Update vendored deps
This commit is contained in:
parent
73f3dde770
commit
7a950b49eb
|
@ -7,6 +7,7 @@
|
|||
# without these options will be part of the same program.
|
||||
|
||||
import("//build/config/c++/c++.gni")
|
||||
import("//build/config/nacl/config.gni")
|
||||
import("//build/config/sanitizers/sanitizers.gni")
|
||||
import("//build/toolchain/toolchain.gni")
|
||||
import("//build_overrides/build.gni")
|
||||
|
@ -16,6 +17,9 @@ config("absl_component_build") {
|
|||
defines = [ "ABSL_CONSUME_DLL" ]
|
||||
}
|
||||
|
||||
assert(!is_nacl || is_nacl_saigo,
|
||||
"base must not be built in most nacl toolchains")
|
||||
|
||||
component("absl") {
|
||||
public_deps = [ ":absl_component_deps" ]
|
||||
if (is_component_build) {
|
||||
|
@ -57,6 +61,7 @@ group("absl_component_deps") {
|
|||
"//third_party/abseil-cpp/absl/base",
|
||||
"//third_party/abseil-cpp/absl/base:config",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/cleanup",
|
||||
"//third_party/abseil-cpp/absl/container:btree",
|
||||
"//third_party/abseil-cpp/absl/container:fixed_array",
|
||||
"//third_party/abseil-cpp/absl/container:flat_hash_map",
|
||||
|
@ -67,21 +72,26 @@ group("absl_component_deps") {
|
|||
"//third_party/abseil-cpp/absl/debugging:failure_signal_handler",
|
||||
"//third_party/abseil-cpp/absl/debugging:stacktrace",
|
||||
"//third_party/abseil-cpp/absl/debugging:symbolize",
|
||||
"//third_party/abseil-cpp/absl/functional:any_invocable",
|
||||
"//third_party/abseil-cpp/absl/functional:bind_front",
|
||||
"//third_party/abseil-cpp/absl/functional:function_ref",
|
||||
"//third_party/abseil-cpp/absl/hash",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/meta:type_traits",
|
||||
"//third_party/abseil-cpp/absl/numeric:bits",
|
||||
"//third_party/abseil-cpp/absl/numeric:int128",
|
||||
"//third_party/abseil-cpp/absl/random",
|
||||
"//third_party/abseil-cpp/absl/status",
|
||||
"//third_party/abseil-cpp/absl/status:statusor",
|
||||
"//third_party/abseil-cpp/absl/strings",
|
||||
"//third_party/abseil-cpp/absl/strings:cord",
|
||||
"//third_party/abseil-cpp/absl/strings:str_format",
|
||||
"//third_party/abseil-cpp/absl/synchronization",
|
||||
"//third_party/abseil-cpp/absl/time",
|
||||
"//third_party/abseil-cpp/absl/types:optional",
|
||||
"//third_party/abseil-cpp/absl/types:span",
|
||||
"//third_party/abseil-cpp/absl/types:variant",
|
||||
"//third_party/abseil-cpp/absl/utility",
|
||||
]
|
||||
|
||||
# The following dependencies currently don't build with NaCl.
|
||||
|
@ -141,18 +151,10 @@ config("absl_default_cflags_cc") {
|
|||
"-Wno-sign-conversion",
|
||||
"-Wstring-conversion",
|
||||
]
|
||||
if (!is_nacl && !use_xcode_clang) {
|
||||
if (!is_nacl) {
|
||||
cflags_cc += [ "-Wbitfield-enum-conversion" ]
|
||||
}
|
||||
}
|
||||
if (is_win) {
|
||||
cflags_cc += [
|
||||
"/wd4005", # macro-redefinition
|
||||
"/wd4018", # sign-compare
|
||||
"/wd4068", # unknown pragma
|
||||
"/wd4702", # unreachable code
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
config("absl_test_cflags_cc") {
|
||||
|
@ -185,22 +187,31 @@ if (build_with_chromium) {
|
|||
"absl/algorithm:algorithm_test",
|
||||
"absl/algorithm:container_test",
|
||||
"absl/base:config_test",
|
||||
"absl/base:prefetch_test",
|
||||
"absl/cleanup:cleanup_test",
|
||||
"absl/container:inlined_vector_test",
|
||||
"absl/container:node_slot_policy_test",
|
||||
"absl/container:sample_element_size_test",
|
||||
"absl/functional:any_invocable_test",
|
||||
"absl/hash:hash_test",
|
||||
"absl/hash:low_level_hash_test",
|
||||
"absl/memory:memory_test",
|
||||
"absl/meta:type_traits_test",
|
||||
"absl/profiling:exponential_biased_test",
|
||||
"absl/profiling:periodic_sampler_test",
|
||||
"absl/status:statusor_test",
|
||||
"absl/strings:ascii_test",
|
||||
"absl/strings:cord_rep_btree_test",
|
||||
"absl/strings:cord_buffer_test",
|
||||
"absl/strings:cord_data_edge_test",
|
||||
"absl/strings:cord_rep_btree_navigator_test",
|
||||
"absl/strings:cord_rep_btree_reader_test",
|
||||
"absl/strings:cord_rep_consume_test",
|
||||
"absl/strings:cord_rep_btree_test",
|
||||
"absl/strings:cord_rep_crc_test",
|
||||
"absl/strings:cordz_functions_test",
|
||||
"absl/strings:cordz_info_statistics_test",
|
||||
"absl/strings:cordz_info_test",
|
||||
"absl/strings:cordz_test",
|
||||
"absl/strings:cordz_update_scope_test",
|
||||
"absl/strings:cord_rep_btree_navigator_test",
|
||||
"absl/strings:cordz_update_tracker_test",
|
||||
"absl/strings:match_test",
|
||||
"absl/strings:str_replace_test",
|
||||
|
|
|
@ -14,11 +14,8 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"base/internal/cycleclock.cc"
|
||||
"base/internal/cycleclock.h"
|
||||
"base/internal/direct_mmap.h"
|
||||
"base/internal/dynamic_annotations.h"
|
||||
"base/internal/endian.h"
|
||||
"base/internal/errno_saver.h"
|
||||
"base/internal/exponential_biased.cc"
|
||||
"base/internal/exponential_biased.h"
|
||||
"base/internal/fast_type_id.h"
|
||||
"base/internal/hide_ptr.h"
|
||||
"base/internal/identity.h"
|
||||
|
@ -28,8 +25,7 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"base/internal/low_level_alloc.h"
|
||||
"base/internal/low_level_scheduling.h"
|
||||
"base/internal/per_thread_tls.h"
|
||||
"base/internal/periodic_sampler.cc"
|
||||
"base/internal/periodic_sampler.h"
|
||||
"base/internal/prefetch.h"
|
||||
"base/internal/pretty_function.h"
|
||||
"base/internal/raw_logging.cc"
|
||||
"base/internal/raw_logging.h"
|
||||
|
@ -44,7 +40,6 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"base/internal/spinlock_wait.h"
|
||||
"base/internal/sysinfo.cc"
|
||||
"base/internal/sysinfo.h"
|
||||
"base/internal/thread_annotations.h"
|
||||
"base/internal/thread_identity.cc"
|
||||
"base/internal/thread_identity.h"
|
||||
"base/internal/throw_delegate.cc"
|
||||
|
@ -82,10 +77,9 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"container/internal/hashtablez_sampler.cc"
|
||||
"container/internal/hashtablez_sampler.h"
|
||||
"container/internal/hashtablez_sampler_force_weak_definition.cc"
|
||||
"container/internal/have_sse.h"
|
||||
"container/internal/inlined_vector.h"
|
||||
"container/internal/layout.h"
|
||||
"container/internal/node_hash_policy.h"
|
||||
"container/internal/node_slot_policy.h"
|
||||
"container/internal/raw_hash_map.h"
|
||||
"container/internal/raw_hash_set.cc"
|
||||
"container/internal/raw_hash_set.h"
|
||||
|
@ -95,7 +89,6 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"debugging/failure_signal_handler.cc"
|
||||
"debugging/failure_signal_handler.h"
|
||||
"debugging/leak_check.h"
|
||||
"debugging/leak_check_disable.cc"
|
||||
"debugging/stacktrace.cc"
|
||||
"debugging/stacktrace.h"
|
||||
"debugging/symbolize.cc"
|
||||
|
@ -114,9 +107,11 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"debugging/internal/symbolize.h"
|
||||
"debugging/internal/vdso_support.cc"
|
||||
"debugging/internal/vdso_support.h"
|
||||
"functional/any_invocable.h"
|
||||
"functional/internal/front_binder.h"
|
||||
"functional/bind_front.h"
|
||||
"functional/function_ref.h"
|
||||
"functional/internal/any_invocable.h"
|
||||
"functional/internal/function_ref.h"
|
||||
"hash/hash.h"
|
||||
"hash/internal/city.h"
|
||||
|
@ -133,6 +128,11 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"numeric/int128.h"
|
||||
"numeric/internal/bits.h"
|
||||
"numeric/internal/representation.h"
|
||||
"profiling/internal/exponential_biased.cc"
|
||||
"profiling/internal/exponential_biased.h"
|
||||
"profiling/internal/periodic_sampler.cc"
|
||||
"profiling/internal/periodic_sampler.h"
|
||||
"profiling/internal/sample_recorder.h"
|
||||
"random/bernoulli_distribution.h"
|
||||
"random/beta_distribution.h"
|
||||
"random/bit_gen_ref.h"
|
||||
|
@ -195,22 +195,29 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"strings/charconv.h"
|
||||
"strings/cord.cc"
|
||||
"strings/cord.h"
|
||||
"strings/cord_analysis.cc"
|
||||
"strings/cord_analysis.h"
|
||||
"strings/cord_buffer.cc"
|
||||
"strings/cord_buffer.h"
|
||||
"strings/escaping.cc"
|
||||
"strings/escaping.h"
|
||||
"strings/internal/charconv_bigint.cc"
|
||||
"strings/internal/charconv_bigint.h"
|
||||
"strings/internal/charconv_parse.cc"
|
||||
"strings/internal/charconv_parse.h"
|
||||
"strings/internal/cord_data_edge.h"
|
||||
"strings/internal/cord_internal.cc"
|
||||
"strings/internal/cord_internal.h"
|
||||
"strings/internal/cord_rep_consume.h"
|
||||
"strings/internal/cord_rep_consume.cc"
|
||||
"strings/internal/cord_rep_btree.cc"
|
||||
"strings/internal/cord_rep_btree.h"
|
||||
"strings/internal/cord_rep_btree_navigator.cc"
|
||||
"strings/internal/cord_rep_btree_navigator.h"
|
||||
"strings/internal/cord_rep_btree_reader.cc"
|
||||
"strings/internal/cord_rep_btree_reader.h"
|
||||
"strings/internal/cord_rep_crc.cc"
|
||||
"strings/internal/cord_rep_crc.h"
|
||||
"strings/internal/cord_rep_consume.h"
|
||||
"strings/internal/cord_rep_consume.cc"
|
||||
"strings/internal/cord_rep_flat.h"
|
||||
"strings/internal/cord_rep_ring.cc"
|
||||
"strings/internal/cord_rep_ring.h"
|
||||
|
@ -340,6 +347,7 @@ set(ABSL_INTERNAL_DLL_FILES
|
|||
"types/internal/span.h"
|
||||
"types/variant.h"
|
||||
"utility/utility.h"
|
||||
"debugging/leak_check.cc"
|
||||
)
|
||||
|
||||
set(ABSL_INTERNAL_DLL_TARGETS
|
||||
|
@ -350,7 +358,6 @@ set(ABSL_INTERNAL_DLL_TARGETS
|
|||
"debugging_internal"
|
||||
"demangle_internal"
|
||||
"leak_check"
|
||||
"leak_check_disable"
|
||||
"stack_consumption"
|
||||
"debugging"
|
||||
"hash"
|
||||
|
@ -381,6 +388,7 @@ set(ABSL_INTERNAL_DLL_TARGETS
|
|||
"kernel_timeout_internal"
|
||||
"synchronization"
|
||||
"thread_pool"
|
||||
"any_invocable"
|
||||
"bind_front"
|
||||
"function_ref"
|
||||
"atomic_hook"
|
||||
|
@ -450,13 +458,13 @@ set(ABSL_INTERNAL_DLL_TARGETS
|
|||
"hashtablez_sampler"
|
||||
"hashtable_debug"
|
||||
"hashtable_debug_hooks"
|
||||
"have_sse"
|
||||
"node_hash_policy"
|
||||
"node_slot_policy"
|
||||
"raw_hash_map"
|
||||
"container_common"
|
||||
"raw_hash_set"
|
||||
"layout"
|
||||
"tracked"
|
||||
"sample_recorder"
|
||||
)
|
||||
|
||||
function(absl_internal_dll_contains)
|
||||
|
|
|
@ -40,7 +40,8 @@ endif()
|
|||
# LINKOPTS: List of link options
|
||||
# PUBLIC: Add this so that this library will be exported under absl::
|
||||
# Also in IDE, target will appear in Abseil folder while non PUBLIC will be in Abseil/internal.
|
||||
# TESTONLY: When added, this target will only be built if BUILD_TESTING=ON.
|
||||
# TESTONLY: When added, this target will only be built if both
|
||||
# BUILD_TESTING=ON and ABSL_BUILD_TESTING=ON.
|
||||
#
|
||||
# Note:
|
||||
# By default, absl_cc_library will always create a library named absl_${NAME},
|
||||
|
@ -82,7 +83,8 @@ function(absl_cc_library)
|
|||
${ARGN}
|
||||
)
|
||||
|
||||
if(ABSL_CC_LIB_TESTONLY AND NOT BUILD_TESTING)
|
||||
if(NOT ABSL_CC_LIB_PUBLIC AND ABSL_CC_LIB_TESTONLY AND
|
||||
NOT (BUILD_TESTING AND ABSL_BUILD_TESTING))
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
@ -168,6 +170,7 @@ function(absl_cc_library)
|
|||
set(PC_CFLAGS "${PC_CFLAGS} ${cflag}")
|
||||
endif()
|
||||
endforeach()
|
||||
string(REPLACE ";" " " PC_LINKOPTS "${ABSL_CC_LIB_LINKOPTS}")
|
||||
FILE(GENERATE OUTPUT "${CMAKE_BINARY_DIR}/lib/pkgconfig/absl_${_NAME}.pc" CONTENT "\
|
||||
prefix=${CMAKE_INSTALL_PREFIX}\n\
|
||||
exec_prefix=\${prefix}\n\
|
||||
|
@ -179,7 +182,7 @@ Description: Abseil ${_NAME} library\n\
|
|||
URL: https://abseil.io/\n\
|
||||
Version: ${PC_VERSION}\n\
|
||||
Requires:${PC_DEPS}\n\
|
||||
Libs: -L\${libdir} $<JOIN:${ABSL_CC_LIB_LINKOPTS}, > $<$<NOT:$<BOOL:${ABSL_CC_LIB_IS_INTERFACE}>>:-labsl_${_NAME}>\n\
|
||||
Libs: -L\${libdir} ${PC_LINKOPTS} $<$<NOT:$<BOOL:${ABSL_CC_LIB_IS_INTERFACE}>>:-labsl_${_NAME}>\n\
|
||||
Cflags: -I\${includedir}${PC_CFLAGS}\n")
|
||||
INSTALL(FILES "${CMAKE_BINARY_DIR}/lib/pkgconfig/absl_${_NAME}.pc"
|
||||
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
||||
|
@ -364,7 +367,7 @@ endfunction()
|
|||
# GTest::gtest_main
|
||||
# )
|
||||
function(absl_cc_test)
|
||||
if(NOT BUILD_TESTING)
|
||||
if(NOT (BUILD_TESTING AND ABSL_BUILD_TESTING))
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
|
|
@ -20,8 +20,10 @@ googletest framework
|
|||
### Step-by-Step Instructions
|
||||
|
||||
1. If you want to build the Abseil tests, integrate the Abseil dependency
|
||||
[Google Test](https://github.com/google/googletest) into your CMake project. To disable Abseil tests, you have to pass
|
||||
`-DBUILD_TESTING=OFF` when configuring your project with CMake.
|
||||
[Google Test](https://github.com/google/googletest) into your CMake
|
||||
project. To disable Abseil tests, you have to pass either
|
||||
`-DBUILD_TESTING=OFF` or `-DABSL_BUILD_TESTING=OFF` when configuring your
|
||||
project with CMake.
|
||||
|
||||
2. Download Abseil and copy it into a subdirectory in your CMake project or add
|
||||
Abseil as a [git submodule](https://git-scm.com/docs/git-submodule) in your
|
||||
|
@ -91,7 +93,8 @@ setting a consistent `CMAKE_CXX_STANDARD` that is sufficiently high.
|
|||
|
||||
### Running Abseil Tests with CMake
|
||||
|
||||
Use the `-DBUILD_TESTING=ON` flag to run Abseil tests.
|
||||
Use the `-DABSL_BUILD_TESTING=ON` flag to run Abseil tests. Note that
|
||||
BUILD_TESTING must also be on (the default).
|
||||
|
||||
You will need to provide Abseil with a Googletest dependency. There are two
|
||||
options for how to do this:
|
||||
|
@ -109,7 +112,7 @@ For example, to run just the Abseil tests, you could use this script:
|
|||
cd path/to/abseil-cpp
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DBUILD_TESTING=ON -DABSL_USE_GOOGLETEST_HEAD=ON ..
|
||||
cmake -DABSL_BUILD_TESTING=ON -DABSL_USE_GOOGLETEST_HEAD=ON ..
|
||||
make -j
|
||||
ctest
|
||||
```
|
||||
|
@ -175,7 +178,7 @@ cmake --build /temporary/build/abseil-cpp --target install
|
|||
|
||||
## Google Test Options
|
||||
|
||||
`-DBUILD_TESTING=ON` must be set to enable testing
|
||||
`-DABSL_BUILD_TESTING=ON` must be set to enable testing
|
||||
|
||||
- Have Abseil download and build Google Test for you: `-DABSL_USE_EXTERNAL_GOOGLETEST=OFF` (default)
|
||||
- Download and build latest Google Test: `-DABSL_USE_GOOGLETEST_HEAD=ON`
|
||||
|
|
|
@ -22,4 +22,4 @@ add_executable(simple simple.cc)
|
|||
|
||||
find_package(absl REQUIRED)
|
||||
|
||||
target_link_libraries(simple absl::strings)
|
||||
target_link_libraries(simple absl::strings absl::config)
|
||||
|
|
|
@ -14,8 +14,17 @@
|
|||
// limitations under the License.
|
||||
|
||||
#include <iostream>
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/strings/substitute.h"
|
||||
|
||||
#if !defined(ABSL_LTS_RELEASE_VERSION) || ABSL_LTS_RELEASE_VERSION != 99998877
|
||||
#error ABSL_LTS_RELEASE_VERSION is not set correctly.
|
||||
#endif
|
||||
|
||||
#if !defined(ABSL_LTS_RELEASE_PATCH_LEVEL) || ABSL_LTS_RELEASE_PATCH_LEVEL != 0
|
||||
#error ABSL_LTS_RELEASE_PATCH_LEVEL is not set correctly.
|
||||
#endif
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
for (int i = 0; i < argc; ++i) {
|
||||
std::cout << absl::Substitute("Arg $0: $1\n", i, argv[i]);
|
||||
|
|
|
@ -55,10 +55,10 @@ cmake "${absl_dir}" \
|
|||
-DABSL_USE_EXTERNAL_GOOGLETEST=ON \
|
||||
-DABSL_FIND_GOOGLETEST=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUILD_TESTING=ON \
|
||||
-DABSL_BUILD_TESTING=ON \
|
||||
-DBUILD_SHARED_LIBS="${build_shared_libs}"
|
||||
make -j $(nproc)
|
||||
ctest -j $(nproc)
|
||||
ctest -j $(nproc) --output-on-failure
|
||||
make install
|
||||
ldconfig
|
||||
popd
|
||||
|
|
|
@ -46,10 +46,6 @@ if (POLICY CMP0091)
|
|||
cmake_policy(SET CMP0091 NEW)
|
||||
endif (POLICY CMP0091)
|
||||
|
||||
# Set BUILD_TESTING to OFF by default.
|
||||
# This must come before the project() and include(CTest) lines.
|
||||
OPTION(BUILD_TESTING "Build tests" OFF)
|
||||
|
||||
project(absl LANGUAGES CXX)
|
||||
include(CTest)
|
||||
|
||||
|
@ -111,8 +107,11 @@ find_package(Threads REQUIRED)
|
|||
|
||||
include(CMakeDependentOption)
|
||||
|
||||
option(ABSL_BUILD_TESTING
|
||||
"If ON, Abseil will build all of Abseil's own tests." OFF)
|
||||
|
||||
option(ABSL_USE_EXTERNAL_GOOGLETEST
|
||||
"If ON, Abseil will assume that the targets for GoogleTest are already provided by the including project. This makes sense when Abseil is used with add_subproject." OFF)
|
||||
"If ON, Abseil will assume that the targets for GoogleTest are already provided by the including project. This makes sense when Abseil is used with add_subdirectory." OFF)
|
||||
|
||||
cmake_dependent_option(ABSL_FIND_GOOGLETEST
|
||||
"If ON, Abseil will use find_package(GTest) rather than assuming that GoogleTest is already provided by the including project."
|
||||
|
@ -130,13 +129,19 @@ set(ABSL_LOCAL_GOOGLETEST_DIR "/usr/src/googletest" CACHE PATH
|
|||
"If ABSL_USE_GOOGLETEST_HEAD is OFF and ABSL_GOOGLETEST_URL is not set, specifies the directory of a local GoogleTest checkout."
|
||||
)
|
||||
|
||||
if(BUILD_TESTING)
|
||||
if(BUILD_TESTING AND ABSL_BUILD_TESTING)
|
||||
## check targets
|
||||
if (ABSL_USE_EXTERNAL_GOOGLETEST)
|
||||
if (ABSL_FIND_GOOGLETEST)
|
||||
find_package(GTest REQUIRED)
|
||||
elseif(NOT TARGET GTest::gtest)
|
||||
if(TARGET gtest)
|
||||
# When Google Test is included directly rather than through find_package, the aliases are missing.
|
||||
add_library(GTest::gtest ALIAS gtest)
|
||||
add_library(GTest::gtest_main ALIAS gtest_main)
|
||||
add_library(GTest::gmock ALIAS gmock)
|
||||
add_library(GTest::gmock_main ALIAS gmock_main)
|
||||
else()
|
||||
if (NOT TARGET gtest AND NOT TARGET GTest::gtest)
|
||||
message(FATAL_ERROR "ABSL_USE_EXTERNAL_GOOGLETEST is ON and ABSL_FIND_GOOGLETEST is OFF, which means that the top-level project must build the Google Test project. However, the target gtest was not found.")
|
||||
endif()
|
||||
endif()
|
||||
|
@ -146,7 +151,7 @@ if(BUILD_TESTING)
|
|||
message(FATAL_ERROR "Do not set both ABSL_USE_GOOGLETEST_HEAD and ABSL_GOOGLETEST_DOWNLOAD_URL")
|
||||
endif()
|
||||
if(ABSL_USE_GOOGLETEST_HEAD)
|
||||
set(absl_gtest_download_url "https://github.com/google/googletest/archive/master.zip")
|
||||
set(absl_gtest_download_url "https://github.com/google/googletest/archive/main.zip")
|
||||
elseif(ABSL_GOOGLETEST_DOWNLOAD_URL)
|
||||
set(absl_gtest_download_url ${ABSL_GOOGLETEST_DOWNLOAD_URL})
|
||||
endif()
|
||||
|
@ -158,24 +163,10 @@ if(BUILD_TESTING)
|
|||
include(CMake/Googletest/DownloadGTest.cmake)
|
||||
endif()
|
||||
|
||||
if (NOT ABSL_FIND_GOOGLETEST)
|
||||
# When Google Test is included directly rather than through find_package, the aliases are missing.
|
||||
add_library(GTest::gtest_main ALIAS gtest_main)
|
||||
add_library(GTest::gtest ALIAS gtest)
|
||||
add_library(GTest::gmock ALIAS gmock)
|
||||
endif()
|
||||
|
||||
check_target(GTest::gtest)
|
||||
check_target(GTest::gtest_main)
|
||||
check_target(GTest::gmock)
|
||||
check_target(GTest::gmock_main)
|
||||
|
||||
list(APPEND ABSL_TEST_COMMON_LIBRARIES
|
||||
GTest::gtest_main
|
||||
GTest::gtest
|
||||
GTest::gmock
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
)
|
||||
endif()
|
||||
|
||||
add_subdirectory(absl)
|
||||
|
|
|
@ -1,3 +1,2 @@
|
|||
danilchap@chromium.org
|
||||
kwiberg@chromium.org
|
||||
mbonadei@chromium.org
|
||||
|
|
|
@ -4,14 +4,15 @@ URL: https://github.com/abseil/abseil-cpp
|
|||
License: Apache 2.0
|
||||
License File: LICENSE
|
||||
Version: 0
|
||||
Revision: 637722af3a60c17915d3325604a0435ee92a41b4
|
||||
Revision: 4bbdb026899fea9f882a95cbd7d6a4adaf49b2dd
|
||||
Security Critical: yes
|
||||
|
||||
Description:
|
||||
This directory contains the source code of Abseil for C++. This can be used by
|
||||
Chromium, subject to the guidance at https://chromium-cpp.appspot.com/; it can
|
||||
be used without restriction by Chromium's dependencies, except that objects
|
||||
compiled into Chromium itself cannot use anything relying on
|
||||
Chromium, subject to the guidance at
|
||||
https://chromium.googlesource.com/chromium/src/+/main/styleguide/c++/c++-features.md;
|
||||
it can be used without restriction by Chromium's dependencies, except that
|
||||
objects compiled into Chromium itself cannot use anything relying on
|
||||
absl::base_internal::FastTypeId (see https://crbug.com/1096380).
|
||||
|
||||
How to update Abseil:
|
||||
|
@ -33,4 +34,4 @@ Local Modifications:
|
|||
* Patches from //third_party/abseil-cpp/patches have been applied.
|
||||
|
||||
* Increment this number to silence presubmits about modifying files in
|
||||
third_party when regenerating absl .def files: 1
|
||||
third_party when regenerating absl .def files: 2
|
||||
|
|
|
@ -92,6 +92,9 @@ Abseil contains the following C++ library components:
|
|||
available within C++14 and C++17 versions of the C++ `<type_traits>` library.
|
||||
* [`numeric`](absl/numeric/)
|
||||
<br /> The `numeric` library contains C++11-compatible 128-bit integers.
|
||||
* [`profiling`](absl/profiling/)
|
||||
<br /> The `profiling` library contains utility code for profiling C++
|
||||
entities. It is currently a private dependency of other Abseil libraries.
|
||||
* [`status`](absl/status/)
|
||||
<br /> The `status` contains abstractions for error handling, specifically
|
||||
`absl::Status` and `absl::StatusOr<T>`.
|
||||
|
|
|
@ -20,33 +20,42 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
|||
|
||||
# GoogleTest/GoogleMock framework. Used by most unit-tests.
|
||||
http_archive(
|
||||
name = "com_google_googletest", # 2021-07-09T13:28:13Z
|
||||
sha256 = "12ef65654dc01ab40f6f33f9d02c04f2097d2cd9fbe48dc6001b29543583b0ad",
|
||||
strip_prefix = "googletest-8d51ffdfab10b3fba636ae69bc03da4b54f8c235",
|
||||
name = "com_google_googletest", # 2022-06-16T20:18:32Z
|
||||
sha256 = "a1d3123179024258f9c399d45da3e0b09c4aaf8d2c041466ce5b4793a8929f23",
|
||||
strip_prefix = "googletest-86add13493e5c881d7e4ba77fb91c1f57752b3a4",
|
||||
# Keep this URL in sync with ABSL_GOOGLETEST_COMMIT in ci/cmake_common.sh.
|
||||
urls = ["https://github.com/google/googletest/archive/8d51ffdfab10b3fba636ae69bc03da4b54f8c235.zip"],
|
||||
urls = ["https://github.com/google/googletest/archive/86add13493e5c881d7e4ba77fb91c1f57752b3a4.zip"],
|
||||
)
|
||||
|
||||
# RE2 (the regular expression library used by GoogleTest)
|
||||
# Note this must use a commit from the `abseil` branch of the RE2 project.
|
||||
# https://github.com/google/re2/tree/abseil
|
||||
http_archive(
|
||||
name = "com_googlesource_code_re2",
|
||||
sha256 = "0a890c2aa0bb05b2ce906a15efb520d0f5ad4c7d37b8db959c43772802991887",
|
||||
strip_prefix = "re2-a427f10b9fb4622dd6d8643032600aa1b50fbd12",
|
||||
urls = ["https://github.com/google/re2/archive/a427f10b9fb4622dd6d8643032600aa1b50fbd12.zip"], # 2022-06-09
|
||||
)
|
||||
|
||||
# Google benchmark.
|
||||
http_archive(
|
||||
name = "com_github_google_benchmark", # 2021-07-01T09:02:54Z
|
||||
sha256 = "1cb4b97a90aa1fd9c8e412a6bc29fc13fc140162a4a0db3811af40befd8c9ea5",
|
||||
strip_prefix = "benchmark-e451e50e9b8af453f076dec10bd6890847f1624e",
|
||||
urls = ["https://github.com/google/benchmark/archive/e451e50e9b8af453f076dec10bd6890847f1624e.zip"],
|
||||
name = "com_github_google_benchmark", # 2021-09-20T09:19:51Z
|
||||
sha256 = "62e2f2e6d8a744d67e4bbc212fcfd06647080de4253c97ad5c6749e09faf2cb0",
|
||||
strip_prefix = "benchmark-0baacde3618ca617da95375e0af13ce1baadea47",
|
||||
urls = ["https://github.com/google/benchmark/archive/0baacde3618ca617da95375e0af13ce1baadea47.zip"],
|
||||
)
|
||||
|
||||
# C++ rules for Bazel.
|
||||
# Bazel Skylib.
|
||||
http_archive(
|
||||
name = "rules_cc", # 2021-06-07T16:41:49Z
|
||||
sha256 = "b295cad8c5899e371dde175079c0a2cdc0151f5127acc92366a8c986beb95c76",
|
||||
strip_prefix = "rules_cc-daf6ace7cfeacd6a83e9ff2ed659f416537b6c74",
|
||||
urls = ["https://github.com/bazelbuild/rules_cc/archive/daf6ace7cfeacd6a83e9ff2ed659f416537b6c74.zip"],
|
||||
name = "bazel_skylib",
|
||||
urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz"],
|
||||
sha256 = "f7be3474d42aae265405a592bb7da8e171919d74c16f082a5457840f06054728",
|
||||
)
|
||||
|
||||
# Bazel platform rules.
|
||||
http_archive(
|
||||
name = "platforms",
|
||||
sha256 = "b601beaf841244de5c5a50d2b2eddd34839788000fa1be4260ce6603ca0d8eb7",
|
||||
strip_prefix = "platforms-98939346da932eef0b54cf808622f5bb0928f00b",
|
||||
urls = ["https://github.com/bazelbuild/platforms/archive/98939346da932eef0b54cf808622f5bb0928f00b.zip"],
|
||||
sha256 = "a879ea428c6d56ab0ec18224f976515948822451473a80d06c2e50af0bbe5121",
|
||||
strip_prefix = "platforms-da5541f26b7de1dc8e04c075c99df5351742a4a2",
|
||||
urls = ["https://github.com/bazelbuild/platforms/archive/da5541f26b7de1dc8e04c075c99df5351742a4a2.zip"], # 2022-05-27
|
||||
)
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
load("@bazel_skylib//lib:selects.bzl", "selects")
|
||||
|
||||
package(default_visibility = ["//visibility:public"])
|
||||
|
||||
licenses(["notice"])
|
||||
|
@ -64,9 +66,52 @@ config_setting(
|
|||
)
|
||||
|
||||
config_setting(
|
||||
name = "wasm",
|
||||
name = "cpu_wasm",
|
||||
values = {
|
||||
"cpu": "wasm",
|
||||
},
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "cpu_wasm32",
|
||||
values = {
|
||||
"cpu": "wasm32",
|
||||
},
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "platforms_wasm32",
|
||||
constraint_values = [
|
||||
"@platforms//cpu:wasm32",
|
||||
],
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "platforms_wasm64",
|
||||
constraint_values = [
|
||||
"@platforms//cpu:wasm64",
|
||||
],
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
||||
selects.config_setting_group(
|
||||
name = "wasm",
|
||||
match_any = [
|
||||
":cpu_wasm",
|
||||
":cpu_wasm32",
|
||||
":platforms_wasm32",
|
||||
":platforms_wasm64",
|
||||
],
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "fuchsia",
|
||||
values = {
|
||||
"cpu": "fuchsia",
|
||||
},
|
||||
visibility = [":__subpackages__"],
|
||||
)
|
||||
|
|
|
@ -25,6 +25,7 @@ add_subdirectory(hash)
|
|||
add_subdirectory(memory)
|
||||
add_subdirectory(meta)
|
||||
add_subdirectory(numeric)
|
||||
add_subdirectory(profiling)
|
||||
add_subdirectory(random)
|
||||
add_subdirectory(status)
|
||||
add_subdirectory(strings)
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")
|
||||
load(
|
||||
"//absl:copts/configure_copts.bzl",
|
||||
"ABSL_DEFAULT_COPTS",
|
||||
|
@ -44,6 +43,7 @@ cc_test(
|
|||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":algorithm",
|
||||
"//absl/base:config",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -23,6 +23,7 @@ absl_source_set("algorithm_test") {
|
|||
sources = [ "algorithm_test.cc" ]
|
||||
deps = [
|
||||
":algorithm",
|
||||
"//third_party/abseil-cpp/absl/base:config",
|
||||
"//third_party/googletest:gtest",
|
||||
"//third_party/googletest:gmock",
|
||||
]
|
||||
|
|
|
@ -35,6 +35,7 @@ absl_cc_test(
|
|||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::algorithm
|
||||
absl::config
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/base/config.h"
|
||||
|
||||
namespace {
|
||||
|
||||
|
@ -50,7 +51,15 @@ TEST(EqualTest, EmptyRange) {
|
|||
std::vector<int> empty1;
|
||||
std::vector<int> empty2;
|
||||
|
||||
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=105705
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wnonnull"
|
||||
#endif
|
||||
EXPECT_FALSE(absl::equal(v1.begin(), v1.end(), empty1.begin(), empty1.end()));
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
EXPECT_FALSE(absl::equal(empty1.begin(), empty1.end(), v1.begin(), v1.end()));
|
||||
EXPECT_TRUE(
|
||||
absl::equal(empty1.begin(), empty1.end(), empty2.begin(), empty2.end()));
|
||||
|
|
|
@ -166,7 +166,7 @@ container_algorithm_internal::ContainerDifferenceType<const C> c_distance(
|
|||
// c_all_of()
|
||||
//
|
||||
// Container-based version of the <algorithm> `std::all_of()` function to
|
||||
// test a condition on all elements within a container.
|
||||
// test if all elements within a container satisfy a condition.
|
||||
template <typename C, typename Pred>
|
||||
bool c_all_of(const C& c, Pred&& pred) {
|
||||
return std::all_of(container_algorithm_internal::c_begin(c),
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
|
||||
load(
|
||||
"//absl:copts/configure_copts.bzl",
|
||||
"ABSL_DEFAULT_COPTS",
|
||||
|
@ -76,6 +75,7 @@ cc_library(
|
|||
":atomic_hook",
|
||||
":config",
|
||||
":core_headers",
|
||||
":errno_saver",
|
||||
":log_severity",
|
||||
],
|
||||
)
|
||||
|
@ -115,9 +115,6 @@ cc_library(
|
|||
|
||||
cc_library(
|
||||
name = "dynamic_annotations",
|
||||
srcs = [
|
||||
"internal/dynamic_annotations.h",
|
||||
],
|
||||
hdrs = [
|
||||
"dynamic_annotations.h",
|
||||
],
|
||||
|
@ -131,9 +128,6 @@ cc_library(
|
|||
|
||||
cc_library(
|
||||
name = "core_headers",
|
||||
srcs = [
|
||||
"internal/thread_annotations.h",
|
||||
],
|
||||
hdrs = [
|
||||
"attributes.h",
|
||||
"const_init.h",
|
||||
|
@ -158,7 +152,9 @@ cc_library(
|
|||
"internal/direct_mmap.h",
|
||||
"internal/low_level_alloc.h",
|
||||
],
|
||||
copts = ABSL_DEFAULT_COPTS,
|
||||
copts = ABSL_DEFAULT_COPTS + select({
|
||||
"//conditions:default": [],
|
||||
}),
|
||||
linkopts = select({
|
||||
"//absl:msvc_compiler": [],
|
||||
"//absl:clang-cl_compiler": [],
|
||||
|
@ -433,6 +429,9 @@ cc_test(
|
|||
srcs = ["spinlock_test_common.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = [
|
||||
"no_test_wasm",
|
||||
],
|
||||
deps = [
|
||||
":base",
|
||||
":base_internal",
|
||||
|
@ -558,6 +557,7 @@ cc_test(
|
|||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = [
|
||||
"no_test_ios_x86_64",
|
||||
"no_test_wasm",
|
||||
],
|
||||
deps = [
|
||||
":malloc_internal",
|
||||
|
@ -571,6 +571,9 @@ cc_test(
|
|||
srcs = ["internal/thread_identity_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = [
|
||||
"no_test_wasm",
|
||||
],
|
||||
deps = [
|
||||
":base",
|
||||
":core_headers",
|
||||
|
@ -593,75 +596,6 @@ cc_test(
|
|||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "exponential_biased",
|
||||
srcs = ["internal/exponential_biased.cc"],
|
||||
hdrs = ["internal/exponential_biased.h"],
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
visibility = [
|
||||
"//absl:__subpackages__",
|
||||
],
|
||||
deps = [
|
||||
":config",
|
||||
":core_headers",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "exponential_biased_test",
|
||||
size = "small",
|
||||
srcs = ["internal/exponential_biased_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":exponential_biased",
|
||||
"//absl/strings",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "periodic_sampler",
|
||||
srcs = ["internal/periodic_sampler.cc"],
|
||||
hdrs = ["internal/periodic_sampler.h"],
|
||||
copts = ABSL_DEFAULT_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":core_headers",
|
||||
":exponential_biased",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "periodic_sampler_test",
|
||||
size = "small",
|
||||
srcs = ["internal/periodic_sampler_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":core_headers",
|
||||
":periodic_sampler",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_binary(
|
||||
name = "periodic_sampler_benchmark",
|
||||
testonly = 1,
|
||||
srcs = ["internal/periodic_sampler_benchmark.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = ["benchmark"],
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":core_headers",
|
||||
":periodic_sampler",
|
||||
"@com_github_google_benchmark//:benchmark_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "scoped_set_env",
|
||||
testonly = 1,
|
||||
|
@ -772,6 +706,31 @@ cc_test(
|
|||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "prefetch",
|
||||
hdrs = ["internal/prefetch.h"],
|
||||
copts = ABSL_DEFAULT_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
visibility = [
|
||||
"//absl:__subpackages__",
|
||||
],
|
||||
deps = [
|
||||
":config",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "prefetch_test",
|
||||
size = "small",
|
||||
srcs = ["internal/prefetch_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":prefetch",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "unique_small_name_test",
|
||||
size = "small",
|
||||
|
|
|
@ -35,6 +35,7 @@ absl_source_set("raw_logging_internal") {
|
|||
":atomic_hook",
|
||||
":config",
|
||||
":core_headers",
|
||||
":errno_saver",
|
||||
":log_severity",
|
||||
]
|
||||
visibility = [ "//third_party/abseil-cpp/absl/*" ]
|
||||
|
@ -231,44 +232,19 @@ absl_source_set("endian") {
|
|||
]
|
||||
}
|
||||
|
||||
absl_source_set("exponential_biased") {
|
||||
sources = [ "internal/exponential_biased.cc" ]
|
||||
public = [ "internal/exponential_biased.h" ]
|
||||
public_deps = [
|
||||
":config",
|
||||
":core_headers",
|
||||
]
|
||||
visibility = [ "//third_party/abseil-cpp/absl/*" ]
|
||||
}
|
||||
|
||||
absl_source_set("periodic_sampler") {
|
||||
sources = [ "internal/periodic_sampler.cc" ]
|
||||
public = [ "internal/periodic_sampler.h" ]
|
||||
public_deps = [
|
||||
":core_headers",
|
||||
":exponential_biased",
|
||||
]
|
||||
}
|
||||
|
||||
absl_source_set("scoped_set_env") {
|
||||
testonly = true
|
||||
public = [ "internal/scoped_set_env.h" ]
|
||||
sources = [ "internal/scoped_set_env.cc" ]
|
||||
public_deps = [
|
||||
":config",
|
||||
]
|
||||
deps = [
|
||||
":raw_logging_internal",
|
||||
]
|
||||
public_deps = [ ":config" ]
|
||||
deps = [ ":raw_logging_internal" ]
|
||||
visibility = [ "//third_party/abseil-cpp/absl/*" ]
|
||||
}
|
||||
|
||||
absl_source_set("strerror") {
|
||||
sources = [ "internal/strerror.cc" ]
|
||||
public = [ "internal/strerror.h" ]
|
||||
public_deps = [
|
||||
":config",
|
||||
]
|
||||
public_deps = [ ":config" ]
|
||||
deps = [
|
||||
":core_headers",
|
||||
":errno_saver",
|
||||
|
@ -282,6 +258,21 @@ absl_source_set("fast_type_id") {
|
|||
visibility = [ "//third_party/abseil-cpp/absl/*" ]
|
||||
}
|
||||
|
||||
absl_source_set("prefetch") {
|
||||
public = [ "internal/prefetch.h" ]
|
||||
deps = [ ":config" ]
|
||||
visibility = [ "//third_party/abseil-cpp/absl/*" ]
|
||||
}
|
||||
|
||||
absl_source_set("prefetch_test") {
|
||||
testonly = true
|
||||
sources = [ "internal/prefetch_test.cc" ]
|
||||
deps = [
|
||||
":prefetch",
|
||||
"//third_party/googletest:gtest",
|
||||
]
|
||||
}
|
||||
|
||||
absl_source_set("config_test") {
|
||||
testonly = true
|
||||
sources = [ "config_test.cc" ]
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
find_library(LIBRT rt)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
atomic_hook
|
||||
|
@ -28,6 +29,7 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
errno_saver
|
||||
|
@ -52,6 +54,7 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
raw_logging_internal
|
||||
|
@ -63,11 +66,13 @@ absl_cc_library(
|
|||
absl::atomic_hook
|
||||
absl::config
|
||||
absl::core_headers
|
||||
absl::errno_saver
|
||||
absl::log_severity
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
spinlock_wait
|
||||
|
@ -104,8 +109,6 @@ absl_cc_library(
|
|||
dynamic_annotations
|
||||
HDRS
|
||||
"dynamic_annotations.h"
|
||||
SRCS
|
||||
"internal/dynamic_annotations.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
|
@ -123,7 +126,6 @@ absl_cc_library(
|
|||
"optimization.h"
|
||||
"port.h"
|
||||
"thread_annotations.h"
|
||||
"internal/thread_annotations.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
|
@ -131,6 +133,7 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
malloc_internal
|
||||
|
@ -151,6 +154,7 @@ absl_cc_library(
|
|||
Threads::Threads
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
base_internal
|
||||
|
@ -207,6 +211,7 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
throw_delegate
|
||||
|
@ -221,6 +226,7 @@ absl_cc_library(
|
|||
absl::raw_logging_internal
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
exception_testing
|
||||
|
@ -234,6 +240,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
pretty_function
|
||||
|
@ -243,6 +250,7 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
exception_safety_testing
|
||||
|
@ -276,6 +284,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
atomic_hook_test_helper
|
||||
|
@ -375,6 +384,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
spinlock_test_common
|
||||
|
@ -409,6 +419,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
endian
|
||||
|
@ -519,60 +530,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
absl_cc_library(
|
||||
NAME
|
||||
exponential_biased
|
||||
SRCS
|
||||
"internal/exponential_biased.cc"
|
||||
HDRS
|
||||
"internal/exponential_biased.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::config
|
||||
absl::core_headers
|
||||
)
|
||||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
exponential_biased_test
|
||||
SRCS
|
||||
"internal/exponential_biased_test.cc"
|
||||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::exponential_biased
|
||||
absl::strings
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
absl_cc_library(
|
||||
NAME
|
||||
periodic_sampler
|
||||
SRCS
|
||||
"internal/periodic_sampler.cc"
|
||||
HDRS
|
||||
"internal/periodic_sampler.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::core_headers
|
||||
absl::exponential_biased
|
||||
)
|
||||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
periodic_sampler_test
|
||||
SRCS
|
||||
"internal/periodic_sampler_test.cc"
|
||||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::core_headers
|
||||
absl::periodic_sampler
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
scoped_set_env
|
||||
|
@ -624,6 +582,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
strerror
|
||||
|
@ -655,6 +614,7 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
fast_type_id
|
||||
|
@ -680,6 +640,32 @@ absl_cc_test(
|
|||
GTest::gtest_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
prefetch
|
||||
HDRS
|
||||
"internal/prefetch.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
LINKOPTS
|
||||
${ABSL_DEFAULT_LINKOPTS}
|
||||
DEPS
|
||||
absl::config
|
||||
)
|
||||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
prefetch_test
|
||||
SRCS
|
||||
"internal/prefetch_test.cc"
|
||||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::prefetch
|
||||
GTest::gtest_main
|
||||
)
|
||||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
optimization_test
|
||||
|
|
|
@ -138,7 +138,8 @@
|
|||
// step, presumably because Windows doesn't use ELF binaries.
|
||||
#if (ABSL_HAVE_ATTRIBUTE(weak) || \
|
||||
(defined(__GNUC__) && !defined(__clang__))) && \
|
||||
(!defined(_WIN32) || __clang_major__ < 9) && !defined(__MINGW32__)
|
||||
(!defined(_WIN32) || (defined(__clang__) && __clang_major__ >= 9)) && \
|
||||
!defined(__MINGW32__)
|
||||
#undef ABSL_ATTRIBUTE_WEAK
|
||||
#define ABSL_ATTRIBUTE_WEAK __attribute__((weak))
|
||||
#define ABSL_HAVE_ATTRIBUTE_WEAK 1
|
||||
|
@ -212,6 +213,9 @@
|
|||
// https://gcc.gnu.org/gcc-4.8/changes.html
|
||||
#if ABSL_HAVE_ATTRIBUTE(no_sanitize_address)
|
||||
#define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
|
||||
#elif defined(_MSC_VER) && _MSC_VER >= 1928
|
||||
// https://docs.microsoft.com/en-us/cpp/cpp/no-sanitize-address
|
||||
#define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS __declspec(no_sanitize_address)
|
||||
#else
|
||||
#define ABSL_ATTRIBUTE_NO_SANITIZE_ADDRESS
|
||||
#endif
|
||||
|
@ -311,15 +315,22 @@
|
|||
__attribute__((section(#name))) __attribute__((noinline))
|
||||
#endif
|
||||
|
||||
|
||||
// ABSL_ATTRIBUTE_SECTION_VARIABLE
|
||||
//
|
||||
// Tells the compiler/linker to put a given variable into a section and define
|
||||
// `__start_ ## name` and `__stop_ ## name` symbols to bracket the section.
|
||||
// This functionality is supported by GNU linker.
|
||||
#ifndef ABSL_ATTRIBUTE_SECTION_VARIABLE
|
||||
#ifdef _AIX
|
||||
// __attribute__((section(#name))) on AIX is achived by using the `.csect` psudo
|
||||
// op which includes an additional integer as part of its syntax indcating
|
||||
// alignment. If data fall under different alignments then you might get a
|
||||
// compilation error indicating a `Section type conflict`.
|
||||
#define ABSL_ATTRIBUTE_SECTION_VARIABLE(name)
|
||||
#else
|
||||
#define ABSL_ATTRIBUTE_SECTION_VARIABLE(name) __attribute__((section(#name)))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// ABSL_DECLARE_ATTRIBUTE_SECTION_VARS
|
||||
//
|
||||
|
@ -392,6 +403,9 @@
|
|||
//
|
||||
// Tells the compiler to warn about unused results.
|
||||
//
|
||||
// For code or headers that are assured to only build with C++17 and up, prefer
|
||||
// just using the standard `[[nodiscard]]` directly over this macro.
|
||||
//
|
||||
// When annotating a function, it must appear as the first part of the
|
||||
// declaration or definition. The compiler will warn if the return value from
|
||||
// such a function is unused:
|
||||
|
@ -418,9 +432,10 @@
|
|||
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66425
|
||||
//
|
||||
// Note: past advice was to place the macro after the argument list.
|
||||
#if ABSL_HAVE_ATTRIBUTE(nodiscard)
|
||||
#define ABSL_MUST_USE_RESULT [[nodiscard]]
|
||||
#elif defined(__clang__) && ABSL_HAVE_ATTRIBUTE(warn_unused_result)
|
||||
//
|
||||
// TODO(b/176172494): Use ABSL_HAVE_CPP_ATTRIBUTE(nodiscard) when all code is
|
||||
// compliant with the stricter [[nodiscard]].
|
||||
#if defined(__clang__) && ABSL_HAVE_ATTRIBUTE(warn_unused_result)
|
||||
#define ABSL_MUST_USE_RESULT __attribute__((warn_unused_result))
|
||||
#else
|
||||
#define ABSL_MUST_USE_RESULT
|
||||
|
@ -548,13 +563,19 @@
|
|||
// ABSL_ATTRIBUTE_PACKED
|
||||
//
|
||||
// Instructs the compiler not to use natural alignment for a tagged data
|
||||
// structure, but instead to reduce its alignment to 1. This attribute can
|
||||
// either be applied to members of a structure or to a structure in its
|
||||
// entirety. Applying this attribute (judiciously) to a structure in its
|
||||
// entirety to optimize the memory footprint of very commonly-used structs is
|
||||
// fine. Do not apply this attribute to a structure in its entirety if the
|
||||
// purpose is to control the offsets of the members in the structure. Instead,
|
||||
// apply this attribute only to structure members that need it.
|
||||
// structure, but instead to reduce its alignment to 1.
|
||||
//
|
||||
// Therefore, DO NOT APPLY THIS ATTRIBUTE TO STRUCTS CONTAINING ATOMICS. Doing
|
||||
// so can cause atomic variables to be mis-aligned and silently violate
|
||||
// atomicity on x86.
|
||||
//
|
||||
// This attribute can either be applied to members of a structure or to a
|
||||
// structure in its entirety. Applying this attribute (judiciously) to a
|
||||
// structure in its entirety to optimize the memory footprint of very
|
||||
// commonly-used structs is fine. Do not apply this attribute to a structure in
|
||||
// its entirety if the purpose is to control the offsets of the members in the
|
||||
// structure. Instead, apply this attribute only to structure members that need
|
||||
// it.
|
||||
//
|
||||
// When applying ABSL_ATTRIBUTE_PACKED only to specific structure members the
|
||||
// natural alignment of structure members not annotated is preserved. Aligned
|
||||
|
@ -628,6 +649,9 @@
|
|||
// declarations. The macro argument is used as a custom diagnostic message (e.g.
|
||||
// suggestion of a better alternative).
|
||||
//
|
||||
// For code or headers that are assured to only build with C++14 and up, prefer
|
||||
// just using the standard `[[deprecated("message")]]` directly over this macro.
|
||||
//
|
||||
// Examples:
|
||||
//
|
||||
// class ABSL_DEPRECATED("Use Bar instead") Foo {...};
|
||||
|
@ -638,14 +662,17 @@
|
|||
// ABSL_DEPRECATED("Use DoThat() instead")
|
||||
// void DoThis();
|
||||
//
|
||||
// enum FooEnum {
|
||||
// kBar ABSL_DEPRECATED("Use kBaz instead"),
|
||||
// };
|
||||
//
|
||||
// Every usage of a deprecated entity will trigger a warning when compiled with
|
||||
// clang's `-Wdeprecated-declarations` option. This option is turned off by
|
||||
// default, but the warnings will be reported by clang-tidy.
|
||||
#if defined(__clang__) && defined(__cplusplus) && __cplusplus >= 201103L
|
||||
// GCC/Clang's `-Wdeprecated-declarations` option. Google's production toolchain
|
||||
// turns this warning off by default, instead relying on clang-tidy to report
|
||||
// new uses of deprecated code.
|
||||
#if ABSL_HAVE_ATTRIBUTE(deprecated)
|
||||
#define ABSL_DEPRECATED(message) __attribute__((deprecated(message)))
|
||||
#endif
|
||||
|
||||
#ifndef ABSL_DEPRECATED
|
||||
#else
|
||||
#define ABSL_DEPRECATED(message)
|
||||
#endif
|
||||
|
||||
|
@ -655,9 +682,18 @@
|
|||
// not compile (on supported platforms) unless the variable has a constant
|
||||
// initializer. This is useful for variables with static and thread storage
|
||||
// duration, because it guarantees that they will not suffer from the so-called
|
||||
// "static init order fiasco". Prefer to put this attribute on the most visible
|
||||
// declaration of the variable, if there's more than one, because code that
|
||||
// accesses the variable can then use the attribute for optimization.
|
||||
// "static init order fiasco".
|
||||
//
|
||||
// This attribute must be placed on the initializing declaration of the
|
||||
// variable. Some compilers will give a -Wmissing-constinit warning when this
|
||||
// attribute is placed on some other declaration but missing from the
|
||||
// initializing declaration.
|
||||
//
|
||||
// In some cases (notably with thread_local variables), `ABSL_CONST_INIT` can
|
||||
// also be used in a non-initializing declaration to tell the compiler that a
|
||||
// variable is already initialized, reducing overhead that would otherwise be
|
||||
// incurred by a hidden guard variable. Thus annotating all declarations with
|
||||
// this attribute is recommended to potentially enhance optimization.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
|
@ -666,14 +702,19 @@
|
|||
// ABSL_CONST_INIT static MyType my_var;
|
||||
// };
|
||||
//
|
||||
// MyType MyClass::my_var = MakeMyType(...);
|
||||
// ABSL_CONST_INIT MyType MyClass::my_var = MakeMyType(...);
|
||||
//
|
||||
// For code or headers that are assured to only build with C++20 and up, prefer
|
||||
// just using the standard `constinit` keyword directly over this macro.
|
||||
//
|
||||
// Note that this attribute is redundant if the variable is declared constexpr.
|
||||
#if ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization)
|
||||
#if defined(__cpp_constinit) && __cpp_constinit >= 201907L
|
||||
#define ABSL_CONST_INIT constinit
|
||||
#elif ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization)
|
||||
#define ABSL_CONST_INIT [[clang::require_constant_initialization]]
|
||||
#else
|
||||
#define ABSL_CONST_INIT
|
||||
#endif // ABSL_HAVE_CPP_ATTRIBUTE(clang::require_constant_initialization)
|
||||
#endif
|
||||
|
||||
// ABSL_ATTRIBUTE_PURE_FUNCTION
|
||||
//
|
||||
|
|
|
@ -29,6 +29,10 @@
|
|||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
#if defined(__cpp_lib_bit_cast) && __cpp_lib_bit_cast >= 201806L
|
||||
#include <bit> // For std::bit_cast.
|
||||
#endif // defined(__cpp_lib_bit_cast) && __cpp_lib_bit_cast >= 201806L
|
||||
|
||||
#include "absl/base/internal/identity.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "absl/meta/type_traits.h"
|
||||
|
@ -36,19 +40,6 @@
|
|||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
|
||||
namespace internal_casts {
|
||||
|
||||
template <class Dest, class Source>
|
||||
struct is_bitcastable
|
||||
: std::integral_constant<
|
||||
bool,
|
||||
sizeof(Dest) == sizeof(Source) &&
|
||||
type_traits_internal::is_trivially_copyable<Source>::value &&
|
||||
type_traits_internal::is_trivially_copyable<Dest>::value &&
|
||||
std::is_default_constructible<Dest>::value> {};
|
||||
|
||||
} // namespace internal_casts
|
||||
|
||||
// implicit_cast()
|
||||
//
|
||||
// Performs an implicit conversion between types following the language
|
||||
|
@ -105,81 +96,83 @@ constexpr To implicit_cast(typename absl::internal::identity_t<To> to) {
|
|||
|
||||
// bit_cast()
|
||||
//
|
||||
// Performs a bitwise cast on a type without changing the underlying bit
|
||||
// representation of that type's value. The two types must be of the same size
|
||||
// and both types must be trivially copyable. As with most casts, use with
|
||||
// caution. A `bit_cast()` might be needed when you need to temporarily treat a
|
||||
// type as some other type, such as in the following cases:
|
||||
// Creates a value of the new type `Dest` whose representation is the same as
|
||||
// that of the argument, which is of (deduced) type `Source` (a "bitwise cast";
|
||||
// every bit in the value representation of the result is equal to the
|
||||
// corresponding bit in the object representation of the source). Source and
|
||||
// destination types must be of the same size, and both types must be trivially
|
||||
// copyable.
|
||||
//
|
||||
// * Serialization (casting temporarily to `char *` for those purposes is
|
||||
// always allowed by the C++ standard)
|
||||
// * Managing the individual bits of a type within mathematical operations
|
||||
// that are not normally accessible through that type
|
||||
// * Casting non-pointer types to pointer types (casting the other way is
|
||||
// allowed by `reinterpret_cast()` but round-trips cannot occur the other
|
||||
// way).
|
||||
//
|
||||
// Example:
|
||||
// As with most casts, use with caution. A `bit_cast()` might be needed when you
|
||||
// need to treat a value as the value of some other type, for example, to access
|
||||
// the individual bits of an object which are not normally accessible through
|
||||
// the object's type, such as for working with the binary representation of a
|
||||
// floating point value:
|
||||
//
|
||||
// float f = 3.14159265358979;
|
||||
// int i = bit_cast<int32_t>(f);
|
||||
// int i = bit_cast<int>(f);
|
||||
// // i = 0x40490fdb
|
||||
//
|
||||
// Casting non-pointer types to pointer types and then dereferencing them
|
||||
// traditionally produces undefined behavior.
|
||||
// Reinterpreting and accessing a value directly as a different type (as shown
|
||||
// below) usually results in undefined behavior.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// // WRONG
|
||||
// float f = 3.14159265358979; // WRONG
|
||||
// int i = * reinterpret_cast<int*>(&f); // WRONG
|
||||
// float f = 3.14159265358979;
|
||||
// int i = reinterpret_cast<int&>(f); // Wrong
|
||||
// int j = *reinterpret_cast<int*>(&f); // Equally wrong
|
||||
// int k = *bit_cast<int*>(&f); // Equally wrong
|
||||
//
|
||||
// The address-casting method produces undefined behavior according to the ISO
|
||||
// C++ specification section [basic.lval]. Roughly, this section says: if an
|
||||
// object in memory has one type, and a program accesses it with a different
|
||||
// type, the result is undefined behavior for most values of "different type".
|
||||
// Reinterpret-casting results in undefined behavior according to the ISO C++
|
||||
// specification, section [basic.lval]. Roughly, this section says: if an object
|
||||
// in memory has one type, and a program accesses it with a different type, the
|
||||
// result is undefined behavior for most "different type".
|
||||
//
|
||||
// Using bit_cast on a pointer and then dereferencing it is no better than using
|
||||
// reinterpret_cast. You should only use bit_cast on the value itself.
|
||||
//
|
||||
// Such casting results in type punning: holding an object in memory of one type
|
||||
// and reading its bits back using a different type. A `bit_cast()` avoids this
|
||||
// issue by implementing its casts using `memcpy()`, which avoids introducing
|
||||
// this undefined behavior.
|
||||
// issue by copying the object representation to a new value, which avoids
|
||||
// introducing this undefined behavior (since the original value is never
|
||||
// accessed in the wrong way).
|
||||
//
|
||||
// NOTE: The requirements here are more strict than the bit_cast of standard
|
||||
// proposal p0476 due to the need for workarounds and lack of intrinsics.
|
||||
// Specifically, this implementation also requires `Dest` to be
|
||||
// default-constructible.
|
||||
template <
|
||||
typename Dest, typename Source,
|
||||
typename std::enable_if<internal_casts::is_bitcastable<Dest, Source>::value,
|
||||
// The requirements of `absl::bit_cast` are more strict than that of
|
||||
// `std::bit_cast` unless compiler support is available. Specifically, without
|
||||
// compiler support, this implementation also requires `Dest` to be
|
||||
// default-constructible. In C++20, `absl::bit_cast` is replaced by
|
||||
// `std::bit_cast`.
|
||||
#if defined(__cpp_lib_bit_cast) && __cpp_lib_bit_cast >= 201806L
|
||||
|
||||
using std::bit_cast;
|
||||
|
||||
#else // defined(__cpp_lib_bit_cast) && __cpp_lib_bit_cast >= 201806L
|
||||
|
||||
template <typename Dest, typename Source,
|
||||
typename std::enable_if<
|
||||
sizeof(Dest) == sizeof(Source) &&
|
||||
type_traits_internal::is_trivially_copyable<Source>::value &&
|
||||
type_traits_internal::is_trivially_copyable<Dest>::value
|
||||
#if !ABSL_HAVE_BUILTIN(__builtin_bit_cast)
|
||||
&& std::is_default_constructible<Dest>::value
|
||||
#endif // !ABSL_HAVE_BUILTIN(__builtin_bit_cast)
|
||||
,
|
||||
int>::type = 0>
|
||||
#if ABSL_HAVE_BUILTIN(__builtin_bit_cast)
|
||||
inline constexpr Dest bit_cast(const Source& source) {
|
||||
return __builtin_bit_cast(Dest, source);
|
||||
}
|
||||
#else // ABSL_HAVE_BUILTIN(__builtin_bit_cast)
|
||||
inline Dest bit_cast(const Source& source) {
|
||||
Dest dest;
|
||||
memcpy(static_cast<void*>(std::addressof(dest)),
|
||||
static_cast<const void*>(std::addressof(source)), sizeof(dest));
|
||||
return dest;
|
||||
}
|
||||
#endif // ABSL_HAVE_BUILTIN(__builtin_bit_cast)
|
||||
|
||||
// NOTE: This overload is only picked if the requirements of bit_cast are
|
||||
// not met. It is therefore UB, but is provided temporarily as previous
|
||||
// versions of this function template were unchecked. Do not use this in
|
||||
// new code.
|
||||
template <
|
||||
typename Dest, typename Source,
|
||||
typename std::enable_if<
|
||||
!internal_casts::is_bitcastable<Dest, Source>::value,
|
||||
int>::type = 0>
|
||||
ABSL_DEPRECATED(
|
||||
"absl::bit_cast type requirements were violated. Update the types "
|
||||
"being used such that they are the same size and are both "
|
||||
"TriviallyCopyable.")
|
||||
inline Dest bit_cast(const Source& source) {
|
||||
static_assert(sizeof(Dest) == sizeof(Source),
|
||||
"Source and destination types should have equal sizes.");
|
||||
|
||||
Dest dest;
|
||||
memcpy(&dest, &source, sizeof(dest));
|
||||
return dest;
|
||||
}
|
||||
#endif // defined(__cpp_lib_bit_cast) && __cpp_lib_bit_cast >= 201806L
|
||||
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
|
|
@ -56,6 +56,25 @@
|
|||
#include <cstddef>
|
||||
#endif // __cplusplus
|
||||
|
||||
// ABSL_INTERNAL_CPLUSPLUS_LANG
|
||||
//
|
||||
// MSVC does not set the value of __cplusplus correctly, but instead uses
|
||||
// _MSVC_LANG as a stand-in.
|
||||
// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros
|
||||
//
|
||||
// However, there are reports that MSVC even sets _MSVC_LANG incorrectly at
|
||||
// times, for example:
|
||||
// https://github.com/microsoft/vscode-cpptools/issues/1770
|
||||
// https://reviews.llvm.org/D70996
|
||||
//
|
||||
// For this reason, this symbol is considered INTERNAL and code outside of
|
||||
// Abseil must not use it.
|
||||
#if defined(_MSVC_LANG)
|
||||
#define ABSL_INTERNAL_CPLUSPLUS_LANG _MSVC_LANG
|
||||
#elif defined(__cplusplus)
|
||||
#define ABSL_INTERNAL_CPLUSPLUS_LANG __cplusplus
|
||||
#endif
|
||||
|
||||
#if defined(__APPLE__)
|
||||
// Included for TARGET_OS_IPHONE, __IPHONE_OS_VERSION_MIN_REQUIRED,
|
||||
// __IPHONE_8_0.
|
||||
|
@ -66,6 +85,35 @@
|
|||
#include "absl/base/options.h"
|
||||
#include "absl/base/policy_checks.h"
|
||||
|
||||
// Abseil long-term support (LTS) releases will define
|
||||
// `ABSL_LTS_RELEASE_VERSION` to the integer representing the date string of the
|
||||
// LTS release version, and will define `ABSL_LTS_RELEASE_PATCH_LEVEL` to the
|
||||
// integer representing the patch-level for that release.
|
||||
//
|
||||
// For example, for LTS release version "20300401.2", this would give us
|
||||
// ABSL_LTS_RELEASE_VERSION == 20300401 && ABSL_LTS_RELEASE_PATCH_LEVEL == 2
|
||||
//
|
||||
// These symbols will not be defined in non-LTS code.
|
||||
//
|
||||
// Abseil recommends that clients live-at-head. Therefore, if you are using
|
||||
// these symbols to assert a minimum version requirement, we recommend you do it
|
||||
// as
|
||||
//
|
||||
// #if defined(ABSL_LTS_RELEASE_VERSION) && ABSL_LTS_RELEASE_VERSION < 20300401
|
||||
// #error Project foo requires Abseil LTS version >= 20300401
|
||||
// #endif
|
||||
//
|
||||
// The `defined(ABSL_LTS_RELEASE_VERSION)` part of the check excludes
|
||||
// live-at-head clients from the minimum version assertion.
|
||||
//
|
||||
// See https://abseil.io/about/releases for more information on Abseil release
|
||||
// management.
|
||||
//
|
||||
// LTS releases can be obtained from
|
||||
// https://github.com/abseil/abseil-cpp/releases.
|
||||
#undef ABSL_LTS_RELEASE_VERSION
|
||||
#undef ABSL_LTS_RELEASE_PATCH_LEVEL
|
||||
|
||||
// Helper macro to convert a CPP variable to a string literal.
|
||||
#define ABSL_INTERNAL_DO_TOKEN_STR(x) #x
|
||||
#define ABSL_INTERNAL_TOKEN_STR(x) ABSL_INTERNAL_DO_TOKEN_STR(x)
|
||||
|
@ -154,12 +202,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#define ABSL_HAVE_BUILTIN(x) 0
|
||||
#endif
|
||||
|
||||
#if defined(__is_identifier)
|
||||
#define ABSL_INTERNAL_HAS_KEYWORD(x) !(__is_identifier(x))
|
||||
#else
|
||||
#define ABSL_INTERNAL_HAS_KEYWORD(x) 0
|
||||
#endif
|
||||
|
||||
#ifdef __has_feature
|
||||
#define ABSL_HAVE_FEATURE(f) __has_feature(f)
|
||||
#else
|
||||
|
@ -183,11 +225,12 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#endif
|
||||
|
||||
// ABSL_HAVE_TLS is defined to 1 when __thread should be supported.
|
||||
// We assume __thread is supported on Linux when compiled with Clang or compiled
|
||||
// against libstdc++ with _GLIBCXX_HAVE_TLS defined.
|
||||
// We assume __thread is supported on Linux or Asylo when compiled with Clang or
|
||||
// compiled against libstdc++ with _GLIBCXX_HAVE_TLS defined.
|
||||
#ifdef ABSL_HAVE_TLS
|
||||
#error ABSL_HAVE_TLS cannot be directly set
|
||||
#elif defined(__linux__) && (defined(__clang__) || defined(_GLIBCXX_HAVE_TLS))
|
||||
#elif (defined(__linux__) || defined(__ASYLO__)) && \
|
||||
(defined(__clang__) || defined(_GLIBCXX_HAVE_TLS))
|
||||
#define ABSL_HAVE_TLS 1
|
||||
#endif
|
||||
|
||||
|
@ -214,33 +257,22 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
//
|
||||
// Checks whether `std::is_trivially_copy_assignable<T>` is supported.
|
||||
|
||||
// Notes: Clang with libc++ supports these features, as does gcc >= 5.1 with
|
||||
// either libc++ or libstdc++, and Visual Studio (but not NVCC).
|
||||
// Notes: Clang with libc++ supports these features, as does gcc >= 7.4 with
|
||||
// libstdc++, or gcc >= 8.2 with libc++, and Visual Studio (but not NVCC).
|
||||
#if defined(ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE)
|
||||
#error ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE cannot be directly set
|
||||
#elif defined(ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE)
|
||||
#error ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE cannot directly set
|
||||
#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
|
||||
(!defined(__clang__) && ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && \
|
||||
(defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
|
||||
(!defined(__clang__) && \
|
||||
((ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && defined(__GLIBCXX__)) || \
|
||||
(ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(8, 2) && \
|
||||
defined(_LIBCPP_VERSION)))) || \
|
||||
(defined(_MSC_VER) && !defined(__NVCC__))
|
||||
#define ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE 1
|
||||
#define ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE 1
|
||||
#endif
|
||||
|
||||
// ABSL_HAVE_SOURCE_LOCATION_CURRENT
|
||||
//
|
||||
// Indicates whether `absl::SourceLocation::current()` will return useful
|
||||
// information in some contexts.
|
||||
#ifndef ABSL_HAVE_SOURCE_LOCATION_CURRENT
|
||||
#if ABSL_INTERNAL_HAS_KEYWORD(__builtin_LINE) && \
|
||||
ABSL_INTERNAL_HAS_KEYWORD(__builtin_FILE)
|
||||
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
|
||||
#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0)
|
||||
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// ABSL_HAVE_THREAD_LOCAL
|
||||
//
|
||||
// Checks whether C++11's `thread_local` storage duration specifier is
|
||||
|
@ -380,9 +412,11 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#ifdef ABSL_HAVE_MMAP
|
||||
#error ABSL_HAVE_MMAP cannot be directly set
|
||||
#elif defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || \
|
||||
defined(__ros__) || defined(__native_client__) || defined(__asmjs__) || \
|
||||
defined(__wasm__) || defined(__Fuchsia__) || defined(__sun) || \
|
||||
defined(__ASYLO__) || defined(__myriad2__)
|
||||
defined(_AIX) || defined(__ros__) || defined(__native_client__) || \
|
||||
defined(__asmjs__) || defined(__wasm__) || defined(__Fuchsia__) || \
|
||||
defined(__sun) || defined(__ASYLO__) || defined(__myriad2__) || \
|
||||
defined(__HAIKU__) || defined(__OpenBSD__) || defined(__NetBSD__) || \
|
||||
defined(__QNX__)
|
||||
#define ABSL_HAVE_MMAP 1
|
||||
#endif
|
||||
|
||||
|
@ -393,7 +427,8 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#ifdef ABSL_HAVE_PTHREAD_GETSCHEDPARAM
|
||||
#error ABSL_HAVE_PTHREAD_GETSCHEDPARAM cannot be directly set
|
||||
#elif defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || \
|
||||
defined(__ros__)
|
||||
defined(_AIX) || defined(__ros__) || defined(__OpenBSD__) || \
|
||||
defined(__NetBSD__)
|
||||
#define ABSL_HAVE_PTHREAD_GETSCHEDPARAM 1
|
||||
#endif
|
||||
|
||||
|
@ -488,14 +523,33 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#error "absl endian detection needs to be set up for your compiler"
|
||||
#endif
|
||||
|
||||
// macOS 10.13 and iOS 10.11 don't let you use <any>, <optional>, or <variant>
|
||||
// even though the headers exist and are publicly noted to work. See
|
||||
// https://github.com/abseil/abseil-cpp/issues/207 and
|
||||
// macOS < 10.13 and iOS < 11 don't let you use <any>, <optional>, or <variant>
|
||||
// even though the headers exist and are publicly noted to work, because the
|
||||
// libc++ shared library shipped on the system doesn't have the requisite
|
||||
// exported symbols. See https://github.com/abseil/abseil-cpp/issues/207 and
|
||||
// https://developer.apple.com/documentation/xcode_release_notes/xcode_10_release_notes
|
||||
//
|
||||
// libc++ spells out the availability requirements in the file
|
||||
// llvm-project/libcxx/include/__config via the #define
|
||||
// _LIBCPP_AVAILABILITY_BAD_OPTIONAL_ACCESS.
|
||||
//
|
||||
// Unfortunately, Apple initially mis-stated the requirements as macOS < 10.14
|
||||
// and iOS < 12 in the libc++ headers. This was corrected by
|
||||
// https://github.com/llvm/llvm-project/commit/7fb40e1569dd66292b647f4501b85517e9247953
|
||||
// which subsequently made it into the XCode 12.5 release. We need to match the
|
||||
// old (incorrect) conditions when built with old XCode, but can use the
|
||||
// corrected earlier versions with new XCode.
|
||||
#if defined(__APPLE__) && defined(_LIBCPP_VERSION) && \
|
||||
((_LIBCPP_VERSION >= 11000 && /* XCode 12.5 or later: */ \
|
||||
((defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ < 101300) || \
|
||||
(defined(__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__ < 110000) || \
|
||||
(defined(__ENVIRONMENT_WATCH_OS_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_WATCH_OS_VERSION_MIN_REQUIRED__ < 40000) || \
|
||||
(defined(__ENVIRONMENT_TV_OS_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_TV_OS_VERSION_MIN_REQUIRED__ < 110000))) || \
|
||||
(_LIBCPP_VERSION < 11000 && /* Pre-XCode 12.5: */ \
|
||||
((defined(__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ < 101400) || \
|
||||
(defined(__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__) && \
|
||||
|
@ -503,7 +557,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
(defined(__ENVIRONMENT_WATCH_OS_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_WATCH_OS_VERSION_MIN_REQUIRED__ < 50000) || \
|
||||
(defined(__ENVIRONMENT_TV_OS_VERSION_MIN_REQUIRED__) && \
|
||||
__ENVIRONMENT_TV_OS_VERSION_MIN_REQUIRED__ < 120000))
|
||||
__ENVIRONMENT_TV_OS_VERSION_MIN_REQUIRED__ < 120000))))
|
||||
#define ABSL_INTERNAL_APPLE_CXX17_TYPES_UNAVAILABLE 1
|
||||
#else
|
||||
#define ABSL_INTERNAL_APPLE_CXX17_TYPES_UNAVAILABLE 0
|
||||
|
@ -673,8 +727,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#endif
|
||||
#endif
|
||||
|
||||
#undef ABSL_INTERNAL_HAS_KEYWORD
|
||||
|
||||
// ABSL_DLL
|
||||
//
|
||||
// When building Abseil as a DLL, this macro expands to `__declspec(dllexport)`
|
||||
|
@ -700,8 +752,6 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
// a compiler instrumentation module and a run-time library.
|
||||
#ifdef ABSL_HAVE_MEMORY_SANITIZER
|
||||
#error "ABSL_HAVE_MEMORY_SANITIZER cannot be directly set."
|
||||
#elif defined(__SANITIZE_MEMORY__)
|
||||
#define ABSL_HAVE_MEMORY_SANITIZER 1
|
||||
#elif !defined(__native_client__) && ABSL_HAVE_FEATURE(memory_sanitizer)
|
||||
#define ABSL_HAVE_MEMORY_SANITIZER 1
|
||||
#endif
|
||||
|
@ -728,6 +778,45 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#define ABSL_HAVE_ADDRESS_SANITIZER 1
|
||||
#endif
|
||||
|
||||
// ABSL_HAVE_HWADDRESS_SANITIZER
|
||||
//
|
||||
// Hardware-Assisted AddressSanitizer (or HWASAN) is even faster than asan
|
||||
// memory error detector which can use CPU features like ARM TBI, Intel LAM or
|
||||
// AMD UAI.
|
||||
#ifdef ABSL_HAVE_HWADDRESS_SANITIZER
|
||||
#error "ABSL_HAVE_HWADDRESS_SANITIZER cannot be directly set."
|
||||
#elif defined(__SANITIZE_HWADDRESS__)
|
||||
#define ABSL_HAVE_HWADDRESS_SANITIZER 1
|
||||
#elif ABSL_HAVE_FEATURE(hwaddress_sanitizer)
|
||||
#define ABSL_HAVE_HWADDRESS_SANITIZER 1
|
||||
#endif
|
||||
|
||||
// ABSL_HAVE_LEAK_SANITIZER
|
||||
//
|
||||
// LeakSanitizer (or lsan) is a detector of memory leaks.
|
||||
// https://clang.llvm.org/docs/LeakSanitizer.html
|
||||
// https://github.com/google/sanitizers/wiki/AddressSanitizerLeakSanitizer
|
||||
//
|
||||
// The macro ABSL_HAVE_LEAK_SANITIZER can be used to detect at compile-time
|
||||
// whether the LeakSanitizer is potentially available. However, just because the
|
||||
// LeakSanitizer is available does not mean it is active. Use the
|
||||
// always-available run-time interface in //absl/debugging/leak_check.h for
|
||||
// interacting with LeakSanitizer.
|
||||
#ifdef ABSL_HAVE_LEAK_SANITIZER
|
||||
#error "ABSL_HAVE_LEAK_SANITIZER cannot be directly set."
|
||||
#elif defined(LEAK_SANITIZER)
|
||||
// GCC provides no method for detecting the presense of the standalone
|
||||
// LeakSanitizer (-fsanitize=leak), so GCC users of -fsanitize=leak should also
|
||||
// use -DLEAK_SANITIZER.
|
||||
#define ABSL_HAVE_LEAK_SANITIZER 1
|
||||
// Clang standalone LeakSanitizer (-fsanitize=leak)
|
||||
#elif ABSL_HAVE_FEATURE(leak_sanitizer)
|
||||
#define ABSL_HAVE_LEAK_SANITIZER 1
|
||||
#elif defined(ABSL_HAVE_ADDRESS_SANITIZER)
|
||||
// GCC or Clang using the LeakSanitizer integrated into AddressSanitizer.
|
||||
#define ABSL_HAVE_LEAK_SANITIZER 1
|
||||
#endif
|
||||
|
||||
// ABSL_HAVE_CLASS_TEMPLATE_ARGUMENT_DEDUCTION
|
||||
//
|
||||
// Class template argument deduction is a language feature added in C++17.
|
||||
|
@ -737,4 +826,88 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
|
|||
#define ABSL_HAVE_CLASS_TEMPLATE_ARGUMENT_DEDUCTION 1
|
||||
#endif
|
||||
|
||||
// ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
//
|
||||
// Prior to C++17, static constexpr variables defined in classes required a
|
||||
// separate definition outside of the class body, for example:
|
||||
//
|
||||
// class Foo {
|
||||
// static constexpr int kBar = 0;
|
||||
// };
|
||||
// constexpr int Foo::kBar;
|
||||
//
|
||||
// In C++17, these variables defined in classes are considered inline variables,
|
||||
// and the extra declaration is redundant. Since some compilers warn on the
|
||||
// extra declarations, ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL can be used
|
||||
// conditionally ignore them:
|
||||
//
|
||||
// #ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
// constexpr int Foo::kBar;
|
||||
// #endif
|
||||
#if defined(ABSL_INTERNAL_CPLUSPLUS_LANG) && \
|
||||
ABSL_INTERNAL_CPLUSPLUS_LANG < 201703L
|
||||
#define ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL 1
|
||||
#endif
|
||||
|
||||
// `ABSL_INTERNAL_HAS_RTTI` determines whether abseil is being compiled with
|
||||
// RTTI support.
|
||||
#ifdef ABSL_INTERNAL_HAS_RTTI
|
||||
#error ABSL_INTERNAL_HAS_RTTI cannot be directly set
|
||||
#elif !defined(__GNUC__) || defined(__GXX_RTTI)
|
||||
#define ABSL_INTERNAL_HAS_RTTI 1
|
||||
#endif // !defined(__GNUC__) || defined(__GXX_RTTI)
|
||||
|
||||
// ABSL_INTERNAL_HAVE_SSE is used for compile-time detection of SSE support.
|
||||
// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of
|
||||
// which architectures support the various x86 instruction sets.
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSE
|
||||
#error ABSL_INTERNAL_HAVE_SSE cannot be directly set
|
||||
#elif defined(__SSE__)
|
||||
#define ABSL_INTERNAL_HAVE_SSE 1
|
||||
#elif defined(_M_X64) || (defined(_M_IX86_FP) && _M_IX86_FP >= 1)
|
||||
// MSVC only defines _M_IX86_FP for x86 32-bit code, and _M_IX86_FP >= 1
|
||||
// indicates that at least SSE was targeted with the /arch:SSE option.
|
||||
// All x86-64 processors support SSE, so support can be assumed.
|
||||
// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros
|
||||
#define ABSL_INTERNAL_HAVE_SSE 1
|
||||
#endif
|
||||
|
||||
// ABSL_INTERNAL_HAVE_SSE2 is used for compile-time detection of SSE2 support.
|
||||
// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of
|
||||
// which architectures support the various x86 instruction sets.
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSE2
|
||||
#error ABSL_INTERNAL_HAVE_SSE2 cannot be directly set
|
||||
#elif defined(__SSE2__)
|
||||
#define ABSL_INTERNAL_HAVE_SSE2 1
|
||||
#elif defined(_M_X64) || (defined(_M_IX86_FP) && _M_IX86_FP >= 2)
|
||||
// MSVC only defines _M_IX86_FP for x86 32-bit code, and _M_IX86_FP >= 2
|
||||
// indicates that at least SSE2 was targeted with the /arch:SSE2 option.
|
||||
// All x86-64 processors support SSE2, so support can be assumed.
|
||||
// https://docs.microsoft.com/en-us/cpp/preprocessor/predefined-macros
|
||||
#define ABSL_INTERNAL_HAVE_SSE2 1
|
||||
#endif
|
||||
|
||||
// ABSL_INTERNAL_HAVE_SSSE3 is used for compile-time detection of SSSE3 support.
|
||||
// See https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html for an overview of
|
||||
// which architectures support the various x86 instruction sets.
|
||||
//
|
||||
// MSVC does not have a mode that targets SSSE3 at compile-time. To use SSSE3
|
||||
// with MSVC requires either assuming that the code will only every run on CPUs
|
||||
// that support SSSE3, otherwise __cpuid() can be used to detect support at
|
||||
// runtime and fallback to a non-SSSE3 implementation when SSSE3 is unsupported
|
||||
// by the CPU.
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSSE3
|
||||
#error ABSL_INTERNAL_HAVE_SSSE3 cannot be directly set
|
||||
#elif defined(__SSSE3__)
|
||||
#define ABSL_INTERNAL_HAVE_SSSE3 1
|
||||
#endif
|
||||
|
||||
// ABSL_INTERNAL_HAVE_ARM_NEON is used for compile-time detection of NEON (ARM
|
||||
// SIMD).
|
||||
#ifdef ABSL_INTERNAL_HAVE_ARM_NEON
|
||||
#error ABSL_INTERNAL_HAVE_ARM_NEON cannot be directly set
|
||||
#elif defined(__ARM_NEON)
|
||||
#define ABSL_INTERNAL_HAVE_ARM_NEON 1
|
||||
#endif
|
||||
|
||||
#endif // ABSL_BASE_CONFIG_H_
|
||||
|
|
|
@ -430,31 +430,6 @@ ABSL_NAMESPACE_END
|
|||
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
#ifdef ABSL_HAVE_THREAD_SANITIZER
|
||||
ABSL_INTERNAL_BEGIN_EXTERN_C
|
||||
int RunningOnValgrind();
|
||||
double ValgrindSlowdown();
|
||||
ABSL_INTERNAL_END_EXTERN_C
|
||||
#else
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace base_internal {
|
||||
ABSL_DEPRECATED(
|
||||
"Don't use this interface. It is misleading and is being deleted.")
|
||||
ABSL_ATTRIBUTE_ALWAYS_INLINE inline int RunningOnValgrind() { return 0; }
|
||||
ABSL_DEPRECATED(
|
||||
"Don't use this interface. It is misleading and is being deleted.")
|
||||
ABSL_ATTRIBUTE_ALWAYS_INLINE inline double ValgrindSlowdown() { return 1.0; }
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
using absl::base_internal::RunningOnValgrind;
|
||||
using absl::base_internal::ValgrindSlowdown;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Address sanitizer annotations
|
||||
|
||||
|
|
|
@ -701,7 +701,10 @@ struct BasicGuaranteeWithExtraContracts : public NonNegative {
|
|||
|
||||
static constexpr int kExceptionSentinel = 9999;
|
||||
};
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
constexpr int BasicGuaranteeWithExtraContracts::kExceptionSentinel;
|
||||
#endif
|
||||
|
||||
TEST(ExceptionCheckTest, BasicGuaranteeWithExtraContracts) {
|
||||
auto tester_with_val =
|
||||
|
|
|
@ -25,6 +25,8 @@
|
|||
#include <atomic>
|
||||
#include <chrono> // NOLINT(build/c++11)
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/unscaledcycleclock.h"
|
||||
|
||||
namespace absl {
|
||||
|
@ -33,44 +35,20 @@ namespace base_internal {
|
|||
|
||||
#if ABSL_USE_UNSCALED_CYCLECLOCK
|
||||
|
||||
namespace {
|
||||
|
||||
#ifdef NDEBUG
|
||||
#ifdef ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY
|
||||
// Not debug mode and the UnscaledCycleClock frequency is the CPU
|
||||
// frequency. Scale the CycleClock to prevent overflow if someone
|
||||
// tries to represent the time as cycles since the Unix epoch.
|
||||
static constexpr int32_t kShift = 1;
|
||||
#else
|
||||
// Not debug mode and the UnscaledCycleClock isn't operating at the
|
||||
// raw CPU frequency. There is no need to do any scaling, so don't
|
||||
// needlessly sacrifice precision.
|
||||
static constexpr int32_t kShift = 0;
|
||||
#endif
|
||||
#else
|
||||
// In debug mode use a different shift to discourage depending on a
|
||||
// particular shift value.
|
||||
static constexpr int32_t kShift = 2;
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
constexpr int32_t CycleClock::kShift;
|
||||
constexpr double CycleClock::kFrequencyScale;
|
||||
#endif
|
||||
|
||||
static constexpr double kFrequencyScale = 1.0 / (1 << kShift);
|
||||
static std::atomic<CycleClockSourceFunc> cycle_clock_source;
|
||||
ABSL_CONST_INIT std::atomic<CycleClockSourceFunc>
|
||||
CycleClock::cycle_clock_source_{nullptr};
|
||||
|
||||
CycleClockSourceFunc LoadCycleClockSource() {
|
||||
// Optimize for the common case (no callback) by first doing a relaxed load;
|
||||
// this is significantly faster on non-x86 platforms.
|
||||
if (cycle_clock_source.load(std::memory_order_relaxed) == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
// This corresponds to the store(std::memory_order_release) in
|
||||
// CycleClockSource::Register, and makes sure that any updates made prior to
|
||||
// registering the callback are visible to this thread before the callback is
|
||||
// invoked.
|
||||
return cycle_clock_source.load(std::memory_order_acquire);
|
||||
void CycleClockSource::Register(CycleClockSourceFunc source) {
|
||||
// Corresponds to the load(std::memory_order_acquire) in LoadCycleClockSource.
|
||||
CycleClock::cycle_clock_source_.store(source, std::memory_order_release);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
#ifdef _WIN32
|
||||
int64_t CycleClock::Now() {
|
||||
auto fn = LoadCycleClockSource();
|
||||
if (fn == nullptr) {
|
||||
|
@ -78,15 +56,7 @@ int64_t CycleClock::Now() {
|
|||
}
|
||||
return fn() >> kShift;
|
||||
}
|
||||
|
||||
double CycleClock::Frequency() {
|
||||
return kFrequencyScale * base_internal::UnscaledCycleClock::Frequency();
|
||||
}
|
||||
|
||||
void CycleClockSource::Register(CycleClockSourceFunc source) {
|
||||
// Corresponds to the load(std::memory_order_acquire) in LoadCycleClockSource.
|
||||
cycle_clock_source.store(source, std::memory_order_release);
|
||||
}
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
|
|
|
@ -42,14 +42,19 @@
|
|||
#ifndef ABSL_BASE_INTERNAL_CYCLECLOCK_H_
|
||||
#define ABSL_BASE_INTERNAL_CYCLECLOCK_H_
|
||||
|
||||
#include <atomic>
|
||||
#include <cstdint>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/unscaledcycleclock.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace base_internal {
|
||||
|
||||
using CycleClockSourceFunc = int64_t (*)();
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// CycleClock
|
||||
// -----------------------------------------------------------------------------
|
||||
|
@ -68,12 +73,37 @@ class CycleClock {
|
|||
static double Frequency();
|
||||
|
||||
private:
|
||||
#if ABSL_USE_UNSCALED_CYCLECLOCK
|
||||
static CycleClockSourceFunc LoadCycleClockSource();
|
||||
|
||||
#ifdef NDEBUG
|
||||
#ifdef ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY
|
||||
// Not debug mode and the UnscaledCycleClock frequency is the CPU
|
||||
// frequency. Scale the CycleClock to prevent overflow if someone
|
||||
// tries to represent the time as cycles since the Unix epoch.
|
||||
static constexpr int32_t kShift = 1;
|
||||
#else
|
||||
// Not debug mode and the UnscaledCycleClock isn't operating at the
|
||||
// raw CPU frequency. There is no need to do any scaling, so don't
|
||||
// needlessly sacrifice precision.
|
||||
static constexpr int32_t kShift = 0;
|
||||
#endif
|
||||
#else // NDEBUG
|
||||
// In debug mode use a different shift to discourage depending on a
|
||||
// particular shift value.
|
||||
static constexpr int32_t kShift = 2;
|
||||
#endif // NDEBUG
|
||||
|
||||
static constexpr double kFrequencyScale = 1.0 / (1 << kShift);
|
||||
ABSL_CONST_INIT static std::atomic<CycleClockSourceFunc> cycle_clock_source_;
|
||||
#endif // ABSL_USE_UNSCALED_CYCLECLOC
|
||||
|
||||
CycleClock() = delete; // no instances
|
||||
CycleClock(const CycleClock&) = delete;
|
||||
CycleClock& operator=(const CycleClock&) = delete;
|
||||
};
|
||||
|
||||
using CycleClockSourceFunc = int64_t (*)();
|
||||
friend class CycleClockSource;
|
||||
};
|
||||
|
||||
class CycleClockSource {
|
||||
private:
|
||||
|
@ -87,6 +117,41 @@ class CycleClockSource {
|
|||
static void Register(CycleClockSourceFunc source);
|
||||
};
|
||||
|
||||
#if ABSL_USE_UNSCALED_CYCLECLOCK
|
||||
|
||||
inline CycleClockSourceFunc CycleClock::LoadCycleClockSource() {
|
||||
#if !defined(__x86_64__)
|
||||
// Optimize for the common case (no callback) by first doing a relaxed load;
|
||||
// this is significantly faster on non-x86 platforms.
|
||||
if (cycle_clock_source_.load(std::memory_order_relaxed) == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
#endif // !defined(__x86_64__)
|
||||
|
||||
// This corresponds to the store(std::memory_order_release) in
|
||||
// CycleClockSource::Register, and makes sure that any updates made prior to
|
||||
// registering the callback are visible to this thread before the callback
|
||||
// is invoked.
|
||||
return cycle_clock_source_.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
// Accessing globals in inlined code in Window DLLs is problematic.
|
||||
#ifndef _WIN32
|
||||
inline int64_t CycleClock::Now() {
|
||||
auto fn = LoadCycleClockSource();
|
||||
if (fn == nullptr) {
|
||||
return base_internal::UnscaledCycleClock::Now() >> kShift;
|
||||
}
|
||||
return fn() >> kShift;
|
||||
}
|
||||
#endif
|
||||
|
||||
inline double CycleClock::Frequency() {
|
||||
return kFrequencyScale * base_internal::UnscaledCycleClock::Frequency();
|
||||
}
|
||||
|
||||
#endif // ABSL_USE_UNSCALED_CYCLECLOCK
|
||||
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
#if ABSL_HAVE_MMAP
|
||||
#ifdef ABSL_HAVE_MMAP
|
||||
|
||||
#include <sys/mman.h>
|
||||
|
||||
|
@ -41,13 +41,13 @@
|
|||
|
||||
#ifdef __mips__
|
||||
// Include definitions of the ABI currently in use.
|
||||
#ifdef __BIONIC__
|
||||
#if defined(__BIONIC__) || !defined(__GLIBC__)
|
||||
// Android doesn't have sgidefs.h, but does have asm/sgidefs.h, which has the
|
||||
// definitions we need.
|
||||
#include <asm/sgidefs.h>
|
||||
#else
|
||||
#include <sgidefs.h>
|
||||
#endif // __BIONIC__
|
||||
#endif // __BIONIC__ || !__GLIBC__
|
||||
#endif // __mips__
|
||||
|
||||
// SYS_mmap and SYS_munmap are not defined in Android.
|
||||
|
|
|
@ -16,16 +16,9 @@
|
|||
#ifndef ABSL_BASE_INTERNAL_ENDIAN_H_
|
||||
#define ABSL_BASE_INTERNAL_ENDIAN_H_
|
||||
|
||||
// The following guarantees declaration of the byte swap functions
|
||||
#ifdef _MSC_VER
|
||||
#include <stdlib.h> // NOLINT(build/include)
|
||||
#elif defined(__FreeBSD__)
|
||||
#include <sys/endian.h>
|
||||
#elif defined(__GLIBC__)
|
||||
#include <byteswap.h> // IWYU pragma: export
|
||||
#endif
|
||||
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
|
||||
#include "absl/base/casts.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/unaligned_access.h"
|
||||
|
@ -34,47 +27,11 @@
|
|||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
|
||||
// Use compiler byte-swapping intrinsics if they are available. 32-bit
|
||||
// and 64-bit versions are available in Clang and GCC as of GCC 4.3.0.
|
||||
// The 16-bit version is available in Clang and GCC only as of GCC 4.8.0.
|
||||
// For simplicity, we enable them all only for GCC 4.8.0 or later.
|
||||
#if defined(__clang__) || \
|
||||
(defined(__GNUC__) && \
|
||||
((__GNUC__ == 4 && __GNUC_MINOR__ >= 8) || __GNUC__ >= 5))
|
||||
inline uint64_t gbswap_64(uint64_t host_int) {
|
||||
#if ABSL_HAVE_BUILTIN(__builtin_bswap64) || defined(__GNUC__)
|
||||
return __builtin_bswap64(host_int);
|
||||
}
|
||||
inline uint32_t gbswap_32(uint32_t host_int) {
|
||||
return __builtin_bswap32(host_int);
|
||||
}
|
||||
inline uint16_t gbswap_16(uint16_t host_int) {
|
||||
return __builtin_bswap16(host_int);
|
||||
}
|
||||
|
||||
#elif defined(_MSC_VER)
|
||||
inline uint64_t gbswap_64(uint64_t host_int) {
|
||||
return _byteswap_uint64(host_int);
|
||||
}
|
||||
inline uint32_t gbswap_32(uint32_t host_int) {
|
||||
return _byteswap_ulong(host_int);
|
||||
}
|
||||
inline uint16_t gbswap_16(uint16_t host_int) {
|
||||
return _byteswap_ushort(host_int);
|
||||
}
|
||||
|
||||
#else
|
||||
inline uint64_t gbswap_64(uint64_t host_int) {
|
||||
#if defined(__GNUC__) && defined(__x86_64__) && !defined(__APPLE__)
|
||||
// Adapted from /usr/include/byteswap.h. Not available on Mac.
|
||||
if (__builtin_constant_p(host_int)) {
|
||||
return __bswap_constant_64(host_int);
|
||||
} else {
|
||||
uint64_t result;
|
||||
__asm__("bswap %0" : "=r"(result) : "0"(host_int));
|
||||
return result;
|
||||
}
|
||||
#elif defined(__GLIBC__)
|
||||
return bswap_64(host_int);
|
||||
#else
|
||||
return (((host_int & uint64_t{0xFF}) << 56) |
|
||||
((host_int & uint64_t{0xFF00}) << 40) |
|
||||
|
@ -84,12 +41,14 @@ inline uint64_t gbswap_64(uint64_t host_int) {
|
|||
((host_int & uint64_t{0xFF0000000000}) >> 24) |
|
||||
((host_int & uint64_t{0xFF000000000000}) >> 40) |
|
||||
((host_int & uint64_t{0xFF00000000000000}) >> 56));
|
||||
#endif // bswap_64
|
||||
#endif
|
||||
}
|
||||
|
||||
inline uint32_t gbswap_32(uint32_t host_int) {
|
||||
#if defined(__GLIBC__)
|
||||
return bswap_32(host_int);
|
||||
#if ABSL_HAVE_BUILTIN(__builtin_bswap32) || defined(__GNUC__)
|
||||
return __builtin_bswap32(host_int);
|
||||
#elif defined(_MSC_VER)
|
||||
return _byteswap_ulong(host_int);
|
||||
#else
|
||||
return (((host_int & uint32_t{0xFF}) << 24) |
|
||||
((host_int & uint32_t{0xFF00}) << 8) |
|
||||
|
@ -99,33 +58,29 @@ inline uint32_t gbswap_32(uint32_t host_int) {
|
|||
}
|
||||
|
||||
inline uint16_t gbswap_16(uint16_t host_int) {
|
||||
#if defined(__GLIBC__)
|
||||
return bswap_16(host_int);
|
||||
#if ABSL_HAVE_BUILTIN(__builtin_bswap16) || defined(__GNUC__)
|
||||
return __builtin_bswap16(host_int);
|
||||
#elif defined(_MSC_VER)
|
||||
return _byteswap_ushort(host_int);
|
||||
#else
|
||||
return (((host_int & uint16_t{0xFF}) << 8) |
|
||||
((host_int & uint16_t{0xFF00}) >> 8));
|
||||
#endif
|
||||
}
|
||||
|
||||
#endif // intrinsics available
|
||||
|
||||
#ifdef ABSL_IS_LITTLE_ENDIAN
|
||||
|
||||
// Definitions for ntohl etc. that don't require us to include
|
||||
// netinet/in.h. We wrap gbswap_32 and gbswap_16 in functions rather
|
||||
// than just #defining them because in debug mode, gcc doesn't
|
||||
// correctly handle the (rather involved) definitions of bswap_32.
|
||||
// gcc guarantees that inline functions are as fast as macros, so
|
||||
// this isn't a performance hit.
|
||||
// Portable definitions for htonl (host-to-network) and friends on little-endian
|
||||
// architectures.
|
||||
inline uint16_t ghtons(uint16_t x) { return gbswap_16(x); }
|
||||
inline uint32_t ghtonl(uint32_t x) { return gbswap_32(x); }
|
||||
inline uint64_t ghtonll(uint64_t x) { return gbswap_64(x); }
|
||||
|
||||
#elif defined ABSL_IS_BIG_ENDIAN
|
||||
|
||||
// These definitions are simpler on big-endian machines
|
||||
// These are functions instead of macros to avoid self-assignment warnings
|
||||
// on calls such as "i = ghtnol(i);". This also provides type checking.
|
||||
// Portable definitions for htonl (host-to-network) etc on big-endian
|
||||
// architectures. These definitions are simpler since the host byte order is the
|
||||
// same as network byte order.
|
||||
inline uint16_t ghtons(uint16_t x) { return x; }
|
||||
inline uint32_t ghtonl(uint32_t x) { return x; }
|
||||
inline uint64_t ghtonll(uint64_t x) { return x; }
|
||||
|
|
|
@ -28,8 +28,10 @@ struct FastTypeTag {
|
|||
constexpr static char dummy_var = 0;
|
||||
};
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
template <typename Type>
|
||||
constexpr char FastTypeTag<Type>::dummy_var;
|
||||
#endif
|
||||
|
||||
// FastTypeId<Type>() evaluates at compile/link-time to a unique pointer for the
|
||||
// passed-in type. These are meant to be good match for keys into maps or
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
//
|
||||
// absl::base_internal::invoke(f, args...) is an implementation of
|
||||
// INVOKE(f, args...) from section [func.require] of the C++ standard.
|
||||
// When compiled as C++17 and later versions, it is implemented as an alias of
|
||||
// std::invoke.
|
||||
//
|
||||
// [func.require]
|
||||
// Define INVOKE (f, t1, t2, ..., tN) as follows:
|
||||
|
@ -35,6 +37,26 @@
|
|||
#ifndef ABSL_BASE_INTERNAL_INVOKE_H_
|
||||
#define ABSL_BASE_INTERNAL_INVOKE_H_
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
#if ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L
|
||||
|
||||
#include <functional>
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace base_internal {
|
||||
|
||||
using std::invoke;
|
||||
using std::invoke_result_t;
|
||||
using std::is_invocable_r;
|
||||
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#else // ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L
|
||||
|
||||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
@ -80,8 +102,18 @@ struct MemFunAndRef : StrippedAccept<MemFunAndRef> {
|
|||
static decltype((std::declval<Obj>().*
|
||||
std::declval<MemFun>())(std::declval<Args>()...))
|
||||
Invoke(MemFun&& mem_fun, Obj&& obj, Args&&... args) {
|
||||
// Ignore bogus GCC warnings on this line.
|
||||
// See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=101436 for similar example.
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(11, 0)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Warray-bounds"
|
||||
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
|
||||
#endif
|
||||
return (std::forward<Obj>(obj).*
|
||||
std::forward<MemFun>(mem_fun))(std::forward<Args>(args)...);
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(11, 0)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -180,8 +212,30 @@ invoke_result_t<F, Args...> invoke(F&& f, Args&&... args) {
|
|||
return Invoker<F, Args...>::type::Invoke(std::forward<F>(f),
|
||||
std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <typename AlwaysVoid, typename, typename, typename...>
|
||||
struct IsInvocableRImpl : std::false_type {};
|
||||
|
||||
template <typename R, typename F, typename... Args>
|
||||
struct IsInvocableRImpl<
|
||||
absl::void_t<absl::base_internal::invoke_result_t<F, Args...> >, R, F,
|
||||
Args...>
|
||||
: std::integral_constant<
|
||||
bool,
|
||||
std::is_convertible<absl::base_internal::invoke_result_t<F, Args...>,
|
||||
R>::value ||
|
||||
std::is_void<R>::value> {};
|
||||
|
||||
// Type trait whose member `value` is true if invoking `F` with `Args` is valid,
|
||||
// and either the return type is convertible to `R`, or `R` is void.
|
||||
// C++11-compatible version of `std::is_invocable_r`.
|
||||
template <typename R, typename F, typename... Args>
|
||||
using is_invocable_r = IsInvocableRImpl<void, R, F, Args...>;
|
||||
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_INTERNAL_CPLUSPLUS_LANG >= 201703L
|
||||
|
||||
#endif // ABSL_BASE_INTERNAL_INVOKE_H_
|
||||
|
|
|
@ -86,7 +86,7 @@ static void Test(bool use_new_arena, bool call_malloc_hook, int n) {
|
|||
AllocMap::iterator it;
|
||||
BlockDesc block_desc;
|
||||
int rnd;
|
||||
LowLevelAlloc::Arena *arena = 0;
|
||||
LowLevelAlloc::Arena *arena = nullptr;
|
||||
if (use_new_arena) {
|
||||
int32_t flags = call_malloc_hook ? LowLevelAlloc::kCallMallocHook : 0;
|
||||
arena = LowLevelAlloc::NewArena(flags);
|
||||
|
@ -101,9 +101,8 @@ static void Test(bool use_new_arena, bool call_malloc_hook, int n) {
|
|||
case 0: // coin came up heads: add a block
|
||||
using_low_level_alloc = true;
|
||||
block_desc.len = rand() & 0x3fff;
|
||||
block_desc.ptr =
|
||||
reinterpret_cast<char *>(
|
||||
arena == 0
|
||||
block_desc.ptr = reinterpret_cast<char *>(
|
||||
arena == nullptr
|
||||
? LowLevelAlloc::Alloc(block_desc.len)
|
||||
: LowLevelAlloc::AllocWithArena(block_desc.len, arena));
|
||||
using_low_level_alloc = false;
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
// Copyright 2022 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef ABSL_BASE_INTERNAL_PREFETCH_H_
|
||||
#define ABSL_BASE_INTERNAL_PREFETCH_H_
|
||||
|
||||
#include "absl/base/config.h"
|
||||
|
||||
#ifdef __SSE__
|
||||
#include <xmmintrin.h>
|
||||
#endif
|
||||
|
||||
#if defined(_MSC_VER) && defined(ABSL_INTERNAL_HAVE_SSE)
|
||||
#include <intrin.h>
|
||||
#pragma intrinsic(_mm_prefetch)
|
||||
#endif
|
||||
|
||||
// Compatibility wrappers around __builtin_prefetch, to prefetch data
|
||||
// for read if supported by the toolchain.
|
||||
|
||||
// Move data into the cache before it is read, or "prefetch" it.
|
||||
//
|
||||
// The value of `addr` is the address of the memory to prefetch. If
|
||||
// the target and compiler support it, data prefetch instructions are
|
||||
// generated. If the prefetch is done some time before the memory is
|
||||
// read, it may be in the cache by the time the read occurs.
|
||||
//
|
||||
// The function names specify the temporal locality heuristic applied,
|
||||
// using the names of Intel prefetch instructions:
|
||||
//
|
||||
// T0 - high degree of temporal locality; data should be left in as
|
||||
// many levels of the cache possible
|
||||
// T1 - moderate degree of temporal locality
|
||||
// T2 - low degree of temporal locality
|
||||
// Nta - no temporal locality, data need not be left in the cache
|
||||
// after the read
|
||||
//
|
||||
// Incorrect or gratuitous use of these functions can degrade
|
||||
// performance, so use them only when representative benchmarks show
|
||||
// an improvement.
|
||||
//
|
||||
// Example usage:
|
||||
//
|
||||
// absl::base_internal::PrefetchT0(addr);
|
||||
//
|
||||
// Currently, the different prefetch calls behave on some Intel
|
||||
// architectures as follows:
|
||||
//
|
||||
// SNB..SKL SKX
|
||||
// PrefetchT0() L1/L2/L3 L1/L2
|
||||
// PrefetchT1() L2/L3 L2
|
||||
// PrefetchT2() L2/L3 L2
|
||||
// PrefetchNta() L1/--/L3 L1*
|
||||
//
|
||||
// * On SKX PrefetchNta() will bring the line into L1 but will evict
|
||||
// from L3 cache. This might result in surprising behavior.
|
||||
//
|
||||
// SNB = Sandy Bridge, SKL = Skylake, SKX = Skylake Xeon.
|
||||
//
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace base_internal {
|
||||
|
||||
void PrefetchT0(const void* addr);
|
||||
void PrefetchT1(const void* addr);
|
||||
void PrefetchT2(const void* addr);
|
||||
void PrefetchNta(const void* addr);
|
||||
|
||||
// Implementation details follow.
|
||||
|
||||
#if ABSL_HAVE_BUILTIN(__builtin_prefetch) || defined(__GNUC__)
|
||||
|
||||
#define ABSL_INTERNAL_HAVE_PREFETCH 1
|
||||
|
||||
// See __builtin_prefetch:
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Other-Builtins.html.
|
||||
//
|
||||
// These functions speculatively load for read only. This is
|
||||
// safe for all currently supported platforms. However, prefetch for
|
||||
// store may have problems depending on the target platform.
|
||||
//
|
||||
inline void PrefetchT0(const void* addr) {
|
||||
// Note: this uses prefetcht0 on Intel.
|
||||
__builtin_prefetch(addr, 0, 3);
|
||||
}
|
||||
inline void PrefetchT1(const void* addr) {
|
||||
// Note: this uses prefetcht1 on Intel.
|
||||
__builtin_prefetch(addr, 0, 2);
|
||||
}
|
||||
inline void PrefetchT2(const void* addr) {
|
||||
// Note: this uses prefetcht2 on Intel.
|
||||
__builtin_prefetch(addr, 0, 1);
|
||||
}
|
||||
inline void PrefetchNta(const void* addr) {
|
||||
// Note: this uses prefetchtnta on Intel.
|
||||
__builtin_prefetch(addr, 0, 0);
|
||||
}
|
||||
|
||||
#elif defined(ABSL_INTERNAL_HAVE_SSE)
|
||||
|
||||
#define ABSL_INTERNAL_HAVE_PREFETCH 1
|
||||
|
||||
inline void PrefetchT0(const void* addr) {
|
||||
_mm_prefetch(reinterpret_cast<const char*>(addr), _MM_HINT_T0);
|
||||
}
|
||||
inline void PrefetchT1(const void* addr) {
|
||||
_mm_prefetch(reinterpret_cast<const char*>(addr), _MM_HINT_T1);
|
||||
}
|
||||
inline void PrefetchT2(const void* addr) {
|
||||
_mm_prefetch(reinterpret_cast<const char*>(addr), _MM_HINT_T2);
|
||||
}
|
||||
inline void PrefetchNta(const void* addr) {
|
||||
_mm_prefetch(reinterpret_cast<const char*>(addr), _MM_HINT_NTA);
|
||||
}
|
||||
|
||||
#else
|
||||
inline void PrefetchT0(const void*) {}
|
||||
inline void PrefetchT1(const void*) {}
|
||||
inline void PrefetchT2(const void*) {}
|
||||
inline void PrefetchNta(const void*) {}
|
||||
#endif
|
||||
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_BASE_INTERNAL_PREFETCH_H_
|
|
@ -0,0 +1,43 @@
|
|||
// Copyright 2022 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/base/internal/prefetch.h"
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
namespace {
|
||||
|
||||
int number = 42;
|
||||
|
||||
TEST(Prefetch, TemporalLocalityNone) {
|
||||
absl::base_internal::PrefetchNta(&number);
|
||||
EXPECT_EQ(number, 42);
|
||||
}
|
||||
|
||||
TEST(Prefetch, TemporalLocalityLow) {
|
||||
absl::base_internal::PrefetchT2(&number);
|
||||
EXPECT_EQ(number, 42);
|
||||
}
|
||||
|
||||
TEST(Prefetch, TemporalLocalityMedium) {
|
||||
absl::base_internal::PrefetchT1(&number);
|
||||
EXPECT_EQ(number, 42);
|
||||
}
|
||||
|
||||
TEST(Prefetch, TemporalLocalityHigh) {
|
||||
absl::base_internal::PrefetchT0(&number);
|
||||
EXPECT_EQ(number, 42);
|
||||
}
|
||||
|
||||
} // namespace
|
|
@ -14,15 +14,17 @@
|
|||
|
||||
#include "absl/base/internal/raw_logging.h"
|
||||
|
||||
#include <stddef.h>
|
||||
#include <cstdarg>
|
||||
#include <cstddef>
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <string>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/atomic_hook.h"
|
||||
#include "absl/base/internal/errno_saver.h"
|
||||
#include "absl/base/log_severity.h"
|
||||
|
||||
// We know how to perform low-level writes to stderr in POSIX and Windows. For
|
||||
|
@ -37,7 +39,7 @@
|
|||
// this, consider moving both to config.h instead.
|
||||
#if defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || \
|
||||
defined(__Fuchsia__) || defined(__native_client__) || \
|
||||
defined(__EMSCRIPTEN__) || defined(__ASYLO__)
|
||||
defined(__OpenBSD__) || defined(__EMSCRIPTEN__) || defined(__ASYLO__)
|
||||
|
||||
#include <unistd.h>
|
||||
|
||||
|
@ -50,7 +52,8 @@
|
|||
// ABSL_HAVE_SYSCALL_WRITE is defined when the platform provides the syscall
|
||||
// syscall(SYS_write, /*int*/ fd, /*char* */ buf, /*size_t*/ len);
|
||||
// for low level operations that want to avoid libc.
|
||||
#if (defined(__linux__) || defined(__FreeBSD__)) && !defined(__ANDROID__)
|
||||
#if (defined(__linux__) || defined(__FreeBSD__) || defined(__OpenBSD__)) && \
|
||||
!defined(__ANDROID__)
|
||||
#include <sys/syscall.h>
|
||||
#define ABSL_HAVE_SYSCALL_WRITE 1
|
||||
#define ABSL_LOW_LEVEL_WRITE_SUPPORTED 1
|
||||
|
@ -76,13 +79,6 @@ namespace {
|
|||
// Explicitly `#error` out when not `ABSL_LOW_LEVEL_WRITE_SUPPORTED`, except for
|
||||
// a selected set of platforms for which we expect not to be able to raw log.
|
||||
|
||||
ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES
|
||||
absl::base_internal::AtomicHook<LogPrefixHook>
|
||||
log_prefix_hook;
|
||||
ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES
|
||||
absl::base_internal::AtomicHook<AbortHook>
|
||||
abort_hook;
|
||||
|
||||
#ifdef ABSL_LOW_LEVEL_WRITE_SUPPORTED
|
||||
constexpr char kTruncated[] = " ... (message truncated)\n";
|
||||
|
||||
|
@ -130,6 +126,18 @@ bool DoRawLog(char** buf, int* size, const char* format, ...) {
|
|||
return true;
|
||||
}
|
||||
|
||||
bool DefaultLogFilterAndPrefix(absl::LogSeverity, const char* file, int line,
|
||||
char** buf, int* buf_size) {
|
||||
DoRawLog(buf, buf_size, "[%s : %d] RAW: ", file, line);
|
||||
return true;
|
||||
}
|
||||
|
||||
ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES
|
||||
absl::base_internal::AtomicHook<LogFilterAndPrefixHook>
|
||||
log_filter_and_prefix_hook(DefaultLogFilterAndPrefix);
|
||||
ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES
|
||||
absl::base_internal::AtomicHook<AbortHook> abort_hook;
|
||||
|
||||
void RawLogVA(absl::LogSeverity severity, const char* file, int line,
|
||||
const char* format, va_list ap) ABSL_PRINTF_ATTRIBUTE(4, 0);
|
||||
void RawLogVA(absl::LogSeverity severity, const char* file, int line,
|
||||
|
@ -150,14 +158,7 @@ void RawLogVA(absl::LogSeverity severity, const char* file, int line,
|
|||
}
|
||||
#endif
|
||||
|
||||
auto log_prefix_hook_ptr = log_prefix_hook.Load();
|
||||
if (log_prefix_hook_ptr) {
|
||||
enabled = log_prefix_hook_ptr(severity, file, line, &buf, &size);
|
||||
} else {
|
||||
if (enabled) {
|
||||
DoRawLog(&buf, &size, "[%s : %d] RAW: ", file, line);
|
||||
}
|
||||
}
|
||||
enabled = log_filter_and_prefix_hook(severity, file, line, &buf, &size);
|
||||
const char* const prefix_end = buf;
|
||||
|
||||
#ifdef ABSL_LOW_LEVEL_WRITE_SUPPORTED
|
||||
|
@ -168,11 +169,12 @@ void RawLogVA(absl::LogSeverity severity, const char* file, int line,
|
|||
} else {
|
||||
DoRawLog(&buf, &size, "%s", kTruncated);
|
||||
}
|
||||
SafeWriteToStderr(buffer, strlen(buffer));
|
||||
AsyncSignalSafeWriteToStderr(buffer, strlen(buffer));
|
||||
}
|
||||
#else
|
||||
static_cast<void>(format);
|
||||
static_cast<void>(ap);
|
||||
static_cast<void>(enabled);
|
||||
#endif
|
||||
|
||||
// Abort the process after logging a FATAL message, even if the output itself
|
||||
|
@ -195,8 +197,11 @@ void DefaultInternalLog(absl::LogSeverity severity, const char* file, int line,
|
|||
|
||||
} // namespace
|
||||
|
||||
void SafeWriteToStderr(const char *s, size_t len) {
|
||||
void AsyncSignalSafeWriteToStderr(const char* s, size_t len) {
|
||||
absl::base_internal::ErrnoSaver errno_saver;
|
||||
#if defined(ABSL_HAVE_SYSCALL_WRITE)
|
||||
// We prefer calling write via `syscall` to minimize the risk of libc doing
|
||||
// something "helpful".
|
||||
syscall(SYS_write, STDERR_FILENO, s, len);
|
||||
#elif defined(ABSL_HAVE_POSIX_WRITE)
|
||||
write(STDERR_FILENO, s, len);
|
||||
|
@ -229,7 +234,9 @@ ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES ABSL_DLL
|
|||
absl::base_internal::AtomicHook<InternalLogFunction>
|
||||
internal_log_function(DefaultInternalLog);
|
||||
|
||||
void RegisterLogPrefixHook(LogPrefixHook func) { log_prefix_hook.Store(func); }
|
||||
void RegisterLogFilterAndPrefixHook(LogFilterAndPrefixHook func) {
|
||||
log_filter_and_prefix_hook.Store(func);
|
||||
}
|
||||
|
||||
void RegisterAbortHook(AbortHook func) { abort_hook.Store(func); }
|
||||
|
||||
|
|
|
@ -109,12 +109,9 @@ namespace raw_logging_internal {
|
|||
void RawLog(absl::LogSeverity severity, const char* file, int line,
|
||||
const char* format, ...) ABSL_PRINTF_ATTRIBUTE(4, 5);
|
||||
|
||||
// Writes the provided buffer directly to stderr, in a safe, low-level manner.
|
||||
//
|
||||
// In POSIX this means calling write(), which is async-signal safe and does
|
||||
// not malloc. If the platform supports the SYS_write syscall, we invoke that
|
||||
// directly to side-step any libc interception.
|
||||
void SafeWriteToStderr(const char *s, size_t len);
|
||||
// Writes the provided buffer directly to stderr, in a signal-safe, low-level
|
||||
// manner.
|
||||
void AsyncSignalSafeWriteToStderr(const char* s, size_t len);
|
||||
|
||||
// compile-time function to get the "base" filename, that is, the part of
|
||||
// a filename after the last "/" or "\" path separator. The search starts at
|
||||
|
@ -148,11 +145,12 @@ bool RawLoggingFullySupported();
|
|||
// 'severity' is the severity level of the message being written.
|
||||
// 'file' and 'line' are the file and line number where the ABSL_RAW_LOG macro
|
||||
// was located.
|
||||
// 'buffer' and 'buf_size' are pointers to the buffer and buffer size. If the
|
||||
// hook writes a prefix, it must increment *buffer and decrement *buf_size
|
||||
// 'buf' and 'buf_size' are pointers to the buffer and buffer size. If the
|
||||
// hook writes a prefix, it must increment *buf and decrement *buf_size
|
||||
// accordingly.
|
||||
using LogPrefixHook = bool (*)(absl::LogSeverity severity, const char* file,
|
||||
int line, char** buffer, int* buf_size);
|
||||
using LogFilterAndPrefixHook = bool (*)(absl::LogSeverity severity,
|
||||
const char* file, int line, char** buf,
|
||||
int* buf_size);
|
||||
|
||||
// Function type for a raw_logging customization hook called to abort a process
|
||||
// when a FATAL message is logged. If the provided AbortHook() returns, the
|
||||
|
@ -162,7 +160,10 @@ using LogPrefixHook = bool (*)(absl::LogSeverity severity, const char* file,
|
|||
// was located.
|
||||
// The NUL-terminated logged message lives in the buffer between 'buf_start'
|
||||
// and 'buf_end'. 'prefix_end' points to the first non-prefix character of the
|
||||
// buffer (as written by the LogPrefixHook.)
|
||||
// buffer (as written by the LogFilterAndPrefixHook.)
|
||||
//
|
||||
// The lifetime of the filename and message buffers will not end while the
|
||||
// process remains alive.
|
||||
using AbortHook = void (*)(const char* file, int line, const char* buf_start,
|
||||
const char* prefix_end, const char* buf_end);
|
||||
|
||||
|
@ -184,7 +185,7 @@ ABSL_INTERNAL_ATOMIC_HOOK_ATTRIBUTES ABSL_DLL extern base_internal::AtomicHook<
|
|||
//
|
||||
// These functions are safe to call at any point during initialization; they do
|
||||
// not block or malloc, and are async-signal safe.
|
||||
void RegisterLogPrefixHook(LogPrefixHook func);
|
||||
void RegisterLogFilterAndPrefixHook(LogFilterAndPrefixHook func);
|
||||
void RegisterAbortHook(AbortHook func);
|
||||
void RegisterInternalLogFunction(InternalLogFunction func);
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <limits>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/atomic_hook.h"
|
||||
#include "absl/base/internal/cycleclock.h"
|
||||
#include "absl/base/internal/spinlock_wait.h"
|
||||
|
@ -66,12 +67,14 @@ void RegisterSpinLockProfiler(void (*fn)(const void *contendedlock,
|
|||
submit_profile_data.Store(fn);
|
||||
}
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
// Static member variable definitions.
|
||||
constexpr uint32_t SpinLock::kSpinLockHeld;
|
||||
constexpr uint32_t SpinLock::kSpinLockCooperative;
|
||||
constexpr uint32_t SpinLock::kSpinLockDisabledScheduling;
|
||||
constexpr uint32_t SpinLock::kSpinLockSleeper;
|
||||
constexpr uint32_t SpinLock::kWaitTimeMask;
|
||||
#endif
|
||||
|
||||
// Uncommon constructors.
|
||||
SpinLock::SpinLock(base_internal::SchedulingMode mode)
|
||||
|
|
|
@ -16,13 +16,15 @@
|
|||
|
||||
// Most users requiring mutual exclusion should use Mutex.
|
||||
// SpinLock is provided for use in two situations:
|
||||
// - for use in code that Mutex itself depends on
|
||||
// - for use by Abseil internal code that Mutex itself depends on
|
||||
// - for async signal safety (see below)
|
||||
|
||||
// SpinLock is async signal safe. If a spinlock is used within a signal
|
||||
// handler, all code that acquires the lock must ensure that the signal cannot
|
||||
// arrive while they are holding the lock. Typically, this is done by blocking
|
||||
// the signal.
|
||||
//
|
||||
// Threads waiting on a SpinLock may be woken in an arbitrary order.
|
||||
|
||||
#ifndef ABSL_BASE_INTERNAL_SPINLOCK_H_
|
||||
#define ABSL_BASE_INTERNAL_SPINLOCK_H_
|
||||
|
@ -118,6 +120,14 @@ class ABSL_LOCKABLE SpinLock {
|
|||
return (lockword_.load(std::memory_order_relaxed) & kSpinLockHeld) != 0;
|
||||
}
|
||||
|
||||
// Return immediately if this thread holds the SpinLock exclusively.
|
||||
// Otherwise, report an error by crashing with a diagnostic.
|
||||
inline void AssertHeld() const ABSL_ASSERT_EXCLUSIVE_LOCK() {
|
||||
if (!IsHeld()) {
|
||||
ABSL_RAW_LOG(FATAL, "thread should hold the lock on SpinLock");
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
// These should not be exported except for testing.
|
||||
|
||||
|
|
|
@ -57,13 +57,10 @@ static_assert(sizeof(std::atomic<uint32_t>) == sizeof(int),
|
|||
extern "C" {
|
||||
|
||||
ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalSpinLockDelay)(
|
||||
std::atomic<uint32_t> *w, uint32_t value, int loop,
|
||||
std::atomic<uint32_t> *w, uint32_t value, int,
|
||||
absl::base_internal::SchedulingMode) {
|
||||
absl::base_internal::ErrnoSaver errno_saver;
|
||||
struct timespec tm;
|
||||
tm.tv_sec = 0;
|
||||
tm.tv_nsec = absl::base_internal::SpinLockSuggestedDelayNS(loop);
|
||||
syscall(SYS_futex, w, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, value, &tm);
|
||||
syscall(SYS_futex, w, FUTEX_WAIT | FUTEX_PRIVATE_FLAG, value, nullptr);
|
||||
}
|
||||
|
||||
ABSL_ATTRIBUTE_WEAK void ABSL_INTERNAL_C_SYMBOL(AbslInternalSpinLockWake)(
|
||||
|
|
|
@ -39,6 +39,8 @@ struct SpinLockWaitTransition {
|
|||
// satisfying 0<=i<n && trans[i].done, atomically make the transition,
|
||||
// then return the old value of *w. Make any other atomic transitions
|
||||
// where !trans[i].done, but continue waiting.
|
||||
//
|
||||
// Wakeups for threads blocked on SpinLockWait do not respect priorities.
|
||||
uint32_t SpinLockWait(std::atomic<uint32_t> *w, int n,
|
||||
const SpinLockWaitTransition trans[],
|
||||
SchedulingMode scheduling_mode);
|
||||
|
|
|
@ -124,13 +124,14 @@ int Win32NumCPUs() {
|
|||
|
||||
} // namespace
|
||||
|
||||
|
||||
static int GetNumCPUs() {
|
||||
#if defined(__myriad2__)
|
||||
return 1;
|
||||
#elif defined(_WIN32)
|
||||
const unsigned hardware_concurrency = Win32NumCPUs();
|
||||
return hardware_concurrency ? hardware_concurrency : 1;
|
||||
#elif defined(_AIX)
|
||||
return sysconf(_SC_NPROCESSORS_ONLN);
|
||||
#else
|
||||
// Other possibilities:
|
||||
// - Read /sys/devices/system/cpu/online and use cpumask_parse()
|
||||
|
|
|
@ -37,29 +37,6 @@ TEST(SysinfoTest, NumCPUs) {
|
|||
<< "NumCPUs() should not have the default value of 0";
|
||||
}
|
||||
|
||||
// Ensure that NominalCPUFrequency returns a reasonable value, or 1.00 on
|
||||
// platforms where the CPU frequency is not available through sysfs.
|
||||
//
|
||||
// POWER is particularly problematic here; some Linux kernels expose the CPU
|
||||
// frequency, while others do not. Since we can't predict a priori what a given
|
||||
// machine is going to do, just disable this test on POWER on Linux.
|
||||
#if !(defined(__linux) && (defined(__ppc64__) || defined(__PPC64__)))
|
||||
TEST(SysinfoTest, NominalCPUFrequency) {
|
||||
// Linux only exposes the CPU frequency on certain architectures, and
|
||||
// Emscripten doesn't expose it at all.
|
||||
#if defined(__linux__) && \
|
||||
(defined(__aarch64__) || defined(__hppa__) || defined(__mips__) || \
|
||||
defined(__riscv) || defined(__s390x__)) || \
|
||||
defined(__EMSCRIPTEN__)
|
||||
EXPECT_EQ(NominalCPUFrequency(), 1.0)
|
||||
<< "CPU frequency detection was fixed! Please update unittest.";
|
||||
#else
|
||||
EXPECT_GE(NominalCPUFrequency(), 1000.0)
|
||||
<< "NominalCPUFrequency() did not return a reasonable value";
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST(SysinfoTest, GetTID) {
|
||||
EXPECT_EQ(GetTID(), GetTID()); // Basic compile and equality test.
|
||||
#ifdef __native_client__
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include "absl/base/internal/thread_identity.h"
|
||||
|
||||
#ifndef _WIN32
|
||||
#if !defined(_WIN32) || defined(__MINGW32__)
|
||||
#include <pthread.h>
|
||||
#include <signal.h>
|
||||
#endif
|
||||
|
@ -56,6 +56,7 @@ void AllocateThreadIdentityKey(ThreadIdentityReclaimerFunction reclaimer) {
|
|||
// *different* instances of this ptr.
|
||||
// Apple platforms have the visibility attribute, but issue a compile warning
|
||||
// that protected visibility is unsupported.
|
||||
ABSL_CONST_INIT // Must come before __attribute__((visibility("protected")))
|
||||
#if ABSL_HAVE_ATTRIBUTE(visibility) && !defined(__APPLE__)
|
||||
__attribute__((visibility("protected")))
|
||||
#endif // ABSL_HAVE_ATTRIBUTE(visibility) && !defined(__APPLE__)
|
||||
|
|
|
@ -24,8 +24,13 @@
|
|||
#ifdef __GLIBC__
|
||||
#include <sys/platform/ppc.h>
|
||||
#elif defined(__FreeBSD__)
|
||||
#include <sys/sysctl.h>
|
||||
// clang-format off
|
||||
// This order does actually matter =(.
|
||||
#include <sys/types.h>
|
||||
#include <sys/sysctl.h>
|
||||
// clang-format on
|
||||
|
||||
#include "absl/base/call_once.h"
|
||||
#endif
|
||||
#endif
|
||||
|
||||
|
@ -49,12 +54,6 @@ double UnscaledCycleClock::Frequency() {
|
|||
|
||||
#elif defined(__x86_64__)
|
||||
|
||||
int64_t UnscaledCycleClock::Now() {
|
||||
uint64_t low, high;
|
||||
__asm__ volatile("rdtsc" : "=a"(low), "=d"(high));
|
||||
return (high << 32) | low;
|
||||
}
|
||||
|
||||
double UnscaledCycleClock::Frequency() {
|
||||
return base_internal::NominalCPUFrequency();
|
||||
}
|
||||
|
@ -87,6 +86,10 @@ int64_t UnscaledCycleClock::Now() {
|
|||
double UnscaledCycleClock::Frequency() {
|
||||
#ifdef __GLIBC__
|
||||
return __ppc_get_timebase_freq();
|
||||
#elif defined(_AIX)
|
||||
// This is the same constant value as returned by
|
||||
// __ppc_get_timebase_freq().
|
||||
return static_cast<double>(512000000);
|
||||
#elif defined(__FreeBSD__)
|
||||
static once_flag init_timebase_frequency_once;
|
||||
static double timebase_frequency = 0.0;
|
||||
|
@ -119,6 +122,18 @@ double UnscaledCycleClock::Frequency() {
|
|||
return aarch64_timer_frequency;
|
||||
}
|
||||
|
||||
#elif defined(__riscv)
|
||||
|
||||
int64_t UnscaledCycleClock::Now() {
|
||||
int64_t virtual_timer_value;
|
||||
asm volatile("rdcycle %0" : "=r"(virtual_timer_value));
|
||||
return virtual_timer_value;
|
||||
}
|
||||
|
||||
double UnscaledCycleClock::Frequency() {
|
||||
return base_internal::NominalCPUFrequency();
|
||||
}
|
||||
|
||||
#elif defined(_M_IX86) || defined(_M_X64)
|
||||
|
||||
#pragma intrinsic(__rdtsc)
|
||||
|
|
|
@ -46,8 +46,8 @@
|
|||
|
||||
// The following platforms have an implementation of a hardware counter.
|
||||
#if defined(__i386__) || defined(__x86_64__) || defined(__aarch64__) || \
|
||||
defined(__powerpc__) || defined(__ppc__) || \
|
||||
defined(_M_IX86) || defined(_M_X64)
|
||||
defined(__powerpc__) || defined(__ppc__) || defined(__riscv) || \
|
||||
defined(_M_IX86) || (defined(_M_X64) && !defined(_M_ARM64EC))
|
||||
#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 1
|
||||
#else
|
||||
#define ABSL_HAVE_UNSCALED_CYCLECLOCK_IMPLEMENTATION 0
|
||||
|
@ -59,8 +59,7 @@
|
|||
// CycleClock that runs at atleast 1 MHz. We've found some Android
|
||||
// ARM64 devices where this is not the case, so we disable it by
|
||||
// default on Android ARM64.
|
||||
#if defined(__native_client__) || \
|
||||
(defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE) || \
|
||||
#if defined(__native_client__) || (defined(__APPLE__)) || \
|
||||
(defined(__ANDROID__) && defined(__aarch64__))
|
||||
#define ABSL_USE_UNSCALED_CYCLECLOCK_DEFAULT 0
|
||||
#else
|
||||
|
@ -80,7 +79,7 @@
|
|||
|
||||
// This macro can be used to test if UnscaledCycleClock::Frequency()
|
||||
// is NominalCPUFrequency() on a particular platform.
|
||||
#if (defined(__i386__) || defined(__x86_64__) || \
|
||||
#if (defined(__i386__) || defined(__x86_64__) || defined(__riscv) || \
|
||||
defined(_M_IX86) || defined(_M_X64))
|
||||
#define ABSL_INTERNAL_UNSCALED_CYCLECLOCK_FREQUENCY_IS_CPU_FREQUENCY
|
||||
#endif
|
||||
|
@ -115,6 +114,16 @@ class UnscaledCycleClock {
|
|||
friend class base_internal::UnscaledCycleClockWrapperForInitializeFrequency;
|
||||
};
|
||||
|
||||
#if defined(__x86_64__)
|
||||
|
||||
inline int64_t UnscaledCycleClock::Now() {
|
||||
uint64_t low, high;
|
||||
__asm__ volatile("rdtsc" : "=a"(low), "=d"(high));
|
||||
return (high << 32) | low;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
|
|
@ -31,6 +31,14 @@ namespace {
|
|||
|
||||
int Function(int a, int b) { return a - b; }
|
||||
|
||||
void VoidFunction(int& a, int& b) {
|
||||
a += b;
|
||||
b = a - b;
|
||||
a -= b;
|
||||
}
|
||||
|
||||
int ZeroArgFunction() { return -1937; }
|
||||
|
||||
int Sink(std::unique_ptr<int> p) {
|
||||
return *p;
|
||||
}
|
||||
|
@ -223,6 +231,100 @@ TEST(InvokeTest, SfinaeFriendly) {
|
|||
EXPECT_THAT(CallMaybeWithArg(Factory), ::testing::Pointee(42));
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableExactMatch) {
|
||||
static_assert(
|
||||
base_internal::is_invocable_r<int, decltype(Function), int, int>::value,
|
||||
"Should be true for exact match of types on a free function");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableArgumentConversionMatch) {
|
||||
static_assert(
|
||||
base_internal::is_invocable_r<int, decltype(Function), char, int>::value,
|
||||
"Should be true for convertible argument type");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableReturnConversionMatch) {
|
||||
static_assert(base_internal::is_invocable_r<double, decltype(Function), int,
|
||||
int>::value,
|
||||
"Should be true for convertible return type");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableReturnVoid) {
|
||||
static_assert(base_internal::is_invocable_r<void, decltype(VoidFunction),
|
||||
int&, int&>::value,
|
||||
"Should be true for void expected and actual return types");
|
||||
static_assert(
|
||||
base_internal::is_invocable_r<void, decltype(Function), int, int>::value,
|
||||
"Should be true for void expected and non-void actual return types");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableRefQualifierMismatch) {
|
||||
static_assert(!base_internal::is_invocable_r<void, decltype(VoidFunction),
|
||||
int&, const int&>::value,
|
||||
"Should be false for reference constness mismatch");
|
||||
static_assert(!base_internal::is_invocable_r<void, decltype(VoidFunction),
|
||||
int&&, int&>::value,
|
||||
"Should be false for reference value category mismatch");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableArgumentTypeMismatch) {
|
||||
static_assert(!base_internal::is_invocable_r<int, decltype(Function),
|
||||
std::string, int>::value,
|
||||
"Should be false for argument type mismatch");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableReturnTypeMismatch) {
|
||||
static_assert(!base_internal::is_invocable_r<std::string, decltype(Function),
|
||||
int, int>::value,
|
||||
"Should be false for return type mismatch");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableTooFewArgs) {
|
||||
static_assert(
|
||||
!base_internal::is_invocable_r<int, decltype(Function), int>::value,
|
||||
"Should be false for too few arguments");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableTooManyArgs) {
|
||||
static_assert(!base_internal::is_invocable_r<int, decltype(Function), int,
|
||||
int, int>::value,
|
||||
"Should be false for too many arguments");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, MemberFunctionAndReference) {
|
||||
static_assert(base_internal::is_invocable_r<int, decltype(&Class::Method),
|
||||
Class&, int, int>::value,
|
||||
"Should be true for exact match of types on a member function "
|
||||
"and class reference");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, MemberFunctionAndPointer) {
|
||||
static_assert(base_internal::is_invocable_r<int, decltype(&Class::Method),
|
||||
Class*, int, int>::value,
|
||||
"Should be true for exact match of types on a member function "
|
||||
"and class pointer");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, DataMemberAndReference) {
|
||||
static_assert(base_internal::is_invocable_r<int, decltype(&Class::member),
|
||||
Class&>::value,
|
||||
"Should be true for exact match of types on a data member and "
|
||||
"class reference");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, DataMemberAndPointer) {
|
||||
static_assert(base_internal::is_invocable_r<int, decltype(&Class::member),
|
||||
Class*>::value,
|
||||
"Should be true for exact match of types on a data member and "
|
||||
"class pointer");
|
||||
}
|
||||
|
||||
TEST(IsInvocableRTest, CallableZeroArgs) {
|
||||
static_assert(
|
||||
base_internal::is_invocable_r<int, decltype(ZeroArgFunction)>::value,
|
||||
"Should be true for exact match for a zero-arg free function");
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace base_internal
|
||||
ABSL_NAMESPACE_END
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#include <ostream>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
|
||||
|
@ -23,5 +25,31 @@ std::ostream& operator<<(std::ostream& os, absl::LogSeverity s) {
|
|||
if (s == absl::NormalizeLogSeverity(s)) return os << absl::LogSeverityName(s);
|
||||
return os << "absl::LogSeverity(" << static_cast<int>(s) << ")";
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, absl::LogSeverityAtLeast s) {
|
||||
switch (s) {
|
||||
case absl::LogSeverityAtLeast::kInfo:
|
||||
case absl::LogSeverityAtLeast::kWarning:
|
||||
case absl::LogSeverityAtLeast::kError:
|
||||
case absl::LogSeverityAtLeast::kFatal:
|
||||
return os << ">=" << static_cast<absl::LogSeverity>(s);
|
||||
case absl::LogSeverityAtLeast::kInfinity:
|
||||
return os << "INFINITY";
|
||||
}
|
||||
return os;
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, absl::LogSeverityAtMost s) {
|
||||
switch (s) {
|
||||
case absl::LogSeverityAtMost::kInfo:
|
||||
case absl::LogSeverityAtMost::kWarning:
|
||||
case absl::LogSeverityAtMost::kError:
|
||||
case absl::LogSeverityAtMost::kFatal:
|
||||
return os << "<=" << static_cast<absl::LogSeverity>(s);
|
||||
case absl::LogSeverityAtMost::kNegativeInfinity:
|
||||
return os << "NEGATIVE_INFINITY";
|
||||
}
|
||||
return os;
|
||||
}
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
|
|
@ -115,6 +115,57 @@ constexpr absl::LogSeverity NormalizeLogSeverity(int s) {
|
|||
// unspecified; do not rely on it.
|
||||
std::ostream& operator<<(std::ostream& os, absl::LogSeverity s);
|
||||
|
||||
// Enums representing a lower bound for LogSeverity. APIs that only operate on
|
||||
// messages of at least a certain level (for example, `SetMinLogLevel()`) use
|
||||
// this type to specify that level. absl::LogSeverityAtLeast::kInfinity is
|
||||
// a level above all threshold levels and therefore no log message will
|
||||
// ever meet this threshold.
|
||||
enum class LogSeverityAtLeast : int {
|
||||
kInfo = static_cast<int>(absl::LogSeverity::kInfo),
|
||||
kWarning = static_cast<int>(absl::LogSeverity::kWarning),
|
||||
kError = static_cast<int>(absl::LogSeverity::kError),
|
||||
kFatal = static_cast<int>(absl::LogSeverity::kFatal),
|
||||
kInfinity = 1000,
|
||||
};
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, absl::LogSeverityAtLeast s);
|
||||
|
||||
// Enums representing an upper bound for LogSeverity. APIs that only operate on
|
||||
// messages of at most a certain level (for example, buffer all messages at or
|
||||
// below a certain level) use this type to specify that level.
|
||||
// absl::LogSeverityAtMost::kNegativeInfinity is a level below all threshold
|
||||
// levels and therefore will exclude all log messages.
|
||||
enum class LogSeverityAtMost : int {
|
||||
kNegativeInfinity = -1000,
|
||||
kInfo = static_cast<int>(absl::LogSeverity::kInfo),
|
||||
kWarning = static_cast<int>(absl::LogSeverity::kWarning),
|
||||
kError = static_cast<int>(absl::LogSeverity::kError),
|
||||
kFatal = static_cast<int>(absl::LogSeverity::kFatal),
|
||||
};
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, absl::LogSeverityAtMost s);
|
||||
|
||||
#define COMPOP(op1, op2, T) \
|
||||
constexpr bool operator op1(absl::T lhs, absl::LogSeverity rhs) { \
|
||||
return static_cast<absl::LogSeverity>(lhs) op1 rhs; \
|
||||
} \
|
||||
constexpr bool operator op2(absl::LogSeverity lhs, absl::T rhs) { \
|
||||
return lhs op2 static_cast<absl::LogSeverity>(rhs); \
|
||||
}
|
||||
|
||||
// Comparisons between `LogSeverity` and `LogSeverityAtLeast`/
|
||||
// `LogSeverityAtMost` are only supported in one direction.
|
||||
// Valid checks are:
|
||||
// LogSeverity >= LogSeverityAtLeast
|
||||
// LogSeverity < LogSeverityAtLeast
|
||||
// LogSeverity <= LogSeverityAtMost
|
||||
// LogSeverity > LogSeverityAtMost
|
||||
COMPOP(>, <, LogSeverityAtLeast)
|
||||
COMPOP(<=, >=, LogSeverityAtLeast)
|
||||
COMPOP(<, >, LogSeverityAtMost)
|
||||
COMPOP(>=, <=, LogSeverityAtMost)
|
||||
#undef COMPOP
|
||||
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
|
|
|
@ -35,7 +35,8 @@ using ::testing::IsTrue;
|
|||
using ::testing::TestWithParam;
|
||||
using ::testing::Values;
|
||||
|
||||
std::string StreamHelper(absl::LogSeverity value) {
|
||||
template <typename T>
|
||||
std::string StreamHelper(T value) {
|
||||
std::ostringstream stream;
|
||||
stream << value;
|
||||
return stream.str();
|
||||
|
@ -201,4 +202,44 @@ TEST_P(UnparseFlagToOtherIntegerTest, ReturnsExpectedValueAndRoundTrips) {
|
|||
IsTrue());
|
||||
EXPECT_THAT(reparsed_value, Eq(to_unparse));
|
||||
}
|
||||
|
||||
TEST(LogThresholdTest, LogSeverityAtLeastTest) {
|
||||
EXPECT_LT(absl::LogSeverity::kError, absl::LogSeverityAtLeast::kFatal);
|
||||
EXPECT_GT(absl::LogSeverityAtLeast::kError, absl::LogSeverity::kInfo);
|
||||
|
||||
EXPECT_LE(absl::LogSeverityAtLeast::kInfo, absl::LogSeverity::kError);
|
||||
EXPECT_GE(absl::LogSeverity::kError, absl::LogSeverityAtLeast::kInfo);
|
||||
}
|
||||
|
||||
TEST(LogThresholdTest, LogSeverityAtMostTest) {
|
||||
EXPECT_GT(absl::LogSeverity::kError, absl::LogSeverityAtMost::kWarning);
|
||||
EXPECT_LT(absl::LogSeverityAtMost::kError, absl::LogSeverity::kFatal);
|
||||
|
||||
EXPECT_GE(absl::LogSeverityAtMost::kFatal, absl::LogSeverity::kError);
|
||||
EXPECT_LE(absl::LogSeverity::kWarning, absl::LogSeverityAtMost::kError);
|
||||
}
|
||||
|
||||
TEST(LogThresholdTest, Extremes) {
|
||||
EXPECT_LT(absl::LogSeverity::kFatal, absl::LogSeverityAtLeast::kInfinity);
|
||||
EXPECT_GT(absl::LogSeverity::kInfo,
|
||||
absl::LogSeverityAtMost::kNegativeInfinity);
|
||||
}
|
||||
|
||||
TEST(LogThresholdTest, Output) {
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtLeast::kInfo), Eq(">=INFO"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtLeast::kWarning),
|
||||
Eq(">=WARNING"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtLeast::kError), Eq(">=ERROR"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtLeast::kFatal), Eq(">=FATAL"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtLeast::kInfinity),
|
||||
Eq("INFINITY"));
|
||||
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtMost::kInfo), Eq("<=INFO"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtMost::kWarning), Eq("<=WARNING"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtMost::kError), Eq("<=ERROR"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtMost::kFatal), Eq("<=FATAL"));
|
||||
EXPECT_THAT(StreamHelper(absl::LogSeverityAtMost::kNegativeInfinity),
|
||||
Eq("NEGATIVE_INFINITY"));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
|
|
@ -181,35 +181,43 @@
|
|||
#define ABSL_PREDICT_TRUE(x) (x)
|
||||
#endif
|
||||
|
||||
// ABSL_INTERNAL_ASSUME(cond)
|
||||
// ABSL_ASSUME(cond)
|
||||
//
|
||||
// Informs the compiler that a condition is always true and that it can assume
|
||||
// it to be true for optimization purposes. The call has undefined behavior if
|
||||
// the condition is false.
|
||||
// it to be true for optimization purposes.
|
||||
//
|
||||
// WARNING: If the condition is false, the program can produce undefined and
|
||||
// potentially dangerous behavior.
|
||||
//
|
||||
// In !NDEBUG mode, the condition is checked with an assert().
|
||||
// NOTE: The expression must not have side effects, as it will only be evaluated
|
||||
// in some compilation modes and not others.
|
||||
//
|
||||
// NOTE: The expression must not have side effects, as it may only be evaluated
|
||||
// in some compilation modes and not others. Some compilers may issue a warning
|
||||
// if the compiler cannot prove the expression has no side effects. For example,
|
||||
// the expression should not use a function call since the compiler cannot prove
|
||||
// that a function call does not have side effects.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// int x = ...;
|
||||
// ABSL_INTERNAL_ASSUME(x >= 0);
|
||||
// ABSL_ASSUME(x >= 0);
|
||||
// // The compiler can optimize the division to a simple right shift using the
|
||||
// // assumption specified above.
|
||||
// int y = x / 16;
|
||||
//
|
||||
#if !defined(NDEBUG)
|
||||
#define ABSL_INTERNAL_ASSUME(cond) assert(cond)
|
||||
#define ABSL_ASSUME(cond) assert(cond)
|
||||
#elif ABSL_HAVE_BUILTIN(__builtin_assume)
|
||||
#define ABSL_INTERNAL_ASSUME(cond) __builtin_assume(cond)
|
||||
#define ABSL_ASSUME(cond) __builtin_assume(cond)
|
||||
#elif defined(__GNUC__) || ABSL_HAVE_BUILTIN(__builtin_unreachable)
|
||||
#define ABSL_INTERNAL_ASSUME(cond) \
|
||||
#define ABSL_ASSUME(cond) \
|
||||
do { \
|
||||
if (!(cond)) __builtin_unreachable(); \
|
||||
} while (0)
|
||||
#elif defined(_MSC_VER)
|
||||
#define ABSL_INTERNAL_ASSUME(cond) __assume(cond)
|
||||
#define ABSL_ASSUME(cond) __assume(cond)
|
||||
#else
|
||||
#define ABSL_INTERNAL_ASSUME(cond) \
|
||||
#define ABSL_ASSUME(cond) \
|
||||
do { \
|
||||
static_cast<void>(false && (cond)); \
|
||||
} while (0)
|
||||
|
|
|
@ -100,7 +100,7 @@
|
|||
// User code should not inspect this macro. To check in the preprocessor if
|
||||
// absl::any is a typedef of std::any, use the feature macro ABSL_USES_STD_ANY.
|
||||
|
||||
#define ABSL_OPTION_USE_STD_ANY 2
|
||||
#define ABSL_OPTION_USE_STD_ANY 0
|
||||
|
||||
|
||||
// ABSL_OPTION_USE_STD_OPTIONAL
|
||||
|
@ -127,7 +127,7 @@
|
|||
// absl::optional is a typedef of std::optional, use the feature macro
|
||||
// ABSL_USES_STD_OPTIONAL.
|
||||
|
||||
#define ABSL_OPTION_USE_STD_OPTIONAL 2
|
||||
#define ABSL_OPTION_USE_STD_OPTIONAL 0
|
||||
|
||||
|
||||
// ABSL_OPTION_USE_STD_STRING_VIEW
|
||||
|
@ -154,7 +154,7 @@
|
|||
// absl::string_view is a typedef of std::string_view, use the feature macro
|
||||
// ABSL_USES_STD_STRING_VIEW.
|
||||
|
||||
#define ABSL_OPTION_USE_STD_STRING_VIEW 2
|
||||
#define ABSL_OPTION_USE_STD_STRING_VIEW 0
|
||||
|
||||
// ABSL_OPTION_USE_STD_VARIANT
|
||||
//
|
||||
|
@ -180,7 +180,7 @@
|
|||
// absl::variant is a typedef of std::variant, use the feature macro
|
||||
// ABSL_USES_STD_VARIANT.
|
||||
|
||||
#define ABSL_OPTION_USE_STD_VARIANT 2
|
||||
#define ABSL_OPTION_USE_STD_VARIANT 0
|
||||
|
||||
|
||||
// ABSL_OPTION_USE_INLINE_NAMESPACE
|
||||
|
|
|
@ -152,8 +152,8 @@
|
|||
|
||||
// ABSL_LOCKS_EXCLUDED()
|
||||
//
|
||||
// Documents the locks acquired in the body of the function. These locks
|
||||
// cannot be held when calling this function (as Abseil's `Mutex` locks are
|
||||
// Documents the locks that cannot be held by callers of this function, as they
|
||||
// might be acquired by this function (Abseil's `Mutex` locks are
|
||||
// non-reentrant).
|
||||
#if ABSL_HAVE_ATTRIBUTE(locks_excluded)
|
||||
#define ABSL_LOCKS_EXCLUDED(...) __attribute__((locks_excluded(__VA_ARGS__)))
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")
|
||||
load(
|
||||
"//absl:copts/configure_copts.bzl",
|
||||
"ABSL_DEFAULT_COPTS",
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
cleanup_internal
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
#
|
||||
|
||||
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test")
|
||||
load(
|
||||
"//absl:copts/configure_copts.bzl",
|
||||
"ABSL_DEFAULT_COPTS",
|
||||
|
@ -218,11 +217,6 @@ cc_test(
|
|||
],
|
||||
)
|
||||
|
||||
NOTEST_TAGS_NONMOBILE = [
|
||||
"no_test_darwin_x86_64",
|
||||
"no_test_loonix",
|
||||
]
|
||||
|
||||
NOTEST_TAGS_MOBILE = [
|
||||
"no_test_android_arm",
|
||||
"no_test_android_arm64",
|
||||
|
@ -230,8 +224,6 @@ NOTEST_TAGS_MOBILE = [
|
|||
"no_test_ios_x86_64",
|
||||
]
|
||||
|
||||
NOTEST_TAGS = NOTEST_TAGS_MOBILE + NOTEST_TAGS_NONMOBILE
|
||||
|
||||
cc_library(
|
||||
name = "flat_hash_map",
|
||||
hdrs = ["flat_hash_map.h"],
|
||||
|
@ -242,6 +234,7 @@ cc_library(
|
|||
":hash_function_defaults",
|
||||
":raw_hash_map",
|
||||
"//absl/algorithm:container",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/memory",
|
||||
],
|
||||
)
|
||||
|
@ -251,7 +244,7 @@ cc_test(
|
|||
srcs = ["flat_hash_map_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":flat_hash_map",
|
||||
":hash_generator_testing",
|
||||
|
@ -285,7 +278,7 @@ cc_test(
|
|||
srcs = ["flat_hash_set_test.cc"],
|
||||
copts = ABSL_TEST_COPTS + ["-DUNORDERED_SET_CXX17"],
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":flat_hash_set",
|
||||
":hash_generator_testing",
|
||||
|
@ -308,9 +301,10 @@ cc_library(
|
|||
deps = [
|
||||
":container_memory",
|
||||
":hash_function_defaults",
|
||||
":node_hash_policy",
|
||||
":node_slot_policy",
|
||||
":raw_hash_map",
|
||||
"//absl/algorithm:container",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/memory",
|
||||
],
|
||||
)
|
||||
|
@ -320,7 +314,7 @@ cc_test(
|
|||
srcs = ["node_hash_map_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":hash_generator_testing",
|
||||
":node_hash_map",
|
||||
|
@ -340,9 +334,10 @@ cc_library(
|
|||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":hash_function_defaults",
|
||||
":node_hash_policy",
|
||||
":node_slot_policy",
|
||||
":raw_hash_set",
|
||||
"//absl/algorithm:container",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/memory",
|
||||
],
|
||||
)
|
||||
|
@ -352,7 +347,7 @@ cc_test(
|
|||
srcs = ["node_hash_set_test.cc"],
|
||||
copts = ABSL_TEST_COPTS + ["-DUNORDERED_SET_CXX17"],
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":node_hash_set",
|
||||
":unordered_set_constructor_test",
|
||||
|
@ -381,7 +376,7 @@ cc_test(
|
|||
srcs = ["internal/container_memory_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":container_memory",
|
||||
":test_instance_tracker",
|
||||
|
@ -408,7 +403,7 @@ cc_test(
|
|||
srcs = ["internal/hash_function_defaults_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS,
|
||||
tags = NOTEST_TAGS_MOBILE + ["no_test_loonix"],
|
||||
deps = [
|
||||
":hash_function_defaults",
|
||||
"//absl/hash",
|
||||
|
@ -507,12 +502,13 @@ cc_library(
|
|||
copts = ABSL_DEFAULT_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":have_sse",
|
||||
"//absl/base",
|
||||
"//absl/base:config",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/base:exponential_biased",
|
||||
"//absl/debugging:stacktrace",
|
||||
"//absl/memory",
|
||||
"//absl/profiling:exponential_biased",
|
||||
"//absl/profiling:sample_recorder",
|
||||
"//absl/synchronization",
|
||||
"//absl/utility",
|
||||
],
|
||||
|
@ -522,10 +518,14 @@ cc_test(
|
|||
name = "hashtablez_sampler_test",
|
||||
srcs = ["internal/hashtablez_sampler_test.cc"],
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = [
|
||||
"no_test_wasm",
|
||||
],
|
||||
deps = [
|
||||
":hashtablez_sampler",
|
||||
":have_sse",
|
||||
"//absl/base:config",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/profiling:sample_recorder",
|
||||
"//absl/synchronization",
|
||||
"//absl/synchronization:thread_pool",
|
||||
"//absl/time",
|
||||
|
@ -534,21 +534,21 @@ cc_test(
|
|||
)
|
||||
|
||||
cc_library(
|
||||
name = "node_hash_policy",
|
||||
hdrs = ["internal/node_hash_policy.h"],
|
||||
name = "node_slot_policy",
|
||||
hdrs = ["internal/node_slot_policy.h"],
|
||||
copts = ABSL_DEFAULT_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = ["//absl/base:config"],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "node_hash_policy_test",
|
||||
srcs = ["internal/node_hash_policy_test.cc"],
|
||||
name = "node_slot_policy_test",
|
||||
srcs = ["internal/node_slot_policy_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
deps = [
|
||||
":hash_policy_traits",
|
||||
":node_hash_policy",
|
||||
":node_slot_policy",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
@ -565,14 +565,6 @@ cc_library(
|
|||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "have_sse",
|
||||
hdrs = ["internal/have_sse.h"],
|
||||
copts = ABSL_DEFAULT_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
visibility = ["//visibility:private"],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "common",
|
||||
hdrs = ["internal/common.h"],
|
||||
|
@ -597,10 +589,10 @@ cc_library(
|
|||
":hash_policy_traits",
|
||||
":hashtable_debug_hooks",
|
||||
":hashtablez_sampler",
|
||||
":have_sse",
|
||||
"//absl/base:config",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/base:endian",
|
||||
"//absl/base:prefetch",
|
||||
"//absl/memory",
|
||||
"//absl/meta:type_traits",
|
||||
"//absl/numeric:bits",
|
||||
|
@ -613,7 +605,11 @@ cc_test(
|
|||
srcs = ["internal/raw_hash_set_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkstatic = 1,
|
||||
tags = NOTEST_TAGS,
|
||||
tags = NOTEST_TAGS_MOBILE + [
|
||||
"no_test_loonix",
|
||||
# TODO(b/237097643): investigate race and remove
|
||||
"noarm_gemu",
|
||||
],
|
||||
deps = [
|
||||
":container_memory",
|
||||
":hash_function_defaults",
|
||||
|
@ -623,6 +619,7 @@ cc_test(
|
|||
"//absl/base",
|
||||
"//absl/base:config",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/base:prefetch",
|
||||
"//absl/base:raw_logging_internal",
|
||||
"//absl/strings",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
|
@ -703,7 +700,7 @@ cc_test(
|
|||
srcs = ["internal/layout_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS,
|
||||
tags = NOTEST_TAGS_MOBILE + ["no_test_loonix"],
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":layout",
|
||||
|
@ -850,7 +847,7 @@ cc_test(
|
|||
srcs = ["internal/unordered_set_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":unordered_set_constructor_test",
|
||||
":unordered_set_lookup_test",
|
||||
|
@ -865,7 +862,7 @@ cc_test(
|
|||
srcs = ["internal/unordered_map_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = NOTEST_TAGS_NONMOBILE,
|
||||
tags = ["no_test_loonix"],
|
||||
deps = [
|
||||
":unordered_map_constructor_test",
|
||||
":unordered_map_lookup_test",
|
||||
|
@ -875,6 +872,22 @@ cc_test(
|
|||
],
|
||||
)
|
||||
|
||||
cc_test(
|
||||
name = "sample_element_size_test",
|
||||
srcs = ["sample_element_size_test.cc"],
|
||||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
tags = ["no_test_loonix"],
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":flat_hash_map",
|
||||
":flat_hash_set",
|
||||
":node_hash_map",
|
||||
":node_hash_set",
|
||||
"@com_google_googletest//:gtest_main",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "btree",
|
||||
srcs = [
|
||||
|
@ -894,6 +907,7 @@ cc_library(
|
|||
":container_memory",
|
||||
":layout",
|
||||
"//absl/base:core_headers",
|
||||
"//absl/base:raw_logging_internal",
|
||||
"//absl/base:throw_delegate",
|
||||
"//absl/memory",
|
||||
"//absl/meta:type_traits",
|
||||
|
@ -929,6 +943,10 @@ cc_test(
|
|||
copts = ABSL_TEST_COPTS,
|
||||
linkopts = ABSL_DEFAULT_LINKOPTS,
|
||||
shard_count = 10,
|
||||
tags = [
|
||||
"no_test_ios",
|
||||
"no_test_wasm",
|
||||
],
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
":btree",
|
||||
|
|
|
@ -66,6 +66,7 @@ absl_source_set("flat_hash_map") {
|
|||
":hash_function_defaults",
|
||||
":raw_hash_map",
|
||||
"//third_party/abseil-cpp/absl/algorithm:container",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
]
|
||||
}
|
||||
|
@ -87,9 +88,10 @@ absl_source_set("node_hash_map") {
|
|||
deps = [
|
||||
":container_memory",
|
||||
":hash_function_defaults",
|
||||
":node_hash_policy",
|
||||
":node_slot_policy",
|
||||
":raw_hash_map",
|
||||
"//third_party/abseil-cpp/absl/algorithm:container",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
]
|
||||
}
|
||||
|
@ -99,9 +101,10 @@ absl_source_set("node_hash_set") {
|
|||
deps = [
|
||||
":container_memory",
|
||||
":hash_function_defaults",
|
||||
":node_hash_policy",
|
||||
":node_slot_policy",
|
||||
":raw_hash_set",
|
||||
"//third_party/abseil-cpp/absl/algorithm:container",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
]
|
||||
}
|
||||
|
@ -169,22 +172,34 @@ absl_source_set("hashtablez_sampler") {
|
|||
"internal/hashtablez_sampler_force_weak_definition.cc",
|
||||
]
|
||||
deps = [
|
||||
":have_sse",
|
||||
"//third_party/abseil-cpp/absl/base",
|
||||
"//third_party/abseil-cpp/absl/base:config",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/base:exponential_biased",
|
||||
"//third_party/abseil-cpp/absl/debugging:stacktrace",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/profiling:exponential_biased",
|
||||
"//third_party/abseil-cpp/absl/profiling:sample_recorder",
|
||||
"//third_party/abseil-cpp/absl/synchronization",
|
||||
"//third_party/abseil-cpp/absl/utility",
|
||||
]
|
||||
}
|
||||
|
||||
absl_source_set("node_hash_policy") {
|
||||
public = [ "internal/node_hash_policy.h" ]
|
||||
absl_source_set("node_slot_policy") {
|
||||
public = [ "internal/node_slot_policy.h" ]
|
||||
deps = [ "//third_party/abseil-cpp/absl/base:config" ]
|
||||
}
|
||||
|
||||
absl_source_set("node_slot_policy_test") {
|
||||
testonly = true
|
||||
sources = [ "internal/node_slot_policy_test.cc" ]
|
||||
deps = [
|
||||
":hash_policy_traits",
|
||||
":node_slot_policy",
|
||||
"//third_party/googletest:gmock",
|
||||
"//third_party/googletest:gtest",
|
||||
]
|
||||
}
|
||||
|
||||
absl_source_set("raw_hash_map") {
|
||||
public = [ "internal/raw_hash_map.h" ]
|
||||
deps = [
|
||||
|
@ -194,11 +209,6 @@ absl_source_set("raw_hash_map") {
|
|||
]
|
||||
}
|
||||
|
||||
absl_source_set("have_sse") {
|
||||
public = [ "internal/have_sse.h" ]
|
||||
visibility = [ ":*" ]
|
||||
}
|
||||
|
||||
absl_source_set("common") {
|
||||
public = [ "internal/common.h" ]
|
||||
deps = [
|
||||
|
@ -217,10 +227,10 @@ absl_source_set("raw_hash_set") {
|
|||
":hash_policy_traits",
|
||||
":hashtable_debug_hooks",
|
||||
":hashtablez_sampler",
|
||||
":have_sse",
|
||||
"//third_party/abseil-cpp/absl/base:config",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/base:endian",
|
||||
"//third_party/abseil-cpp/absl/base:prefetch",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/meta:type_traits",
|
||||
"//third_party/abseil-cpp/absl/numeric:bits",
|
||||
|
@ -325,6 +335,18 @@ absl_source_set("unordered_set_modifiers_test") {
|
|||
]
|
||||
}
|
||||
|
||||
absl_source_set("sample_element_size_test") {
|
||||
testonly = true
|
||||
public = [ "sample_element_size_test.cc" ]
|
||||
deps = [
|
||||
":flat_hash_map",
|
||||
":flat_hash_set",
|
||||
":node_hash_map",
|
||||
":node_hash_set",
|
||||
"//third_party/googletest:gtest",
|
||||
]
|
||||
}
|
||||
|
||||
absl_source_set("btree") {
|
||||
sources = [
|
||||
"internal/btree.h",
|
||||
|
@ -340,6 +362,7 @@ absl_source_set("btree") {
|
|||
":container_memory",
|
||||
":layout",
|
||||
"//third_party/abseil-cpp/absl/base:core_headers",
|
||||
"//third_party/abseil-cpp/absl/base:raw_logging_internal",
|
||||
"//third_party/abseil-cpp/absl/base:throw_delegate",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/meta:type_traits",
|
||||
|
@ -369,7 +392,7 @@ absl_source_set("inlined_vector_test") {
|
|||
"//third_party/abseil-cpp/absl/hash:hash_testing",
|
||||
"//third_party/abseil-cpp/absl/memory",
|
||||
"//third_party/abseil-cpp/absl/strings",
|
||||
"//third_party/googletest:gtest",
|
||||
"//third_party/googletest:gmock",
|
||||
"//third_party/googletest:gtest",
|
||||
]
|
||||
}
|
||||
|
|
|
@ -35,12 +35,14 @@ absl_cc_library(
|
|||
absl::core_headers
|
||||
absl::layout
|
||||
absl::memory
|
||||
absl::raw_logging_internal
|
||||
absl::strings
|
||||
absl::throw_delegate
|
||||
absl::type_traits
|
||||
absl::utility
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
btree_test_common
|
||||
|
@ -83,6 +85,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
compressed_tuple
|
||||
|
@ -161,6 +164,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
inlined_vector_internal
|
||||
|
@ -193,6 +197,7 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
counting_allocator
|
||||
|
@ -239,6 +244,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
test_instance_tracker
|
||||
|
@ -274,6 +280,7 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::container_memory
|
||||
absl::core_headers
|
||||
absl::hash_function_defaults
|
||||
absl::raw_hash_map
|
||||
absl::algorithm_container
|
||||
|
@ -347,8 +354,9 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::container_memory
|
||||
absl::core_headers
|
||||
absl::hash_function_defaults
|
||||
absl::node_hash_policy
|
||||
absl::node_slot_policy
|
||||
absl::raw_hash_map
|
||||
absl::algorithm_container
|
||||
absl::memory
|
||||
|
@ -381,8 +389,9 @@ absl_cc_library(
|
|||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::core_headers
|
||||
absl::hash_function_defaults
|
||||
absl::node_hash_policy
|
||||
absl::node_slot_policy
|
||||
absl::raw_hash_set
|
||||
absl::algorithm_container
|
||||
absl::memory
|
||||
|
@ -407,6 +416,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
container_memory
|
||||
|
@ -436,6 +446,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hash_function_defaults
|
||||
|
@ -468,6 +479,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hash_generator_testing
|
||||
|
@ -485,6 +497,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hash_policy_testing
|
||||
|
@ -510,6 +523,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hash_policy_traits
|
||||
|
@ -534,6 +548,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hashtablez_sampler
|
||||
|
@ -546,8 +561,9 @@ absl_cc_library(
|
|||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
absl::base
|
||||
absl::config
|
||||
absl::exponential_biased
|
||||
absl::have_sse
|
||||
absl::sample_recorder
|
||||
absl::synchronization
|
||||
)
|
||||
|
||||
|
@ -559,11 +575,12 @@ absl_cc_test(
|
|||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::config
|
||||
absl::hashtablez_sampler
|
||||
absl::have_sse
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hashtable_debug
|
||||
|
@ -575,6 +592,7 @@ absl_cc_library(
|
|||
absl::hashtable_debug_hooks
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
hashtable_debug_hooks
|
||||
|
@ -587,20 +605,12 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
have_sse
|
||||
node_slot_policy
|
||||
HDRS
|
||||
"internal/have_sse.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
)
|
||||
|
||||
absl_cc_library(
|
||||
NAME
|
||||
node_hash_policy
|
||||
HDRS
|
||||
"internal/node_hash_policy.h"
|
||||
"internal/node_slot_policy.h"
|
||||
COPTS
|
||||
${ABSL_DEFAULT_COPTS}
|
||||
DEPS
|
||||
|
@ -610,17 +620,18 @@ absl_cc_library(
|
|||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
node_hash_policy_test
|
||||
node_slot_policy_test
|
||||
SRCS
|
||||
"internal/node_hash_policy_test.cc"
|
||||
"internal/node_slot_policy_test.cc"
|
||||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::hash_policy_traits
|
||||
absl::node_hash_policy
|
||||
absl::node_slot_policy
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
raw_hash_map
|
||||
|
@ -635,6 +646,7 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
container_common
|
||||
|
@ -646,6 +658,7 @@ absl_cc_library(
|
|||
absl::type_traits
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
raw_hash_set
|
||||
|
@ -665,10 +678,10 @@ absl_cc_library(
|
|||
absl::endian
|
||||
absl::hash_policy_traits
|
||||
absl::hashtable_debug_hooks
|
||||
absl::have_sse
|
||||
absl::memory
|
||||
absl::meta
|
||||
absl::optional
|
||||
absl::prefetch
|
||||
absl::utility
|
||||
absl::hashtablez_sampler
|
||||
PUBLIC
|
||||
|
@ -690,6 +703,7 @@ absl_cc_test(
|
|||
absl::base
|
||||
absl::config
|
||||
absl::core_headers
|
||||
absl::prefetch
|
||||
absl::raw_logging_internal
|
||||
absl::strings
|
||||
GTest::gmock_main
|
||||
|
@ -709,6 +723,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
layout
|
||||
|
@ -742,6 +757,7 @@ absl_cc_test(
|
|||
GTest::gmock_main
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
tracked
|
||||
|
@ -754,6 +770,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_map_constructor_test
|
||||
|
@ -768,6 +785,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_map_lookup_test
|
||||
|
@ -782,6 +800,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_map_members_test
|
||||
|
@ -795,6 +814,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_map_modifiers_test
|
||||
|
@ -809,6 +829,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_set_constructor_test
|
||||
|
@ -823,6 +844,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_set_lookup_test
|
||||
|
@ -837,6 +859,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_set_members_test
|
||||
|
@ -850,6 +873,7 @@ absl_cc_library(
|
|||
TESTONLY
|
||||
)
|
||||
|
||||
# Internal-only target, do not depend on directly.
|
||||
absl_cc_library(
|
||||
NAME
|
||||
unordered_set_modifiers_test
|
||||
|
@ -893,3 +917,18 @@ absl_cc_test(
|
|||
absl::unordered_map_modifiers_test
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
||||
absl_cc_test(
|
||||
NAME
|
||||
sample_element_size_test
|
||||
SRCS
|
||||
"sample_element_size_test.cc"
|
||||
COPTS
|
||||
${ABSL_TEST_COPTS}
|
||||
DEPS
|
||||
absl::flat_hash_map
|
||||
absl::flat_hash_set
|
||||
absl::node_hash_map
|
||||
absl::node_hash_set
|
||||
GTest::gmock_main
|
||||
)
|
||||
|
|
|
@ -153,9 +153,9 @@ void BM_FullLookup(benchmark::State& state) {
|
|||
BM_LookupImpl<T>(state, true);
|
||||
}
|
||||
|
||||
// Benchmark deletion of values from a container.
|
||||
// Benchmark erasing values from a container.
|
||||
template <typename T>
|
||||
void BM_Delete(benchmark::State& state) {
|
||||
void BM_Erase(benchmark::State& state) {
|
||||
using V = typename remove_pair_const<typename T::value_type>::type;
|
||||
typename KeyOfValue<typename T::key_type, V>::type key_of_value;
|
||||
std::vector<V> values = GenerateValues<V>(kBenchmarkValues);
|
||||
|
@ -180,9 +180,9 @@ void BM_Delete(benchmark::State& state) {
|
|||
}
|
||||
}
|
||||
|
||||
// Benchmark deletion of multiple values from a container.
|
||||
// Benchmark erasing multiple values from a container.
|
||||
template <typename T>
|
||||
void BM_DeleteRange(benchmark::State& state) {
|
||||
void BM_EraseRange(benchmark::State& state) {
|
||||
using V = typename remove_pair_const<typename T::value_type>::type;
|
||||
typename KeyOfValue<typename T::key_type, V>::type key_of_value;
|
||||
std::vector<V> values = GenerateValues<V>(kBenchmarkValues);
|
||||
|
@ -222,6 +222,40 @@ void BM_DeleteRange(benchmark::State& state) {
|
|||
}
|
||||
}
|
||||
|
||||
// Predicate that erases every other element. We can't use a lambda because
|
||||
// C++11 doesn't support generic lambdas.
|
||||
// TODO(b/207389011): consider adding benchmarks that remove different fractions
|
||||
// of keys (e.g. 10%, 90%).
|
||||
struct EraseIfPred {
|
||||
uint64_t i = 0;
|
||||
template <typename T>
|
||||
bool operator()(const T&) {
|
||||
return ++i % 2;
|
||||
}
|
||||
};
|
||||
|
||||
// Benchmark erasing multiple values from a container with a predicate.
|
||||
template <typename T>
|
||||
void BM_EraseIf(benchmark::State& state) {
|
||||
using V = typename remove_pair_const<typename T::value_type>::type;
|
||||
std::vector<V> values = GenerateValues<V>(kBenchmarkValues);
|
||||
|
||||
// Removes half of the keys per batch.
|
||||
const int batch_size = (kBenchmarkValues + 1) / 2;
|
||||
EraseIfPred pred;
|
||||
while (state.KeepRunningBatch(batch_size)) {
|
||||
state.PauseTiming();
|
||||
{
|
||||
T container(values.begin(), values.end());
|
||||
state.ResumeTiming();
|
||||
erase_if(container, pred);
|
||||
benchmark::DoNotOptimize(container);
|
||||
state.PauseTiming();
|
||||
}
|
||||
state.ResumeTiming();
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark steady-state insert (into first half of range) and remove (from
|
||||
// second half of range), treating the container approximately like a queue with
|
||||
// log-time access for all elements. This benchmark does not test the case where
|
||||
|
@ -477,14 +511,14 @@ BTREE_TYPES(Time);
|
|||
void BM_##type##_##func(benchmark::State& state) { BM_##func<type>(state); } \
|
||||
BENCHMARK(BM_##type##_##func)
|
||||
|
||||
#define MY_BENCHMARK3(type) \
|
||||
#define MY_BENCHMARK3_STL(type) \
|
||||
MY_BENCHMARK4(type, Insert); \
|
||||
MY_BENCHMARK4(type, InsertSorted); \
|
||||
MY_BENCHMARK4(type, InsertSmall); \
|
||||
MY_BENCHMARK4(type, Lookup); \
|
||||
MY_BENCHMARK4(type, FullLookup); \
|
||||
MY_BENCHMARK4(type, Delete); \
|
||||
MY_BENCHMARK4(type, DeleteRange); \
|
||||
MY_BENCHMARK4(type, Erase); \
|
||||
MY_BENCHMARK4(type, EraseRange); \
|
||||
MY_BENCHMARK4(type, QueueAddRem); \
|
||||
MY_BENCHMARK4(type, MixedAddRem); \
|
||||
MY_BENCHMARK4(type, Fifo); \
|
||||
|
@ -492,9 +526,13 @@ BTREE_TYPES(Time);
|
|||
MY_BENCHMARK4(type, InsertRangeRandom); \
|
||||
MY_BENCHMARK4(type, InsertRangeSorted)
|
||||
|
||||
#define MY_BENCHMARK3(type) \
|
||||
MY_BENCHMARK4(type, EraseIf); \
|
||||
MY_BENCHMARK3_STL(type)
|
||||
|
||||
#define MY_BENCHMARK2_SUPPORTS_MULTI_ONLY(type) \
|
||||
MY_BENCHMARK3(stl_##type); \
|
||||
MY_BENCHMARK3(stl_unordered_##type); \
|
||||
MY_BENCHMARK3_STL(stl_##type); \
|
||||
MY_BENCHMARK3_STL(stl_unordered_##type); \
|
||||
MY_BENCHMARK3(btree_256_##type)
|
||||
|
||||
#define MY_BENCHMARK2(type) \
|
||||
|
@ -684,12 +722,12 @@ double ContainerInfo(const btree_map<int, BigTypePtr<Size>>& b) {
|
|||
btree_set<BigTypePtr<SIZE>>; \
|
||||
using btree_256_map_size##SIZE##copies##SIZE##ptr = \
|
||||
btree_map<int, BigTypePtr<SIZE>>; \
|
||||
MY_BENCHMARK3(stl_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(stl_unordered_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3_STL(stl_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3_STL(stl_unordered_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(flat_hash_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(btree_256_set_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(stl_map_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(stl_unordered_map_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3_STL(stl_map_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3_STL(stl_unordered_map_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(flat_hash_map_size##SIZE##copies##SIZE##ptr); \
|
||||
MY_BENCHMARK3(btree_256_map_size##SIZE##copies##SIZE##ptr)
|
||||
|
||||
|
|
|
@ -35,14 +35,17 @@
|
|||
//
|
||||
// However, these types should not be considered drop-in replacements for
|
||||
// `std::map` and `std::multimap` as there are some API differences, which are
|
||||
// noted in this header file.
|
||||
// noted in this header file. The most consequential differences with respect to
|
||||
// migrating to b-tree from the STL types are listed in the next paragraph.
|
||||
// Other API differences are minor.
|
||||
//
|
||||
// Importantly, insertions and deletions may invalidate outstanding iterators,
|
||||
// pointers, and references to elements. Such invalidations are typically only
|
||||
// an issue if insertion and deletion operations are interleaved with the use of
|
||||
// more than one iterator, pointer, or reference simultaneously. For this
|
||||
// reason, `insert()` and `erase()` return a valid iterator at the current
|
||||
// position.
|
||||
// position. Another important difference is that key-types must be
|
||||
// copy-constructible.
|
||||
|
||||
#ifndef ABSL_CONTAINER_BTREE_MAP_H_
|
||||
#define ABSL_CONTAINER_BTREE_MAP_H_
|
||||
|
@ -53,6 +56,14 @@
|
|||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
template <typename Key, typename Data, typename Compare, typename Alloc,
|
||||
int TargetNodeSize, bool IsMulti>
|
||||
struct map_params;
|
||||
|
||||
} // namespace container_internal
|
||||
|
||||
// absl::btree_map<>
|
||||
//
|
||||
// An `absl::btree_map<K, V>` is an ordered associative container of
|
||||
|
@ -74,7 +85,7 @@ class btree_map
|
|||
: public container_internal::btree_map_container<
|
||||
container_internal::btree<container_internal::map_params<
|
||||
Key, Value, Compare, Alloc, /*TargetNodeSize=*/256,
|
||||
/*Multi=*/false>>> {
|
||||
/*IsMulti=*/false>>> {
|
||||
using Base = typename btree_map::btree_map_container;
|
||||
|
||||
public:
|
||||
|
@ -366,8 +377,8 @@ class btree_map
|
|||
// Determines whether an element comparing equal to the given `key` exists
|
||||
// within the `btree_map`, returning `true` if so or `false` otherwise.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::contains;
|
||||
|
||||
// btree_map::count()
|
||||
|
@ -378,8 +389,8 @@ class btree_map
|
|||
// the `btree_map`. Note that this function will return either `1` or `0`
|
||||
// since duplicate elements are not allowed within a `btree_map`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::count;
|
||||
|
||||
// btree_map::equal_range()
|
||||
|
@ -395,10 +406,34 @@ class btree_map
|
|||
//
|
||||
// Finds an element with the passed `key` within the `btree_map`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::find;
|
||||
|
||||
// btree_map::lower_bound()
|
||||
//
|
||||
// template <typename K> iterator lower_bound(const K& key):
|
||||
// template <typename K> const_iterator lower_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element with a key that is not less than `key` within the
|
||||
// `btree_map`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::lower_bound;
|
||||
|
||||
// btree_map::upper_bound()
|
||||
//
|
||||
// template <typename K> iterator upper_bound(const K& key):
|
||||
// template <typename K> const_iterator upper_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element with a key that is greater than `key` within the
|
||||
// `btree_map`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::upper_bound;
|
||||
|
||||
// btree_map::operator[]()
|
||||
//
|
||||
// Returns a reference to the value mapped to the passed key within the
|
||||
|
@ -443,15 +478,11 @@ void swap(btree_map<K, V, C, A> &x, btree_map<K, V, C, A> &y) {
|
|||
// absl::erase_if(absl::btree_map<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate pred from the container.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename V, typename C, typename A, typename Pred>
|
||||
void erase_if(btree_map<K, V, C, A> &map, Pred pred) {
|
||||
for (auto it = map.begin(); it != map.end();) {
|
||||
if (pred(*it)) {
|
||||
it = map.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
typename btree_map<K, V, C, A>::size_type erase_if(
|
||||
btree_map<K, V, C, A> &map, Pred pred) {
|
||||
return container_internal::btree_access::erase_if(map, std::move(pred));
|
||||
}
|
||||
|
||||
// absl::btree_multimap
|
||||
|
@ -476,7 +507,7 @@ class btree_multimap
|
|||
: public container_internal::btree_multimap_container<
|
||||
container_internal::btree<container_internal::map_params<
|
||||
Key, Value, Compare, Alloc, /*TargetNodeSize=*/256,
|
||||
/*Multi=*/true>>> {
|
||||
/*IsMulti=*/true>>> {
|
||||
using Base = typename btree_multimap::btree_multimap_container;
|
||||
|
||||
public:
|
||||
|
@ -669,9 +700,8 @@ class btree_multimap
|
|||
|
||||
// btree_multimap::merge()
|
||||
//
|
||||
// Extracts elements from a given `source` btree_multimap into this
|
||||
// `btree_multimap`. If the destination `btree_multimap` already contains an
|
||||
// element with an equivalent key, that element is not extracted.
|
||||
// Extracts all elements from a given `source` btree_multimap into this
|
||||
// `btree_multimap`.
|
||||
using Base::merge;
|
||||
|
||||
// btree_multimap::swap(btree_multimap& other)
|
||||
|
@ -691,8 +721,8 @@ class btree_multimap
|
|||
// Determines whether an element comparing equal to the given `key` exists
|
||||
// within the `btree_multimap`, returning `true` if so or `false` otherwise.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::contains;
|
||||
|
||||
// btree_multimap::count()
|
||||
|
@ -702,8 +732,8 @@ class btree_multimap
|
|||
// Returns the number of elements comparing equal to the given `key` within
|
||||
// the `btree_multimap`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::count;
|
||||
|
||||
// btree_multimap::equal_range()
|
||||
|
@ -720,10 +750,34 @@ class btree_multimap
|
|||
//
|
||||
// Finds an element with the passed `key` within the `btree_multimap`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::find;
|
||||
|
||||
// btree_multimap::lower_bound()
|
||||
//
|
||||
// template <typename K> iterator lower_bound(const K& key):
|
||||
// template <typename K> const_iterator lower_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element with a key that is not less than `key` within the
|
||||
// `btree_multimap`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::lower_bound;
|
||||
|
||||
// btree_multimap::upper_bound()
|
||||
//
|
||||
// template <typename K> iterator upper_bound(const K& key):
|
||||
// template <typename K> const_iterator upper_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element with a key that is greater than `key` within the
|
||||
// `btree_multimap`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the map has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::upper_bound;
|
||||
|
||||
// btree_multimap::get_allocator()
|
||||
//
|
||||
// Returns the allocator function associated with this `btree_multimap`.
|
||||
|
@ -751,17 +805,46 @@ void swap(btree_multimap<K, V, C, A> &x, btree_multimap<K, V, C, A> &y) {
|
|||
// absl::erase_if(absl::btree_multimap<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate pred from the container.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename V, typename C, typename A, typename Pred>
|
||||
void erase_if(btree_multimap<K, V, C, A> &map, Pred pred) {
|
||||
for (auto it = map.begin(); it != map.end();) {
|
||||
if (pred(*it)) {
|
||||
it = map.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
typename btree_multimap<K, V, C, A>::size_type erase_if(
|
||||
btree_multimap<K, V, C, A> &map, Pred pred) {
|
||||
return container_internal::btree_access::erase_if(map, std::move(pred));
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
// A parameters structure for holding the type parameters for a btree_map.
|
||||
// Compare and Alloc should be nothrow copy-constructible.
|
||||
template <typename Key, typename Data, typename Compare, typename Alloc,
|
||||
int TargetNodeSize, bool IsMulti>
|
||||
struct map_params : common_params<Key, Compare, Alloc, TargetNodeSize, IsMulti,
|
||||
/*IsMap=*/true, map_slot_policy<Key, Data>> {
|
||||
using super_type = typename map_params::common_params;
|
||||
using mapped_type = Data;
|
||||
// This type allows us to move keys when it is safe to do so. It is safe
|
||||
// for maps in which value_type and mutable_value_type are layout compatible.
|
||||
using slot_policy = typename super_type::slot_policy;
|
||||
using slot_type = typename super_type::slot_type;
|
||||
using value_type = typename super_type::value_type;
|
||||
using init_type = typename super_type::init_type;
|
||||
|
||||
template <typename V>
|
||||
static auto key(const V &value) -> decltype(value.first) {
|
||||
return value.first;
|
||||
}
|
||||
static const Key &key(const slot_type *s) { return slot_policy::key(s); }
|
||||
static const Key &key(slot_type *s) { return slot_policy::key(s); }
|
||||
// For use in node handle.
|
||||
static auto mutable_key(slot_type *s)
|
||||
-> decltype(slot_policy::mutable_key(s)) {
|
||||
return slot_policy::mutable_key(s);
|
||||
}
|
||||
static mapped_type &value(value_type *value) { return value->second; }
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
|
|
|
@ -35,7 +35,9 @@
|
|||
//
|
||||
// However, these types should not be considered drop-in replacements for
|
||||
// `std::set` and `std::multiset` as there are some API differences, which are
|
||||
// noted in this header file.
|
||||
// noted in this header file. The most consequential differences with respect to
|
||||
// migrating to b-tree from the STL types are listed in the next paragraph.
|
||||
// Other API differences are minor.
|
||||
//
|
||||
// Importantly, insertions and deletions may invalidate outstanding iterators,
|
||||
// pointers, and references to elements. Such invalidations are typically only
|
||||
|
@ -53,6 +55,17 @@
|
|||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
template <typename Key>
|
||||
struct set_slot_policy;
|
||||
|
||||
template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
|
||||
bool IsMulti>
|
||||
struct set_params;
|
||||
|
||||
} // namespace container_internal
|
||||
|
||||
// absl::btree_set<>
|
||||
//
|
||||
// An `absl::btree_set<K>` is an ordered associative container of unique key
|
||||
|
@ -74,7 +87,7 @@ class btree_set
|
|||
: public container_internal::btree_set_container<
|
||||
container_internal::btree<container_internal::set_params<
|
||||
Key, Compare, Alloc, /*TargetNodeSize=*/256,
|
||||
/*Multi=*/false>>> {
|
||||
/*IsMulti=*/false>>> {
|
||||
using Base = typename btree_set::btree_set_container;
|
||||
|
||||
public:
|
||||
|
@ -300,8 +313,8 @@ class btree_set
|
|||
// Determines whether an element comparing equal to the given `key` exists
|
||||
// within the `btree_set`, returning `true` if so or `false` otherwise.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::contains;
|
||||
|
||||
// btree_set::count()
|
||||
|
@ -312,8 +325,8 @@ class btree_set
|
|||
// the `btree_set`. Note that this function will return either `1` or `0`
|
||||
// since duplicate elements are not allowed within a `btree_set`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::count;
|
||||
|
||||
// btree_set::equal_range()
|
||||
|
@ -330,10 +343,32 @@ class btree_set
|
|||
//
|
||||
// Finds an element with the passed `key` within the `btree_set`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::find;
|
||||
|
||||
// btree_set::lower_bound()
|
||||
//
|
||||
// template <typename K> iterator lower_bound(const K& key):
|
||||
// template <typename K> const_iterator lower_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element that is not less than `key` within the `btree_set`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::lower_bound;
|
||||
|
||||
// btree_set::upper_bound()
|
||||
//
|
||||
// template <typename K> iterator upper_bound(const K& key):
|
||||
// template <typename K> const_iterator upper_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element that is greater than `key` within the `btree_set`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::upper_bound;
|
||||
|
||||
// btree_set::get_allocator()
|
||||
//
|
||||
// Returns the allocator function associated with this `btree_set`.
|
||||
|
@ -363,15 +398,11 @@ void swap(btree_set<K, C, A> &x, btree_set<K, C, A> &y) {
|
|||
// absl::erase_if(absl::btree_set<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate pred from the container.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename C, typename A, typename Pred>
|
||||
void erase_if(btree_set<K, C, A> &set, Pred pred) {
|
||||
for (auto it = set.begin(); it != set.end();) {
|
||||
if (pred(*it)) {
|
||||
it = set.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
typename btree_set<K, C, A>::size_type erase_if(btree_set<K, C, A> &set,
|
||||
Pred pred) {
|
||||
return container_internal::btree_access::erase_if(set, std::move(pred));
|
||||
}
|
||||
|
||||
// absl::btree_multiset<>
|
||||
|
@ -396,7 +427,7 @@ class btree_multiset
|
|||
: public container_internal::btree_multiset_container<
|
||||
container_internal::btree<container_internal::set_params<
|
||||
Key, Compare, Alloc, /*TargetNodeSize=*/256,
|
||||
/*Multi=*/true>>> {
|
||||
/*IsMulti=*/true>>> {
|
||||
using Base = typename btree_multiset::btree_multiset_container;
|
||||
|
||||
public:
|
||||
|
@ -582,9 +613,8 @@ class btree_multiset
|
|||
|
||||
// btree_multiset::merge()
|
||||
//
|
||||
// Extracts elements from a given `source` btree_multiset into this
|
||||
// `btree_multiset`. If the destination `btree_multiset` already contains an
|
||||
// element with an equivalent key, that element is not extracted.
|
||||
// Extracts all elements from a given `source` btree_multiset into this
|
||||
// `btree_multiset`.
|
||||
using Base::merge;
|
||||
|
||||
// btree_multiset::swap(btree_multiset& other)
|
||||
|
@ -604,8 +634,8 @@ class btree_multiset
|
|||
// Determines whether an element comparing equal to the given `key` exists
|
||||
// within the `btree_multiset`, returning `true` if so or `false` otherwise.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::contains;
|
||||
|
||||
// btree_multiset::count()
|
||||
|
@ -615,8 +645,8 @@ class btree_multiset
|
|||
// Returns the number of elements comparing equal to the given `key` within
|
||||
// the `btree_multiset`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::count;
|
||||
|
||||
// btree_multiset::equal_range()
|
||||
|
@ -633,10 +663,34 @@ class btree_multiset
|
|||
//
|
||||
// Finds an element with the passed `key` within the `btree_multiset`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set is provided a
|
||||
// compatible heterogeneous comparator.
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::find;
|
||||
|
||||
// btree_multiset::lower_bound()
|
||||
//
|
||||
// template <typename K> iterator lower_bound(const K& key):
|
||||
// template <typename K> const_iterator lower_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element that is not less than `key` within the
|
||||
// `btree_multiset`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::lower_bound;
|
||||
|
||||
// btree_multiset::upper_bound()
|
||||
//
|
||||
// template <typename K> iterator upper_bound(const K& key):
|
||||
// template <typename K> const_iterator upper_bound(const K& key) const:
|
||||
//
|
||||
// Finds the first element that is greater than `key` within the
|
||||
// `btree_multiset`.
|
||||
//
|
||||
// Supports heterogeneous lookup, provided that the set has a compatible
|
||||
// heterogeneous comparator.
|
||||
using Base::upper_bound;
|
||||
|
||||
// btree_multiset::get_allocator()
|
||||
//
|
||||
// Returns the allocator function associated with this `btree_multiset`.
|
||||
|
@ -666,17 +720,73 @@ void swap(btree_multiset<K, C, A> &x, btree_multiset<K, C, A> &y) {
|
|||
// absl::erase_if(absl::btree_multiset<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate pred from the container.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename C, typename A, typename Pred>
|
||||
void erase_if(btree_multiset<K, C, A> &set, Pred pred) {
|
||||
for (auto it = set.begin(); it != set.end();) {
|
||||
if (pred(*it)) {
|
||||
it = set.erase(it);
|
||||
} else {
|
||||
++it;
|
||||
}
|
||||
}
|
||||
typename btree_multiset<K, C, A>::size_type erase_if(
|
||||
btree_multiset<K, C, A> & set, Pred pred) {
|
||||
return container_internal::btree_access::erase_if(set, std::move(pred));
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
// This type implements the necessary functions from the
|
||||
// absl::container_internal::slot_type interface for btree_(multi)set.
|
||||
template <typename Key>
|
||||
struct set_slot_policy {
|
||||
using slot_type = Key;
|
||||
using value_type = Key;
|
||||
using mutable_value_type = Key;
|
||||
|
||||
static value_type &element(slot_type *slot) { return *slot; }
|
||||
static const value_type &element(const slot_type *slot) { return *slot; }
|
||||
|
||||
template <typename Alloc, class... Args>
|
||||
static void construct(Alloc *alloc, slot_type *slot, Args &&...args) {
|
||||
absl::allocator_traits<Alloc>::construct(*alloc, slot,
|
||||
std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <typename Alloc>
|
||||
static void construct(Alloc *alloc, slot_type *slot, slot_type *other) {
|
||||
absl::allocator_traits<Alloc>::construct(*alloc, slot, std::move(*other));
|
||||
}
|
||||
|
||||
template <typename Alloc>
|
||||
static void construct(Alloc *alloc, slot_type *slot, const slot_type *other) {
|
||||
absl::allocator_traits<Alloc>::construct(*alloc, slot, *other);
|
||||
}
|
||||
|
||||
template <typename Alloc>
|
||||
static void destroy(Alloc *alloc, slot_type *slot) {
|
||||
absl::allocator_traits<Alloc>::destroy(*alloc, slot);
|
||||
}
|
||||
|
||||
template <typename Alloc>
|
||||
static void transfer(Alloc *alloc, slot_type *new_slot, slot_type *old_slot) {
|
||||
construct(alloc, new_slot, old_slot);
|
||||
destroy(alloc, old_slot);
|
||||
}
|
||||
};
|
||||
|
||||
// A parameters structure for holding the type parameters for a btree_set.
|
||||
// Compare and Alloc should be nothrow copy-constructible.
|
||||
template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
|
||||
bool IsMulti>
|
||||
struct set_params : common_params<Key, Compare, Alloc, TargetNodeSize, IsMulti,
|
||||
/*IsMap=*/false, set_slot_policy<Key>> {
|
||||
using value_type = Key;
|
||||
using slot_type = typename set_params::common_params::slot_type;
|
||||
|
||||
template <typename V>
|
||||
static const V &key(const V &value) {
|
||||
return value;
|
||||
}
|
||||
static const Key &key(const slot_type *slot) { return *slot; }
|
||||
static const Key &key(slot_type *slot) { return *slot; }
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
|
|
|
@ -14,10 +14,14 @@
|
|||
|
||||
#include "absl/container/btree_test.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <cstdint>
|
||||
#include <functional>
|
||||
#include <limits>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
#include <type_traits>
|
||||
|
@ -1212,6 +1216,11 @@ class BtreeNodePeer {
|
|||
constexpr static bool UsesLinearNodeSearch() {
|
||||
return btree_node<typename Btree::params_type>::use_linear_search::value;
|
||||
}
|
||||
|
||||
template <typename Btree>
|
||||
constexpr static bool UsesGenerations() {
|
||||
return Btree::params_type::kEnableGenerations;
|
||||
}
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
@ -1285,7 +1294,7 @@ TEST(Btree, BtreeMapCanHoldMoveOnlyTypes) {
|
|||
|
||||
std::unique_ptr<std::string> &v = m["A"];
|
||||
EXPECT_TRUE(v == nullptr);
|
||||
v.reset(new std::string("X"));
|
||||
v = absl::make_unique<std::string>("X");
|
||||
|
||||
auto iter = m.find("A");
|
||||
EXPECT_EQ("X", *iter->second);
|
||||
|
@ -1344,38 +1353,34 @@ TEST(Btree, InitializerListInsert) {
|
|||
EXPECT_EQ(++it, range.second);
|
||||
}
|
||||
|
||||
template <typename Compare, typename K>
|
||||
void AssertKeyCompareToAdapted() {
|
||||
using Adapted = typename key_compare_to_adapter<Compare>::type;
|
||||
static_assert(!std::is_same<Adapted, Compare>::value,
|
||||
"key_compare_to_adapter should have adapted this comparator.");
|
||||
template <typename Compare, typename Key>
|
||||
void AssertKeyCompareStringAdapted() {
|
||||
using Adapted = typename key_compare_adapter<Compare, Key>::type;
|
||||
static_assert(
|
||||
std::is_same<absl::weak_ordering,
|
||||
absl::result_of_t<Adapted(const K &, const K &)>>::value,
|
||||
"Adapted comparator should be a key-compare-to comparator.");
|
||||
std::is_same<Adapted, StringBtreeDefaultLess>::value ||
|
||||
std::is_same<Adapted, StringBtreeDefaultGreater>::value,
|
||||
"key_compare_adapter should have string-adapted this comparator.");
|
||||
}
|
||||
template <typename Compare, typename K>
|
||||
void AssertKeyCompareToNotAdapted() {
|
||||
using Unadapted = typename key_compare_to_adapter<Compare>::type;
|
||||
template <typename Compare, typename Key>
|
||||
void AssertKeyCompareNotStringAdapted() {
|
||||
using Adapted = typename key_compare_adapter<Compare, Key>::type;
|
||||
static_assert(
|
||||
std::is_same<Unadapted, Compare>::value,
|
||||
"key_compare_to_adapter shouldn't have adapted this comparator.");
|
||||
static_assert(
|
||||
std::is_same<bool,
|
||||
absl::result_of_t<Unadapted(const K &, const K &)>>::value,
|
||||
"Un-adapted comparator should return bool.");
|
||||
!std::is_same<Adapted, StringBtreeDefaultLess>::value &&
|
||||
!std::is_same<Adapted, StringBtreeDefaultGreater>::value,
|
||||
"key_compare_adapter shouldn't have string-adapted this comparator.");
|
||||
}
|
||||
|
||||
TEST(Btree, KeyCompareToAdapter) {
|
||||
AssertKeyCompareToAdapted<std::less<std::string>, std::string>();
|
||||
AssertKeyCompareToAdapted<std::greater<std::string>, std::string>();
|
||||
AssertKeyCompareToAdapted<std::less<absl::string_view>, absl::string_view>();
|
||||
AssertKeyCompareToAdapted<std::greater<absl::string_view>,
|
||||
TEST(Btree, KeyCompareAdapter) {
|
||||
AssertKeyCompareStringAdapted<std::less<std::string>, std::string>();
|
||||
AssertKeyCompareStringAdapted<std::greater<std::string>, std::string>();
|
||||
AssertKeyCompareStringAdapted<std::less<absl::string_view>,
|
||||
absl::string_view>();
|
||||
AssertKeyCompareToAdapted<std::less<absl::Cord>, absl::Cord>();
|
||||
AssertKeyCompareToAdapted<std::greater<absl::Cord>, absl::Cord>();
|
||||
AssertKeyCompareToNotAdapted<std::less<int>, int>();
|
||||
AssertKeyCompareToNotAdapted<std::greater<int>, int>();
|
||||
AssertKeyCompareStringAdapted<std::greater<absl::string_view>,
|
||||
absl::string_view>();
|
||||
AssertKeyCompareStringAdapted<std::less<absl::Cord>, absl::Cord>();
|
||||
AssertKeyCompareStringAdapted<std::greater<absl::Cord>, absl::Cord>();
|
||||
AssertKeyCompareNotStringAdapted<std::less<int>, int>();
|
||||
AssertKeyCompareNotStringAdapted<std::greater<int>, int>();
|
||||
}
|
||||
|
||||
TEST(Btree, RValueInsert) {
|
||||
|
@ -1425,11 +1430,19 @@ TEST(Btree, RValueInsert) {
|
|||
EXPECT_EQ(tracker.swaps(), 0);
|
||||
}
|
||||
|
||||
// A btree set with a specific number of values per node.
|
||||
template <typename Cmp>
|
||||
struct CheckedCompareOptedOutCmp : Cmp, BtreeTestOnlyCheckedCompareOptOutBase {
|
||||
using Cmp::Cmp;
|
||||
CheckedCompareOptedOutCmp() {}
|
||||
CheckedCompareOptedOutCmp(Cmp cmp) : Cmp(std::move(cmp)) {} // NOLINT
|
||||
};
|
||||
|
||||
// A btree set with a specific number of values per node. Opt out of
|
||||
// checked_compare so that we can expect exact numbers of comparisons.
|
||||
template <typename Key, int TargetValuesPerNode, typename Cmp = std::less<Key>>
|
||||
class SizedBtreeSet
|
||||
: public btree_set_container<btree<
|
||||
set_params<Key, Cmp, std::allocator<Key>,
|
||||
set_params<Key, CheckedCompareOptedOutCmp<Cmp>, std::allocator<Key>,
|
||||
BtreeNodePeer::GetTargetNodeSize<Key>(TargetValuesPerNode),
|
||||
/*Multi=*/false>>> {
|
||||
using Base = typename SizedBtreeSet::btree_set_container;
|
||||
|
@ -1473,8 +1486,10 @@ TEST(Btree, MovesComparisonsCopiesSwapsTracking) {
|
|||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>(), 61);
|
||||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set100)>(), 100);
|
||||
if (sizeof(void *) == 8) {
|
||||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
|
||||
BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>());
|
||||
EXPECT_EQ(
|
||||
BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
|
||||
// When we have generations, there is one fewer slot.
|
||||
BtreeNodePeer::UsesGenerations<absl::btree_set<int32_t>>() ? 60 : 61);
|
||||
}
|
||||
|
||||
// Test key insertion/deletion in random order.
|
||||
|
@ -1528,8 +1543,10 @@ TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) {
|
|||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>(), 61);
|
||||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set100)>(), 100);
|
||||
if (sizeof(void *) == 8) {
|
||||
EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
|
||||
BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>());
|
||||
EXPECT_EQ(
|
||||
BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
|
||||
// When we have generations, there is one fewer slot.
|
||||
BtreeNodePeer::UsesGenerations<absl::btree_set<int32_t>>() ? 60 : 61);
|
||||
}
|
||||
|
||||
// Test key insertion/deletion in random order.
|
||||
|
@ -1748,6 +1765,22 @@ TEST(Btree, ValueComp) {
|
|||
EXPECT_FALSE(m2.value_comp()(std::make_pair("b", 0), std::make_pair("a", 0)));
|
||||
}
|
||||
|
||||
// Test that we have the protected members from the std::map::value_compare API.
|
||||
// See https://en.cppreference.com/w/cpp/container/map/value_compare.
|
||||
TEST(Btree, MapValueCompProtected) {
|
||||
struct key_compare {
|
||||
bool operator()(int l, int r) const { return l < r; }
|
||||
int id;
|
||||
};
|
||||
using value_compare = absl::btree_map<int, int, key_compare>::value_compare;
|
||||
struct value_comp_child : public value_compare {
|
||||
explicit value_comp_child(key_compare kc) : value_compare(kc) {}
|
||||
int GetId() const { return comp.id; }
|
||||
};
|
||||
value_comp_child c(key_compare{10});
|
||||
EXPECT_EQ(c.GetId(), 10);
|
||||
}
|
||||
|
||||
TEST(Btree, DefaultConstruction) {
|
||||
absl::btree_set<int> s;
|
||||
absl::btree_map<int, int> m;
|
||||
|
@ -2297,7 +2330,9 @@ TEST(Btree, TryEmplaceWithHintWorks) {
|
|||
};
|
||||
using Cmp = decltype(cmp);
|
||||
|
||||
absl::btree_map<int, int, Cmp> m(cmp);
|
||||
// Use a map that is opted out of key_compare being adapted so we can expect
|
||||
// strict comparison call limits.
|
||||
absl::btree_map<int, int, CheckedCompareOptedOutCmp<Cmp>> m(cmp);
|
||||
for (int i = 0; i < 128; ++i) {
|
||||
m.emplace(i, i);
|
||||
}
|
||||
|
@ -2452,23 +2487,28 @@ TEST(Btree, EraseIf) {
|
|||
// Test that erase_if works with all the container types and supports lambdas.
|
||||
{
|
||||
absl::btree_set<int> s = {1, 3, 5, 6, 100};
|
||||
erase_if(s, [](int k) { return k > 3; });
|
||||
EXPECT_EQ(erase_if(s, [](int k) { return k > 3; }), 3);
|
||||
EXPECT_THAT(s, ElementsAre(1, 3));
|
||||
}
|
||||
{
|
||||
absl::btree_multiset<int> s = {1, 3, 3, 5, 6, 6, 100};
|
||||
erase_if(s, [](int k) { return k <= 3; });
|
||||
EXPECT_EQ(erase_if(s, [](int k) { return k <= 3; }), 3);
|
||||
EXPECT_THAT(s, ElementsAre(5, 6, 6, 100));
|
||||
}
|
||||
{
|
||||
absl::btree_map<int, int> m = {{1, 1}, {3, 3}, {6, 6}, {100, 100}};
|
||||
erase_if(m, [](std::pair<const int, int> kv) { return kv.first > 3; });
|
||||
EXPECT_EQ(
|
||||
erase_if(m, [](std::pair<const int, int> kv) { return kv.first > 3; }),
|
||||
2);
|
||||
EXPECT_THAT(m, ElementsAre(Pair(1, 1), Pair(3, 3)));
|
||||
}
|
||||
{
|
||||
absl::btree_multimap<int, int> m = {{1, 1}, {3, 3}, {3, 6},
|
||||
{6, 6}, {6, 7}, {100, 6}};
|
||||
erase_if(m, [](std::pair<const int, int> kv) { return kv.second == 6; });
|
||||
EXPECT_EQ(
|
||||
erase_if(m,
|
||||
[](std::pair<const int, int> kv) { return kv.second == 6; }),
|
||||
3);
|
||||
EXPECT_THAT(m, ElementsAre(Pair(1, 1), Pair(3, 3), Pair(6, 7)));
|
||||
}
|
||||
// Test that erasing all elements from a large set works and test support for
|
||||
|
@ -2476,15 +2516,29 @@ TEST(Btree, EraseIf) {
|
|||
{
|
||||
absl::btree_set<int> s;
|
||||
for (int i = 0; i < 1000; ++i) s.insert(2 * i);
|
||||
erase_if(s, IsEven);
|
||||
EXPECT_EQ(erase_if(s, IsEven), 1000);
|
||||
EXPECT_THAT(s, IsEmpty());
|
||||
}
|
||||
// Test that erase_if supports other format of function pointers.
|
||||
{
|
||||
absl::btree_set<int> s = {1, 3, 5, 6, 100};
|
||||
erase_if(s, &IsEven);
|
||||
EXPECT_EQ(erase_if(s, &IsEven), 2);
|
||||
EXPECT_THAT(s, ElementsAre(1, 3, 5));
|
||||
}
|
||||
// Test that erase_if invokes the predicate once per element.
|
||||
{
|
||||
absl::btree_set<int> s;
|
||||
for (int i = 0; i < 1000; ++i) s.insert(i);
|
||||
int pred_calls = 0;
|
||||
EXPECT_EQ(erase_if(s,
|
||||
[&pred_calls](int k) {
|
||||
++pred_calls;
|
||||
return k % 2;
|
||||
}),
|
||||
500);
|
||||
EXPECT_THAT(s, SizeIs(500));
|
||||
EXPECT_EQ(pred_calls, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Btree, InsertOrAssign) {
|
||||
|
@ -2948,6 +3002,252 @@ TEST(Btree, ConstructImplicitlyWithUnadaptedComparator) {
|
|||
absl::btree_set<MultiKey, MultiKeyComp> set = {{}, MultiKeyComp{}};
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
TEST(Btree, InvalidComparatorsCaught) {
|
||||
{
|
||||
struct ZeroAlwaysLessCmp {
|
||||
bool operator()(int lhs, int rhs) const {
|
||||
if (lhs == 0) return true;
|
||||
return lhs < rhs;
|
||||
}
|
||||
};
|
||||
absl::btree_set<int, ZeroAlwaysLessCmp> set;
|
||||
EXPECT_DEATH(set.insert({0, 1, 2}), "is_self_equivalent");
|
||||
}
|
||||
{
|
||||
struct ThreeWayAlwaysLessCmp {
|
||||
absl::weak_ordering operator()(int, int) const {
|
||||
return absl::weak_ordering::less;
|
||||
}
|
||||
};
|
||||
absl::btree_set<int, ThreeWayAlwaysLessCmp> set;
|
||||
EXPECT_DEATH(set.insert({0, 1, 2}), "is_self_equivalent");
|
||||
}
|
||||
{
|
||||
struct SumGreaterZeroCmp {
|
||||
bool operator()(int lhs, int rhs) const {
|
||||
// First, do equivalence correctly - so we can test later condition.
|
||||
if (lhs == rhs) return false;
|
||||
return lhs + rhs > 0;
|
||||
}
|
||||
};
|
||||
absl::btree_set<int, SumGreaterZeroCmp> set;
|
||||
// Note: '!' only needs to be escaped when it's the first character.
|
||||
EXPECT_DEATH(set.insert({0, 1, 2}),
|
||||
R"regex(\!lhs_comp_rhs \|\| !comp\(\)\(rhs, lhs\))regex");
|
||||
}
|
||||
{
|
||||
struct ThreeWaySumGreaterZeroCmp {
|
||||
absl::weak_ordering operator()(int lhs, int rhs) const {
|
||||
// First, do equivalence correctly - so we can test later condition.
|
||||
if (lhs == rhs) return absl::weak_ordering::equivalent;
|
||||
|
||||
if (lhs + rhs > 0) return absl::weak_ordering::less;
|
||||
if (lhs + rhs == 0) return absl::weak_ordering::equivalent;
|
||||
return absl::weak_ordering::greater;
|
||||
}
|
||||
};
|
||||
absl::btree_set<int, ThreeWaySumGreaterZeroCmp> set;
|
||||
EXPECT_DEATH(set.insert({0, 1, 2}), "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef _MSC_VER
|
||||
// This test crashes on MSVC.
|
||||
TEST(Btree, InvalidIteratorUse) {
|
||||
if (!BtreeNodePeer::UsesGenerations<absl::btree_set<int>>())
|
||||
GTEST_SKIP() << "Generation validation for iterators is disabled.";
|
||||
|
||||
{
|
||||
absl::btree_set<int> set;
|
||||
for (int i = 0; i < 10; ++i) set.insert(i);
|
||||
auto it = set.begin();
|
||||
set.erase(it++);
|
||||
EXPECT_DEATH(set.erase(it++), "invalidated iterator");
|
||||
}
|
||||
{
|
||||
absl::btree_set<int> set;
|
||||
for (int i = 0; i < 10; ++i) set.insert(i);
|
||||
auto it = set.insert(20).first;
|
||||
set.insert(30);
|
||||
EXPECT_DEATH(*it, "invalidated iterator");
|
||||
}
|
||||
{
|
||||
absl::btree_set<int> set;
|
||||
for (int i = 0; i < 10000; ++i) set.insert(i);
|
||||
auto it = set.find(5000);
|
||||
ASSERT_NE(it, set.end());
|
||||
set.erase(1);
|
||||
EXPECT_DEATH(*it, "invalidated iterator");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
class OnlyConstructibleByAllocator {
|
||||
explicit OnlyConstructibleByAllocator(int i) : i_(i) {}
|
||||
|
||||
public:
|
||||
OnlyConstructibleByAllocator(const OnlyConstructibleByAllocator &other)
|
||||
: i_(other.i_) {}
|
||||
OnlyConstructibleByAllocator &operator=(
|
||||
const OnlyConstructibleByAllocator &other) {
|
||||
i_ = other.i_;
|
||||
return *this;
|
||||
}
|
||||
int Get() const { return i_; }
|
||||
bool operator==(int i) const { return i_ == i; }
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
friend class OnlyConstructibleAllocator;
|
||||
|
||||
int i_;
|
||||
};
|
||||
|
||||
template <typename T = OnlyConstructibleByAllocator>
|
||||
class OnlyConstructibleAllocator : public std::allocator<T> {
|
||||
public:
|
||||
OnlyConstructibleAllocator() = default;
|
||||
template <class U>
|
||||
explicit OnlyConstructibleAllocator(const OnlyConstructibleAllocator<U> &) {}
|
||||
|
||||
void construct(OnlyConstructibleByAllocator *p, int i) {
|
||||
new (p) OnlyConstructibleByAllocator(i);
|
||||
}
|
||||
template <typename Pair>
|
||||
void construct(Pair *p, const int i) {
|
||||
OnlyConstructibleByAllocator only(i);
|
||||
new (p) Pair(std::move(only), i);
|
||||
}
|
||||
|
||||
template <class U>
|
||||
struct rebind {
|
||||
using other = OnlyConstructibleAllocator<U>;
|
||||
};
|
||||
};
|
||||
|
||||
struct OnlyConstructibleByAllocatorComp {
|
||||
using is_transparent = void;
|
||||
bool operator()(OnlyConstructibleByAllocator a,
|
||||
OnlyConstructibleByAllocator b) const {
|
||||
return a.Get() < b.Get();
|
||||
}
|
||||
bool operator()(int a, OnlyConstructibleByAllocator b) const {
|
||||
return a < b.Get();
|
||||
}
|
||||
bool operator()(OnlyConstructibleByAllocator a, int b) const {
|
||||
return a.Get() < b;
|
||||
}
|
||||
};
|
||||
|
||||
TEST(Btree, OnlyConstructibleByAllocatorType) {
|
||||
const std::array<int, 2> arr = {3, 4};
|
||||
{
|
||||
absl::btree_set<OnlyConstructibleByAllocator,
|
||||
OnlyConstructibleByAllocatorComp,
|
||||
OnlyConstructibleAllocator<>>
|
||||
set;
|
||||
set.emplace(1);
|
||||
set.emplace_hint(set.end(), 2);
|
||||
set.insert(arr.begin(), arr.end());
|
||||
EXPECT_THAT(set, ElementsAre(1, 2, 3, 4));
|
||||
}
|
||||
{
|
||||
absl::btree_multiset<OnlyConstructibleByAllocator,
|
||||
OnlyConstructibleByAllocatorComp,
|
||||
OnlyConstructibleAllocator<>>
|
||||
set;
|
||||
set.emplace(1);
|
||||
set.emplace_hint(set.end(), 2);
|
||||
// TODO(ezb): fix insert_multi to allow this to compile.
|
||||
// set.insert(arr.begin(), arr.end());
|
||||
EXPECT_THAT(set, ElementsAre(1, 2));
|
||||
}
|
||||
{
|
||||
absl::btree_map<OnlyConstructibleByAllocator, int,
|
||||
OnlyConstructibleByAllocatorComp,
|
||||
OnlyConstructibleAllocator<>>
|
||||
map;
|
||||
map.emplace(1);
|
||||
map.emplace_hint(map.end(), 2);
|
||||
map.insert(arr.begin(), arr.end());
|
||||
EXPECT_THAT(map,
|
||||
ElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3), Pair(4, 4)));
|
||||
}
|
||||
{
|
||||
absl::btree_multimap<OnlyConstructibleByAllocator, int,
|
||||
OnlyConstructibleByAllocatorComp,
|
||||
OnlyConstructibleAllocator<>>
|
||||
map;
|
||||
map.emplace(1);
|
||||
map.emplace_hint(map.end(), 2);
|
||||
// TODO(ezb): fix insert_multi to allow this to compile.
|
||||
// map.insert(arr.begin(), arr.end());
|
||||
EXPECT_THAT(map, ElementsAre(Pair(1, 1), Pair(2, 2)));
|
||||
}
|
||||
}
|
||||
|
||||
class NotAssignable {
|
||||
public:
|
||||
explicit NotAssignable(int i) : i_(i) {}
|
||||
NotAssignable(const NotAssignable &other) : i_(other.i_) {}
|
||||
NotAssignable &operator=(NotAssignable &&other) = delete;
|
||||
int Get() const { return i_; }
|
||||
bool operator==(int i) const { return i_ == i; }
|
||||
friend bool operator<(NotAssignable a, NotAssignable b) {
|
||||
return a.i_ < b.i_;
|
||||
}
|
||||
|
||||
private:
|
||||
int i_;
|
||||
};
|
||||
|
||||
TEST(Btree, NotAssignableType) {
|
||||
{
|
||||
absl::btree_set<NotAssignable> set;
|
||||
set.emplace(1);
|
||||
set.emplace_hint(set.end(), 2);
|
||||
set.insert(NotAssignable(3));
|
||||
set.insert(set.end(), NotAssignable(4));
|
||||
EXPECT_THAT(set, ElementsAre(1, 2, 3, 4));
|
||||
set.erase(set.begin());
|
||||
EXPECT_THAT(set, ElementsAre(2, 3, 4));
|
||||
}
|
||||
{
|
||||
absl::btree_multiset<NotAssignable> set;
|
||||
set.emplace(1);
|
||||
set.emplace_hint(set.end(), 2);
|
||||
set.insert(NotAssignable(2));
|
||||
set.insert(set.end(), NotAssignable(3));
|
||||
EXPECT_THAT(set, ElementsAre(1, 2, 2, 3));
|
||||
set.erase(set.begin());
|
||||
EXPECT_THAT(set, ElementsAre(2, 2, 3));
|
||||
}
|
||||
{
|
||||
absl::btree_map<NotAssignable, int> map;
|
||||
map.emplace(NotAssignable(1), 1);
|
||||
map.emplace_hint(map.end(), NotAssignable(2), 2);
|
||||
map.insert({NotAssignable(3), 3});
|
||||
map.insert(map.end(), {NotAssignable(4), 4});
|
||||
EXPECT_THAT(map,
|
||||
ElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3), Pair(4, 4)));
|
||||
map.erase(map.begin());
|
||||
EXPECT_THAT(map, ElementsAre(Pair(2, 2), Pair(3, 3), Pair(4, 4)));
|
||||
}
|
||||
{
|
||||
absl::btree_multimap<NotAssignable, int> map;
|
||||
map.emplace(NotAssignable(1), 1);
|
||||
map.emplace_hint(map.end(), NotAssignable(2), 2);
|
||||
map.insert({NotAssignable(2), 3});
|
||||
map.insert(map.end(), {NotAssignable(3), 3});
|
||||
EXPECT_THAT(map,
|
||||
ElementsAre(Pair(1, 1), Pair(2, 2), Pair(2, 3), Pair(3, 3)));
|
||||
map.erase(map.begin());
|
||||
EXPECT_THAT(map, ElementsAre(Pair(2, 2), Pair(2, 3), Pair(3, 3)));
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
|
|
|
@ -489,12 +489,14 @@ class FixedArray {
|
|||
Storage storage_;
|
||||
};
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
template <typename T, size_t N, typename A>
|
||||
constexpr size_t FixedArray<T, N, A>::kInlineBytesDefault;
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
constexpr typename FixedArray<T, N, A>::size_type
|
||||
FixedArray<T, N, A>::inline_elements;
|
||||
#endif
|
||||
|
||||
template <typename T, size_t N, typename A>
|
||||
void FixedArray<T, N, A>::NonEmptyInlinedStorage::AnnotateConstruct(
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "absl/container/internal/container_memory.h"
|
||||
#include "absl/container/internal/hash_function_defaults.h" // IWYU pragma: export
|
||||
#include "absl/container/internal/raw_hash_map.h" // IWYU pragma: export
|
||||
|
@ -75,6 +76,10 @@ struct FlatHashMapPolicy;
|
|||
// absl/hash/hash.h for information on extending Abseil hashing to user-defined
|
||||
// types.
|
||||
//
|
||||
// Using `absl::flat_hash_map` at interface boundaries in dynamically loaded
|
||||
// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may
|
||||
// be randomized across dynamically loaded libraries.
|
||||
//
|
||||
// NOTE: A `flat_hash_map` stores its value types directly inside its
|
||||
// implementation array to avoid memory indirection. Because a `flat_hash_map`
|
||||
// is designed to move data when rehashed, map values will not retain pointer
|
||||
|
@ -356,8 +361,8 @@ class flat_hash_map : public absl::container_internal::raw_hash_map<
|
|||
// `flat_hash_map`.
|
||||
//
|
||||
// iterator try_emplace(const_iterator hint,
|
||||
// const init_type& k, Args&&... args):
|
||||
// iterator try_emplace(const_iterator hint, init_type&& k, Args&&... args):
|
||||
// const key_type& k, Args&&... args):
|
||||
// iterator try_emplace(const_iterator hint, key_type&& k, Args&&... args):
|
||||
//
|
||||
// Inserts (via copy or move) the element of the specified key into the
|
||||
// `flat_hash_map` using the position of `hint` as a non-binding suggestion
|
||||
|
@ -541,10 +546,12 @@ class flat_hash_map : public absl::container_internal::raw_hash_map<
|
|||
// erase_if(flat_hash_map<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate `pred` from the container `c`.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename V, typename H, typename E, typename A,
|
||||
typename Predicate>
|
||||
void erase_if(flat_hash_map<K, V, H, E, A>& c, Predicate pred) {
|
||||
container_internal::EraseIf(pred, &c);
|
||||
typename flat_hash_map<K, V, H, E, A>::size_type erase_if(
|
||||
flat_hash_map<K, V, H, E, A>& c, Predicate pred) {
|
||||
return container_internal::EraseIf(pred, &c);
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
|
|
@ -236,33 +236,36 @@ TEST(FlatHashMap, EraseIf) {
|
|||
// Erase all elements.
|
||||
{
|
||||
flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, [](std::pair<const int, int>) { return true; });
|
||||
EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return true; }), 5);
|
||||
EXPECT_THAT(s, IsEmpty());
|
||||
}
|
||||
// Erase no elements.
|
||||
{
|
||||
flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, [](std::pair<const int, int>) { return false; });
|
||||
EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return false; }), 0);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3),
|
||||
Pair(4, 4), Pair(5, 5)));
|
||||
}
|
||||
// Erase specific elements.
|
||||
{
|
||||
flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s,
|
||||
[](std::pair<const int, int> kvp) { return kvp.first % 2 == 1; });
|
||||
EXPECT_EQ(erase_if(s,
|
||||
[](std::pair<const int, int> kvp) {
|
||||
return kvp.first % 2 == 1;
|
||||
}),
|
||||
3);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(2, 2), Pair(4, 4)));
|
||||
}
|
||||
// Predicate is function reference.
|
||||
{
|
||||
flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, FirstIsEven);
|
||||
EXPECT_EQ(erase_if(s, FirstIsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
|
||||
}
|
||||
// Predicate is function pointer.
|
||||
{
|
||||
flat_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, &FirstIsEven);
|
||||
EXPECT_EQ(erase_if(s, &FirstIsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,11 +67,15 @@ struct FlatHashSetPolicy;
|
|||
//
|
||||
// By default, `flat_hash_set` uses the `absl::Hash` hashing framework. All
|
||||
// fundamental and Abseil types that support the `absl::Hash` framework have a
|
||||
// compatible equality operator for comparing insertions into `flat_hash_map`.
|
||||
// compatible equality operator for comparing insertions into `flat_hash_set`.
|
||||
// If your type is not yet supported by the `absl::Hash` framework, see
|
||||
// absl/hash/hash.h for information on extending Abseil hashing to user-defined
|
||||
// types.
|
||||
//
|
||||
// Using `absl::flat_hash_set` at interface boundaries in dynamically loaded
|
||||
// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may
|
||||
// be randomized across dynamically loaded libraries.
|
||||
//
|
||||
// NOTE: A `flat_hash_set` stores its keys directly inside its implementation
|
||||
// array to avoid memory indirection. Because a `flat_hash_set` is designed to
|
||||
// move data when rehashed, set keys will not retain pointer stability. If you
|
||||
|
@ -106,7 +110,7 @@ class flat_hash_set
|
|||
public:
|
||||
// Constructors and Assignment Operators
|
||||
//
|
||||
// A flat_hash_set supports the same overload set as `std::unordered_map`
|
||||
// A flat_hash_set supports the same overload set as `std::unordered_set`
|
||||
// for construction and assignment:
|
||||
//
|
||||
// * Default constructor
|
||||
|
@ -173,7 +177,7 @@ class flat_hash_set
|
|||
// available within the `flat_hash_set`.
|
||||
//
|
||||
// NOTE: this member function is particular to `absl::flat_hash_set` and is
|
||||
// not provided in the `std::unordered_map` API.
|
||||
// not provided in the `std::unordered_set` API.
|
||||
using Base::capacity;
|
||||
|
||||
// flat_hash_set::empty()
|
||||
|
@ -332,7 +336,7 @@ class flat_hash_set
|
|||
// flat_hash_set::swap(flat_hash_set& other)
|
||||
//
|
||||
// Exchanges the contents of this `flat_hash_set` with those of the `other`
|
||||
// flat hash map, avoiding invocation of any move, copy, or swap operations on
|
||||
// flat hash set, avoiding invocation of any move, copy, or swap operations on
|
||||
// individual elements.
|
||||
//
|
||||
// All iterators and references on the `flat_hash_set` remain valid, excepting
|
||||
|
@ -340,7 +344,7 @@ class flat_hash_set
|
|||
//
|
||||
// `swap()` requires that the flat hash set's hashing and key equivalence
|
||||
// functions be Swappable, and are exchaged using unqualified calls to
|
||||
// non-member `swap()`. If the map's allocator has
|
||||
// non-member `swap()`. If the set's allocator has
|
||||
// `std::allocator_traits<allocator_type>::propagate_on_container_swap::value`
|
||||
// set to `true`, the allocators are also exchanged using an unqualified call
|
||||
// to non-member `swap()`; otherwise, the allocators are not swapped.
|
||||
|
@ -395,14 +399,14 @@ class flat_hash_set
|
|||
// flat_hash_set::bucket_count()
|
||||
//
|
||||
// Returns the number of "buckets" within the `flat_hash_set`. Note that
|
||||
// because a flat hash map contains all elements within its internal storage,
|
||||
// because a flat hash set contains all elements within its internal storage,
|
||||
// this value simply equals the current capacity of the `flat_hash_set`.
|
||||
using Base::bucket_count;
|
||||
|
||||
// flat_hash_set::load_factor()
|
||||
//
|
||||
// Returns the current load factor of the `flat_hash_set` (the average number
|
||||
// of slots occupied with a value within the hash map).
|
||||
// of slots occupied with a value within the hash set).
|
||||
using Base::load_factor;
|
||||
|
||||
// flat_hash_set::max_load_factor()
|
||||
|
@ -443,9 +447,11 @@ class flat_hash_set
|
|||
// erase_if(flat_hash_set<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate `pred` from the container `c`.
|
||||
// Returns the number of erased elements.
|
||||
template <typename T, typename H, typename E, typename A, typename Predicate>
|
||||
void erase_if(flat_hash_set<T, H, E, A>& c, Predicate pred) {
|
||||
container_internal::EraseIf(pred, &c);
|
||||
typename flat_hash_set<T, H, E, A>::size_type erase_if(
|
||||
flat_hash_set<T, H, E, A>& c, Predicate pred) {
|
||||
return container_internal::EraseIf(pred, &c);
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
|
|
@ -143,31 +143,31 @@ TEST(FlatHashSet, EraseIf) {
|
|||
// Erase all elements.
|
||||
{
|
||||
flat_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int) { return true; });
|
||||
EXPECT_EQ(erase_if(s, [](int) { return true; }), 5);
|
||||
EXPECT_THAT(s, IsEmpty());
|
||||
}
|
||||
// Erase no elements.
|
||||
{
|
||||
flat_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int) { return false; });
|
||||
EXPECT_EQ(erase_if(s, [](int) { return false; }), 0);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 2, 3, 4, 5));
|
||||
}
|
||||
// Erase specific elements.
|
||||
{
|
||||
flat_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int k) { return k % 2 == 1; });
|
||||
EXPECT_EQ(erase_if(s, [](int k) { return k % 2 == 1; }), 3);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(2, 4));
|
||||
}
|
||||
// Predicate is function reference.
|
||||
{
|
||||
flat_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, IsEven);
|
||||
EXPECT_EQ(erase_if(s, IsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 3, 5));
|
||||
}
|
||||
// Predicate is function pointer.
|
||||
{
|
||||
flat_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, &IsEven);
|
||||
EXPECT_EQ(erase_if(s, &IsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 3, 5));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,7 +36,6 @@
|
|||
#define ABSL_CONTAINER_INLINED_VECTOR_H_
|
||||
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
|
@ -72,37 +71,43 @@ class InlinedVector {
|
|||
|
||||
using Storage = inlined_vector_internal::Storage<T, N, A>;
|
||||
|
||||
using AllocatorTraits = typename Storage::AllocatorTraits;
|
||||
using RValueReference = typename Storage::RValueReference;
|
||||
using MoveIterator = typename Storage::MoveIterator;
|
||||
using IsMemcpyOk = typename Storage::IsMemcpyOk;
|
||||
template <typename TheA>
|
||||
using AllocatorTraits = inlined_vector_internal::AllocatorTraits<TheA>;
|
||||
template <typename TheA>
|
||||
using MoveIterator = inlined_vector_internal::MoveIterator<TheA>;
|
||||
template <typename TheA>
|
||||
using IsMemcpyOk = inlined_vector_internal::IsMemcpyOk<TheA>;
|
||||
|
||||
template <typename Iterator>
|
||||
template <typename TheA, typename Iterator>
|
||||
using IteratorValueAdapter =
|
||||
typename Storage::template IteratorValueAdapter<Iterator>;
|
||||
using CopyValueAdapter = typename Storage::CopyValueAdapter;
|
||||
using DefaultValueAdapter = typename Storage::DefaultValueAdapter;
|
||||
inlined_vector_internal::IteratorValueAdapter<TheA, Iterator>;
|
||||
template <typename TheA>
|
||||
using CopyValueAdapter = inlined_vector_internal::CopyValueAdapter<TheA>;
|
||||
template <typename TheA>
|
||||
using DefaultValueAdapter =
|
||||
inlined_vector_internal::DefaultValueAdapter<TheA>;
|
||||
|
||||
template <typename Iterator>
|
||||
using EnableIfAtLeastForwardIterator = absl::enable_if_t<
|
||||
inlined_vector_internal::IsAtLeastForwardIterator<Iterator>::value>;
|
||||
inlined_vector_internal::IsAtLeastForwardIterator<Iterator>::value, int>;
|
||||
template <typename Iterator>
|
||||
using DisableIfAtLeastForwardIterator = absl::enable_if_t<
|
||||
!inlined_vector_internal::IsAtLeastForwardIterator<Iterator>::value>;
|
||||
!inlined_vector_internal::IsAtLeastForwardIterator<Iterator>::value, int>;
|
||||
|
||||
public:
|
||||
using allocator_type = typename Storage::allocator_type;
|
||||
using value_type = typename Storage::value_type;
|
||||
using pointer = typename Storage::pointer;
|
||||
using const_pointer = typename Storage::const_pointer;
|
||||
using size_type = typename Storage::size_type;
|
||||
using difference_type = typename Storage::difference_type;
|
||||
using reference = typename Storage::reference;
|
||||
using const_reference = typename Storage::const_reference;
|
||||
using iterator = typename Storage::iterator;
|
||||
using const_iterator = typename Storage::const_iterator;
|
||||
using reverse_iterator = typename Storage::reverse_iterator;
|
||||
using const_reverse_iterator = typename Storage::const_reverse_iterator;
|
||||
using allocator_type = A;
|
||||
using value_type = inlined_vector_internal::ValueType<A>;
|
||||
using pointer = inlined_vector_internal::Pointer<A>;
|
||||
using const_pointer = inlined_vector_internal::ConstPointer<A>;
|
||||
using size_type = inlined_vector_internal::SizeType<A>;
|
||||
using difference_type = inlined_vector_internal::DifferenceType<A>;
|
||||
using reference = inlined_vector_internal::Reference<A>;
|
||||
using const_reference = inlined_vector_internal::ConstReference<A>;
|
||||
using iterator = inlined_vector_internal::Iterator<A>;
|
||||
using const_iterator = inlined_vector_internal::ConstIterator<A>;
|
||||
using reverse_iterator = inlined_vector_internal::ReverseIterator<A>;
|
||||
using const_reverse_iterator =
|
||||
inlined_vector_internal::ConstReverseIterator<A>;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// InlinedVector Constructors and Destructor
|
||||
|
@ -111,28 +116,28 @@ class InlinedVector {
|
|||
// Creates an empty inlined vector with a value-initialized allocator.
|
||||
InlinedVector() noexcept(noexcept(allocator_type())) : storage_() {}
|
||||
|
||||
// Creates an empty inlined vector with a copy of `alloc`.
|
||||
explicit InlinedVector(const allocator_type& alloc) noexcept
|
||||
: storage_(alloc) {}
|
||||
// Creates an empty inlined vector with a copy of `allocator`.
|
||||
explicit InlinedVector(const allocator_type& allocator) noexcept
|
||||
: storage_(allocator) {}
|
||||
|
||||
// Creates an inlined vector with `n` copies of `value_type()`.
|
||||
explicit InlinedVector(size_type n,
|
||||
const allocator_type& alloc = allocator_type())
|
||||
: storage_(alloc) {
|
||||
storage_.Initialize(DefaultValueAdapter(), n);
|
||||
const allocator_type& allocator = allocator_type())
|
||||
: storage_(allocator) {
|
||||
storage_.Initialize(DefaultValueAdapter<A>(), n);
|
||||
}
|
||||
|
||||
// Creates an inlined vector with `n` copies of `v`.
|
||||
InlinedVector(size_type n, const_reference v,
|
||||
const allocator_type& alloc = allocator_type())
|
||||
: storage_(alloc) {
|
||||
storage_.Initialize(CopyValueAdapter(v), n);
|
||||
const allocator_type& allocator = allocator_type())
|
||||
: storage_(allocator) {
|
||||
storage_.Initialize(CopyValueAdapter<A>(std::addressof(v)), n);
|
||||
}
|
||||
|
||||
// Creates an inlined vector with copies of the elements of `list`.
|
||||
InlinedVector(std::initializer_list<value_type> list,
|
||||
const allocator_type& alloc = allocator_type())
|
||||
: InlinedVector(list.begin(), list.end(), alloc) {}
|
||||
const allocator_type& allocator = allocator_type())
|
||||
: InlinedVector(list.begin(), list.end(), allocator) {}
|
||||
|
||||
// Creates an inlined vector with elements constructed from the provided
|
||||
// forward iterator range [`first`, `last`).
|
||||
|
@ -141,35 +146,36 @@ class InlinedVector {
|
|||
// this constructor with two integral arguments and a call to the above
|
||||
// `InlinedVector(size_type, const_reference)` constructor.
|
||||
template <typename ForwardIterator,
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator>* = nullptr>
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator> = 0>
|
||||
InlinedVector(ForwardIterator first, ForwardIterator last,
|
||||
const allocator_type& alloc = allocator_type())
|
||||
: storage_(alloc) {
|
||||
storage_.Initialize(IteratorValueAdapter<ForwardIterator>(first),
|
||||
std::distance(first, last));
|
||||
const allocator_type& allocator = allocator_type())
|
||||
: storage_(allocator) {
|
||||
storage_.Initialize(IteratorValueAdapter<A, ForwardIterator>(first),
|
||||
static_cast<size_t>(std::distance(first, last)));
|
||||
}
|
||||
|
||||
// Creates an inlined vector with elements constructed from the provided input
|
||||
// iterator range [`first`, `last`).
|
||||
template <typename InputIterator,
|
||||
DisableIfAtLeastForwardIterator<InputIterator>* = nullptr>
|
||||
DisableIfAtLeastForwardIterator<InputIterator> = 0>
|
||||
InlinedVector(InputIterator first, InputIterator last,
|
||||
const allocator_type& alloc = allocator_type())
|
||||
: storage_(alloc) {
|
||||
const allocator_type& allocator = allocator_type())
|
||||
: storage_(allocator) {
|
||||
std::copy(first, last, std::back_inserter(*this));
|
||||
}
|
||||
|
||||
// Creates an inlined vector by copying the contents of `other` using
|
||||
// `other`'s allocator.
|
||||
InlinedVector(const InlinedVector& other)
|
||||
: InlinedVector(other, *other.storage_.GetAllocPtr()) {}
|
||||
: InlinedVector(other, other.storage_.GetAllocator()) {}
|
||||
|
||||
// Creates an inlined vector by copying the contents of `other` using `alloc`.
|
||||
InlinedVector(const InlinedVector& other, const allocator_type& alloc)
|
||||
: storage_(alloc) {
|
||||
// Creates an inlined vector by copying the contents of `other` using the
|
||||
// provided `allocator`.
|
||||
InlinedVector(const InlinedVector& other, const allocator_type& allocator)
|
||||
: storage_(allocator) {
|
||||
if (other.empty()) {
|
||||
// Empty; nothing to do.
|
||||
} else if (IsMemcpyOk::value && !other.storage_.GetIsAllocated()) {
|
||||
} else if (IsMemcpyOk<A>::value && !other.storage_.GetIsAllocated()) {
|
||||
// Memcpy-able and do not need allocation.
|
||||
storage_.MemcpyFrom(other.storage_);
|
||||
} else {
|
||||
|
@ -194,23 +200,23 @@ class InlinedVector {
|
|||
InlinedVector(InlinedVector&& other) noexcept(
|
||||
absl::allocator_is_nothrow<allocator_type>::value ||
|
||||
std::is_nothrow_move_constructible<value_type>::value)
|
||||
: storage_(*other.storage_.GetAllocPtr()) {
|
||||
if (IsMemcpyOk::value) {
|
||||
: storage_(other.storage_.GetAllocator()) {
|
||||
if (IsMemcpyOk<A>::value) {
|
||||
storage_.MemcpyFrom(other.storage_);
|
||||
|
||||
other.storage_.SetInlinedSize(0);
|
||||
} else if (other.storage_.GetIsAllocated()) {
|
||||
storage_.SetAllocatedData(other.storage_.GetAllocatedData(),
|
||||
other.storage_.GetAllocatedCapacity());
|
||||
storage_.SetAllocation({other.storage_.GetAllocatedData(),
|
||||
other.storage_.GetAllocatedCapacity()});
|
||||
storage_.SetAllocatedSize(other.storage_.GetSize());
|
||||
|
||||
other.storage_.SetInlinedSize(0);
|
||||
} else {
|
||||
IteratorValueAdapter<MoveIterator> other_values(
|
||||
MoveIterator(other.storage_.GetInlinedData()));
|
||||
IteratorValueAdapter<A, MoveIterator<A>> other_values(
|
||||
MoveIterator<A>(other.storage_.GetInlinedData()));
|
||||
|
||||
inlined_vector_internal::ConstructElements(
|
||||
storage_.GetAllocPtr(), storage_.GetInlinedData(), &other_values,
|
||||
inlined_vector_internal::ConstructElements<A>(
|
||||
storage_.GetAllocator(), storage_.GetInlinedData(), other_values,
|
||||
other.storage_.GetSize());
|
||||
|
||||
storage_.SetInlinedSize(other.storage_.GetSize());
|
||||
|
@ -218,29 +224,31 @@ class InlinedVector {
|
|||
}
|
||||
|
||||
// Creates an inlined vector by moving in the contents of `other` with a copy
|
||||
// of `alloc`.
|
||||
// of `allocator`.
|
||||
//
|
||||
// NOTE: if `other`'s allocator is not equal to `alloc`, even if `other`
|
||||
// NOTE: if `other`'s allocator is not equal to `allocator`, even if `other`
|
||||
// contains allocated memory, this move constructor will still allocate. Since
|
||||
// allocation is performed, this constructor can only be `noexcept` if the
|
||||
// specified allocator is also `noexcept`.
|
||||
InlinedVector(InlinedVector&& other, const allocator_type& alloc) noexcept(
|
||||
absl::allocator_is_nothrow<allocator_type>::value)
|
||||
: storage_(alloc) {
|
||||
if (IsMemcpyOk::value) {
|
||||
InlinedVector(
|
||||
InlinedVector&& other,
|
||||
const allocator_type&
|
||||
allocator) noexcept(absl::allocator_is_nothrow<allocator_type>::value)
|
||||
: storage_(allocator) {
|
||||
if (IsMemcpyOk<A>::value) {
|
||||
storage_.MemcpyFrom(other.storage_);
|
||||
|
||||
other.storage_.SetInlinedSize(0);
|
||||
} else if ((*storage_.GetAllocPtr() == *other.storage_.GetAllocPtr()) &&
|
||||
} else if ((storage_.GetAllocator() == other.storage_.GetAllocator()) &&
|
||||
other.storage_.GetIsAllocated()) {
|
||||
storage_.SetAllocatedData(other.storage_.GetAllocatedData(),
|
||||
other.storage_.GetAllocatedCapacity());
|
||||
storage_.SetAllocation({other.storage_.GetAllocatedData(),
|
||||
other.storage_.GetAllocatedCapacity()});
|
||||
storage_.SetAllocatedSize(other.storage_.GetSize());
|
||||
|
||||
other.storage_.SetInlinedSize(0);
|
||||
} else {
|
||||
storage_.Initialize(
|
||||
IteratorValueAdapter<MoveIterator>(MoveIterator(other.data())),
|
||||
storage_.Initialize(IteratorValueAdapter<A, MoveIterator<A>>(
|
||||
MoveIterator<A>(other.data())),
|
||||
other.size());
|
||||
}
|
||||
}
|
||||
|
@ -442,7 +450,7 @@ class InlinedVector {
|
|||
// `InlinedVector::get_allocator()`
|
||||
//
|
||||
// Returns a copy of the inlined vector's allocator.
|
||||
allocator_type get_allocator() const { return *storage_.GetAllocPtr(); }
|
||||
allocator_type get_allocator() const { return storage_.GetAllocator(); }
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// InlinedVector Member Mutators
|
||||
|
@ -476,16 +484,16 @@ class InlinedVector {
|
|||
// unspecified state.
|
||||
InlinedVector& operator=(InlinedVector&& other) {
|
||||
if (ABSL_PREDICT_TRUE(this != std::addressof(other))) {
|
||||
if (IsMemcpyOk::value || other.storage_.GetIsAllocated()) {
|
||||
inlined_vector_internal::DestroyElements(storage_.GetAllocPtr(), data(),
|
||||
size());
|
||||
if (IsMemcpyOk<A>::value || other.storage_.GetIsAllocated()) {
|
||||
inlined_vector_internal::DestroyAdapter<A>::DestroyElements(
|
||||
storage_.GetAllocator(), data(), size());
|
||||
storage_.DeallocateIfAllocated();
|
||||
storage_.MemcpyFrom(other.storage_);
|
||||
|
||||
other.storage_.SetInlinedSize(0);
|
||||
} else {
|
||||
storage_.Assign(IteratorValueAdapter<MoveIterator>(
|
||||
MoveIterator(other.storage_.GetInlinedData())),
|
||||
storage_.Assign(IteratorValueAdapter<A, MoveIterator<A>>(
|
||||
MoveIterator<A>(other.storage_.GetInlinedData())),
|
||||
other.size());
|
||||
}
|
||||
}
|
||||
|
@ -497,7 +505,7 @@ class InlinedVector {
|
|||
//
|
||||
// Replaces the contents of the inlined vector with `n` copies of `v`.
|
||||
void assign(size_type n, const_reference v) {
|
||||
storage_.Assign(CopyValueAdapter(v), n);
|
||||
storage_.Assign(CopyValueAdapter<A>(std::addressof(v)), n);
|
||||
}
|
||||
|
||||
// Overload of `InlinedVector::assign(...)` that replaces the contents of the
|
||||
|
@ -511,10 +519,10 @@ class InlinedVector {
|
|||
//
|
||||
// NOTE: this overload is for iterators that are "forward" category or better.
|
||||
template <typename ForwardIterator,
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator>* = nullptr>
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator> = 0>
|
||||
void assign(ForwardIterator first, ForwardIterator last) {
|
||||
storage_.Assign(IteratorValueAdapter<ForwardIterator>(first),
|
||||
std::distance(first, last));
|
||||
storage_.Assign(IteratorValueAdapter<A, ForwardIterator>(first),
|
||||
static_cast<size_t>(std::distance(first, last)));
|
||||
}
|
||||
|
||||
// Overload of `InlinedVector::assign(...)` to replace the contents of the
|
||||
|
@ -522,7 +530,7 @@ class InlinedVector {
|
|||
//
|
||||
// NOTE: this overload is for iterators that are "input" category.
|
||||
template <typename InputIterator,
|
||||
DisableIfAtLeastForwardIterator<InputIterator>* = nullptr>
|
||||
DisableIfAtLeastForwardIterator<InputIterator> = 0>
|
||||
void assign(InputIterator first, InputIterator last) {
|
||||
size_type i = 0;
|
||||
for (; i < size() && first != last; ++i, static_cast<void>(++first)) {
|
||||
|
@ -541,7 +549,7 @@ class InlinedVector {
|
|||
// is larger than `size()`, new elements are value-initialized.
|
||||
void resize(size_type n) {
|
||||
ABSL_HARDENING_ASSERT(n <= max_size());
|
||||
storage_.Resize(DefaultValueAdapter(), n);
|
||||
storage_.Resize(DefaultValueAdapter<A>(), n);
|
||||
}
|
||||
|
||||
// Overload of `InlinedVector::resize(...)` that resizes the inlined vector to
|
||||
|
@ -551,7 +559,7 @@ class InlinedVector {
|
|||
// is larger than `size()`, new elements are copied-constructed from `v`.
|
||||
void resize(size_type n, const_reference v) {
|
||||
ABSL_HARDENING_ASSERT(n <= max_size());
|
||||
storage_.Resize(CopyValueAdapter(v), n);
|
||||
storage_.Resize(CopyValueAdapter<A>(std::addressof(v)), n);
|
||||
}
|
||||
|
||||
// `InlinedVector::insert(...)`
|
||||
|
@ -564,7 +572,7 @@ class InlinedVector {
|
|||
|
||||
// Overload of `InlinedVector::insert(...)` that inserts `v` at `pos` using
|
||||
// move semantics, returning an `iterator` to the newly inserted element.
|
||||
iterator insert(const_iterator pos, RValueReference v) {
|
||||
iterator insert(const_iterator pos, value_type&& v) {
|
||||
return emplace(pos, std::move(v));
|
||||
}
|
||||
|
||||
|
@ -577,7 +585,20 @@ class InlinedVector {
|
|||
|
||||
if (ABSL_PREDICT_TRUE(n != 0)) {
|
||||
value_type dealias = v;
|
||||
return storage_.Insert(pos, CopyValueAdapter(dealias), n);
|
||||
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102329#c2
|
||||
// It appears that GCC thinks that since `pos` is a const pointer and may
|
||||
// point to uninitialized memory at this point, a warning should be
|
||||
// issued. But `pos` is actually only used to compute an array index to
|
||||
// write to.
|
||||
#if !defined(__clang__) && defined(__GNUC__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
|
||||
#endif
|
||||
return storage_.Insert(pos, CopyValueAdapter<A>(std::addressof(dealias)),
|
||||
n);
|
||||
#if !defined(__clang__) && defined(__GNUC__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
} else {
|
||||
return const_cast<iterator>(pos);
|
||||
}
|
||||
|
@ -596,14 +617,15 @@ class InlinedVector {
|
|||
//
|
||||
// NOTE: this overload is for iterators that are "forward" category or better.
|
||||
template <typename ForwardIterator,
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator>* = nullptr>
|
||||
EnableIfAtLeastForwardIterator<ForwardIterator> = 0>
|
||||
iterator insert(const_iterator pos, ForwardIterator first,
|
||||
ForwardIterator last) {
|
||||
ABSL_HARDENING_ASSERT(pos >= begin());
|
||||
ABSL_HARDENING_ASSERT(pos <= end());
|
||||
|
||||
if (ABSL_PREDICT_TRUE(first != last)) {
|
||||
return storage_.Insert(pos, IteratorValueAdapter<ForwardIterator>(first),
|
||||
return storage_.Insert(pos,
|
||||
IteratorValueAdapter<A, ForwardIterator>(first),
|
||||
std::distance(first, last));
|
||||
} else {
|
||||
return const_cast<iterator>(pos);
|
||||
|
@ -616,7 +638,7 @@ class InlinedVector {
|
|||
//
|
||||
// NOTE: this overload is for iterators that are "input" category.
|
||||
template <typename InputIterator,
|
||||
DisableIfAtLeastForwardIterator<InputIterator>* = nullptr>
|
||||
DisableIfAtLeastForwardIterator<InputIterator> = 0>
|
||||
iterator insert(const_iterator pos, InputIterator first, InputIterator last) {
|
||||
ABSL_HARDENING_ASSERT(pos >= begin());
|
||||
ABSL_HARDENING_ASSERT(pos <= end());
|
||||
|
@ -640,8 +662,8 @@ class InlinedVector {
|
|||
|
||||
value_type dealias(std::forward<Args>(args)...);
|
||||
return storage_.Insert(pos,
|
||||
IteratorValueAdapter<MoveIterator>(
|
||||
MoveIterator(std::addressof(dealias))),
|
||||
IteratorValueAdapter<A, MoveIterator<A>>(
|
||||
MoveIterator<A>(std::addressof(dealias))),
|
||||
1);
|
||||
}
|
||||
|
||||
|
@ -661,7 +683,7 @@ class InlinedVector {
|
|||
|
||||
// Overload of `InlinedVector::push_back(...)` for inserting `v` at `end()`
|
||||
// using move semantics.
|
||||
void push_back(RValueReference v) {
|
||||
void push_back(value_type&& v) {
|
||||
static_cast<void>(emplace_back(std::move(v)));
|
||||
}
|
||||
|
||||
|
@ -671,7 +693,7 @@ class InlinedVector {
|
|||
void pop_back() noexcept {
|
||||
ABSL_HARDENING_ASSERT(!empty());
|
||||
|
||||
AllocatorTraits::destroy(*storage_.GetAllocPtr(), data() + (size() - 1));
|
||||
AllocatorTraits<A>::destroy(storage_.GetAllocator(), data() + (size() - 1));
|
||||
storage_.SubtractSize(1);
|
||||
}
|
||||
|
||||
|
@ -710,8 +732,8 @@ class InlinedVector {
|
|||
// Destroys all elements in the inlined vector, setting the size to `0` and
|
||||
// deallocating any held memory.
|
||||
void clear() noexcept {
|
||||
inlined_vector_internal::DestroyElements(storage_.GetAllocPtr(), data(),
|
||||
size());
|
||||
inlined_vector_internal::DestroyAdapter<A>::DestroyElements(
|
||||
storage_.GetAllocator(), data(), size());
|
||||
storage_.DeallocateIfAllocated();
|
||||
|
||||
storage_.SetInlinedSize(0);
|
||||
|
@ -724,15 +746,12 @@ class InlinedVector {
|
|||
|
||||
// `InlinedVector::shrink_to_fit()`
|
||||
//
|
||||
// Reduces memory usage by freeing unused memory. After being called, calls to
|
||||
// `capacity()` will be equal to `max(N, size())`.
|
||||
// Attempts to reduce memory usage by moving elements to (or keeping elements
|
||||
// in) the smallest available buffer sufficient for containing `size()`
|
||||
// elements.
|
||||
//
|
||||
// If `size() <= N` and the inlined vector contains allocated memory, the
|
||||
// elements will all be moved to the inlined space and the allocated memory
|
||||
// will be deallocated.
|
||||
//
|
||||
// If `size() > N` and `size() < capacity()`, the elements will be moved to a
|
||||
// smaller allocation.
|
||||
// If `size()` is sufficiently small, the elements will be moved into (or kept
|
||||
// in) the inlined space.
|
||||
void shrink_to_fit() {
|
||||
if (storage_.GetIsAllocated()) {
|
||||
storage_.ShrinkToFit();
|
||||
|
|
|
@ -1545,7 +1545,7 @@ TYPED_TEST_P(InstanceTest, InitializerListAssign) {
|
|||
}
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(InstanceTest, Swap, CountConstructorsDestructors,
|
||||
REGISTER_TYPED_TEST_SUITE_P(InstanceTest, Swap, CountConstructorsDestructors,
|
||||
CountConstructorsDestructorsOnCopyConstruction,
|
||||
CountConstructorsDestructorsOnMoveConstruction,
|
||||
CountConstructorsDestructorsOnAssignment,
|
||||
|
@ -1555,7 +1555,8 @@ REGISTER_TYPED_TEST_CASE_P(InstanceTest, Swap, CountConstructorsDestructors,
|
|||
|
||||
using InstanceTypes =
|
||||
::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>;
|
||||
INSTANTIATE_TYPED_TEST_CASE_P(InstanceTestOnTypes, InstanceTest, InstanceTypes);
|
||||
INSTANTIATE_TYPED_TEST_SUITE_P(InstanceTestOnTypes, InstanceTest,
|
||||
InstanceTypes);
|
||||
|
||||
TEST(DynamicVec, DynamicVecCompiles) {
|
||||
DynamicVec v;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -44,8 +44,8 @@ class btree_container {
|
|||
// transparent case.
|
||||
template <class K>
|
||||
using key_arg =
|
||||
typename KeyArg<IsTransparent<typename Tree::key_compare>::value>::
|
||||
template type<K, typename Tree::key_type>;
|
||||
typename KeyArg<params_type::kIsKeyCompareTransparent>::template type<
|
||||
K, typename Tree::key_type>;
|
||||
|
||||
public:
|
||||
using key_type = typename Tree::key_type;
|
||||
|
@ -166,9 +166,10 @@ class btree_container {
|
|||
|
||||
// Extract routines.
|
||||
node_type extract(iterator position) {
|
||||
// Use Move instead of Transfer, because the rebalancing code expects to
|
||||
// have a valid object to scribble metadata bits on top of.
|
||||
auto node = CommonAccess::Move<node_type>(get_allocator(), position.slot());
|
||||
// Use Construct instead of Transfer because the rebalancing code will
|
||||
// destroy the slot later.
|
||||
auto node =
|
||||
CommonAccess::Construct<node_type>(get_allocator(), position.slot());
|
||||
erase(position);
|
||||
return node;
|
||||
}
|
||||
|
@ -228,6 +229,7 @@ class btree_container {
|
|||
}
|
||||
|
||||
protected:
|
||||
friend struct btree_access;
|
||||
Tree tree_;
|
||||
};
|
||||
|
||||
|
@ -290,8 +292,11 @@ class btree_set_container : public btree_container<Tree> {
|
|||
}
|
||||
template <typename... Args>
|
||||
std::pair<iterator, bool> emplace(Args &&... args) {
|
||||
init_type v(std::forward<Args>(args)...);
|
||||
return this->tree_.insert_unique(params_type::key(v), std::move(v));
|
||||
// Use a node handle to manage a temp slot.
|
||||
auto node = CommonAccess::Construct<node_type>(this->get_allocator(),
|
||||
std::forward<Args>(args)...);
|
||||
auto *slot = CommonAccess::GetSlot(node);
|
||||
return this->tree_.insert_unique(params_type::key(slot), slot);
|
||||
}
|
||||
iterator insert(const_iterator hint, const value_type &v) {
|
||||
return this->tree_
|
||||
|
@ -305,9 +310,12 @@ class btree_set_container : public btree_container<Tree> {
|
|||
}
|
||||
template <typename... Args>
|
||||
iterator emplace_hint(const_iterator hint, Args &&... args) {
|
||||
init_type v(std::forward<Args>(args)...);
|
||||
// Use a node handle to manage a temp slot.
|
||||
auto node = CommonAccess::Construct<node_type>(this->get_allocator(),
|
||||
std::forward<Args>(args)...);
|
||||
auto *slot = CommonAccess::GetSlot(node);
|
||||
return this->tree_
|
||||
.insert_hint_unique(iterator(hint), params_type::key(v), std::move(v))
|
||||
.insert_hint_unique(iterator(hint), params_type::key(slot), slot)
|
||||
.first;
|
||||
}
|
||||
template <typename InputIterator>
|
||||
|
@ -536,6 +544,7 @@ class btree_multiset_container : public btree_container<Tree> {
|
|||
using params_type = typename Tree::params_type;
|
||||
using init_type = typename params_type::init_type;
|
||||
using is_key_compare_to = typename params_type::is_key_compare_to;
|
||||
friend class BtreeNodePeer;
|
||||
|
||||
template <class K>
|
||||
using key_arg = typename super_type::template key_arg<K>;
|
||||
|
@ -596,12 +605,18 @@ class btree_multiset_container : public btree_container<Tree> {
|
|||
}
|
||||
template <typename... Args>
|
||||
iterator emplace(Args &&... args) {
|
||||
return this->tree_.insert_multi(init_type(std::forward<Args>(args)...));
|
||||
// Use a node handle to manage a temp slot.
|
||||
auto node = CommonAccess::Construct<node_type>(this->get_allocator(),
|
||||
std::forward<Args>(args)...);
|
||||
return this->tree_.insert_multi(CommonAccess::GetSlot(node));
|
||||
}
|
||||
template <typename... Args>
|
||||
iterator emplace_hint(const_iterator hint, Args &&... args) {
|
||||
return this->tree_.insert_hint_multi(
|
||||
iterator(hint), init_type(std::forward<Args>(args)...));
|
||||
// Use a node handle to manage a temp slot.
|
||||
auto node = CommonAccess::Construct<node_type>(this->get_allocator(),
|
||||
std::forward<Args>(args)...);
|
||||
return this->tree_.insert_hint_multi(iterator(hint),
|
||||
CommonAccess::GetSlot(node));
|
||||
}
|
||||
iterator insert(node_type &&node) {
|
||||
if (!node) return this->end();
|
||||
|
@ -667,6 +682,7 @@ template <typename Tree>
|
|||
class btree_multimap_container : public btree_multiset_container<Tree> {
|
||||
using super_type = btree_multiset_container<Tree>;
|
||||
using params_type = typename Tree::params_type;
|
||||
friend class BtreeNodePeer;
|
||||
|
||||
public:
|
||||
using mapped_type = typename params_type::mapped_type;
|
||||
|
|
|
@ -84,10 +84,11 @@ class node_handle_base {
|
|||
PolicyTraits::transfer(alloc(), slot(), s);
|
||||
}
|
||||
|
||||
struct move_tag_t {};
|
||||
node_handle_base(move_tag_t, const allocator_type& a, slot_type* s)
|
||||
struct construct_tag_t {};
|
||||
template <typename... Args>
|
||||
node_handle_base(construct_tag_t, const allocator_type& a, Args&&... args)
|
||||
: alloc_(a) {
|
||||
PolicyTraits::construct(alloc(), slot(), s);
|
||||
PolicyTraits::construct(alloc(), slot(), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
void destroy() {
|
||||
|
@ -186,8 +187,8 @@ struct CommonAccess {
|
|||
}
|
||||
|
||||
template <typename T, typename... Args>
|
||||
static T Move(Args&&... args) {
|
||||
return T(typename T::move_tag_t{}, std::forward<Args>(args)...);
|
||||
static T Construct(Args&&... args) {
|
||||
return T(typename T::construct_tag_t{}, std::forward<Args>(args)...);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -403,6 +403,16 @@ TEST(CompressedTupleTest, EmptyFinalClass) {
|
|||
}
|
||||
#endif
|
||||
|
||||
// TODO(b/214288561): enable this test.
|
||||
TEST(CompressedTupleTest, DISABLED_NestedEbo) {
|
||||
struct Empty1 {};
|
||||
struct Empty2 {};
|
||||
CompressedTuple<Empty1, CompressedTuple<Empty2>, int> x;
|
||||
CompressedTuple<Empty1, Empty2, int> y;
|
||||
// Currently fails with sizeof(x) == 8, sizeof(y) == 4.
|
||||
EXPECT_EQ(sizeof(x), sizeof(y));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
|
|
|
@ -174,7 +174,7 @@ decltype(std::declval<F>()(std::declval<T>())) WithConstructed(
|
|||
//
|
||||
// 2. auto a = PairArgs(args...);
|
||||
// std::pair<F, S> p(std::piecewise_construct,
|
||||
// std::move(p.first), std::move(p.second));
|
||||
// std::move(a.first), std::move(a.second));
|
||||
inline std::pair<std::tuple<>, std::tuple<>> PairArgs() { return {}; }
|
||||
template <class F, class S>
|
||||
std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(F&& f, S&& s) {
|
||||
|
@ -402,6 +402,15 @@ struct map_slot_policy {
|
|||
}
|
||||
}
|
||||
|
||||
// Construct this slot by copying from another slot.
|
||||
template <class Allocator>
|
||||
static void construct(Allocator* alloc, slot_type* slot,
|
||||
const slot_type* other) {
|
||||
emplace(slot);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &slot->value,
|
||||
other->value);
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void destroy(Allocator* alloc, slot_type* slot) {
|
||||
if (kMutableKeys::value) {
|
||||
|
@ -424,33 +433,6 @@ struct map_slot_policy {
|
|||
}
|
||||
destroy(alloc, old_slot);
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void swap(Allocator* alloc, slot_type* a, slot_type* b) {
|
||||
if (kMutableKeys::value) {
|
||||
using std::swap;
|
||||
swap(a->mutable_value, b->mutable_value);
|
||||
} else {
|
||||
value_type tmp = std::move(a->value);
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &a->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &a->value,
|
||||
std::move(b->value));
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &b->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &b->value,
|
||||
std::move(tmp));
|
||||
}
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void move(Allocator* alloc, slot_type* src, slot_type* dest) {
|
||||
if (kMutableKeys::value) {
|
||||
dest->mutable_value = std::move(src->mutable_value);
|
||||
} else {
|
||||
absl::allocator_traits<Allocator>::destroy(*alloc, &dest->value);
|
||||
absl::allocator_traits<Allocator>::construct(*alloc, &dest->value,
|
||||
std::move(src->value));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace container_internal
|
||||
|
|
|
@ -80,7 +80,15 @@ class CountingAllocator {
|
|||
template <typename U>
|
||||
void destroy(U* p) {
|
||||
Allocator allocator;
|
||||
// Ignore GCC warning bug.
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wuse-after-free"
|
||||
#endif
|
||||
AllocatorTraits::destroy(allocator, p);
|
||||
#if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
if (instance_count_ != nullptr) {
|
||||
*instance_count_ -= 1;
|
||||
}
|
||||
|
|
|
@ -310,7 +310,7 @@ struct StringLikeTest : public ::testing::Test {
|
|||
hash_default_hash<typename T::first_type> hash;
|
||||
};
|
||||
|
||||
TYPED_TEST_CASE_P(StringLikeTest);
|
||||
TYPED_TEST_SUITE_P(StringLikeTest);
|
||||
|
||||
TYPED_TEST_P(StringLikeTest, Eq) {
|
||||
EXPECT_TRUE(this->eq(this->a1, this->b1));
|
||||
|
|
|
@ -21,49 +21,55 @@
|
|||
#include <limits>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/internal/exponential_biased.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/debugging/stacktrace.h"
|
||||
#include "absl/memory/memory.h"
|
||||
#include "absl/profiling/internal/exponential_biased.h"
|
||||
#include "absl/profiling/internal/sample_recorder.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
constexpr int HashtablezInfo::kMaxStackDepth;
|
||||
#endif
|
||||
|
||||
namespace {
|
||||
ABSL_CONST_INIT std::atomic<bool> g_hashtablez_enabled{
|
||||
false
|
||||
};
|
||||
ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_sample_parameter{1 << 10};
|
||||
ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_max_samples{1 << 20};
|
||||
std::atomic<HashtablezConfigListener> g_hashtablez_config_listener{nullptr};
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
ABSL_PER_THREAD_TLS_KEYWORD absl::base_internal::ExponentialBiased
|
||||
ABSL_PER_THREAD_TLS_KEYWORD absl::profiling_internal::ExponentialBiased
|
||||
g_exponential_biased_generator;
|
||||
#endif
|
||||
|
||||
void TriggerHashtablezConfigListener() {
|
||||
auto* listener = g_hashtablez_config_listener.load(std::memory_order_acquire);
|
||||
if (listener != nullptr) listener();
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample = 0;
|
||||
ABSL_PER_THREAD_TLS_KEYWORD SamplingState global_next_sample = {0, 0};
|
||||
#endif // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
||||
HashtablezSampler& HashtablezSampler::Global() {
|
||||
HashtablezSampler& GlobalHashtablezSampler() {
|
||||
static auto* sampler = new HashtablezSampler();
|
||||
return *sampler;
|
||||
}
|
||||
|
||||
HashtablezSampler::DisposeCallback HashtablezSampler::SetDisposeCallback(
|
||||
DisposeCallback f) {
|
||||
return dispose_.exchange(f, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
HashtablezInfo::HashtablezInfo() { PrepareForSampling(); }
|
||||
HashtablezInfo::HashtablezInfo() = default;
|
||||
HashtablezInfo::~HashtablezInfo() = default;
|
||||
|
||||
void HashtablezInfo::PrepareForSampling() {
|
||||
void HashtablezInfo::PrepareForSampling(int64_t stride,
|
||||
size_t inline_element_size_value) {
|
||||
capacity.store(0, std::memory_order_relaxed);
|
||||
size.store(0, std::memory_order_relaxed);
|
||||
num_erases.store(0, std::memory_order_relaxed);
|
||||
|
@ -73,100 +79,16 @@ void HashtablezInfo::PrepareForSampling() {
|
|||
hashes_bitwise_or.store(0, std::memory_order_relaxed);
|
||||
hashes_bitwise_and.store(~size_t{}, std::memory_order_relaxed);
|
||||
hashes_bitwise_xor.store(0, std::memory_order_relaxed);
|
||||
max_reserve.store(0, std::memory_order_relaxed);
|
||||
|
||||
create_time = absl::Now();
|
||||
weight = stride;
|
||||
// The inliner makes hardcoded skip_count difficult (especially when combined
|
||||
// with LTO). We use the ability to exclude stacks by regex when encoding
|
||||
// instead.
|
||||
depth = absl::GetStackTrace(stack, HashtablezInfo::kMaxStackDepth,
|
||||
/* skip_count= */ 0);
|
||||
dead = nullptr;
|
||||
}
|
||||
|
||||
HashtablezSampler::HashtablezSampler()
|
||||
: dropped_samples_(0), size_estimate_(0), all_(nullptr), dispose_(nullptr) {
|
||||
absl::MutexLock l(&graveyard_.init_mu);
|
||||
graveyard_.dead = &graveyard_;
|
||||
}
|
||||
|
||||
HashtablezSampler::~HashtablezSampler() {
|
||||
HashtablezInfo* s = all_.load(std::memory_order_acquire);
|
||||
while (s != nullptr) {
|
||||
HashtablezInfo* next = s->next;
|
||||
delete s;
|
||||
s = next;
|
||||
}
|
||||
}
|
||||
|
||||
void HashtablezSampler::PushNew(HashtablezInfo* sample) {
|
||||
sample->next = all_.load(std::memory_order_relaxed);
|
||||
while (!all_.compare_exchange_weak(sample->next, sample,
|
||||
std::memory_order_release,
|
||||
std::memory_order_relaxed)) {
|
||||
}
|
||||
}
|
||||
|
||||
void HashtablezSampler::PushDead(HashtablezInfo* sample) {
|
||||
if (auto* dispose = dispose_.load(std::memory_order_relaxed)) {
|
||||
dispose(*sample);
|
||||
}
|
||||
|
||||
absl::MutexLock graveyard_lock(&graveyard_.init_mu);
|
||||
absl::MutexLock sample_lock(&sample->init_mu);
|
||||
sample->dead = graveyard_.dead;
|
||||
graveyard_.dead = sample;
|
||||
}
|
||||
|
||||
HashtablezInfo* HashtablezSampler::PopDead() {
|
||||
absl::MutexLock graveyard_lock(&graveyard_.init_mu);
|
||||
|
||||
// The list is circular, so eventually it collapses down to
|
||||
// graveyard_.dead == &graveyard_
|
||||
// when it is empty.
|
||||
HashtablezInfo* sample = graveyard_.dead;
|
||||
if (sample == &graveyard_) return nullptr;
|
||||
|
||||
absl::MutexLock sample_lock(&sample->init_mu);
|
||||
graveyard_.dead = sample->dead;
|
||||
sample->PrepareForSampling();
|
||||
return sample;
|
||||
}
|
||||
|
||||
HashtablezInfo* HashtablezSampler::Register() {
|
||||
int64_t size = size_estimate_.fetch_add(1, std::memory_order_relaxed);
|
||||
if (size > g_hashtablez_max_samples.load(std::memory_order_relaxed)) {
|
||||
size_estimate_.fetch_sub(1, std::memory_order_relaxed);
|
||||
dropped_samples_.fetch_add(1, std::memory_order_relaxed);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
HashtablezInfo* sample = PopDead();
|
||||
if (sample == nullptr) {
|
||||
// Resurrection failed. Hire a new warlock.
|
||||
sample = new HashtablezInfo();
|
||||
PushNew(sample);
|
||||
}
|
||||
|
||||
return sample;
|
||||
}
|
||||
|
||||
void HashtablezSampler::Unregister(HashtablezInfo* sample) {
|
||||
PushDead(sample);
|
||||
size_estimate_.fetch_sub(1, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
int64_t HashtablezSampler::Iterate(
|
||||
const std::function<void(const HashtablezInfo& stack)>& f) {
|
||||
HashtablezInfo* s = all_.load(std::memory_order_acquire);
|
||||
while (s != nullptr) {
|
||||
absl::MutexLock l(&s->init_mu);
|
||||
if (s->dead == nullptr) {
|
||||
f(*s);
|
||||
}
|
||||
s = s->next;
|
||||
}
|
||||
|
||||
return dropped_samples_.load(std::memory_order_relaxed);
|
||||
inline_element_size = inline_element_size_value;
|
||||
}
|
||||
|
||||
static bool ShouldForceSampling() {
|
||||
|
@ -189,21 +111,32 @@ static bool ShouldForceSampling() {
|
|||
return state == kForce;
|
||||
}
|
||||
|
||||
HashtablezInfo* SampleSlow(int64_t* next_sample) {
|
||||
HashtablezInfo* SampleSlow(SamplingState& next_sample,
|
||||
size_t inline_element_size) {
|
||||
if (ABSL_PREDICT_FALSE(ShouldForceSampling())) {
|
||||
*next_sample = 1;
|
||||
return HashtablezSampler::Global().Register();
|
||||
next_sample.next_sample = 1;
|
||||
const int64_t old_stride = exchange(next_sample.sample_stride, 1);
|
||||
HashtablezInfo* result =
|
||||
GlobalHashtablezSampler().Register(old_stride, inline_element_size);
|
||||
return result;
|
||||
}
|
||||
|
||||
#if !defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
*next_sample = std::numeric_limits<int64_t>::max();
|
||||
next_sample = {
|
||||
std::numeric_limits<int64_t>::max(),
|
||||
std::numeric_limits<int64_t>::max(),
|
||||
};
|
||||
return nullptr;
|
||||
#else
|
||||
bool first = *next_sample < 0;
|
||||
*next_sample = g_exponential_biased_generator.GetStride(
|
||||
bool first = next_sample.next_sample < 0;
|
||||
|
||||
const int64_t next_stride = g_exponential_biased_generator.GetStride(
|
||||
g_hashtablez_sample_parameter.load(std::memory_order_relaxed));
|
||||
|
||||
next_sample.next_sample = next_stride;
|
||||
const int64_t old_stride = exchange(next_sample.sample_stride, next_stride);
|
||||
// Small values of interval are equivalent to just sampling next time.
|
||||
ABSL_ASSERT(*next_sample >= 1);
|
||||
ABSL_ASSERT(next_stride >= 1);
|
||||
|
||||
// g_hashtablez_enabled can be dynamically flipped, we need to set a threshold
|
||||
// low enough that we will start sampling in a reasonable time, so we just use
|
||||
|
@ -213,16 +146,16 @@ HashtablezInfo* SampleSlow(int64_t* next_sample) {
|
|||
// We will only be negative on our first count, so we should just retry in
|
||||
// that case.
|
||||
if (first) {
|
||||
if (ABSL_PREDICT_TRUE(--*next_sample > 0)) return nullptr;
|
||||
return SampleSlow(next_sample);
|
||||
if (ABSL_PREDICT_TRUE(--next_sample.next_sample > 0)) return nullptr;
|
||||
return SampleSlow(next_sample, inline_element_size);
|
||||
}
|
||||
|
||||
return HashtablezSampler::Global().Register();
|
||||
return GlobalHashtablezSampler().Register(old_stride, inline_element_size);
|
||||
#endif
|
||||
}
|
||||
|
||||
void UnsampleSlow(HashtablezInfo* info) {
|
||||
HashtablezSampler::Global().Unregister(info);
|
||||
GlobalHashtablezSampler().Unregister(info);
|
||||
}
|
||||
|
||||
void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
||||
|
@ -230,7 +163,7 @@ void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
|||
// SwissTables probe in groups of 16, so scale this to count items probes and
|
||||
// not offset from desired.
|
||||
size_t probe_length = distance_from_desired;
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSE2
|
||||
probe_length /= 16;
|
||||
#else
|
||||
probe_length /= 8;
|
||||
|
@ -247,11 +180,33 @@ void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
|||
info->size.fetch_add(1, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
void SetHashtablezConfigListener(HashtablezConfigListener l) {
|
||||
g_hashtablez_config_listener.store(l, std::memory_order_release);
|
||||
}
|
||||
|
||||
bool IsHashtablezEnabled() {
|
||||
return g_hashtablez_enabled.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
void SetHashtablezEnabled(bool enabled) {
|
||||
SetHashtablezEnabledInternal(enabled);
|
||||
TriggerHashtablezConfigListener();
|
||||
}
|
||||
|
||||
void SetHashtablezEnabledInternal(bool enabled) {
|
||||
g_hashtablez_enabled.store(enabled, std::memory_order_release);
|
||||
}
|
||||
|
||||
int32_t GetHashtablezSampleParameter() {
|
||||
return g_hashtablez_sample_parameter.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
void SetHashtablezSampleParameter(int32_t rate) {
|
||||
SetHashtablezSampleParameterInternal(rate);
|
||||
TriggerHashtablezConfigListener();
|
||||
}
|
||||
|
||||
void SetHashtablezSampleParameterInternal(int32_t rate) {
|
||||
if (rate > 0) {
|
||||
g_hashtablez_sample_parameter.store(rate, std::memory_order_release);
|
||||
} else {
|
||||
|
@ -260,9 +215,18 @@ void SetHashtablezSampleParameter(int32_t rate) {
|
|||
}
|
||||
}
|
||||
|
||||
int32_t GetHashtablezMaxSamples() {
|
||||
return GlobalHashtablezSampler().GetMaxSamples();
|
||||
}
|
||||
|
||||
void SetHashtablezMaxSamples(int32_t max) {
|
||||
SetHashtablezMaxSamplesInternal(max);
|
||||
TriggerHashtablezConfigListener();
|
||||
}
|
||||
|
||||
void SetHashtablezMaxSamplesInternal(int32_t max) {
|
||||
if (max > 0) {
|
||||
g_hashtablez_max_samples.store(max, std::memory_order_release);
|
||||
GlobalHashtablezSampler().SetMaxSamples(max);
|
||||
} else {
|
||||
ABSL_RAW_LOG(ERROR, "Invalid hashtablez max samples: %lld",
|
||||
static_cast<long long>(max)); // NOLINT(runtime/int)
|
||||
|
|
|
@ -44,9 +44,10 @@
|
|||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/per_thread_tls.h"
|
||||
#include "absl/base/optimization.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/profiling/internal/sample_recorder.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
|
@ -57,7 +58,7 @@ namespace container_internal {
|
|||
// Stores information about a sampled hashtable. All mutations to this *must*
|
||||
// be made through `Record*` functions below. All reads from this *must* only
|
||||
// occur in the callback to `HashtablezSampler::Iterate`.
|
||||
struct HashtablezInfo {
|
||||
struct HashtablezInfo : public profiling_internal::Sample<HashtablezInfo> {
|
||||
// Constructs the object but does not fill in any fields.
|
||||
HashtablezInfo();
|
||||
~HashtablezInfo();
|
||||
|
@ -66,7 +67,8 @@ struct HashtablezInfo {
|
|||
|
||||
// Puts the object into a clean state, fills in the logically `const` members,
|
||||
// blocking for any readers that are currently sampling the object.
|
||||
void PrepareForSampling() ABSL_EXCLUSIVE_LOCKS_REQUIRED(init_mu);
|
||||
void PrepareForSampling(int64_t stride, size_t inline_element_size_value)
|
||||
ABSL_EXCLUSIVE_LOCKS_REQUIRED(init_mu);
|
||||
|
||||
// These fields are mutated by the various Record* APIs and need to be
|
||||
// thread-safe.
|
||||
|
@ -79,28 +81,22 @@ struct HashtablezInfo {
|
|||
std::atomic<size_t> hashes_bitwise_or;
|
||||
std::atomic<size_t> hashes_bitwise_and;
|
||||
std::atomic<size_t> hashes_bitwise_xor;
|
||||
|
||||
// `HashtablezSampler` maintains intrusive linked lists for all samples. See
|
||||
// comments on `HashtablezSampler::all_` for details on these. `init_mu`
|
||||
// guards the ability to restore the sample to a pristine state. This
|
||||
// prevents races with sampling and resurrecting an object.
|
||||
absl::Mutex init_mu;
|
||||
HashtablezInfo* next;
|
||||
HashtablezInfo* dead ABSL_GUARDED_BY(init_mu);
|
||||
std::atomic<size_t> max_reserve;
|
||||
|
||||
// All of the fields below are set by `PrepareForSampling`, they must not be
|
||||
// mutated in `Record*` functions. They are logically `const` in that sense.
|
||||
// These are guarded by init_mu, but that is not externalized to clients, who
|
||||
// can only read them during `HashtablezSampler::Iterate` which will hold the
|
||||
// lock.
|
||||
// These are guarded by init_mu, but that is not externalized to clients,
|
||||
// which can read them only during `SampleRecorder::Iterate` which will hold
|
||||
// the lock.
|
||||
static constexpr int kMaxStackDepth = 64;
|
||||
absl::Time create_time;
|
||||
int32_t depth;
|
||||
void* stack[kMaxStackDepth];
|
||||
size_t inline_element_size; // How big is the slot?
|
||||
};
|
||||
|
||||
inline void RecordRehashSlow(HashtablezInfo* info, size_t total_probe_length) {
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSE2
|
||||
total_probe_length /= 16;
|
||||
#else
|
||||
total_probe_length /= 8;
|
||||
|
@ -114,6 +110,18 @@ inline void RecordRehashSlow(HashtablezInfo* info, size_t total_probe_length) {
|
|||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
inline void RecordReservationSlow(HashtablezInfo* info,
|
||||
size_t target_capacity) {
|
||||
info->max_reserve.store(
|
||||
(std::max)(info->max_reserve.load(std::memory_order_relaxed),
|
||||
target_capacity),
|
||||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
inline void RecordClearedReservationSlow(HashtablezInfo* info) {
|
||||
info->max_reserve.store(0, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
inline void RecordStorageChangedSlow(HashtablezInfo* info, size_t size,
|
||||
size_t capacity) {
|
||||
info->size.store(size, std::memory_order_relaxed);
|
||||
|
@ -137,7 +145,15 @@ inline void RecordEraseSlow(HashtablezInfo* info) {
|
|||
std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
HashtablezInfo* SampleSlow(int64_t* next_sample);
|
||||
struct SamplingState {
|
||||
int64_t next_sample;
|
||||
// When we make a sampling decision, we record that distance so we can weight
|
||||
// each sample.
|
||||
int64_t sample_stride;
|
||||
};
|
||||
|
||||
HashtablezInfo* SampleSlow(SamplingState& next_sample,
|
||||
size_t inline_element_size);
|
||||
void UnsampleSlow(HashtablezInfo* info);
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
@ -177,6 +193,16 @@ class HashtablezInfoHandle {
|
|||
RecordRehashSlow(info_, total_probe_length);
|
||||
}
|
||||
|
||||
inline void RecordReservation(size_t target_capacity) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordReservationSlow(info_, target_capacity);
|
||||
}
|
||||
|
||||
inline void RecordClearedReservation() {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordClearedReservationSlow(info_);
|
||||
}
|
||||
|
||||
inline void RecordInsert(size_t hash, size_t distance_from_desired) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordInsertSlow(info_, hash, distance_from_desired);
|
||||
|
@ -206,6 +232,8 @@ class HashtablezInfoHandle {
|
|||
|
||||
inline void RecordStorageChanged(size_t /*size*/, size_t /*capacity*/) {}
|
||||
inline void RecordRehash(size_t /*total_probe_length*/) {}
|
||||
inline void RecordReservation(size_t /*target_capacity*/) {}
|
||||
inline void RecordClearedReservation() {}
|
||||
inline void RecordInsert(size_t /*hash*/, size_t /*distance_from_desired*/) {}
|
||||
inline void RecordErase() {}
|
||||
|
||||
|
@ -215,98 +243,47 @@ class HashtablezInfoHandle {
|
|||
#endif // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
extern ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample;
|
||||
extern ABSL_PER_THREAD_TLS_KEYWORD SamplingState global_next_sample;
|
||||
#endif // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
|
||||
// Returns an RAII sampling handle that manages registration and unregistation
|
||||
// with the global sampler.
|
||||
inline HashtablezInfoHandle Sample() {
|
||||
inline HashtablezInfoHandle Sample(
|
||||
size_t inline_element_size ABSL_ATTRIBUTE_UNUSED) {
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
if (ABSL_PREDICT_TRUE(--global_next_sample > 0)) {
|
||||
if (ABSL_PREDICT_TRUE(--global_next_sample.next_sample > 0)) {
|
||||
return HashtablezInfoHandle(nullptr);
|
||||
}
|
||||
return HashtablezInfoHandle(SampleSlow(&global_next_sample));
|
||||
return HashtablezInfoHandle(
|
||||
SampleSlow(global_next_sample, inline_element_size));
|
||||
#else
|
||||
return HashtablezInfoHandle(nullptr);
|
||||
#endif // !ABSL_PER_THREAD_TLS
|
||||
}
|
||||
|
||||
// Holds samples and their associated stack traces with a soft limit of
|
||||
// `SetHashtablezMaxSamples()`.
|
||||
//
|
||||
// Thread safe.
|
||||
class HashtablezSampler {
|
||||
public:
|
||||
// Returns a global Sampler.
|
||||
static HashtablezSampler& Global();
|
||||
using HashtablezSampler =
|
||||
::absl::profiling_internal::SampleRecorder<HashtablezInfo>;
|
||||
|
||||
HashtablezSampler();
|
||||
~HashtablezSampler();
|
||||
// Returns a global Sampler.
|
||||
HashtablezSampler& GlobalHashtablezSampler();
|
||||
|
||||
// Registers for sampling. Returns an opaque registration info.
|
||||
HashtablezInfo* Register();
|
||||
|
||||
// Unregisters the sample.
|
||||
void Unregister(HashtablezInfo* sample);
|
||||
|
||||
// The dispose callback will be called on all samples the moment they are
|
||||
// being unregistered. Only affects samples that are unregistered after the
|
||||
// callback has been set.
|
||||
// Returns the previous callback.
|
||||
using DisposeCallback = void (*)(const HashtablezInfo&);
|
||||
DisposeCallback SetDisposeCallback(DisposeCallback f);
|
||||
|
||||
// Iterates over all the registered `StackInfo`s. Returning the number of
|
||||
// samples that have been dropped.
|
||||
int64_t Iterate(const std::function<void(const HashtablezInfo& stack)>& f);
|
||||
|
||||
private:
|
||||
void PushNew(HashtablezInfo* sample);
|
||||
void PushDead(HashtablezInfo* sample);
|
||||
HashtablezInfo* PopDead();
|
||||
|
||||
std::atomic<size_t> dropped_samples_;
|
||||
std::atomic<size_t> size_estimate_;
|
||||
|
||||
// Intrusive lock free linked lists for tracking samples.
|
||||
//
|
||||
// `all_` records all samples (they are never removed from this list) and is
|
||||
// terminated with a `nullptr`.
|
||||
//
|
||||
// `graveyard_.dead` is a circular linked list. When it is empty,
|
||||
// `graveyard_.dead == &graveyard`. The list is circular so that
|
||||
// every item on it (even the last) has a non-null dead pointer. This allows
|
||||
// `Iterate` to determine if a given sample is live or dead using only
|
||||
// information on the sample itself.
|
||||
//
|
||||
// For example, nodes [A, B, C, D, E] with [A, C, E] alive and [B, D] dead
|
||||
// looks like this (G is the Graveyard):
|
||||
//
|
||||
// +---+ +---+ +---+ +---+ +---+
|
||||
// all -->| A |--->| B |--->| C |--->| D |--->| E |
|
||||
// | | | | | | | | | |
|
||||
// +---+ | | +->| |-+ | | +->| |-+ | |
|
||||
// | G | +---+ | +---+ | +---+ | +---+ | +---+
|
||||
// | | | | | |
|
||||
// | | --------+ +--------+ |
|
||||
// +---+ |
|
||||
// ^ |
|
||||
// +--------------------------------------+
|
||||
//
|
||||
std::atomic<HashtablezInfo*> all_;
|
||||
HashtablezInfo graveyard_;
|
||||
|
||||
std::atomic<DisposeCallback> dispose_;
|
||||
};
|
||||
using HashtablezConfigListener = void (*)();
|
||||
void SetHashtablezConfigListener(HashtablezConfigListener l);
|
||||
|
||||
// Enables or disables sampling for Swiss tables.
|
||||
bool IsHashtablezEnabled();
|
||||
void SetHashtablezEnabled(bool enabled);
|
||||
void SetHashtablezEnabledInternal(bool enabled);
|
||||
|
||||
// Sets the rate at which Swiss tables will be sampled.
|
||||
int32_t GetHashtablezSampleParameter();
|
||||
void SetHashtablezSampleParameter(int32_t rate);
|
||||
void SetHashtablezSampleParameterInternal(int32_t rate);
|
||||
|
||||
// Sets a soft max for the number of samples that will be kept.
|
||||
int32_t GetHashtablezMaxSamples();
|
||||
void SetHashtablezMaxSamples(int32_t max);
|
||||
void SetHashtablezMaxSamplesInternal(int32_t max);
|
||||
|
||||
// Configuration override.
|
||||
// This allows process-wide sampling without depending on order of
|
||||
|
|
|
@ -21,7 +21,8 @@
|
|||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/profiling/internal/sample_recorder.h"
|
||||
#include "absl/synchronization/blocking_counter.h"
|
||||
#include "absl/synchronization/internal/thread_pool.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
|
@ -29,7 +30,7 @@
|
|||
#include "absl/time/clock.h"
|
||||
#include "absl/time/time.h"
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#ifdef ABSL_INTERNAL_HAVE_SSE2
|
||||
constexpr int kProbeLength = 16;
|
||||
#else
|
||||
constexpr int kProbeLength = 8;
|
||||
|
@ -69,7 +70,9 @@ std::vector<size_t> GetSizes(HashtablezSampler* s) {
|
|||
}
|
||||
|
||||
HashtablezInfo* Register(HashtablezSampler* s, size_t size) {
|
||||
auto* info = s->Register();
|
||||
const int64_t test_stride = 123;
|
||||
const size_t test_element_size = 17;
|
||||
auto* info = s->Register(test_stride, test_element_size);
|
||||
assert(info != nullptr);
|
||||
info->size.store(size);
|
||||
return info;
|
||||
|
@ -77,9 +80,11 @@ HashtablezInfo* Register(HashtablezSampler* s, size_t size) {
|
|||
|
||||
TEST(HashtablezInfoTest, PrepareForSampling) {
|
||||
absl::Time test_start = absl::Now();
|
||||
const int64_t test_stride = 123;
|
||||
const size_t test_element_size = 17;
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
|
||||
EXPECT_EQ(info.capacity.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
|
@ -90,7 +95,10 @@ TEST(HashtablezInfoTest, PrepareForSampling) {
|
|||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), ~size_t{});
|
||||
EXPECT_EQ(info.hashes_bitwise_xor.load(), 0);
|
||||
EXPECT_EQ(info.max_reserve.load(), 0);
|
||||
EXPECT_GE(info.create_time, test_start);
|
||||
EXPECT_EQ(info.weight, test_stride);
|
||||
EXPECT_EQ(info.inline_element_size, test_element_size);
|
||||
|
||||
info.capacity.store(1, std::memory_order_relaxed);
|
||||
info.size.store(1, std::memory_order_relaxed);
|
||||
|
@ -100,9 +108,10 @@ TEST(HashtablezInfoTest, PrepareForSampling) {
|
|||
info.hashes_bitwise_or.store(1, std::memory_order_relaxed);
|
||||
info.hashes_bitwise_and.store(1, std::memory_order_relaxed);
|
||||
info.hashes_bitwise_xor.store(1, std::memory_order_relaxed);
|
||||
info.max_reserve.store(1, std::memory_order_relaxed);
|
||||
info.create_time = test_start - absl::Hours(20);
|
||||
|
||||
info.PrepareForSampling();
|
||||
info.PrepareForSampling(test_stride * 2, test_element_size);
|
||||
EXPECT_EQ(info.capacity.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
|
@ -112,13 +121,18 @@ TEST(HashtablezInfoTest, PrepareForSampling) {
|
|||
EXPECT_EQ(info.hashes_bitwise_or.load(), 0);
|
||||
EXPECT_EQ(info.hashes_bitwise_and.load(), ~size_t{});
|
||||
EXPECT_EQ(info.hashes_bitwise_xor.load(), 0);
|
||||
EXPECT_EQ(info.max_reserve.load(), 0);
|
||||
EXPECT_EQ(info.weight, 2 * test_stride);
|
||||
EXPECT_EQ(info.inline_element_size, test_element_size);
|
||||
EXPECT_GE(info.create_time, test_start);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordStorageChanged) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
const int64_t test_stride = 21;
|
||||
const size_t test_element_size = 19;
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
RecordStorageChangedSlow(&info, 17, 47);
|
||||
EXPECT_EQ(info.size.load(), 17);
|
||||
EXPECT_EQ(info.capacity.load(), 47);
|
||||
|
@ -130,7 +144,9 @@ TEST(HashtablezInfoTest, RecordStorageChanged) {
|
|||
TEST(HashtablezInfoTest, RecordInsert) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
const int64_t test_stride = 25;
|
||||
const size_t test_element_size = 23;
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 0);
|
||||
RecordInsertSlow(&info, 0x0000FF00, 6 * kProbeLength);
|
||||
EXPECT_EQ(info.max_probe_length.load(), 6);
|
||||
|
@ -150,9 +166,11 @@ TEST(HashtablezInfoTest, RecordInsert) {
|
|||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordErase) {
|
||||
const int64_t test_stride = 31;
|
||||
const size_t test_element_size = 29;
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
RecordInsertSlow(&info, 0x0000FF00, 6 * kProbeLength);
|
||||
|
@ -160,12 +178,15 @@ TEST(HashtablezInfoTest, RecordErase) {
|
|||
RecordEraseSlow(&info);
|
||||
EXPECT_EQ(info.size.load(), 0);
|
||||
EXPECT_EQ(info.num_erases.load(), 1);
|
||||
EXPECT_EQ(info.inline_element_size, test_element_size);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordRehash) {
|
||||
const int64_t test_stride = 33;
|
||||
const size_t test_element_size = 31;
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
RecordInsertSlow(&info, 0x1, 0);
|
||||
RecordInsertSlow(&info, 0x2, kProbeLength);
|
||||
RecordInsertSlow(&info, 0x4, kProbeLength);
|
||||
|
@ -184,43 +205,67 @@ TEST(HashtablezInfoTest, RecordRehash) {
|
|||
EXPECT_EQ(info.total_probe_length.load(), 3);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
EXPECT_EQ(info.num_rehashes.load(), 1);
|
||||
EXPECT_EQ(info.inline_element_size, test_element_size);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordReservation) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
const int64_t test_stride = 35;
|
||||
const size_t test_element_size = 33;
|
||||
info.PrepareForSampling(test_stride, test_element_size);
|
||||
RecordReservationSlow(&info, 3);
|
||||
EXPECT_EQ(info.max_reserve.load(), 3);
|
||||
|
||||
RecordReservationSlow(&info, 2);
|
||||
// High watermark does not change
|
||||
EXPECT_EQ(info.max_reserve.load(), 3);
|
||||
|
||||
RecordReservationSlow(&info, 10);
|
||||
// High watermark does change
|
||||
EXPECT_EQ(info.max_reserve.load(), 10);
|
||||
}
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
TEST(HashtablezSamplerTest, SmallSampleParameter) {
|
||||
const size_t test_element_size = 31;
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
int64_t next_sample = 0;
|
||||
HashtablezInfo* sample = SampleSlow(&next_sample);
|
||||
EXPECT_GT(next_sample, 0);
|
||||
SamplingState next_sample = {0, 0};
|
||||
HashtablezInfo* sample = SampleSlow(next_sample, test_element_size);
|
||||
EXPECT_GT(next_sample.next_sample, 0);
|
||||
EXPECT_EQ(next_sample.next_sample, next_sample.sample_stride);
|
||||
EXPECT_NE(sample, nullptr);
|
||||
UnsampleSlow(sample);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, LargeSampleParameter) {
|
||||
const size_t test_element_size = 31;
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(std::numeric_limits<int32_t>::max());
|
||||
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
int64_t next_sample = 0;
|
||||
HashtablezInfo* sample = SampleSlow(&next_sample);
|
||||
EXPECT_GT(next_sample, 0);
|
||||
SamplingState next_sample = {0, 0};
|
||||
HashtablezInfo* sample = SampleSlow(next_sample, test_element_size);
|
||||
EXPECT_GT(next_sample.next_sample, 0);
|
||||
EXPECT_EQ(next_sample.next_sample, next_sample.sample_stride);
|
||||
EXPECT_NE(sample, nullptr);
|
||||
UnsampleSlow(sample);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Sample) {
|
||||
const size_t test_element_size = 31;
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
int64_t num_sampled = 0;
|
||||
int64_t total = 0;
|
||||
double sample_rate = 0.0;
|
||||
for (int i = 0; i < 1000000; ++i) {
|
||||
HashtablezInfoHandle h = Sample();
|
||||
HashtablezInfoHandle h = Sample(test_element_size);
|
||||
++total;
|
||||
if (HashtablezInfoHandlePeer::IsSampled(h)) {
|
||||
++num_sampled;
|
||||
|
@ -232,14 +277,17 @@ TEST(HashtablezSamplerTest, Sample) {
|
|||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, Handle) {
|
||||
auto& sampler = HashtablezSampler::Global();
|
||||
HashtablezInfoHandle h(sampler.Register());
|
||||
auto& sampler = GlobalHashtablezSampler();
|
||||
const int64_t test_stride = 41;
|
||||
const size_t test_element_size = 39;
|
||||
HashtablezInfoHandle h(sampler.Register(test_stride, test_element_size));
|
||||
auto* info = HashtablezInfoHandlePeer::GetInfo(&h);
|
||||
info->hashes_bitwise_and.store(0x12345678, std::memory_order_relaxed);
|
||||
|
||||
bool found = false;
|
||||
sampler.Iterate([&](const HashtablezInfo& h) {
|
||||
if (&h == info) {
|
||||
EXPECT_EQ(h.weight, test_stride);
|
||||
EXPECT_EQ(h.hashes_bitwise_and.load(), 0x12345678);
|
||||
found = true;
|
||||
}
|
||||
|
@ -305,18 +353,20 @@ TEST(HashtablezSamplerTest, MultiThreaded) {
|
|||
ThreadPool pool(10);
|
||||
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
pool.Schedule([&sampler, &stop]() {
|
||||
const int64_t sampling_stride = 11 + i % 3;
|
||||
const size_t elt_size = 10 + i % 2;
|
||||
pool.Schedule([&sampler, &stop, sampling_stride, elt_size]() {
|
||||
std::random_device rd;
|
||||
std::mt19937 gen(rd());
|
||||
|
||||
std::vector<HashtablezInfo*> infoz;
|
||||
while (!stop.HasBeenNotified()) {
|
||||
if (infoz.empty()) {
|
||||
infoz.push_back(sampler.Register());
|
||||
infoz.push_back(sampler.Register(sampling_stride, elt_size));
|
||||
}
|
||||
switch (std::uniform_int_distribution<>(0, 2)(gen)) {
|
||||
case 0: {
|
||||
infoz.push_back(sampler.Register());
|
||||
infoz.push_back(sampler.Register(sampling_stride, elt_size));
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
|
@ -325,6 +375,7 @@ TEST(HashtablezSamplerTest, MultiThreaded) {
|
|||
HashtablezInfo* info = infoz[p];
|
||||
infoz[p] = infoz.back();
|
||||
infoz.pop_back();
|
||||
EXPECT_EQ(info->weight, sampling_stride);
|
||||
sampler.Unregister(info);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// Shared config probing for SSE instructions used in Swiss tables.
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
||||
|
||||
#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#if defined(__SSE2__) || \
|
||||
(defined(_MSC_VER) && \
|
||||
(defined(_M_X64) || (defined(_M_IX86) && _M_IX86_FP >= 2)))
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 1
|
||||
#else
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
|
||||
#ifdef __SSSE3__
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 1
|
||||
#else
|
||||
#define ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3 && \
|
||||
!ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#error "Bad configuration!"
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
|
||||
#include <emmintrin.h>
|
||||
#endif
|
||||
|
||||
#if ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSSE3
|
||||
#include <tmmintrin.h>
|
||||
#endif
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_HAVE_SSE_H_
|
File diff suppressed because it is too large
Load Diff
|
@ -1350,7 +1350,13 @@ TEST(Layout, CustomAlignment) {
|
|||
TEST(Layout, OverAligned) {
|
||||
constexpr size_t M = alignof(max_align_t);
|
||||
constexpr Layout<unsigned char, Aligned<unsigned char, 2 * M>> x(1, 3);
|
||||
#ifdef __GNUC__
|
||||
// Using __attribute__ ((aligned ())) instead of alignas to bypass a gcc bug:
|
||||
// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89357
|
||||
__attribute__((aligned(2 * M))) unsigned char p[x.AllocSize()];
|
||||
#else
|
||||
alignas(2 * M) unsigned char p[x.AllocSize()];
|
||||
#endif
|
||||
EXPECT_EQ(2 * M + 3, x.AllocSize());
|
||||
EXPECT_THAT(x.Pointers(p), Tuple(p + 0, p + 2 * M));
|
||||
}
|
||||
|
|
|
@ -30,8 +30,8 @@
|
|||
// It may also optionally define `value()` and `apply()`. For documentation on
|
||||
// these, see hash_policy_traits.h.
|
||||
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
#ifndef ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_
|
||||
#define ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_
|
||||
|
||||
#include <cassert>
|
||||
#include <cstddef>
|
||||
|
@ -46,7 +46,7 @@ ABSL_NAMESPACE_BEGIN
|
|||
namespace container_internal {
|
||||
|
||||
template <class Reference, class Policy>
|
||||
struct node_hash_policy {
|
||||
struct node_slot_policy {
|
||||
static_assert(std::is_lvalue_reference<Reference>::value, "");
|
||||
|
||||
using slot_type = typename std::remove_cv<
|
||||
|
@ -89,4 +89,4 @@ struct node_hash_policy {
|
|||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
||||
|
||||
#endif // ABSL_CONTAINER_INTERNAL_NODE_HASH_POLICY_H_
|
||||
#endif // ABSL_CONTAINER_INTERNAL_NODE_SLOT_POLICY_H_
|
|
@ -12,7 +12,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "absl/container/internal/node_hash_policy.h"
|
||||
#include "absl/container/internal/node_slot_policy.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
|
@ -27,7 +27,7 @@ namespace {
|
|||
|
||||
using ::testing::Pointee;
|
||||
|
||||
struct Policy : node_hash_policy<int&, Policy> {
|
||||
struct Policy : node_slot_policy<int&, Policy> {
|
||||
using key_type = int;
|
||||
using init_type = int;
|
||||
|
|
@ -23,13 +23,17 @@ namespace absl {
|
|||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
|
||||
// A single block of empty control bytes for tables without any slots allocated.
|
||||
// This enables removing a branch in the hot path of find().
|
||||
alignas(16) ABSL_CONST_INIT ABSL_DLL const ctrl_t kEmptyGroup[16] = {
|
||||
ctrl_t::kSentinel, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty,
|
||||
ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty,
|
||||
ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty,
|
||||
ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty, ctrl_t::kEmpty};
|
||||
|
||||
#ifdef ABSL_INTERNAL_NEED_REDUNDANT_CONSTEXPR_DECL
|
||||
constexpr size_t Group::kWidth;
|
||||
#endif
|
||||
|
||||
// Returns "random" seed.
|
||||
inline size_t RandomSeed() {
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -330,33 +330,42 @@ void BM_Group_Match(benchmark::State& state) {
|
|||
h2_t h = 1;
|
||||
for (auto _ : state) {
|
||||
::benchmark::DoNotOptimize(h);
|
||||
::benchmark::DoNotOptimize(g);
|
||||
::benchmark::DoNotOptimize(g.Match(h));
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Group_Match);
|
||||
|
||||
void BM_Group_MatchEmpty(benchmark::State& state) {
|
||||
void BM_Group_MaskEmpty(benchmark::State& state) {
|
||||
std::array<ctrl_t, Group::kWidth> group;
|
||||
Iota(group.begin(), group.end(), -4);
|
||||
Group g{group.data()};
|
||||
for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmpty());
|
||||
for (auto _ : state) {
|
||||
::benchmark::DoNotOptimize(g);
|
||||
::benchmark::DoNotOptimize(g.MaskEmpty());
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Group_MatchEmpty);
|
||||
BENCHMARK(BM_Group_MaskEmpty);
|
||||
|
||||
void BM_Group_MatchEmptyOrDeleted(benchmark::State& state) {
|
||||
void BM_Group_MaskEmptyOrDeleted(benchmark::State& state) {
|
||||
std::array<ctrl_t, Group::kWidth> group;
|
||||
Iota(group.begin(), group.end(), -4);
|
||||
Group g{group.data()};
|
||||
for (auto _ : state) ::benchmark::DoNotOptimize(g.MatchEmptyOrDeleted());
|
||||
for (auto _ : state) {
|
||||
::benchmark::DoNotOptimize(g);
|
||||
::benchmark::DoNotOptimize(g.MaskEmptyOrDeleted());
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Group_MatchEmptyOrDeleted);
|
||||
BENCHMARK(BM_Group_MaskEmptyOrDeleted);
|
||||
|
||||
void BM_Group_CountLeadingEmptyOrDeleted(benchmark::State& state) {
|
||||
std::array<ctrl_t, Group::kWidth> group;
|
||||
Iota(group.begin(), group.end(), -2);
|
||||
Group g{group.data()};
|
||||
for (auto _ : state)
|
||||
for (auto _ : state) {
|
||||
::benchmark::DoNotOptimize(g);
|
||||
::benchmark::DoNotOptimize(g.CountLeadingEmptyOrDeleted());
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Group_CountLeadingEmptyOrDeleted);
|
||||
|
||||
|
@ -364,7 +373,10 @@ void BM_Group_MatchFirstEmptyOrDeleted(benchmark::State& state) {
|
|||
std::array<ctrl_t, Group::kWidth> group;
|
||||
Iota(group.begin(), group.end(), -2);
|
||||
Group g{group.data()};
|
||||
for (auto _ : state) ::benchmark::DoNotOptimize(*g.MatchEmptyOrDeleted());
|
||||
for (auto _ : state) {
|
||||
::benchmark::DoNotOptimize(g);
|
||||
::benchmark::DoNotOptimize(g.MaskEmptyOrDeleted().LowestBitSet());
|
||||
}
|
||||
}
|
||||
BENCHMARK(BM_Group_MatchFirstEmptyOrDeleted);
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/config.h"
|
||||
#include "absl/base/internal/cycleclock.h"
|
||||
#include "absl/base/internal/prefetch.h"
|
||||
#include "absl/base/internal/raw_logging.h"
|
||||
#include "absl/container/internal/container_memory.h"
|
||||
#include "absl/container/internal/hash_function_defaults.h"
|
||||
|
@ -194,35 +195,39 @@ TEST(Group, Match) {
|
|||
}
|
||||
}
|
||||
|
||||
TEST(Group, MatchEmpty) {
|
||||
TEST(Group, MaskEmpty) {
|
||||
if (Group::kWidth == 16) {
|
||||
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3),
|
||||
ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7),
|
||||
CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1),
|
||||
CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)};
|
||||
EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0, 4));
|
||||
EXPECT_THAT(Group{group}.MaskEmpty().LowestBitSet(), 0);
|
||||
EXPECT_THAT(Group{group}.MaskEmpty().HighestBitSet(), 4);
|
||||
} else if (Group::kWidth == 8) {
|
||||
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2),
|
||||
ctrl_t::kDeleted, CtrlT(2), CtrlT(1),
|
||||
ctrl_t::kSentinel, CtrlT(1)};
|
||||
EXPECT_THAT(Group{group}.MatchEmpty(), ElementsAre(0));
|
||||
EXPECT_THAT(Group{group}.MaskEmpty().LowestBitSet(), 0);
|
||||
EXPECT_THAT(Group{group}.MaskEmpty().HighestBitSet(), 0);
|
||||
} else {
|
||||
FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth;
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Group, MatchEmptyOrDeleted) {
|
||||
TEST(Group, MaskEmptyOrDeleted) {
|
||||
if (Group::kWidth == 16) {
|
||||
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kDeleted, CtrlT(3),
|
||||
ctrl_t::kEmpty, CtrlT(5), ctrl_t::kSentinel, CtrlT(7),
|
||||
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), ctrl_t::kEmpty, CtrlT(3),
|
||||
ctrl_t::kDeleted, CtrlT(5), ctrl_t::kSentinel, CtrlT(7),
|
||||
CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1),
|
||||
CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)};
|
||||
EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 2, 4));
|
||||
EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().LowestBitSet(), 0);
|
||||
EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().HighestBitSet(), 4);
|
||||
} else if (Group::kWidth == 8) {
|
||||
ctrl_t group[] = {ctrl_t::kEmpty, CtrlT(1), CtrlT(2),
|
||||
ctrl_t::kDeleted, CtrlT(2), CtrlT(1),
|
||||
ctrl_t::kSentinel, CtrlT(1)};
|
||||
EXPECT_THAT(Group{group}.MatchEmptyOrDeleted(), ElementsAre(0, 3));
|
||||
EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().LowestBitSet(), 0);
|
||||
EXPECT_THAT(Group{group}.MaskEmptyOrDeleted().HighestBitSet(), 3);
|
||||
} else {
|
||||
FAIL() << "No test coverage for Group::kWidth==" << Group::kWidth;
|
||||
}
|
||||
|
@ -1244,7 +1249,7 @@ ExpectedStats XorSeedExpectedStats() {
|
|||
case 16:
|
||||
if (kRandomizesInserts) {
|
||||
return {0.1,
|
||||
1.0,
|
||||
2.0,
|
||||
{{0.95, 0.1}},
|
||||
{{0.95, 0}, {0.99, 1}, {0.999, 8}, {0.9999, 15}}};
|
||||
} else {
|
||||
|
@ -1258,6 +1263,7 @@ ExpectedStats XorSeedExpectedStats() {
|
|||
return {};
|
||||
}
|
||||
|
||||
// TODO(b/80415403): Figure out why this test is so flaky, esp. on MSVC
|
||||
TEST(Table, DISABLED_EnsureNonQuadraticTopNXorSeedByProbeSeqLength) {
|
||||
ProbeStatsPerSize stats;
|
||||
std::vector<size_t> sizes = {Group::kWidth << 5, Group::kWidth << 10};
|
||||
|
@ -1330,17 +1336,17 @@ ExpectedStats LinearTransformExpectedStats() {
|
|||
{{0.95, 0.3}},
|
||||
{{0.95, 0}, {0.99, 1}, {0.999, 8}, {0.9999, 15}}};
|
||||
} else {
|
||||
return {0.15,
|
||||
0.5,
|
||||
{{0.95, 0.3}},
|
||||
{{0.95, 0}, {0.99, 3}, {0.999, 15}, {0.9999, 25}}};
|
||||
return {0.4,
|
||||
0.6,
|
||||
{{0.95, 0.5}},
|
||||
{{0.95, 1}, {0.99, 14}, {0.999, 23}, {0.9999, 26}}};
|
||||
}
|
||||
case 16:
|
||||
if (kRandomizesInserts) {
|
||||
return {0.1,
|
||||
0.4,
|
||||
{{0.95, 0.3}},
|
||||
{{0.95, 0}, {0.99, 1}, {0.999, 8}, {0.9999, 15}}};
|
||||
{{0.95, 1}, {0.99, 2}, {0.999, 9}, {0.9999, 15}}};
|
||||
} else {
|
||||
return {0.05,
|
||||
0.2,
|
||||
|
@ -1352,6 +1358,7 @@ ExpectedStats LinearTransformExpectedStats() {
|
|||
return {};
|
||||
}
|
||||
|
||||
// TODO(b/80415403): Figure out why this test is so flaky.
|
||||
TEST(Table, DISABLED_EnsureNonQuadraticTopNLinearTransformByProbeSeqLength) {
|
||||
ProbeStatsPerSize stats;
|
||||
std::vector<size_t> sizes = {Group::kWidth << 5, Group::kWidth << 10};
|
||||
|
@ -2028,7 +2035,7 @@ TEST(TableDeathTest, EraseOfEndAsserts) {
|
|||
|
||||
IntTable t;
|
||||
// Extra simple "regexp" as regexp support is highly varied across platforms.
|
||||
constexpr char kDeathMsg[] = "Invalid operation on iterator";
|
||||
constexpr char kDeathMsg[] = "erase.. called on invalid iterator";
|
||||
EXPECT_DEATH_IF_SUPPORTED(t.erase(t.end()), kDeathMsg);
|
||||
}
|
||||
|
||||
|
@ -2038,7 +2045,7 @@ TEST(RawHashSamplerTest, Sample) {
|
|||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
|
||||
auto& sampler = HashtablezSampler::Global();
|
||||
auto& sampler = GlobalHashtablezSampler();
|
||||
size_t start_size = 0;
|
||||
std::unordered_set<const HashtablezInfo*> preexisting_info;
|
||||
start_size += sampler.Iterate([&](const HashtablezInfo& info) {
|
||||
|
@ -2049,16 +2056,33 @@ TEST(RawHashSamplerTest, Sample) {
|
|||
std::vector<IntTable> tables;
|
||||
for (int i = 0; i < 1000000; ++i) {
|
||||
tables.emplace_back();
|
||||
|
||||
const bool do_reserve = (i % 10 > 5);
|
||||
const bool do_rehash = !do_reserve && (i % 10 > 0);
|
||||
|
||||
if (do_reserve) {
|
||||
// Don't reserve on all tables.
|
||||
tables.back().reserve(10 * (i % 10));
|
||||
}
|
||||
|
||||
tables.back().insert(1);
|
||||
tables.back().insert(i % 5);
|
||||
|
||||
if (do_rehash) {
|
||||
// Rehash some other tables.
|
||||
tables.back().rehash(10 * (i % 10));
|
||||
}
|
||||
}
|
||||
size_t end_size = 0;
|
||||
std::unordered_map<size_t, int> observed_checksums;
|
||||
std::unordered_map<ssize_t, int> reservations;
|
||||
end_size += sampler.Iterate([&](const HashtablezInfo& info) {
|
||||
if (preexisting_info.count(&info) == 0) {
|
||||
observed_checksums[info.hashes_bitwise_xor.load(
|
||||
std::memory_order_relaxed)]++;
|
||||
reservations[info.max_reserve.load(std::memory_order_relaxed)]++;
|
||||
}
|
||||
EXPECT_EQ(info.inline_element_size, sizeof(int64_t));
|
||||
++end_size;
|
||||
});
|
||||
|
||||
|
@ -2068,6 +2092,15 @@ TEST(RawHashSamplerTest, Sample) {
|
|||
for (const auto& [_, count] : observed_checksums) {
|
||||
EXPECT_NEAR((100 * count) / static_cast<double>(tables.size()), 0.2, 0.05);
|
||||
}
|
||||
|
||||
EXPECT_EQ(reservations.size(), 10);
|
||||
for (const auto& [reservation, count] : reservations) {
|
||||
EXPECT_GE(reservation, 0);
|
||||
EXPECT_LT(reservation, 100);
|
||||
|
||||
EXPECT_NEAR((100 * count) / static_cast<double>(tables.size()), 0.1, 0.05)
|
||||
<< reservation;
|
||||
}
|
||||
}
|
||||
#endif // ABSL_INTERNAL_HASHTABLEZ_SAMPLE
|
||||
|
||||
|
@ -2076,7 +2109,7 @@ TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
|
|||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
|
||||
auto& sampler = HashtablezSampler::Global();
|
||||
auto& sampler = GlobalHashtablezSampler();
|
||||
size_t start_size = 0;
|
||||
start_size += sampler.Iterate([&](const HashtablezInfo&) { ++start_size; });
|
||||
|
||||
|
|
|
@ -476,7 +476,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
|
|||
// containers in unspecified state (and in practice in causes memory-leak
|
||||
// according to heap-checker!).
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(
|
||||
REGISTER_TYPED_TEST_SUITE_P(
|
||||
ConstructorTest, NoArgs, BucketCount, BucketCountHash, BucketCountHashEqual,
|
||||
BucketCountHashEqualAlloc, BucketCountAlloc, BucketCountHashAlloc, Alloc,
|
||||
InputIteratorBucketHashEqualAlloc, InputIteratorBucketAlloc,
|
||||
|
|
|
@ -107,7 +107,7 @@ TYPED_TEST_P(LookupTest, EqualRange) {
|
|||
}
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(LookupTest, At, OperatorBracket, Count, Find,
|
||||
REGISTER_TYPED_TEST_SUITE_P(LookupTest, At, OperatorBracket, Count, Find,
|
||||
EqualRange);
|
||||
|
||||
} // namespace container_internal
|
||||
|
|
|
@ -297,11 +297,12 @@ TYPED_TEST_P(ModifiersTest, Swap) {
|
|||
// TODO(alkis): Write tests for extract.
|
||||
// TODO(alkis): Write tests for merge.
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
REGISTER_TYPED_TEST_SUITE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
InsertRange, InsertWithinCapacity,
|
||||
InsertRangeWithinCapacity, InsertOrAssign,
|
||||
InsertOrAssignHint, Emplace, EmplaceHint, TryEmplace,
|
||||
TryEmplaceHint, Erase, EraseRange, EraseKey, Swap);
|
||||
InsertOrAssignHint, Emplace, EmplaceHint,
|
||||
TryEmplace, TryEmplaceHint, Erase, EraseRange,
|
||||
EraseKey, Swap);
|
||||
|
||||
template <typename Type>
|
||||
struct is_unique_ptr : std::false_type {};
|
||||
|
|
|
@ -478,7 +478,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
|
|||
EXPECT_THAT(keys(m), ::testing::UnorderedElementsAreArray(values));
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(
|
||||
REGISTER_TYPED_TEST_SUITE_P(
|
||||
ConstructorTest, NoArgs, BucketCount, BucketCountHash, BucketCountHashEqual,
|
||||
BucketCountHashEqualAlloc, BucketCountAlloc, BucketCountHashAlloc, Alloc,
|
||||
InputIteratorBucketHashEqualAlloc, InputIteratorBucketAlloc,
|
||||
|
|
|
@ -82,7 +82,7 @@ TYPED_TEST_P(LookupTest, EqualRange) {
|
|||
}
|
||||
}
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(LookupTest, Count, Find, EqualRange);
|
||||
REGISTER_TYPED_TEST_SUITE_P(LookupTest, Count, Find, EqualRange);
|
||||
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
|
|
|
@ -209,7 +209,7 @@ TYPED_TEST_P(ModifiersTest, Swap) {
|
|||
// TODO(alkis): Write tests for extract.
|
||||
// TODO(alkis): Write tests for merge.
|
||||
|
||||
REGISTER_TYPED_TEST_CASE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
REGISTER_TYPED_TEST_SUITE_P(ModifiersTest, Clear, Insert, InsertHint,
|
||||
InsertRange, InsertWithinCapacity,
|
||||
InsertRangeWithinCapacity, Emplace, EmplaceHint,
|
||||
Erase, EraseRange, EraseKey, Swap);
|
||||
|
|
|
@ -41,9 +41,10 @@
|
|||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "absl/container/internal/container_memory.h"
|
||||
#include "absl/container/internal/hash_function_defaults.h" // IWYU pragma: export
|
||||
#include "absl/container/internal/node_hash_policy.h"
|
||||
#include "absl/container/internal/node_slot_policy.h"
|
||||
#include "absl/container/internal/raw_hash_map.h" // IWYU pragma: export
|
||||
#include "absl/memory/memory.h"
|
||||
|
||||
|
@ -77,6 +78,10 @@ class NodeHashMapPolicy;
|
|||
// absl/hash/hash.h for information on extending Abseil hashing to user-defined
|
||||
// types.
|
||||
//
|
||||
// Using `absl::node_hash_map` at interface boundaries in dynamically loaded
|
||||
// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may
|
||||
// be randomized across dynamically loaded libraries.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// // Create a node hash map of three strings (that map to strings)
|
||||
|
@ -347,8 +352,8 @@ class node_hash_map
|
|||
// `node_hash_map`.
|
||||
//
|
||||
// iterator try_emplace(const_iterator hint,
|
||||
// const init_type& k, Args&&... args):
|
||||
// iterator try_emplace(const_iterator hint, init_type&& k, Args&&... args):
|
||||
// const key_type& k, Args&&... args):
|
||||
// iterator try_emplace(const_iterator hint, key_type&& k, Args&&... args):
|
||||
//
|
||||
// Inserts (via copy or move) the element of the specified key into the
|
||||
// `node_hash_map` using the position of `hint` as a non-binding suggestion
|
||||
|
@ -525,17 +530,19 @@ class node_hash_map
|
|||
// erase_if(node_hash_map<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate `pred` from the container `c`.
|
||||
// Returns the number of erased elements.
|
||||
template <typename K, typename V, typename H, typename E, typename A,
|
||||
typename Predicate>
|
||||
void erase_if(node_hash_map<K, V, H, E, A>& c, Predicate pred) {
|
||||
container_internal::EraseIf(pred, &c);
|
||||
typename node_hash_map<K, V, H, E, A>::size_type erase_if(
|
||||
node_hash_map<K, V, H, E, A>& c, Predicate pred) {
|
||||
return container_internal::EraseIf(pred, &c);
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
template <class Key, class Value>
|
||||
class NodeHashMapPolicy
|
||||
: public absl::container_internal::node_hash_policy<
|
||||
: public absl::container_internal::node_slot_policy<
|
||||
std::pair<const Key, Value>&, NodeHashMapPolicy<Key, Value>> {
|
||||
using value_type = std::pair<const Key, Value>;
|
||||
|
||||
|
|
|
@ -223,33 +223,36 @@ TEST(NodeHashMap, EraseIf) {
|
|||
// Erase all elements.
|
||||
{
|
||||
node_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, [](std::pair<const int, int>) { return true; });
|
||||
EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return true; }), 5);
|
||||
EXPECT_THAT(s, IsEmpty());
|
||||
}
|
||||
// Erase no elements.
|
||||
{
|
||||
node_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, [](std::pair<const int, int>) { return false; });
|
||||
EXPECT_EQ(erase_if(s, [](std::pair<const int, int>) { return false; }), 0);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(2, 2), Pair(3, 3),
|
||||
Pair(4, 4), Pair(5, 5)));
|
||||
}
|
||||
// Erase specific elements.
|
||||
{
|
||||
node_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s,
|
||||
[](std::pair<const int, int> kvp) { return kvp.first % 2 == 1; });
|
||||
EXPECT_EQ(erase_if(s,
|
||||
[](std::pair<const int, int> kvp) {
|
||||
return kvp.first % 2 == 1;
|
||||
}),
|
||||
3);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(2, 2), Pair(4, 4)));
|
||||
}
|
||||
// Predicate is function reference.
|
||||
{
|
||||
node_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, FirstIsEven);
|
||||
EXPECT_EQ(erase_if(s, FirstIsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
|
||||
}
|
||||
// Predicate is function pointer.
|
||||
{
|
||||
node_hash_map<int, int> s = {{1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}};
|
||||
erase_if(s, &FirstIsEven);
|
||||
EXPECT_EQ(erase_if(s, &FirstIsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(Pair(1, 1), Pair(3, 3), Pair(5, 5)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,8 +38,9 @@
|
|||
#include <type_traits>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "absl/container/internal/hash_function_defaults.h" // IWYU pragma: export
|
||||
#include "absl/container/internal/node_hash_policy.h"
|
||||
#include "absl/container/internal/node_slot_policy.h"
|
||||
#include "absl/container/internal/raw_hash_set.h" // IWYU pragma: export
|
||||
#include "absl/memory/memory.h"
|
||||
|
||||
|
@ -73,6 +74,10 @@ struct NodeHashSetPolicy;
|
|||
// absl/hash/hash.h for information on extending Abseil hashing to user-defined
|
||||
// types.
|
||||
//
|
||||
// Using `absl::node_hash_set` at interface boundaries in dynamically loaded
|
||||
// libraries (e.g. .dll, .so) is unsupported due to way `absl::Hash` values may
|
||||
// be randomized across dynamically loaded libraries.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// // Create a node hash set of three strings
|
||||
|
@ -433,16 +438,18 @@ class node_hash_set
|
|||
// erase_if(node_hash_set<>, Pred)
|
||||
//
|
||||
// Erases all elements that satisfy the predicate `pred` from the container `c`.
|
||||
// Returns the number of erased elements.
|
||||
template <typename T, typename H, typename E, typename A, typename Predicate>
|
||||
void erase_if(node_hash_set<T, H, E, A>& c, Predicate pred) {
|
||||
container_internal::EraseIf(pred, &c);
|
||||
typename node_hash_set<T, H, E, A>::size_type erase_if(
|
||||
node_hash_set<T, H, E, A>& c, Predicate pred) {
|
||||
return container_internal::EraseIf(pred, &c);
|
||||
}
|
||||
|
||||
namespace container_internal {
|
||||
|
||||
template <class T>
|
||||
struct NodeHashSetPolicy
|
||||
: absl::container_internal::node_hash_policy<T&, NodeHashSetPolicy<T>> {
|
||||
: absl::container_internal::node_slot_policy<T&, NodeHashSetPolicy<T>> {
|
||||
using key_type = T;
|
||||
using init_type = T;
|
||||
using constant_iterators = std::true_type;
|
||||
|
|
|
@ -108,31 +108,31 @@ TEST(NodeHashSet, EraseIf) {
|
|||
// Erase all elements.
|
||||
{
|
||||
node_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int) { return true; });
|
||||
EXPECT_EQ(erase_if(s, [](int) { return true; }), 5);
|
||||
EXPECT_THAT(s, IsEmpty());
|
||||
}
|
||||
// Erase no elements.
|
||||
{
|
||||
node_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int) { return false; });
|
||||
EXPECT_EQ(erase_if(s, [](int) { return false; }), 0);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 2, 3, 4, 5));
|
||||
}
|
||||
// Erase specific elements.
|
||||
{
|
||||
node_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, [](int k) { return k % 2 == 1; });
|
||||
EXPECT_EQ(erase_if(s, [](int k) { return k % 2 == 1; }), 3);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(2, 4));
|
||||
}
|
||||
// Predicate is function reference.
|
||||
{
|
||||
node_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, IsEven);
|
||||
EXPECT_EQ(erase_if(s, IsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 3, 5));
|
||||
}
|
||||
// Predicate is function pointer.
|
||||
{
|
||||
node_hash_set<int> s = {1, 2, 3, 4, 5};
|
||||
erase_if(s, &IsEven);
|
||||
EXPECT_EQ(erase_if(s, &IsEven), 2);
|
||||
EXPECT_THAT(s, UnorderedElementsAre(1, 3, 5));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,114 @@
|
|||
// Copyright 2018 The Abseil Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// https://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "absl/container/flat_hash_map.h"
|
||||
#include "absl/container/flat_hash_set.h"
|
||||
#include "absl/container/node_hash_map.h"
|
||||
#include "absl/container/node_hash_set.h"
|
||||
|
||||
namespace absl {
|
||||
ABSL_NAMESPACE_BEGIN
|
||||
namespace container_internal {
|
||||
namespace {
|
||||
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
// Create some tables of type `Table`, then look at all the new
|
||||
// `HashtablezInfo`s to make sure that the `inline_element_size ==
|
||||
// expected_element_size`. The `inline_element_size` is the amount of memory
|
||||
// allocated for each slot of a hash table, that is `sizeof(slot_type)`. Add
|
||||
// the new `HashtablezInfo`s to `preexisting_info`. Store all the new tables
|
||||
// into `tables`.
|
||||
template <class Table>
|
||||
void TestInlineElementSize(
|
||||
HashtablezSampler& sampler,
|
||||
// clang-tidy gives a false positive on this declaration. This unordered
|
||||
// set cannot be flat_hash_set, however, since that would introduce a mutex
|
||||
// deadlock.
|
||||
std::unordered_set<const HashtablezInfo*>& preexisting_info, // NOLINT
|
||||
std::vector<Table>& tables, const typename Table::value_type& elt,
|
||||
size_t expected_element_size) {
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
// We create a new table and must store it somewhere so that when we store
|
||||
// a pointer to the resulting `HashtablezInfo` into `preexisting_info`
|
||||
// that we aren't storing a dangling pointer.
|
||||
tables.emplace_back();
|
||||
// We must insert an element to get a hashtablez to instantiate.
|
||||
tables.back().insert(elt);
|
||||
}
|
||||
size_t new_count = 0;
|
||||
sampler.Iterate([&](const HashtablezInfo& info) {
|
||||
if (preexisting_info.insert(&info).second) {
|
||||
EXPECT_EQ(info.inline_element_size, expected_element_size);
|
||||
++new_count;
|
||||
}
|
||||
});
|
||||
// Make sure we actually did get a new hashtablez.
|
||||
EXPECT_GT(new_count, 0);
|
||||
}
|
||||
|
||||
struct bigstruct {
|
||||
char a[1000];
|
||||
friend bool operator==(const bigstruct& x, const bigstruct& y) {
|
||||
return memcmp(x.a, y.a, sizeof(x.a)) == 0;
|
||||
}
|
||||
template <typename H>
|
||||
friend H AbslHashValue(H h, const bigstruct& c) {
|
||||
return H::combine_contiguous(std::move(h), c.a, sizeof(c.a));
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
TEST(FlatHashMap, SampleElementSize) {
|
||||
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
|
||||
// Enable sampling even if the prod default is off.
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(1);
|
||||
|
||||
auto& sampler = GlobalHashtablezSampler();
|
||||
std::vector<flat_hash_map<int, bigstruct>> flat_map_tables;
|
||||
std::vector<flat_hash_set<bigstruct>> flat_set_tables;
|
||||
std::vector<node_hash_map<int, bigstruct>> node_map_tables;
|
||||
std::vector<node_hash_set<bigstruct>> node_set_tables;
|
||||
|
||||
// It takes thousands of new tables after changing the sampling parameters
|
||||
// before you actually get some instrumentation. And if you must actually
|
||||
// put something into those tables.
|
||||
for (int i = 0; i < 10000; ++i) {
|
||||
flat_map_tables.emplace_back();
|
||||
flat_map_tables.back()[i] = bigstruct{};
|
||||
}
|
||||
|
||||
// clang-tidy gives a false positive on this declaration. This unordered set
|
||||
// cannot be a flat_hash_set, however, since that would introduce a mutex
|
||||
// deadlock.
|
||||
std::unordered_set<const HashtablezInfo*> preexisting_info; // NOLINT
|
||||
sampler.Iterate(
|
||||
[&](const HashtablezInfo& info) { preexisting_info.insert(&info); });
|
||||
TestInlineElementSize(sampler, preexisting_info, flat_map_tables,
|
||||
{0, bigstruct{}}, sizeof(int) + sizeof(bigstruct));
|
||||
TestInlineElementSize(sampler, preexisting_info, node_map_tables,
|
||||
{0, bigstruct{}}, sizeof(void*));
|
||||
TestInlineElementSize(sampler, preexisting_info, flat_set_tables, //
|
||||
bigstruct{}, sizeof(bigstruct));
|
||||
TestInlineElementSize(sampler, preexisting_info, node_set_tables, //
|
||||
bigstruct{}, sizeof(void*));
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace container_internal
|
||||
ABSL_NAMESPACE_END
|
||||
} // namespace absl
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue