diff --git a/cmake/aws-sdk.cmake b/cmake/aws-sdk.cmake index 0d5f46e..2eb0589 100644 --- a/cmake/aws-sdk.cmake +++ b/cmake/aws-sdk.cmake @@ -10,7 +10,7 @@ ExternalProject_Add( libawscpp-download PREFIX "vendor/libawscpp-download" GIT_REPOSITORY "https://github.com/aws/aws-sdk-cpp.git" - GIT_TAG "1.9.370" + GIT_TAG "1.11.414" TIMEOUT 10 LIST_SEPARATOR "|" CMAKE_ARGS diff --git a/cmake/tbb.cmake b/cmake/tbb.cmake index b9d4a87..29f8fa1 100755 --- a/cmake/tbb.cmake +++ b/cmake/tbb.cmake @@ -8,27 +8,32 @@ find_package(Git REQUIRED) # Get rapidjson ExternalProject_Add( tbb_src - PREFIX "vendor/intel/tbb" - GIT_REPOSITORY "https://github.com/wjakob/tbb.git" - GIT_TAG b066defc0229a1e92d7a200eb3fe0f7e35945d95 + PREFIX "vendor/intel" + GIT_REPOSITORY "https://github.com/oneapi-src/oneTBB.git" + GIT_TAG 2a7e0dbe46855b75497355ed3808c31af14a35b6 TIMEOUT 10 BUILD_COMMAND make UPDATE_COMMAND "" # to prevent rebuilding everytime - INSTALL_COMMAND "" CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX=${CMAKE_BINARY_DIR}/vendor/tbb_cpp + -DTBB_TEST=OFF + -DCMAKE_INSTALL_PREFIX=${CMAKE_BINARY_DIR}/vendor/intel -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_CXX_FLAGS=${CMAKE_CXX_FLAGS} -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} ) -# Prepare json -ExternalProject_Get_Property(tbb_src source_dir) -ExternalProject_Get_Property(tbb_src binary_dir) +# Prepare tbb +# ExternalProject_Get_Property(tbb_src source_dir) +# ExternalProject_Get_Property(tbb_src binary_dir) +ExternalProject_Get_Property(tbb_src install_dir) -set(TBB_INCLUDE_DIR ${source_dir}/include) -set(TBB_LIBRARY_PATH ${binary_dir}/libtbb.so) +set(TBB_INCLUDE_DIR ${install_dir}/include) +if (CMAKE_BUILD_TYPE STREQUAL "Debug") + set(TBB_LIBRARY_PATH ${install_dir}/lib/libtbb_debug.so) +else() + set(TBB_LIBRARY_PATH ${install_dir}/lib/libtbb.so) +endif() file(MAKE_DIRECTORY ${TBB_INCLUDE_DIR}) diff --git a/tools/conversion/btrtocsv.cpp b/tools/conversion/btrtocsv.cpp index 4d29b82..d43109a 100644 --- a/tools/conversion/btrtocsv.cpp +++ b/tools/conversion/btrtocsv.cpp @@ -12,8 +12,8 @@ #include "gflags/gflags.h" #include "yaml-cpp/yaml.h" #include "spdlog/spdlog.h" -#include "tbb/parallel_for.h" -#include "tbb/task_scheduler_init.h" +#include "oneapi/tbb/global_control.h" +#include "oneapi/tbb/parallel_for.h" // ------------------------------------------------------------------------------ // Btrfiles library #include "btrfiles.hpp" @@ -105,9 +105,9 @@ int main(int argc, char **argv) // This seems necessary to be SchemePool::refresh(); - // Init TBB TODO: is that actually still necessary ? - tbb::task_scheduler_init init(FLAGS_threads); // NOLINT(cppcoreguidelines-narrowing-conversions) - + // Init TBB + oneapi::tbb::global_control global_limit( + oneapi::tbb::global_control::max_allowed_parallelism, FLAGS_threads); // Open output file auto csvstream = std::ofstream(FLAGS_csv); csvstream << std::setprecision(32); @@ -124,7 +124,7 @@ int main(int argc, char **argv) // Prepare the readers std::vector> readers(file_metadata->num_columns); std::vector>> compressed_data(file_metadata->num_columns); - tbb::parallel_for(u32(0), file_metadata->num_columns, [&](u32 column_i) { + oneapi::tbb::parallel_for(u32(0), file_metadata->num_columns, [&](u32 column_i) { compressed_data[column_i].resize(file_metadata->parts[column_i].num_parts); for (u32 part_i = 0; part_i < file_metadata->parts[column_i].num_parts; part_i++) { auto path = btr_dir / ("column" + std::to_string(column_i) + "_part" + std::to_string(part_i)); diff --git a/tools/conversion/csvtobtr.cpp b/tools/conversion/csvtobtr.cpp index 786e68d..2a45ee8 100644 --- a/tools/conversion/csvtobtr.cpp +++ b/tools/conversion/csvtobtr.cpp @@ -14,8 +14,8 @@ #include #include #include -#include -#include +#include +#include // ------------------------------------------------------------------------------ // Btr internal includes #include "common/Utils.hpp" @@ -72,8 +72,9 @@ int main(int argc, char **argv) // This seems necessary to be SchemePool::refresh(); - // Init TBB TODO: is that actually still necessary ? - tbb::task_scheduler_init init(FLAGS_threads); + // Init TBB + oneapi::tbb::global_control global_limit( + oneapi::tbb::global_control::max_allowed_parallelism, FLAGS_threads); // Load schema const auto schema = YAML::LoadFile(FLAGS_yaml); diff --git a/tools/conversion/decompression-speed.cpp b/tools/conversion/decompression-speed.cpp index 87ea0f8..15e61dc 100644 --- a/tools/conversion/decompression-speed.cpp +++ b/tools/conversion/decompression-speed.cpp @@ -2,11 +2,11 @@ #include #include #include -#include // ------------------------------------------------------------------------------------- #include "gflags/gflags.h" -#include "tbb/parallel_for.h" -#include "tbb/task_scheduler_init.h" +#include "oneapi/tbb/global_control.h" +#include "oneapi/tbb/parallel_for.h" +#include // ------------------------------------------------------------------------------------- #include "common/PerfEvent.hpp" #include "common/Utils.hpp" @@ -108,7 +108,8 @@ int main(int argc, char **argv) { } else { threads = FLAGS_threads; } - tbb::task_scheduler_init init(threads); + oneapi::tbb::global_control global_limit( + oneapi::tbb::global_control::max_allowed_parallelism, FLAGS_threads); // Read the metadata std::vector raw_file_metadata; diff --git a/tools/playground/generate_s3_data.cpp b/tools/playground/generate_s3_data.cpp index 1b9d9dc..9b5cac8 100644 --- a/tools/playground/generate_s3_data.cpp +++ b/tools/playground/generate_s3_data.cpp @@ -5,8 +5,8 @@ #include #include // ------------------------------------------------------------------------------------- -#include -#include +#include +#include // ------------------------------------------------------------------------------------- #include #include @@ -101,7 +101,7 @@ static void generate_and_upload_multipart(const Aws::S3Crt::S3CrtClient& s3_clie /* Upload parts */ size_t num_parts = (object_size + part_size - 1) / part_size; std::vector completed_parts(num_parts); - tbb::parallel_for(size_t(1), num_parts + 1, [&](size_t part_number) { + oneapi::tbb::parallel_for(size_t(1), num_parts + 1, [&](size_t part_number) { auto sstream = std::make_shared(); generate_data(sstream, std::min(part_size, static_cast(object_size))); auto [success, etag] = upload_part(s3_client, bucket, key, upload_id, part_number, sstream); diff --git a/tools/playground/playground.cpp b/tools/playground/playground.cpp index c369c13..062a7d1 100755 --- a/tools/playground/playground.cpp +++ b/tools/playground/playground.cpp @@ -10,6 +10,10 @@ #include /* time */ #include #include +#include +#include +#include +#include // ------------------------------------------------------------------------------------- #include "headers/codecfactory.h" #include "headers/deltautil.h" diff --git a/tools/playground/rle.cpp b/tools/playground/rle.cpp index f774459..8f7f82e 100755 --- a/tools/playground/rle.cpp +++ b/tools/playground/rle.cpp @@ -14,7 +14,8 @@ int main() { std::srand(std::time(nullptr)); // ------------------------------------------------------------------------------------- using namespace FastPForLib; - IntegerCODEC &codec = *CODECFactory::getFromName("simdfastpfor256"); + CODECFactory factory; + IntegerCODEC &codec = *factory.getFromName("simdfastpfor256"); size_t N = 1000 * 1000; std::vector rle_input(N); for ( uint32_t i = 0; i < N; i++ ) {