Skip to content

[TMVA][SOFIE] Benchmark of models with ONNXRuntime #236

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 10 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
101 changes: 101 additions & 0 deletions cmake/FindONNXRuntime.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
#[============================================================================[

Author: Federico Sossai (fsossai), 2021
Description: CMake script for finding the ONNXRuntime library.
Usage: The user must provide the directory in which ONNXRuntime is installed
together with the one in which it has been built by setting the
following variables:
ONNXRuntime_SRC (e.g. /home/repo/onnxruntime) [MADATORY]
ONNXRuntime_BUILD (e.g. /home/repo/onnxruntime/build)
If ONNXRuntime_BUILD is not set ${ONNXRuntime_SRC}/build is assumed
as default value.

Result Variables
^^^^^^^^^^^^^^^^

This module defines the following variables::

ONNXRuntime_FOUND - True if ONNXRuntime was found
ONNXRuntime_INCLUDE_DIRS - include directories for ONNXRuntime
ONNXRuntime_LIBRARIES - link against this library to use ONNXRuntime
ONNXRuntime_VERSION_STRING - Full version of ONNXRuntime (e.g. 1.8.0)
ONNXRuntime_VERSION_MAJOR - The major version of the ONNXRuntime implementation
ONNXRuntime_VERSION_MINOR - The minor version of the ONNXRuntime implementation
ONNXRuntime_VERSION_PATCH - The patch version of the ONNXRuntime implementation
ONNXRuntime_BUILD_TYPE - Describes whether the current build is
- Debug, MinSizeRel Release or RelWithDebInfo


The module will also define two cache variables::

ONNXRuntime_OS - Operating system that the library has been built for
ONNXRuntime_INCLUDE_DIR - Identical to ONNXRuntime_INCLUDE_DIRS
ONNXRuntime_LIBRARY - Identical to ONNXRuntime_LIBRARIES

#]============================================================================]


if(ONNXRuntime_SRC)

if(NOT ONNXRuntime_BUILD)
set(ONNXRuntime_BUILD ${ONNXRuntime_SRC}/build)
message(STATUS "Assuming ${ONNXRuntime_BUILD} as the build directory for ONNXRuntime")
endif()

# Setting ONNXRuntime_OS
set(all_os "Linux" "Windows" "MacOS" "iOS" "Android")
foreach(os ${all_os})
if(IS_DIRECTORY ${ONNXRuntime_BUILD}/${os})
set(ONNXRuntime_OS ${os})
break()
endif()
endforeach()

if(NOT ONNXRuntime_OS)
message(FATAL_ERROR "ONNXRuntime: no suitable operating system found in the build directory")
endif()

# Setting ONNXRuntime_BUILD_TYPE
set(all_types "Debug" "MinSizeRel" "Release" "RelWithDebInfo")
foreach(type ${all_types})
if(IS_DIRECTORY ${ONNXRuntime_BUILD}/${ONNXRuntime_OS}/${type})
set(ONNXRuntime_BUILD_TYPE ${type})
break()
endif()
endforeach()

# Setting ONNXRuntime_LIBRARIES
if(EXISTS ${ONNXRuntime_BUILD}/${ONNXRuntime_OS}/${ONNXRuntime_BUILD_TYPE}/libonnxruntime.so)
set(ONNXRuntime_LIBRARIES ${ONNXRuntime_BUILD}/${ONNXRuntime_OS}/${ONNXRuntime_BUILD_TYPE})
set(ONNXRuntime_LIBRARY ${ONNXRuntime_LIBRARIES})

# Setting ONNXRuntime_VERSION_*
file(GLOB match REGEX "${ONNXRuntime_LIBRARIES}/libonnxruntime.so.*")
foreach(fname ${match})
get_filename_component(fname ${fname} NAME)
string(REGEX MATCH "[0-9][0-9.]+" ONNXRuntime_VERSION_STRING ${fname})
string(REPLACE "." ";" version ${ONNXRuntime_VERSION_STRING})
list(LENGTH version version_length)
if(${version_length} GREATER 2)
list(GET version 0 ONNXRuntime_VERSION_MAJOR)
list(GET version 1 ONNXRuntime_VERSION_MINOR)
list(GET version 2 ONNXRuntime_VERSION_PATCH)
break()
endif()
endforeach()
endif()

# Setting ONNXRuntime_INCLUDE_DIRS
if(IS_DIRECTORY ${ONNXRuntime_SRC}/include)
set(ONNXRuntime_INCLUDE_DIRS ${ONNXRuntime_SRC}/include)
set(ONNXRuntime_INCLUDE_DIR ${ONNXRuntime_INCLUDE_DIRS})
endif()

endif()

find_package_handle_standard_args(
ONNXRuntime
FOUND_VAR ONNXRuntime_FOUND
REQUIRED_VARS ONNXRuntime_LIBRARIES ONNXRuntime_INCLUDE_DIRS
VERSION_VAR ONNXRuntime_VERSION_STRING
)
1 change: 1 addition & 0 deletions root/tmva/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
add_subdirectory(tmva)
add_subdirectory(sofie)
99 changes: 99 additions & 0 deletions root/tmva/sofie/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# @author Federico Sossai (fsossai)

if(ROOT_tmva_FOUND AND ROOT_tmva-sofie_FOUND)

# Checking that all required model exist
if (NOT ONNX_MODELS_DIR)
set(ONNX_MODELS_DIR input_models)
endif()
file(GLOB ONNX_MODELS "${ONNX_MODELS_DIR}/*.onnx")

# Copying every ONNX model in the input directory to the build directory.
set(out_dir ${CMAKE_CURRENT_BINARY_DIR}/${ONNX_MODELS_DIR})
file(MAKE_DIRECTORY ${out_dir})
foreach(model ${ONNX_MODELS})
get_filename_component(fname ${model} NAME)
configure_file(${model} ${out_dir}/${fname} COPYONLY)
endforeach()

# Looking for ONNXRuntime
find_package(ONNXRuntime QUIET)
if(ONNXRuntime_FOUND)
message(STATUS "Found ONNXRuntime (build type: ${ONNXRuntime_BUILD_TYPE}, version: ${ONNXRuntime_VERSION_STRING})")

# Configuring ONNXRuntimeInference_Template.cxx.in
set(FUNC_NAME "BM_ONNXRuntime_Inference")
set(CAPTURE_STR "BENCHMARK_CAPTURE(${FUNC_NAME}, @1,\t@2)@3")
set(HEAD_COMMENT "Automatically configured by CMake")
set(ALL_CAPTURES "")
foreach(model ${ONNX_MODELS})
get_filename_component(fname ${model} NAME)
get_filename_component(fname_we ${model} NAME_WE)
string(REPLACE "@1" ${fname_we} cap ${CAPTURE_STR})
string(REPLACE "@2" "\"${ONNX_MODELS_DIR}/${fname}\"" cap ${cap})
list(APPEND ALL_CAPTURES ${cap})
endforeach()
string(REPLACE ";" "\n" BENCHMARK_CAPTURES "${ALL_CAPTURES}") # String[] -> String
string(REPLACE "@3" ";" BENCHMARK_CAPTURES "${BENCHMARK_CAPTURES}") # Adding semicolon
configure_file(ONNXRuntimeInference_Template.cxx.in ONNXRuntimeInference.cxx @ONLY)

RB_ADD_GBENCHMARK(ONNXRuntimeInference
ONNXRuntimeInference.cxx
LABEL short
LIBRARIES TMVA onnxruntime
)
target_link_directories(ONNXRuntimeInference PRIVATE ${ONNXRuntime_LIBRARIES})
target_include_directories(ONNXRuntimeInference PRIVATE ${ONNXRuntime_INCLUDE_DIR})

else()
message(WARNING "ONNXRuntime not found")
endif()

# Configuring SOFIEInference_Template.cxx.in
set(FUNC_NAME "BM_SOFIE_Inference")
set(CAPTURE_STR "BENCHMARK_CAPTURE(${FUNC_NAME}, @1,\t@2)@3")
set(INCLUDES_STR "#include @1")
set(FUNCS_STR "\t\t{ @1,\t{@2,\t@3} }")
set(HEAD_COMMENT "Automatically configured by CMake")
set(ALL_CAPTURES "")
set(ALL_INCLUDES "")
set(ALL_FUNCS "")
set(COMPILED_MODELS_DIR ${ONNX_MODELS_DIR}/compiled)
file(GLOB COMPILED_MODELS "${COMPILED_MODELS_DIR}/*.hxx")
set(inc "")
set(cap "")
set(funcs "")
foreach(model ${COMPILED_MODELS})
get_filename_component(fname ${model} NAME)
get_filename_component(fname_we ${model} NAME_WE)
# Fixing the string for the include headers
string(REPLACE "@1" "\"${COMPILED_MODELS_DIR}/${fname}\"" inc ${INCLUDES_STR})
list(APPEND ALL_INCLUDES ${inc})
# Fixing the string for the GBenchmark captures
string(REPLACE "@1" ${fname_we} cap ${CAPTURE_STR})
string(REPLACE "@2" "\"${fname_we}\"" cap ${cap})
list(APPEND ALL_CAPTURES ${cap})
# Fixing the string for the actual infer function that each capture will call
string(REPLACE "@1" "\"${fname_we}\"" funcs ${FUNCS_STR})
string(REPLACE "@2" "TMVA_SOFIE_${fname_we}::infer" funcs ${funcs})
string(REPLACE "@3" "0" funcs ${funcs})
list(APPEND ALL_FUNCS ${funcs})
endforeach()

# Transforming list of strings into a single multi-line string
string(REPLACE ";" "\n" BENCHMARK_CAPTURES "${ALL_CAPTURES}") # String[] -> String
string(REPLACE "@3" ";" BENCHMARK_CAPTURES "${BENCHMARK_CAPTURES}") # Adding semicolon
string(REPLACE ";" "\n" INCLUDE_HEADERS "${ALL_INCLUDES}") # String[] -> String
string(REPLACE ";" ",\n" FUNC_TUPLES "${ALL_FUNCS}") # String[] -> String
configure_file(SOFIEInference_Template.cxx.in SOFIEInference.cxx @ONLY)

# Benchmark for models emitted by SOFIE
RB_ADD_GBENCHMARK(SOFIEInference
SOFIEInference.cxx
LABEL short
LIBRARIES TMVA openblas
)
target_include_directories(SOFIEInference PRIVATE ${CMAKE_CURRENT_SOURCE_DIR})

endif()

60 changes: 60 additions & 0 deletions root/tmva/sofie/ONNXRuntimeInference_Template.cxx.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
// @HEAD_COMMENT@
// Author: Federico Sossai (fsossai), 2021

#include <benchmark/benchmark.h>
#include <onnxruntime/core/session/onnxruntime_cxx_api.h>

#include <iostream>
#include <vector>
#include <numeric>

using namespace std;

static void @FUNC_NAME@(benchmark::State& state, string model_path)
{
Ort::Env env(ORT_LOGGING_LEVEL_WARNING, "benchmark");

Ort::SessionOptions session_options;
session_options.SetIntraOpNumThreads(1);
session_options.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_EXTENDED);

Ort::Session session(env, model_path.c_str(), session_options);

vector<const char*> input_node_names(1);
vector<const char*> output_node_names(1);

Ort::AllocatorWithDefaultOptions allocator;
input_node_names[0] = session.GetInputName(0, allocator);
output_node_names[0] = session.GetOutputName(0, allocator);

// Getting the shapes

vector<int64_t> input_node_dims = session
.GetInputTypeInfo(0).GetTensorTypeAndShapeInfo().GetShape();
vector<int64_t> output_node_dims = session
.GetOutputTypeInfo(0).GetTensorTypeAndShapeInfo().GetShape();

// Calculating the dimension of the input tensor

size_t input_tensor_size = accumulate(input_node_dims.begin(),
input_node_dims.end(), 1, multiplies<int>());
vector<float> input_tensor_values(input_tensor_size);

// Input tensor initialization
fill_n(input_tensor_values.begin(), input_tensor_size, 1.0);

auto memory_info = Ort::MemoryInfo::CreateCpu(OrtArenaAllocator, OrtMemTypeDefault);
Ort::Value input_tensor = Ort::Value::CreateTensor<float>(memory_info,
input_tensor_values.data(), input_tensor_size,
input_node_dims.data(), input_node_dims.size());

// Running the model

for (auto _ : state) {
session.Run(Ort::RunOptions{nullptr}, input_node_names.data(),
&input_tensor, 1, output_node_names.data(), 1);
}
}
@BENCHMARK_CAPTURES@

BENCHMARK_MAIN();
36 changes: 36 additions & 0 deletions root/tmva/sofie/SOFIEInference_Template.cxx.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// @HEAD_COMMENT@
// Author: Federico Sossai (fsossai), 2021

#include <benchmark/benchmark.h>

#include <iostream>
#include <vector>
#include <functional>
#include <unordered_map>

@INCLUDE_HEADERS@

using namespace std;

static void @FUNC_NAME@(benchmark::State& state, std::string model_name)
{
vector<float> input;

typedef function<vector<float>(float*)> infer_t;
unordered_map<string, pair<infer_t, size_t>> all_infer_funcs{
@FUNC_TUPLES@
};

infer_t infer_func;
size_t input_size;
tie(infer_func, input_size) = all_infer_funcs[model_name];
//input.resize(input_size);
input.resize(10'000); // temporary remedy

for (auto _ : state) {
infer_func(input.data());
}
}
@BENCHMARK_CAPTURES@

BENCHMARK_MAIN();
Binary file added root/tmva/sofie/input_models/ConvWithPadding.onnx
Binary file not shown.
Binary file added root/tmva/sofie/input_models/Linear_16.onnx
Binary file not shown.
Binary file added root/tmva/sofie/input_models/Linear_32.onnx
Binary file not shown.
Binary file added root/tmva/sofie/input_models/Linear_64.onnx
Binary file not shown.
195 changes: 195 additions & 0 deletions root/tmva/sofie/input_models/compiled/Linear_16.hxx

Large diffs are not rendered by default.

195 changes: 195 additions & 0 deletions root/tmva/sofie/input_models/compiled/Linear_32.hxx

Large diffs are not rendered by default.

195 changes: 195 additions & 0 deletions root/tmva/sofie/input_models/compiled/Linear_64.hxx

Large diffs are not rendered by default.