Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions tmva/sofie/inc/TMVA/ROperator_BasicUnary.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,9 @@ struct UnaryOpTraits<T, EBasicUnaryOperator::kAbs> {
template <typename T>
struct UnaryOpTraits<T, EBasicUnaryOperator::kSoftplus> {
static std::string Name() { return "Softplus"; }
static std::string Op(const std::string &X) { return "std::log(std::exp(" + X + ") + 1)"; }
static std::string Op(const std::string &X) {
return "((" + X + " >= 0x1.4000000000000p+4f) ? " + X + " : std::log1p(std::exp(" + X + ")))";
}
};

template <typename T, EBasicUnaryOperator Op>
Expand Down Expand Up @@ -115,7 +117,7 @@ public:
}

std::vector<std::string> GetStdLibs() override {
if (Op == EBasicUnaryOperator::kSqrt || Op == EBasicUnaryOperator::kExp || Op == EBasicUnaryOperator::kLog) {
if (Op == EBasicUnaryOperator::kSqrt || Op == EBasicUnaryOperator::kExp || Op == EBasicUnaryOperator::kLog || Op == EBasicUnaryOperator::kSoftplus) {
return { std::string("cmath") };
} else {
return {};
Expand Down
8 changes: 8 additions & 0 deletions tmva/sofie/test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,13 @@ if (BLAS_FOUND)
# Creating a Google Test for the automatic differentiation of Gemm_Call
ROOT_ADD_GTEST(TestGemmDerivative TestGemmDerivative.cxx LIBRARIES Core BLAS::BLAS)
endif()

# Softplus Operator Unit Test
# Tests threshold hexfloat, numerical stability, overflow protection
ROOT_ADD_GTEST(TestSofieSoftplus TestSofieSoftplus.cxx
LIBRARIES
ROOTTMVASofie
)
endif()

# Look for needed Python modules
Expand Down Expand Up @@ -124,6 +131,7 @@ if (ROOT_TORCH_FOUND AND ROOT_ONNX_FOUND AND NOT broken_onnx)
configure_file(ConvTrans2dModelGenerator.py ConvTrans2dModelGenerator.py COPYONLY)
configure_file(LinearModelGenerator.py LinearModelGenerator.py COPYONLY)
configure_file(RecurrentModelGenerator.py RecurrentModelGenerator.py COPYONLY)
configure_file(SoftplusModelGenerator.py SoftplusModelGenerator.py COPYONLY)

if (BLAS_FOUND)
ROOT_ADD_GTEST(TestSofieModels TestSofieModels.cxx
Expand Down
100 changes: 100 additions & 0 deletions tmva/sofie/test/SoftplusModelGenerator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/usr/bin/python3

### Generate Linear -> Softplus model using PyTorch

import numpy as np
import argparse
import torch
import torch.nn as nn


class Net(nn.Module):

def __init__(self, nd=10, nout=4):
super(Net, self).__init__()
self.linear = nn.Linear(in_features=nd, out_features=nout)
self.softplus = nn.Softplus()

def forward(self, x):
x = self.linear(x)
x = self.softplus(x)
return x


def main():

parser = argparse.ArgumentParser(description='PyTorch Softplus model generator')
parser.add_argument('params', type=int, nargs='+',
help='parameters: batchSize, inputSize')
parser.add_argument('--v', action='store_true', default=False,
help='For verbose mode')

args = parser.parse_args()

bsize = 1
d = 10
noutput = 4

nparams = len(args.params)
if nparams < 2:
exit()
bsize = args.params[0]
d = args.params[1]

verbose = args.v

print("using batch-size =", bsize, "input dim =", d)

name = "SoftplusModel"
name += "_B" + str(bsize)

model = Net(d, noutput)

# Generate input data matching TestSofieModels convention
xinput = torch.zeros([])
for ib in range(0, bsize):
xa = torch.ones([1, d]) * (ib + 1)
if ib == 0:
xinput = xa
else:
xinput = torch.cat((xinput, xa), 0)

print("input data", xinput.shape)
if verbose:
print(xinput)

# Trace and export
model.eval()

# Export to ONNX
v = torch.__version__
print("using torch version: ", v)
from packaging.version import Version
if Version(v) >= Version("2.5.0"):
torch.onnx.export(
model,
xinput,
name + ".onnx",
export_params=True,
dynamo=True,
external_data=False
)
else:
torch.onnx.export(model, xinput, name + ".onnx", export_params=True)

# Run inference and save reference output
y = model.forward(xinput)

print("output data : shape,", y.shape)
print(y)

outSize = y.nelement()
yvec = y.reshape([outSize])

f = open(name + ".out", "w")
for i in range(0, outSize):
f.write(str(float(yvec[i].detach())) + " ")


if __name__ == '__main__':
main()
47 changes: 47 additions & 0 deletions tmva/sofie/test/TestSofieModels.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -414,3 +414,50 @@ TEST(SOFIE, CONVTRANS2D_B1)
{
TestConvTranspose("2d", 1);
}

void TestSoftplus(int nbatches, int inputSize = 10)
{
std::string modelName = "SoftplusModel";
modelName += "_B" + std::to_string(nbatches);

// network parameters : nbatches, inputDim
std::vector<int> params = {nbatches, inputSize};

std::string command = "python3 SoftplusModelGenerator.py ";
for (size_t i = 0; i < params.size(); i++)
command += " " + std::to_string(params[i]);

printf("executing %s\n", command.c_str());
gSystem->Exec(command.c_str());

ExecuteSofieParser(modelName);

int id = DeclareCode(modelName);

ASSERT_NE(id, 0) << "Declareing model code to interpreter failed!";

// input data
std::vector<float> xinput(nbatches * inputSize);
for (int ib = 0; ib < nbatches; ib++) {
std::vector<float> x1(inputSize, float(ib + 1));
std::copy(x1.begin(), x1.end(), xinput.begin() + ib * inputSize);
}

auto result = RunInference(xinput.data(), id);

// read reference value from test file
std::vector<float> refValue(result.size());

std::ifstream f(std::string(modelName + ".out").c_str());
for (size_t i = 0; i < refValue.size(); ++i) {
f >> refValue[i];
if (verbose)
std::cout << " result " << result.at(i) << " reference " << refValue[i] << std::endl;
EXPECT_NEAR(result.at(i), refValue[i], 10 * std::numeric_limits<float>::epsilon());
}
}

TEST(SOFIE, Softplus_B1)
{
TestSoftplus(1);
}
191 changes: 191 additions & 0 deletions tmva/sofie/test/TestSofieSoftplus.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
#include "TMVA/ROperator_BasicUnary.hxx"
#include "TMVA/RModel.hxx"

#include "gtest/gtest.h"

#include <cmath>
#include <string>
#include <vector>
#include <utility>
#include <algorithm>

using namespace TMVA::Experimental::SOFIE;

// Testing hexfloat threshold constant for overflow protection
TEST(SOFIE_Softplus, GenerateHexfloatConstants)
{
RModel model;
model.AddInputTensorInfo("input", ETensorType::FLOAT, std::vector<size_t>{1, 10});
model.AddOutputTensorNameList({"output"});

ROperator_BasicUnary<float, EBasicUnaryOperator::kSoftplus> op("input", "output");
op.Initialize(model);

std::string code = op.Generate("softplus_test");

// Testing hexfloat threshold (20.0f) for overflow protection
EXPECT_TRUE(code.find("0x1.4000000000000p+4f") != std::string::npos)
<< "Generated code missing hexfloat threshold constant 0x1.4000000000000p+4f (20.0f)";

// Verify ternary conditional structure
EXPECT_TRUE(code.find("?") != std::string::npos && code.find(":") != std::string::npos)
<< "Generated code should use ternary operator for threshold branch";
}

// Testing numerical stability functions
TEST(SOFIE_Softplus, GenerateStabilityFunctions)
{
RModel model;
model.AddInputTensorInfo("X", ETensorType::FLOAT, std::vector<size_t>{2, 5});
model.AddOutputTensorNameList({"Y"});

ROperator_BasicUnary<float, EBasicUnaryOperator::kSoftplus> op("X", "Y");
op.Initialize(model);

std::string code = op.Generate("softplus_stability_test");

// Verify std::log1p is used (not std::log)
EXPECT_TRUE(code.find("std::log1p") != std::string::npos)
<< "Generated code must use std::log1p for numerical stability";

// Verify std::exp is used
EXPECT_TRUE(code.find("std::exp") != std::string::npos)
<< "Generated code must use std::exp";

// Verify std::log is NOT used (would indicate precision loss)
size_t log1p_pos = code.find("std::log1p");
size_t log_pos = code.find("std::log(");
EXPECT_TRUE(log_pos == std::string::npos || (log1p_pos != std::string::npos && log1p_pos < log_pos))
<< "Generated code should use std::log1p, not std::log";
}

// Testing numeric correctness in stable region
TEST(SOFIE_Softplus, NumericCorrectnessStableRegion)
{
const std::vector<std::pair<float, float>> referenceData = {
{-10.0f, std::log1p(std::exp(-10.0f))},
{ -5.0f, std::log1p(std::exp(-5.0f))},
{ -1.0f, std::log1p(std::exp(-1.0f))},
{ 0.0f, std::log1p(std::exp(0.0f))}, // ln(2) ≈ 0.693
{ 1.0f, std::log1p(std::exp(1.0f))},
{ 5.0f, std::log1p(std::exp(5.0f))},
{ 10.0f, std::log1p(std::exp(10.0f))},
{ 15.0f, std::log1p(std::exp(15.0f))},
};

// Proxy for generated logic with threshold
auto softplus_eval = [](float x) -> float {
return (x >= 0x1.4000000000000p+4f) ? x : std::log1p(std::exp(x));
};

for (const auto& [input, expected] : referenceData) {
float computed = softplus_eval(input);
float tol = 1e-6f;

EXPECT_NEAR(computed, expected, tol)
<< "Stable region mismatch at x = " << input;
}
}

// Testing threshold behavior for overflow protection
TEST(SOFIE_Softplus, NumericCorrectnessThreshold)
{
const std::vector<std::pair<float, float>> thresholdData = {
{ 20.0f, 20.0f}, // At threshold: passthrough
{ 25.0f, 25.0f}, // Above threshold: passthrough
{ 50.0f, 50.0f}, // Far above: passthrough
{100.0f, 100.0f}, // Extreme: would overflow exp() without threshold
{1000.0f, 1000.0f}, // Very extreme: definite overflow without protection
};

auto softplus_eval = [](float x) -> float {
return (x >= 0x1.4000000000000p+4f) ? x : std::log1p(std::exp(x));
};

for (const auto& [input, expected] : thresholdData) {
float computed = softplus_eval(input);
float tol = 1e-6f;

EXPECT_NEAR(computed, expected, tol)
<< "Threshold behavior mismatch at x = " << input;

// Ensure no NaN or Inf
EXPECT_FALSE(std::isnan(computed)) << "NaN at x = " << input;
EXPECT_FALSE(std::isinf(computed)) << "Inf at x = " << input;
}
}

// Testing specific known values
TEST(SOFIE_Softplus, KnownValues)
{
auto softplus_eval = [](float x) -> float {
return (x >= 0x1.4000000000000p+4f) ? x : std::log1p(std::exp(x));
};

float tol = 1e-6f;

// ln(1 + e^0) = ln(2)
EXPECT_NEAR(softplus_eval(0.0f), 0.6931471805599453f, tol);

// For large negative x: ln(1 + e^x) ≈ e^x ≈ 0
EXPECT_NEAR(softplus_eval(-20.0f), std::exp(-20.0f), tol);

// At threshold: exact passthrough
EXPECT_NEAR(softplus_eval(20.0f), 20.0f, tol);

// Just below threshold: computed value
float x = 19.9f;
EXPECT_NEAR(softplus_eval(x), std::log1p(std::exp(x)), tol);
}

// StdLib dependencies
TEST(SOFIE_Softplus, StdLibDependencies)
{
ROperator_BasicUnary<float, EBasicUnaryOperator::kSoftplus> op("in", "out");
auto libs = op.GetStdLibs();
ASSERT_EQ(libs.size(), 1u);
EXPECT_EQ(libs[0], "cmath");
}

// Type and Shape Inference
TEST(SOFIE_Softplus, Inference)
{
ROperator_BasicUnary<float, EBasicUnaryOperator::kSoftplus> op("in", "out");

// Type inference
auto types = op.TypeInference({ETensorType::FLOAT});
EXPECT_EQ(types[0], ETensorType::FLOAT);

// Shape inference
std::vector<size_t> shape = {4, 16, 32};
auto shapes = op.ShapeInference({shape});
EXPECT_EQ(shapes[0], shape);
}

// Loop structure verification
TEST(SOFIE_Softplus, GenerateStructure)
{
RModel model;
model.AddInputTensorInfo("X", ETensorType::FLOAT, std::vector<size_t>{2, 5});
model.AddOutputTensorNameList({"Y"});

ROperator_BasicUnary<float, EBasicUnaryOperator::kSoftplus> op("X", "Y");
op.Initialize(model);

std::string code = op.Generate("softplus_struct_test");

EXPECT_TRUE(code.find("tensor_Y") != std::string::npos) << "Missing output tensor access";
EXPECT_TRUE(code.find("tensor_X") != std::string::npos) << "Missing input tensor access";
// Loop limit check for shape {2, 5}
EXPECT_TRUE(code.find("10") != std::string::npos) << "Incorrect loop limit generated";
// Operator comment
EXPECT_TRUE(code.find("Softplus") != std::string::npos) << "Missing operator comment";
}

// Threshold constant verification (20.0f as hexfloat)
TEST(SOFIE_Softplus, ThresholdConstantValue)
{
// Verify the hexfloat threshold equals 20.0f exactly
float threshold = 0x1.4000000000000p+4f;
EXPECT_FLOAT_EQ(threshold, 20.0f);
}