Files
FastDeploy/tests/function/test_softmax.cc
Jack Zhou 129dda7809 [Functions] Add quantile function (#700)
* Add sort function

* Add isfinite function

* upgrade isinf isnan

* Add Scalar to FDTensor

* Add floor, ceil function

* add cast functions

* Update out_tmp

* Update quantile

* add gather scatter along axis

* finish quantile function

* Add quantile unittest

* refresh code style for test source code

* Add comments

* Add full function

* Add scalar to fd tensor

* Add full unittest

* Add functions headers

* move fdtensor operators to fastdeploy namespace
2022-11-28 09:51:40 +08:00

62 lines
2.3 KiB
C++

// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "fastdeploy/core/fd_tensor.h"
#include "fastdeploy/function/softmax.h"
#include "glog/logging.h"
#include "gtest_utils.h"
#include "gtest/gtest.h"
#include <vector>
namespace fastdeploy {
namespace function {
TEST(fastdeploy, softmax) {
FDTensor input, input1, output;
CheckShape check_shape;
CheckData check_data;
std::vector<float> inputs = {1, 2, 3, 4, 5, 6};
auto inputs1 = inputs;
std::vector<float> expected_result_axis0 = {
0.04742587, 0.04742587, 0.04742587, 0.95257413, 0.95257413, 0.95257413};
std::vector<float> expected_result_axis1 = {
0.09003057, 0.24472846, 0.66524088, 0.09003057, 0.24472846, 0.66524088};
input.SetExternalData({2, 3}, FDDataType::FP32, inputs.data());
input1.SetExternalData({2, 3}, FDDataType::FP32, inputs1.data());
// axis = 0
Softmax(input, &output, 0);
check_shape(output.shape, {2, 3});
check_data(reinterpret_cast<const float*>(output.Data()),
expected_result_axis0.data(), expected_result_axis0.size());
// Test the case when output eqauls to input
Softmax(input, &input, 0);
check_shape(output.shape, {2, 3});
check_data(reinterpret_cast<const float*>(input.Data()),
expected_result_axis0.data(), expected_result_axis0.size());
// axis = 1
Softmax(input1, &output, 1);
check_shape(output.shape, {2, 3});
check_data(reinterpret_cast<const float*>(output.Data()),
expected_result_axis1.data(), expected_result_axis1.size());
// Test the case when output eqauls to input
Softmax(input1, &input1, 1);
check_shape(output.shape, {2, 3});
check_data(reinterpret_cast<const float*>(input1.Data()),
expected_result_axis1.data(), expected_result_axis1.size());
}
} // namespace function
} // namespace fastdeploy