Skip to content

Commit

Permalink
[GPU] Refactor (openvinotoolkit#21846)
Browse files Browse the repository at this point in the history
* generate_proposals

* activation

* dft

* lstm_sequence

* rdft

* scatter_elements_update

* fix prelu

* fix activation

* temp comment scatter_elements_update v12

* strided_slice

* fix ranges RDTF

* add headers

* add generate_inputs to DFT and RDFT

* refactor activation generate_inputs

* generate_proposals

* activation

* dft

* lstm_sequence

* rdft

* scatter_elements_update

* fix prelu

* fix activation

* temp comment scatter_elements_update v12

* strided_slice

* fix ranges RDTF

* add headers

* add generate_inputs to DFT and RDFT

* refactor activation generate_inputs

* ov_tensor_utils

* fix code style

* fix code style

* fix code style

* fix code style

* edit skip list for new tests names

* edit skip list for new tests names

* rm unreferenced local variable in dft.cpp

* fix generate_inputs fp16 for cpu

* code style rdft

* revert lstm_sequence scatter_elements_update

* add comments

* add lstm

* add custom compare for generate_proposals

* edit compare

* add custom compare

* rm compare_results

---------

Co-authored-by: Wang, Yang <[email protected]>
  • Loading branch information
andrei-cv and yangwang201911 authored Jan 9, 2024
1 parent 9638b89 commit 4c31bd6
Show file tree
Hide file tree
Showing 20 changed files with 548 additions and 297 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,131 +2,121 @@
// SPDX-License-Identifier: Apache-2.0
//

#include <vector>
#include "single_layer_tests/activation.hpp"
#include "single_op_tests/activation.hpp"
#include "common_test_utils/test_constants.hpp"

using namespace LayerTestsDefinitions;
using namespace ngraph::helpers;
namespace {
// Common params
const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
InferenceEngine::Precision::I16,
InferenceEngine::Precision::U8
};
using ov::test::ActivationLayerTest;
using ov::test::ActivationParamLayerTest;
using ov::test::utils::ActivationTypes;

const std::vector<InferenceEngine::Precision> netPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16
// Common params
const std::vector<ov::element::Type> netPrecisions = {
ov::element::f32,
ov::element::f16
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> activationTypes = {
{Sigmoid, {}},
{Tanh, {}},
{Relu, {}},
{Exp, {}},
{Log, {}},
{Sign, {}},
{Abs, {}},
{Gelu, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Negative, {}},
{Acos, {}},
{Acosh, {}},
{Asin, {}},
{Asinh, {}},
{Atan, {}},
{Atanh, {}},
{Cos, {}},
{Cosh, {}},
{Floor, {}},
{Sin, {}},
{Sinh, {}},
{Sqrt, {}},
{Tan, {}},
{Elu, {{0.1f}}},
{Erf, {}},
{HardSigmoid, {{0.2f, 0.5f}}},
{Selu, {{1.6732f, 1.0507f}}},
{Ceiling, {}},
{Mish, {}},
{HSwish, {}},
{SoftPlus, {}},
{HSigmoid, {}},
{Swish, {{0.5f}}},
{RoundHalfToEven, {}},
{RoundHalfAwayFromZero, {}},
{GeluErf, {}},
{GeluTanh, {}},
{SoftSign, {}},
{ActivationTypes::Sigmoid, {}},
{ActivationTypes::Tanh, {}},
{ActivationTypes::Relu, {}},
{ActivationTypes::Exp, {}},
{ActivationTypes::Log, {}},
{ActivationTypes::Sign, {}},
{ActivationTypes::Abs, {}},
{ActivationTypes::Gelu, {}},
{ActivationTypes::Clamp, {{-2.0f, 2.0f}}},
{ActivationTypes::Negative, {}},
{ActivationTypes::Acos, {}},
{ActivationTypes::Acosh, {}},
{ActivationTypes::Asin, {}},
{ActivationTypes::Asinh, {}},
{ActivationTypes::Atan, {}},
{ActivationTypes::Atanh, {}},
{ActivationTypes::Cos, {}},
{ActivationTypes::Cosh, {}},
{ActivationTypes::Floor, {}},
{ActivationTypes::Sin, {}},
{ActivationTypes::Sinh, {}},
{ActivationTypes::Sqrt, {}},
{ActivationTypes::Tan, {}},
{ActivationTypes::Elu, {{0.1f}}},
{ActivationTypes::Erf, {}},
{ActivationTypes::HardSigmoid, {{0.2f, 0.5f}}},
{ActivationTypes::Selu, {{1.6732f, 1.0507f}}},
{ActivationTypes::Ceiling, {}},
{ActivationTypes::Mish, {}},
{ActivationTypes::HSwish, {}},
{ActivationTypes::SoftPlus, {}},
{ActivationTypes::HSigmoid, {}},
{ActivationTypes::Swish, {{0.5f}}},
{ActivationTypes::RoundHalfToEven, {}},
{ActivationTypes::RoundHalfAwayFromZero, {}},
{ActivationTypes::GeluErf, {}},
{ActivationTypes::GeluTanh, {}},
{ActivationTypes::SoftSign, {}},
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> big_rank_activation_types = {
{Relu, {}},
{Exp, {}},
{Log, {}},
{Abs, {}},
{Clamp, {{-2.0f, 2.0f}}},
{Ceiling, {}},
{Swish, {{0.5f}}},
{ActivationTypes::Relu, {}},
{ActivationTypes::Exp, {}},
{ActivationTypes::Log, {}},
{ActivationTypes::Abs, {}},
{ActivationTypes::Clamp, {{-2.0f, 2.0f}}},
{ActivationTypes::Ceiling, {}},
{ActivationTypes::Swish, {{0.5f}}},
};

const std::map<ActivationTypes, std::vector<std::vector<float>>> activationParamTypes = {
{PReLu, {{-0.01f}}},
{LeakyRelu, {{0.01f}}}
{ActivationTypes::PReLu, {{-0.01f}}},
{ActivationTypes::LeakyRelu, {{0.01f}}}
};

std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> basic = {
{{{1, 50}}, {{}}},
{{{1, 128}}, {{}}},
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> basic = {
{{1, 50}, {{}}},
{{1, 128}, {{}}},
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> big_ranks = {
{{{1, 2, 3, 4, 5, 3}}, {{}}},
{{{1, 2, 3, 4, 1, 3, 2}}, {{}}},
{{{1, 2, 3, 4, 3, 2, 1, 2}}, {{}}},
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> big_ranks = {
{{1, 2, 3, 4, 5, 3}, {{}}},
{{1, 2, 3, 4, 1, 3, 2}, {{}}},
{{1, 2, 3, 4, 3, 2, 1, 2}, {{}}},
std::map<std::vector<ov::Shape>, std::vector<ov::Shape>> preluBasic = {
{{{1, 10, 20}}, {{10}, {20}, {10, 20}}},
{{{1, 128}}, {{1}, {128}}},
};

std::map<std::vector<size_t>, std::vector<std::vector<size_t>>> preluBasic = {
{{1, 10, 20}, {{10}, {20}, {10, 20}}},
{{1, 128}, {{1}, {128}}},
auto static_shapes_param_transform = [](const std::vector<std::pair<std::vector<ov::Shape>, ov::Shape>>& original_shapes) {
std::vector<std::pair<std::vector<ov::test::InputShape>, ov::Shape>> new_shapes;
for (const auto& shape_element : original_shapes) {
new_shapes.emplace_back(ov::test::static_shapes_to_test_representation(shape_element.first), shape_element.second);
}
return new_shapes;
};

const auto basicCases = []() {
return ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(activationTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(basic)),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(basic))),
::testing::Values(ov::test::utils::DEVICE_GPU));
};

const auto basicPreluCases = []() {
return ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(activationParamTypes)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(preluBasic)),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(preluBasic))),
::testing::Values(ov::test::utils::DEVICE_GPU));
};

const auto big_rank_cases = []() {
return ::testing::Combine(
::testing::ValuesIn(ov::test::utils::combineParams(big_rank_activation_types)),
::testing::ValuesIn(netPrecisions),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Precision::UNSPECIFIED),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::Values(InferenceEngine::Layout::ANY),
::testing::ValuesIn(ov::test::utils::combineParams(big_ranks)),
::testing::ValuesIn(static_shapes_param_transform(ov::test::utils::combineParams(big_ranks))),
::testing::Values(ov::test::utils::DEVICE_GPU));
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,86 +3,84 @@
//

#include <common_test_utils/test_constants.hpp>
#include <single_layer_tests/dft.hpp>
#include <vector>
#include <single_op_tests/dft.hpp>

namespace {
using ov::test::DFTLayerTest;

const std::vector<ngraph::helpers::DFTOpType> opTypes = {
ngraph::helpers::DFTOpType::FORWARD,
ngraph::helpers::DFTOpType::INVERSE,
const std::vector<ov::test::utils::DFTOpType> opTypes = {
ov::test::utils::DFTOpType::FORWARD,
ov::test::utils::DFTOpType::INVERSE,
};

const std::vector<InferenceEngine::Precision> inputPrecisions = {
InferenceEngine::Precision::FP32,
InferenceEngine::Precision::FP16,
const std::vector<ov::element::Type> inputPrecisions = {
ov::element::f32,
ov::element::f16,
};

const auto combine = [](const std::vector<InferenceEngine::SizeVector>& inputShapes,
const auto combine = [](const std::vector<std::vector<ov::Shape>>& inputShapes,
const std::vector<std::vector<int64_t>>& axes,
const std::vector<std::vector<int64_t>>& signalSizes) {
return testing::Combine(testing::ValuesIn(inputShapes),
return testing::Combine(testing::ValuesIn(ov::test::static_shapes_to_test_representation(inputShapes)),
testing::ValuesIn(inputPrecisions),
testing::ValuesIn(axes),
testing::ValuesIn(signalSizes),
testing::ValuesIn(opTypes),
testing::Values(ov::test::utils::DEVICE_GPU));
};

using namespace LayerTestsDefinitions;

INSTANTIATE_TEST_SUITE_P(smoke_DFT_2d,
DFTLayerTest,
combine({{10, 2}}, // input shapes
combine({{{10, 2}}}, // input shapes
{{0}}, // axes
{{}, {3}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_3d,
DFTLayerTest,
combine({{10, 4, 2}}, // input shapes
combine({{{10, 4, 2}}}, // input shapes
{{0, 1}}, // axes
{{}, {3, 10}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_4d,
DFTLayerTest,
combine({{10, 4, 8, 2}}, // input shapes
combine({{{10, 4, 8, 2}}}, // input shapes
{{0, 1, 2}}, // axes
{{}, {3, 10, 8}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_4d_negative_reversed_axes,
DFTLayerTest,
combine({{10, 4, 8, 2}}, // input shapes
combine({{{10, 4, 8, 2}}}, // input shapes
{{-1, -2, -3}}, // axes
{{}, {8, 10, 3}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_4d_single_axis,
DFTLayerTest,
combine({{10, 4, 8, 2}}, // input shapes
combine({{{10, 4, 8, 2}}}, // input shapes
{{0}, {1}, {2}}, // axes
{{}, {1}, {5}, {20}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_5d,
DFTLayerTest,
combine({{10, 4, 8, 2, 2}}, // input shapes
combine({{{10, 4, 8, 2, 2}}}, // input shapes
{{0, 1, 2, 3}}, // axes
{{}, {3, 10, 8, 6}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_6d,
DFTLayerTest,
combine({{10, 4, 8, 2, 5, 2}}, // input shapes
combine({{{10, 4, 8, 2, 5, 2}}}, // input shapes
{{0, 1, 2, 3, 4}}, // axes
{{}, {3, 10, 8, 6, 2}}), // signal sizes
DFTLayerTest::getTestCaseName);

INSTANTIATE_TEST_SUITE_P(smoke_DFT_6d_zero,
DFTLayerTest,
combine({{10, 4, 8, 2, 5, 2}}, // input shapes
combine({{{10, 4, 8, 2, 5, 2}}}, // input shapes
{{}}, // axes
{{}}), // signal sizes
DFTLayerTest::getTestCaseName);
Expand Down
Loading

0 comments on commit 4c31bd6

Please sign in to comment.