test/nextgen/operators/matmul/matmul_operator.cpp
| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | // | ||
| 2 | // SPDX-FileCopyrightText: Copyright 2025 Arm Limited and/or its affiliates <open-source-office@arm.com> | ||
| 3 | // | ||
| 4 | // SPDX-License-Identifier: Apache-2.0 | ||
| 5 | // | ||
| 6 | |||
| 7 | #include "test/nextgen/operators/matmul/matmul_operator.hpp" | ||
| 8 | |||
| 9 | #include <array> | ||
| 10 | #include <memory> | ||
| 11 | #include <optional> | ||
| 12 | |||
| 13 | #include "test/common/cpu_info.hpp" | ||
| 14 | #include "test/common/data_type.hpp" | ||
| 15 | #include "test/common/span.hpp" | ||
| 16 | #include "test/nextgen/functions/round.hpp" | ||
| 17 | #include "test/nextgen/operators/matmul/matmul/matmul_wrapper.hpp" | ||
| 18 | #include "test/nextgen/operators/matmul/matmul_bias_mode.hpp" | ||
| 19 | #include "test/nextgen/operators/matmul/pack_lhs/matmul_pack_lhs_wrapper.hpp" | ||
| 20 | #include "test/nextgen/operators/matmul/pack_rhs/matmul_pack_rhs_wrapper.hpp" | ||
| 21 | #include "test/nextgen/quantization/asymm_linear_quantizer.hpp" | ||
| 22 | #include "test/nextgen/quantization/symm_linear_quantizer.hpp" | ||
| 23 | |||
| 24 | namespace kai::test { | ||
| 25 | |||
| 26 | 3 | Span<const MatMulOperator> get_available_matmul_operators() { | |
| 27 |
2/4✓ Branch 0 taken 3 times.
✗ Branch 1 not taken.
✗ Branch 2 not taken.
✓ Branch 3 taken 3 times.
|
3 | static std::array<MatMulOperator, 2> operators; |
| 28 | |||
| 29 | // matmul_clamp_f32_qai8dxp1vlx8_qsi4cxp4vlx8_1vlx4vl_sme2_mopa | ||
| 30 | 3 | operators[0].name = "matmul_clamp_f32_qai8dxp1vlx8_qsi4cxp4vlx8_1vlx4vl_sme2_mopa"; | |
| 31 | |||
| 32 | 6 | operators[0].is_cpu_supported = []() { return cpu_has_sme2(); }; | |
| 33 | 103 | operators[0].is_shape_suitable = [](size_t, size_t, size_t) { return true; }; | |
| 34 | |||
| 35 | 3 | operators[0].supported_bias_modes = {MatMulBiasMode::NO_BIAS, MatMulBiasMode::PER_N}; | |
| 36 | |||
| 37 |
2/4✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
✓ Branch 2 taken 1 time.
✗ Branch 3 not taken.
|
6 | operators[0].lhs_quant = std::make_unique<AsymmLinearQuantizer>( |
| 38 | 3 | DataType::I8, DataType::FP32, DataType::I32, RoundMode::TIE_AWAY, RoundMode::CURRENT, 1, 0); | |
| 39 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[0].rhs_quant = |
| 40 | 3 | std::make_unique<SymmLinearQuantizer>(DataType::U4, DataType::FP32, RoundMode::CURRENT, 1, 0); | |
| 41 | 3 | operators[0].bias_quant = std::nullopt; | |
| 42 | |||
| 43 | 3 | operators[0].acc_dtype = DataType::FP32; | |
| 44 | 3 | operators[0].dst_dtype = DataType::FP32; | |
| 45 | |||
| 46 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[0].pack_lhs = create_matmul_lhs_quant_pack_qai8dxp1vlx4_f32(); |
| 47 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[0].pack_rhs = create_matmul_rhs_pack_nxk_qsi4cxp4vlx4s1s0_qsu4cxs1s0_neon(); |
| 48 | 3 | operators[0].matmul = create_matmul_clamp_f32_qai8dxp1vlx8_qsi4cxp4vlx8_1vlx4vl_sme2_mopa(); | |
| 49 | |||
| 50 | // matmul_clamp_f32_qai8dxp1x4_qsi4cxp4vlx4_1x4vl_sme2_sdot | ||
| 51 | 3 | operators[1].name = "matmul_clamp_f32_qai8dxp1x4_qsi4cxp4vlx4_1x4vl_sme2_sdot"; | |
| 52 | |||
| 53 | 6 | operators[1].is_cpu_supported = []() { return cpu_has_sme2(); }; | |
| 54 | 103 | operators[1].is_shape_suitable = [](size_t, size_t, size_t) { return true; }; | |
| 55 | |||
| 56 | 3 | operators[1].supported_bias_modes = {MatMulBiasMode::NO_BIAS, MatMulBiasMode::PER_N}; | |
| 57 | |||
| 58 |
2/4✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
✓ Branch 2 taken 1 time.
✗ Branch 3 not taken.
|
6 | operators[1].lhs_quant = std::make_unique<AsymmLinearQuantizer>( |
| 59 | 3 | DataType::I8, DataType::FP32, DataType::I32, RoundMode::TIE_AWAY, RoundMode::CURRENT, 1, 0); | |
| 60 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[1].rhs_quant = |
| 61 | 3 | std::make_unique<SymmLinearQuantizer>(DataType::U4, DataType::FP32, RoundMode::CURRENT, 1, 0); | |
| 62 | 3 | operators[1].bias_quant = std::nullopt; | |
| 63 | |||
| 64 | 3 | operators[1].acc_dtype = DataType::FP32; | |
| 65 | 3 | operators[1].dst_dtype = DataType::FP32; | |
| 66 | |||
| 67 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[1].pack_lhs = create_matmul_lhs_quant_pack_qai8dxp1x4_f32(); |
| 68 |
1/2✓ Branch 0 taken 1 time.
✗ Branch 1 not taken.
|
3 | operators[1].pack_rhs = create_matmul_rhs_pack_nxk_qsi4cxp4vlx4s1s0_qsu4cxs1s0_neon(); |
| 69 | 3 | operators[1].matmul = create_matmul_clamp_f32_qai8dxp1x4_qsi4cxp4vlx4_1x4vl_sme2_sdot(); | |
| 70 | |||
| 71 | 3 | return operators; | |
| 72 | ✗ | } | |
| 73 | |||
| 74 | } // namespace kai::test | ||
| 75 |