blob: 5043f79c23516fa30e6e2c962936afb60c2cde14 [file] [log] [blame]
Anthony Barbier6ff3b192017-09-04 18:44:23 +01001/*
2 * Copyright (c) 2017 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef __ARM_COMPUTE_NESOFTMAXLAYER_H__
25#define __ARM_COMPUTE_NESOFTMAXLAYER_H__
26
27#include "arm_compute/core/NEON/kernels/NEFillBorderKernel.h"
28#include "arm_compute/core/NEON/kernels/NESoftmaxLayerKernel.h"
29#include "arm_compute/runtime/IFunction.h"
Georgios Pinitasbaf174e2017-09-08 19:47:30 +010030#include "arm_compute/runtime/MemoryGroup.h"
Anthony Barbier6ff3b192017-09-04 18:44:23 +010031#include "arm_compute/runtime/Tensor.h"
32
33namespace arm_compute
34{
35class ITensor;
36
37/** Basic function to compute a SoftmaxLayer.
38 *
39 * Softmax is calculated by :
40 * @f[ out = \frac{e^{x - max(x)}}{\sum{e^{x - max(x)}}} @f]
41 *
42 * This function runs the following kernels:
43 * -# @ref NELogits1DMaxKernel
44 * -# @ref NELogits1DShiftExpSumKernel
45 * -# @ref NELogits1DNormKernel
46 */
47class NESoftmaxLayer : public IFunction
48{
49public:
50 /** Constructor */
Georgios Pinitasbaf174e2017-09-08 19:47:30 +010051 NESoftmaxLayer(std::shared_ptr<IMemoryManager> memory_manager = nullptr);
Anthony Barbier6ff3b192017-09-04 18:44:23 +010052 /** Set the input and output tensors.
53 *
Pablo Tellob49a7152017-07-11 16:31:35 +010054 * @param[in] input Source tensor. Data types supported: QS8/QS16/F16/F32.
Anthony Barbier6ff3b192017-09-04 18:44:23 +010055 * @param[out] output Destination tensor. Data types supported: same as @p input.
Pablo Palmiera2b89ca2017-10-05 15:01:34 +010056 * @param[in] beta (Optional) A scaling factor for the exponent. QS8/QS16 only support a beta value of 1.
Anthony Barbier6ff3b192017-09-04 18:44:23 +010057 */
Pablo Palmiera2b89ca2017-10-05 15:01:34 +010058 void configure(ITensor *input, ITensor *output, float beta = 1.0f);
Michalis Spyrouafa5d812017-11-30 14:25:57 +000059 /** Static function to check if given info will lead to a valid configuration of @ref NESoftmaxLayer
60 *
61 * @param[in] input Source tensor. Data types supported: QS8/QS16/F16/F32
62 * @param[in] output Destination tensor. Data types supported: same as @p input
63 * @param[in] beta (Optional) A scaling factor for the exponent. QS8/QS16 only support a beta value of 1.
64 *
65 * @return a status
66 */
67 static Status validate(const ITensorInfo *input, const ITensorInfo *output, float beta = 1.0f);
Anthony Barbier6ff3b192017-09-04 18:44:23 +010068
69 // Inherited methods overridden:
70 void run() override;
71
72private:
Georgios Pinitasbaf174e2017-09-08 19:47:30 +010073 MemoryGroup _memory_group;
Anthony Barbier6ff3b192017-09-04 18:44:23 +010074 NELogits1DMaxKernel _max_kernel;
75 NELogits1DShiftExpSumKernel _shift_exp_sum_kernel;
76 NELogits1DNormKernel _norm_kernel;
77 NEFillBorderKernel _fill_border_kernel;
78 Tensor _max;
79 Tensor _sum;
80 Tensor _tmp;
81};
82}
83#endif /* __ARM_COMPUTE_NESOFTMAXLAYER_H__ */