blob: d8f8967bcd70fb4115cb430c961e3eb647b72e20 [file] [log] [blame]
//
// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
// SPDX-License-Identifier: MIT
//
#include "../Serializer.hpp"
#include "SerializerTestUtils.hpp"
#include <armnn/Descriptors.hpp>
#include <armnn/INetwork.hpp>
#include <armnn/IRuntime.hpp>
#include <armnnDeserializer/IDeserializer.hpp>
#include <armnn/utility/IgnoreUnused.hpp>
#include <armnn/LstmParams.hpp>
#include <armnn/QuantizedLstmParams.hpp>
#include <doctest/doctest.h>
#include <fmt/format.h>
TEST_SUITE("SerializerTests")
{
template<typename Descriptor>
armnn::LstmInputParams ConstantVector2LstmInputParams(const std::vector<armnn::ConstTensor>& constants,
Descriptor& descriptor)
{
armnn::LstmInputParams lstmInputParams;
size_t i = 0;
// Inserting basic paramters
lstmInputParams.m_InputToForgetWeights = &constants[i++];
lstmInputParams.m_InputToCellWeights = &constants[i++];
lstmInputParams.m_InputToOutputWeights = &constants[i++];
lstmInputParams.m_RecurrentToForgetWeights = &constants[i++];
lstmInputParams.m_RecurrentToCellWeights = &constants[i++];
lstmInputParams.m_RecurrentToOutputWeights = &constants[i++];
lstmInputParams.m_ForgetGateBias = &constants[i++];
lstmInputParams.m_CellBias = &constants[i++];
lstmInputParams.m_OutputGateBias = &constants[i++];
if (!descriptor.m_CifgEnabled)
{
lstmInputParams.m_InputToInputWeights = &constants[i++];
lstmInputParams.m_RecurrentToInputWeights = &constants[i++];
lstmInputParams.m_InputGateBias = &constants[i++];
}
if (descriptor.m_PeepholeEnabled)
{
if (!descriptor.m_CifgEnabled)
{
lstmInputParams.m_CellToInputWeights = &constants[i++];
}
lstmInputParams.m_CellToForgetWeights = &constants[i++];
lstmInputParams.m_CellToOutputWeights = &constants[i++];
}
if (descriptor.m_ProjectionEnabled)
{
lstmInputParams.m_ProjectionWeights = &constants[i++];
lstmInputParams.m_ProjectionBias = &constants[i++];
}
if (descriptor.m_LayerNormEnabled)
{
if (!descriptor.m_CifgEnabled)
{
lstmInputParams.m_InputLayerNormWeights = &constants[i++];
}
lstmInputParams.m_ForgetLayerNormWeights = &constants[i++];
lstmInputParams.m_CellLayerNormWeights = &constants[i++];
lstmInputParams.m_OutputLayerNormWeights = &constants[i++];
}
return lstmInputParams;
}
// Works for Lstm, UnidirectionalSequenceLstm and QLstm (QuantizedLstm uses different parameters)
template<typename Descriptor>
class VerifyLstmLayer : public LayerVerifierBaseWithDescriptor<Descriptor>
{
public:
VerifyLstmLayer(const std::string& layerName,
const std::vector<armnn::TensorInfo>& inputInfos,
const std::vector<armnn::TensorInfo>& outputInfos,
const Descriptor& descriptor,
const armnn::LstmInputParams& inputParams)
: LayerVerifierBaseWithDescriptor<Descriptor>(layerName, inputInfos, outputInfos, descriptor)
, m_InputParams(inputParams) {}
void ExecuteStrategy(const armnn::IConnectableLayer* layer,
const armnn::BaseDescriptor& descriptor,
const std::vector<armnn::ConstTensor>& constants,
const char* name,
const armnn::LayerBindingId id = 0) override
{
armnn::IgnoreUnused(constants, id);
switch (layer->GetType())
{
case armnn::LayerType::Input: break;
case armnn::LayerType::Output: break;
case armnn::LayerType::Lstm:
case armnn::LayerType::UnidirectionalSequenceLstm:
{
this->VerifyNameAndConnections(layer, name);
const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
this->VerifyDescriptor(internalDescriptor);
armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor);
VerifyInputParameters(lstmParams);
break;
}
case armnn::LayerType::QLstm:
{
this->VerifyNameAndConnections(layer, name);
const Descriptor& internalDescriptor = static_cast<const Descriptor&>(descriptor);
this->VerifyDescriptor(internalDescriptor);
armnn::LstmInputParams lstmParams = ConstantVector2LstmInputParams(constants, internalDescriptor);
VerifyInputParameters(lstmParams);
break;
}
default:
{
throw armnn::Exception("Unexpected layer type in Lstm test model");
}
}
}
protected:
void VerifyInputParameters(const armnn::LstmInputParams& params)
{
this->VerifyConstTensors(
"m_InputToInputWeights", m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
this->VerifyConstTensors(
"m_InputToForgetWeights", m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
this->VerifyConstTensors(
"m_InputToCellWeights", m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
this->VerifyConstTensors(
"m_InputToOutputWeights", m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
this->VerifyConstTensors(
"m_RecurrentToInputWeights", m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
this->VerifyConstTensors(
"m_RecurrentToForgetWeights", m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
this->VerifyConstTensors(
"m_RecurrentToCellWeights", m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
this->VerifyConstTensors(
"m_RecurrentToOutputWeights", m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
this->VerifyConstTensors(
"m_CellToInputWeights", m_InputParams.m_CellToInputWeights, params.m_CellToInputWeights);
this->VerifyConstTensors(
"m_CellToForgetWeights", m_InputParams.m_CellToForgetWeights, params.m_CellToForgetWeights);
this->VerifyConstTensors(
"m_CellToOutputWeights", m_InputParams.m_CellToOutputWeights, params.m_CellToOutputWeights);
this->VerifyConstTensors(
"m_InputGateBias", m_InputParams.m_InputGateBias, params.m_InputGateBias);
this->VerifyConstTensors(
"m_ForgetGateBias", m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
this->VerifyConstTensors(
"m_CellBias", m_InputParams.m_CellBias, params.m_CellBias);
this->VerifyConstTensors(
"m_OutputGateBias", m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
this->VerifyConstTensors(
"m_ProjectionWeights", m_InputParams.m_ProjectionWeights, params.m_ProjectionWeights);
this->VerifyConstTensors(
"m_ProjectionBias", m_InputParams.m_ProjectionBias, params.m_ProjectionBias);
this->VerifyConstTensors(
"m_InputLayerNormWeights", m_InputParams.m_InputLayerNormWeights, params.m_InputLayerNormWeights);
this->VerifyConstTensors(
"m_ForgetLayerNormWeights", m_InputParams.m_ForgetLayerNormWeights, params.m_ForgetLayerNormWeights);
this->VerifyConstTensors(
"m_CellLayerNormWeights", m_InputParams.m_CellLayerNormWeights, params.m_CellLayerNormWeights);
this->VerifyConstTensors(
"m_OutputLayerNormWeights", m_InputParams.m_OutputLayerNormWeights, params.m_OutputLayerNormWeights);
}
private:
armnn::LstmInputParams m_InputParams;
};
TEST_CASE("SerializeDeserializeLstmCifgPeepholeNoProjection")
{
armnn::LstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = false;
descriptor.m_PeepholeEnabled = true;
const uint32_t batchSize = 1;
const uint32_t inputSize = 2;
const uint32_t numUnits = 4;
const uint32_t outputSize = numUnits;
armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
std::vector<float> forgetGateBiasData(numUnits, 1.0f);
armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
std::vector<float> cellBiasData(numUnits, 0.0f);
armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
std::vector<float> outputGateBiasData(numUnits, 0.0f);
armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("lstm");
armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 3 }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::LstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeLstmNoCifgWithPeepholeAndProjection")
{
armnn::LstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
const uint32_t batchSize = 2;
const uint32_t inputSize = 5;
const uint32_t numUnits = 20;
const uint32_t outputSize = 16;
armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionBiasData(outputSize, 0.f);
armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// additional params because: descriptor.m_CifgEnabled = false
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_CellToInputWeights = &cellToInputWeights;
params.m_InputGateBias = &inputGateBias;
// additional params because: descriptor.m_ProjectionEnabled = true
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// additional params because: descriptor.m_PeepholeEnabled = true
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("lstm");
armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::LstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeLstmNoCifgWithPeepholeWithProjectionWithLayerNorm")
{
armnn::LstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
descriptor.m_LayerNormEnabled = true;
const uint32_t batchSize = 2;
const uint32_t inputSize = 5;
const uint32_t numUnits = 20;
const uint32_t outputSize = 16;
armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionBiasData(outputSize, 0.f);
armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// additional params because: descriptor.m_CifgEnabled = false
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_CellToInputWeights = &cellToInputWeights;
params.m_InputGateBias = &inputGateBias;
// additional params because: descriptor.m_ProjectionEnabled = true
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// additional params because: descriptor.m_PeepholeEnabled = true
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
// additional params because: despriptor.m_LayerNormEnabled = true
params.m_InputLayerNormWeights = &inputLayerNormWeights;
params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
params.m_CellLayerNormWeights = &cellLayerNormWeights;
params.m_OutputLayerNormWeights = &outLayerNormWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("lstm");
armnn::IConnectableLayer* const lstmLayer = network->AddLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const scratchBuffer = network->AddOutputLayer(0);
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(2);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(3);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(lstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(0).Connect(scratchBuffer->GetInputSlot(0));
lstmLayer->GetOutputSlot(0).SetTensorInfo(lstmTensorInfoScratchBuff);
lstmLayer->GetOutputSlot(1).Connect(outputStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
lstmLayer->GetOutputSlot(2).Connect(cellStateOut->GetInputSlot(0));
lstmLayer->GetOutputSlot(2).SetTensorInfo(cellStateTensorInfo);
lstmLayer->GetOutputSlot(3).Connect(outputLayer->GetInputSlot(0));
lstmLayer->GetOutputSlot(3).SetTensorInfo(outputStateTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::LstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("EnsureLstmLayersBackwardCompatibility")
{
// The hex data below is a flat buffer containing a lstm layer with no Cifg, with peephole and projection
// enabled. That data was obtained before additional layer normalization parameters where added to the
// lstm serializer. That way it can be tested if a lstm model with the old parameter configuration can
// still be loaded
const std::vector<uint8_t> lstmNoCifgWithPeepholeAndProjectionModel =
{
0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00,
0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
0xDC, 0x29, 0x00, 0x00, 0x38, 0x29, 0x00, 0x00, 0xB4, 0x28, 0x00, 0x00, 0x94, 0x01, 0x00, 0x00, 0x3C, 0x01,
0x00, 0x00, 0xE0, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x00,
0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x70, 0xD6, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x06, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x88, 0xD7,
0xFF, 0xFF, 0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xF6, 0xD6, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00,
0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xE8, 0xD7, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xC8, 0xD6, 0xFF, 0xFF, 0x00, 0x00,
0x00, 0x0B, 0x04, 0x00, 0x00, 0x00, 0x5E, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xE0, 0xD7, 0xFF, 0xFF,
0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x4E, 0xD7, 0xFF, 0xFF, 0x06, 0x00, 0x00, 0x00, 0x10, 0x00,
0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD8,
0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x20, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B,
0x04, 0x00, 0x00, 0x00, 0xB6, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x38, 0xD8, 0xFF, 0xFF, 0x08, 0x00,
0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xA6, 0xD7, 0xFF, 0xFF, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x98, 0xD8, 0xFF, 0xFF,
0x03, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x78, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x0B, 0x04, 0x00,
0x00, 0x00, 0x0E, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x16, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
0xFA, 0xD7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00,
0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xEC, 0xD8, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x6C, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x23, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00,
0x12, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x0A, 0x00, 0x00, 0x00, 0xE0, 0x25, 0x00, 0x00, 0xD0, 0x25,
0x00, 0x00, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x26, 0x00, 0x48, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00,
0x10, 0x00, 0x14, 0x00, 0x18, 0x00, 0x1C, 0x00, 0x20, 0x00, 0x24, 0x00, 0x28, 0x00, 0x2C, 0x00, 0x30, 0x00,
0x34, 0x00, 0x38, 0x00, 0x3C, 0x00, 0x40, 0x00, 0x44, 0x00, 0x26, 0x00, 0x00, 0x00, 0xC4, 0x23, 0x00, 0x00,
0xF8, 0x21, 0x00, 0x00, 0x2C, 0x20, 0x00, 0x00, 0xF0, 0x1A, 0x00, 0x00, 0xB4, 0x15, 0x00, 0x00, 0x78, 0x10,
0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x68, 0x0F, 0x00, 0x00, 0xE0, 0x0E, 0x00, 0x00, 0x14, 0x0D, 0x00, 0x00,
0xD8, 0x07, 0x00, 0x00, 0x50, 0x07, 0x00, 0x00, 0xC8, 0x06, 0x00, 0x00, 0x8C, 0x01, 0x00, 0x00, 0x14, 0x01,
0x00, 0x00, 0x8C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xD7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xFE, 0xD8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x5A, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x72, 0xD8,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x82, 0xD9, 0xFF, 0xFF,
0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDE, 0xD8,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
0x14, 0x00, 0x00, 0x00, 0xF6, 0xD8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x54, 0x00, 0x00, 0x00, 0x04, 0x00,
0x00, 0x00, 0x06, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xD9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6A, 0xD9, 0xFF, 0xFF, 0x00, 0x00,
0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7A, 0xDA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00,
0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86, 0xDE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xA2, 0xDE,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB2, 0xDF, 0xFF, 0xFF,
0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0xDF,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
0x14, 0x00, 0x00, 0x00, 0x26, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00,
0x00, 0x00, 0x36, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x92, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xAA, 0xDF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xBA, 0xE0, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xC6, 0xE4, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xE2, 0xE4, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF2, 0xE5, 0xFF, 0xFF, 0x04, 0x00,
0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8E, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
0x00, 0x00, 0xAA, 0xE6, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0xBA, 0xE7, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x16, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x2E, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x64, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x3E, 0xE8, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9A, 0xE7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xB2, 0xE7, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x03, 0x64, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xC2, 0xE8, 0xFF, 0xFF, 0x04, 0x00,
0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1E, 0xE8, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00,
0x00, 0x00, 0x36, 0xE8, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0x46, 0xE9, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xED, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x6E, 0xED, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0x14, 0x05, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x7E, 0xEE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x8A, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xA6, 0xF2, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03,
0x14, 0x05, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xB6, 0xF3, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x40, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xC2, 0xF7, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xDE, 0xF7, 0xFF, 0xFF,
0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xEE, 0xF8, 0xFF, 0xFF, 0x04, 0x00,
0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x8A, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00,
0x00, 0x00, 0xA6, 0xF9, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0xB6, 0xFA, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x52, 0xFB,
0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x6E, 0xFB, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x03, 0xA4, 0x01,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x7E, 0xFC, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x1A, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x10, 0x00, 0x0C, 0x00,
0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x01, 0x04, 0x00, 0x00, 0x00, 0x2E, 0xFE, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0x22, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6C, 0x73,
0x74, 0x6D, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0xD0, 0x00, 0x00, 0x00,
0xB4, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x88, 0x00, 0x00, 0x00, 0x5C, 0x00, 0x00, 0x00, 0x30, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0xA6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x3C, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0xCE, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x64, 0xFF, 0xFF, 0xFF,
0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xF6, 0xFD, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0xB4, 0xFE, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0x1A, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
0xF0, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00,
0x10, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xE8, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
0x7E, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00,
0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x76, 0xFF, 0xFF, 0xFF, 0x02, 0x00, 0x00, 0x00,
0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00,
0x68, 0xFF, 0xFF, 0xFF, 0x04, 0x00, 0x00, 0x00, 0xCE, 0xFE, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0x08, 0x00, 0x0E, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x0C, 0x00,
0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, 0x06, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00,
0x08, 0x00, 0x0E, 0x00, 0x04, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0C, 0x00, 0x10, 0x00, 0x14, 0x00,
0x0E, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x6E, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x08, 0x00,
0x0C, 0x00, 0x07, 0x00, 0x08, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x09, 0x04, 0x00, 0x00, 0x00,
0xF6, 0xFF, 0xFF, 0xFF, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x0A, 0x00, 0x04, 0x00, 0x06, 0x00,
0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
0x0C, 0x00, 0x10, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x10, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x08, 0x00, 0x0A, 0x00, 0x00, 0x00, 0x04, 0x00, 0x08, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x10, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0C, 0x00,
0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00,
0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x00
};
armnn::INetworkPtr deserializedNetwork =
DeserializeNetwork(std::string(lstmNoCifgWithPeepholeAndProjectionModel.begin(),
lstmNoCifgWithPeepholeAndProjectionModel.end()));
CHECK(deserializedNetwork);
// generating the same model parameters which where used to serialize the model (Layer norm is not specified)
armnn::LstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = false;
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
const uint32_t batchSize = 2u;
const uint32_t inputSize = 5u;
const uint32_t numUnits = 20u;
const uint32_t outputSize = 16u;
armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToInputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
std::vector<float> inputToForgetWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData(tensorInfo20x5.GetNumElements(), 0.0f);
armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
std::vector<float> forgetGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
std::vector<float> cellBiasData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
std::vector<float> outputGateBiasData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToInputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
std::vector<float> recurrentToForgetWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData(tensorInfo20x16.GetNumElements(), 0.0f);
armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
std::vector<float> cellToInputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
std::vector<float> cellToForgetWeightsData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData(tensorInfo20.GetNumElements(), 0.0f);
armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionWeightsData(tensorInfo16x20.GetNumElements(), 0.0f);
armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionBiasData(outputSize, 0.0f);
armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// additional params because: descriptor.m_CifgEnabled = false
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_CellToInputWeights = &cellToInputWeights;
params.m_InputGateBias = &inputGateBias;
// additional params because: descriptor.m_ProjectionEnabled = true
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// additional params because: descriptor.m_PeepholeEnabled = true
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
const std::string layerName("lstm");
armnn::TensorInfo inputTensorInfo({ batchSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo lstmTensorInfoScratchBuff({ batchSize, numUnits * 4 }, armnn::DataType::Float32);
VerifyLstmLayer<armnn::LstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{lstmTensorInfoScratchBuff, outputStateTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
armnn::QuantizedLstmInputParams ConstantsVector2QuantizedLstmInputParams(
const std::vector<armnn::ConstTensor>& constants)
{
armnn::QuantizedLstmInputParams params;
// index for constants vector
size_t i = 0;
// Get input parameters
params.m_InputToInputWeights = &constants[i++];
params.m_InputToForgetWeights = &constants[i++];
params.m_InputToCellWeights = &constants[i++];
params.m_InputToOutputWeights = &constants[i++];
params.m_RecurrentToInputWeights = &constants[i++];
params.m_RecurrentToForgetWeights = &constants[i++];
params.m_RecurrentToCellWeights = &constants[i++];
params.m_RecurrentToOutputWeights = &constants[i++];
params.m_InputGateBias = &constants[i++];
params.m_ForgetGateBias = &constants[i++];
params.m_CellBias = &constants[i++];
params.m_OutputGateBias = &constants[i++];
return params;
}
class VerifyQuantizedLstmLayer : public LayerVerifierBase
{
public:
VerifyQuantizedLstmLayer(const std::string& layerName,
const std::vector<armnn::TensorInfo>& inputInfos,
const std::vector<armnn::TensorInfo>& outputInfos,
const armnn::QuantizedLstmInputParams& inputParams)
: LayerVerifierBase(layerName, inputInfos, outputInfos), m_InputParams(inputParams) {}
void ExecuteStrategy(const armnn::IConnectableLayer* layer,
const armnn::BaseDescriptor& descriptor,
const std::vector<armnn::ConstTensor>& constants,
const char* name,
const armnn::LayerBindingId id = 0) override
{
armnn::IgnoreUnused(descriptor, constants, id);
switch (layer->GetType())
{
case armnn::LayerType::Input: break;
case armnn::LayerType::Output: break;
case armnn::LayerType::QuantizedLstm:
{
VerifyNameAndConnections(layer, name);
armnn::QuantizedLstmInputParams params = ConstantsVector2QuantizedLstmInputParams(constants);
VerifyInputParameters(params);
break;
}
default:
{
throw armnn::Exception(fmt::format("Unexpected layer type in QuantizedLstm test model:",
layer->GetName()));
}
}
}
protected:
void VerifyInputParameters(const armnn::QuantizedLstmInputParams& params)
{
VerifyConstTensors("m_InputToInputWeights",
m_InputParams.m_InputToInputWeights, params.m_InputToInputWeights);
VerifyConstTensors("m_InputToForgetWeights",
m_InputParams.m_InputToForgetWeights, params.m_InputToForgetWeights);
VerifyConstTensors("m_InputToCellWeights",
m_InputParams.m_InputToCellWeights, params.m_InputToCellWeights);
VerifyConstTensors("m_InputToOutputWeights",
m_InputParams.m_InputToOutputWeights, params.m_InputToOutputWeights);
VerifyConstTensors("m_RecurrentToInputWeights",
m_InputParams.m_RecurrentToInputWeights, params.m_RecurrentToInputWeights);
VerifyConstTensors("m_RecurrentToForgetWeights",
m_InputParams.m_RecurrentToForgetWeights, params.m_RecurrentToForgetWeights);
VerifyConstTensors("m_RecurrentToCellWeights",
m_InputParams.m_RecurrentToCellWeights, params.m_RecurrentToCellWeights);
VerifyConstTensors("m_RecurrentToOutputWeights",
m_InputParams.m_RecurrentToOutputWeights, params.m_RecurrentToOutputWeights);
VerifyConstTensors("m_InputGateBias",
m_InputParams.m_InputGateBias, params.m_InputGateBias);
VerifyConstTensors("m_ForgetGateBias",
m_InputParams.m_ForgetGateBias, params.m_ForgetGateBias);
VerifyConstTensors("m_CellBias",
m_InputParams.m_CellBias, params.m_CellBias);
VerifyConstTensors("m_OutputGateBias",
m_InputParams.m_OutputGateBias, params.m_OutputGateBias);
}
private:
armnn::QuantizedLstmInputParams m_InputParams;
};
TEST_CASE("SerializeDeserializeQuantizedLstm")
{
const uint32_t batchSize = 1;
const uint32_t inputSize = 2;
const uint32_t numUnits = 4;
const uint32_t outputSize = numUnits;
// Scale/Offset for input/output, cellState In/Out, weights, bias
float inputOutputScale = 0.0078125f;
int32_t inputOutputOffset = 128;
float cellStateScale = 0.00048828125f;
int32_t cellStateOffset = 0;
float weightsScale = 0.00408021f;
int32_t weightsOffset = 100;
float biasScale = 3.1876640625e-05f;
int32_t biasOffset = 0;
// The shape of weight data is {outputSize, inputSize} = {4, 2}
armnn::TensorShape inputToInputWeightsShape = {4, 2};
std::vector<uint8_t> inputToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
armnn::TensorInfo inputToInputWeightsInfo(inputToInputWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor inputToInputWeights(inputToInputWeightsInfo, inputToInputWeightsData);
armnn::TensorShape inputToForgetWeightsShape = {4, 2};
std::vector<uint8_t> inputToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
armnn::TensorInfo inputToForgetWeightsInfo(inputToForgetWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor inputToForgetWeights(inputToForgetWeightsInfo, inputToForgetWeightsData);
armnn::TensorShape inputToCellWeightsShape = {4, 2};
std::vector<uint8_t> inputToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
armnn::TensorInfo inputToCellWeightsInfo(inputToCellWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor inputToCellWeights(inputToCellWeightsInfo, inputToCellWeightsData);
armnn::TensorShape inputToOutputWeightsShape = {4, 2};
std::vector<uint8_t> inputToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8};
armnn::TensorInfo inputToOutputWeightsInfo(inputToOutputWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor inputToOutputWeights(inputToOutputWeightsInfo, inputToOutputWeightsData);
// The shape of recurrent weight data is {outputSize, outputSize} = {4, 4}
armnn::TensorShape recurrentToInputWeightsShape = {4, 4};
std::vector<uint8_t> recurrentToInputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
armnn::TensorInfo recurrentToInputWeightsInfo(recurrentToInputWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor recurrentToInputWeights(recurrentToInputWeightsInfo, recurrentToInputWeightsData);
armnn::TensorShape recurrentToForgetWeightsShape = {4, 4};
std::vector<uint8_t> recurrentToForgetWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
armnn::TensorInfo recurrentToForgetWeightsInfo(recurrentToForgetWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor recurrentToForgetWeights(recurrentToForgetWeightsInfo, recurrentToForgetWeightsData);
armnn::TensorShape recurrentToCellWeightsShape = {4, 4};
std::vector<uint8_t> recurrentToCellWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
armnn::TensorInfo recurrentToCellWeightsInfo(recurrentToCellWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor recurrentToCellWeights(recurrentToCellWeightsInfo, recurrentToCellWeightsData);
armnn::TensorShape recurrentToOutputWeightsShape = {4, 4};
std::vector<uint8_t> recurrentToOutputWeightsData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
armnn::TensorInfo recurrentToOutputWeightsInfo(recurrentToOutputWeightsShape,
armnn::DataType::QAsymmU8,
weightsScale,
weightsOffset,
true);
armnn::ConstTensor recurrentToOutputWeights(recurrentToOutputWeightsInfo, recurrentToOutputWeightsData);
// The shape of bias data is {outputSize} = {4}
armnn::TensorShape inputGateBiasShape = {4};
std::vector<int32_t> inputGateBiasData = {1, 2, 3, 4};
armnn::TensorInfo inputGateBiasInfo(inputGateBiasShape,
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::ConstTensor inputGateBias(inputGateBiasInfo, inputGateBiasData);
armnn::TensorShape forgetGateBiasShape = {4};
std::vector<int32_t> forgetGateBiasData = {1, 2, 3, 4};
armnn::TensorInfo forgetGateBiasInfo(forgetGateBiasShape,
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::ConstTensor forgetGateBias(forgetGateBiasInfo, forgetGateBiasData);
armnn::TensorShape cellBiasShape = {4};
std::vector<int32_t> cellBiasData = {1, 2, 3, 4};
armnn::TensorInfo cellBiasInfo(cellBiasShape,
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::ConstTensor cellBias(cellBiasInfo, cellBiasData);
armnn::TensorShape outputGateBiasShape = {4};
std::vector<int32_t> outputGateBiasData = {1, 2, 3, 4};
armnn::TensorInfo outputGateBiasInfo(outputGateBiasShape,
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::ConstTensor outputGateBias(outputGateBiasInfo, outputGateBiasData);
armnn::QuantizedLstmInputParams params;
params.m_InputToInputWeights = &inputToInputWeights;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_InputGateBias = &inputGateBias;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("QuantizedLstm");
armnn::IConnectableLayer* const quantizedLstmLayer = network->AddQuantizedLstmLayer(params, layerName.c_str());
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(0);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(1);
// Connect up
armnn::TensorInfo inputTensorInfo({ batchSize, inputSize },
armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits },
armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize },
armnn::DataType::QAsymmU8,
inputOutputScale,
inputOutputOffset);
inputLayer->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(1));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(quantizedLstmLayer->GetInputSlot(2));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
quantizedLstmLayer->GetOutputSlot(0).Connect(cellStateOut->GetInputSlot(0));
quantizedLstmLayer->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
quantizedLstmLayer->GetOutputSlot(1).Connect(outputLayer->GetInputSlot(0));
quantizedLstmLayer->GetOutputSlot(1).SetTensorInfo(outputStateTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyQuantizedLstmLayer checker(layerName,
{inputTensorInfo, cellStateTensorInfo, outputStateTensorInfo},
{cellStateTensorInfo, outputStateTensorInfo},
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeQLstmBasic")
{
armnn::QLstmDescriptor descriptor;
descriptor.m_CifgEnabled = true;
descriptor.m_ProjectionEnabled = false;
descriptor.m_PeepholeEnabled = false;
descriptor.m_LayerNormEnabled = false;
descriptor.m_CellClip = 0.0f;
descriptor.m_ProjectionClip = 0.0f;
descriptor.m_InputIntermediateScale = 0.00001f;
descriptor.m_ForgetIntermediateScale = 0.00001f;
descriptor.m_CellIntermediateScale = 0.00001f;
descriptor.m_OutputIntermediateScale = 0.00001f;
descriptor.m_HiddenStateScale = 0.07f;
descriptor.m_HiddenStateZeroPoint = 0;
const unsigned int numBatches = 2;
const unsigned int inputSize = 5;
const unsigned int outputSize = 4;
const unsigned int numUnits = 4;
// Scale/Offset quantization info
float inputScale = 0.0078f;
int32_t inputOffset = 0;
float outputScale = 0.0078f;
int32_t outputOffset = 0;
float cellStateScale = 3.5002e-05f;
int32_t cellStateOffset = 0;
float weightsScale = 0.007f;
int32_t weightsOffset = 0;
float biasScale = 3.5002e-05f / 1024;
int32_t biasOffset = 0;
// Weights and bias tensor and quantization info
armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo biasInfo({numUnits}, armnn::DataType::Signed32, biasScale, biasOffset, true);
std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
std::vector<int8_t> recurrentToForgetWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToCellWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToOutputWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
std::vector<int32_t> forgetGateBiasData(numUnits, 1);
std::vector<int32_t> cellBiasData(numUnits, 0);
std::vector<int32_t> outputGateBiasData(numUnits, 0);
armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
armnn::ConstTensor cellBias(biasInfo, cellBiasData);
armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
// Set up params
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// Create network
armnn::INetworkPtr network = armnn::INetwork::Create();
const std::string layerName("qLstm");
armnn::IConnectableLayer* const input = network->AddInputLayer(0);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
// Input/Output tensor info
armnn::TensorInfo inputInfo({numBatches , inputSize},
armnn::DataType::QAsymmS8,
inputScale,
inputOffset);
armnn::TensorInfo cellStateInfo({numBatches , numUnits},
armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateInfo({numBatches , outputSize},
armnn::DataType::QAsymmS8,
outputScale,
outputOffset);
// Connect input/output slots
input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
input->GetOutputSlot(0).SetTensorInfo(inputInfo);
outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::QLstmDescriptor> checker(
layerName,
{inputInfo, cellStateInfo, outputStateInfo},
{outputStateInfo, cellStateInfo, outputStateInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeQLstmCifgLayerNorm")
{
armnn::QLstmDescriptor descriptor;
// CIFG params are used when CIFG is disabled
descriptor.m_CifgEnabled = true;
descriptor.m_ProjectionEnabled = false;
descriptor.m_PeepholeEnabled = false;
descriptor.m_LayerNormEnabled = true;
descriptor.m_CellClip = 0.0f;
descriptor.m_ProjectionClip = 0.0f;
descriptor.m_InputIntermediateScale = 0.00001f;
descriptor.m_ForgetIntermediateScale = 0.00001f;
descriptor.m_CellIntermediateScale = 0.00001f;
descriptor.m_OutputIntermediateScale = 0.00001f;
descriptor.m_HiddenStateScale = 0.07f;
descriptor.m_HiddenStateZeroPoint = 0;
const unsigned int numBatches = 2;
const unsigned int inputSize = 5;
const unsigned int outputSize = 4;
const unsigned int numUnits = 4;
// Scale/Offset quantization info
float inputScale = 0.0078f;
int32_t inputOffset = 0;
float outputScale = 0.0078f;
int32_t outputOffset = 0;
float cellStateScale = 3.5002e-05f;
int32_t cellStateOffset = 0;
float weightsScale = 0.007f;
int32_t weightsOffset = 0;
float layerNormScale = 3.5002e-05f;
int32_t layerNormOffset = 0;
float biasScale = layerNormScale / 1024;
int32_t biasOffset = 0;
// Weights and bias tensor and quantization info
armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo biasInfo({numUnits},
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::TensorInfo layerNormWeightsInfo({numUnits},
armnn::DataType::QSymmS16,
layerNormScale,
layerNormOffset,
true);
// Mandatory params
std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
std::vector<int8_t> recurrentToForgetWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToCellWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToOutputWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
std::vector<int32_t> forgetGateBiasData(numUnits, 1);
std::vector<int32_t> cellBiasData(numUnits, 0);
std::vector<int32_t> outputGateBiasData(numUnits, 0);
armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
armnn::ConstTensor cellBias(biasInfo, cellBiasData);
armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
// Layer Norm
std::vector<int16_t> forgetLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
std::vector<int16_t> cellLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
std::vector<int16_t> outputLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
// Set up params
armnn::LstmInputParams params;
// Mandatory params
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// Layer Norm
params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
params.m_CellLayerNormWeights = &cellLayerNormWeights;
params.m_OutputLayerNormWeights = &outputLayerNormWeights;
// Create network
armnn::INetworkPtr network = armnn::INetwork::Create();
const std::string layerName("qLstm");
armnn::IConnectableLayer* const input = network->AddInputLayer(0);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
// Input/Output tensor info
armnn::TensorInfo inputInfo({numBatches , inputSize},
armnn::DataType::QAsymmS8,
inputScale,
inputOffset);
armnn::TensorInfo cellStateInfo({numBatches , numUnits},
armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateInfo({numBatches , outputSize},
armnn::DataType::QAsymmS8,
outputScale,
outputOffset);
// Connect input/output slots
input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
input->GetOutputSlot(0).SetTensorInfo(inputInfo);
outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::QLstmDescriptor> checker(layerName,
{inputInfo, cellStateInfo, outputStateInfo},
{outputStateInfo, cellStateInfo, outputStateInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeQLstmAdvanced")
{
armnn::QLstmDescriptor descriptor;
descriptor.m_CifgEnabled = false;
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
descriptor.m_LayerNormEnabled = true;
descriptor.m_CellClip = 0.1f;
descriptor.m_ProjectionClip = 0.1f;
descriptor.m_InputIntermediateScale = 0.00001f;
descriptor.m_ForgetIntermediateScale = 0.00001f;
descriptor.m_CellIntermediateScale = 0.00001f;
descriptor.m_OutputIntermediateScale = 0.00001f;
descriptor.m_HiddenStateScale = 0.07f;
descriptor.m_HiddenStateZeroPoint = 0;
const unsigned int numBatches = 2;
const unsigned int inputSize = 5;
const unsigned int outputSize = 4;
const unsigned int numUnits = 4;
// Scale/Offset quantization info
float inputScale = 0.0078f;
int32_t inputOffset = 0;
float outputScale = 0.0078f;
int32_t outputOffset = 0;
float cellStateScale = 3.5002e-05f;
int32_t cellStateOffset = 0;
float weightsScale = 0.007f;
int32_t weightsOffset = 0;
float layerNormScale = 3.5002e-05f;
int32_t layerNormOffset = 0;
float biasScale = layerNormScale / 1024;
int32_t biasOffset = 0;
// Weights and bias tensor and quantization info
armnn::TensorInfo inputWeightsInfo({numUnits, inputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo recurrentWeightsInfo({numUnits, outputSize},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo biasInfo({numUnits},
armnn::DataType::Signed32,
biasScale,
biasOffset,
true);
armnn::TensorInfo peepholeWeightsInfo({numUnits},
armnn::DataType::QSymmS16,
weightsScale,
weightsOffset,
true);
armnn::TensorInfo layerNormWeightsInfo({numUnits},
armnn::DataType::QSymmS16,
layerNormScale,
layerNormOffset,
true);
armnn::TensorInfo projectionWeightsInfo({outputSize, numUnits},
armnn::DataType::QSymmS8,
weightsScale,
weightsOffset,
true);
// Mandatory params
std::vector<int8_t> inputToForgetWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToCellWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> inputToOutputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo, inputToForgetWeightsData);
armnn::ConstTensor inputToCellWeights(inputWeightsInfo, inputToCellWeightsData);
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo, inputToOutputWeightsData);
std::vector<int8_t> recurrentToForgetWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToCellWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToOutputWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(recurrentWeightsInfo, recurrentToForgetWeightsData);
armnn::ConstTensor recurrentToCellWeights(recurrentWeightsInfo, recurrentToCellWeightsData);
armnn::ConstTensor recurrentToOutputWeights(recurrentWeightsInfo, recurrentToOutputWeightsData);
std::vector<int32_t> forgetGateBiasData(numUnits, 1);
std::vector<int32_t> cellBiasData(numUnits, 0);
std::vector<int32_t> outputGateBiasData(numUnits, 0);
armnn::ConstTensor forgetGateBias(biasInfo, forgetGateBiasData);
armnn::ConstTensor cellBias(biasInfo, cellBiasData);
armnn::ConstTensor outputGateBias(biasInfo, outputGateBiasData);
// CIFG
std::vector<int8_t> inputToInputWeightsData = GenerateRandomData<int8_t>(inputWeightsInfo.GetNumElements());
std::vector<int8_t> recurrentToInputWeightsData =
GenerateRandomData<int8_t>(recurrentWeightsInfo.GetNumElements());
std::vector<int32_t> inputGateBiasData(numUnits, 1);
armnn::ConstTensor inputToInputWeights(inputWeightsInfo, inputToInputWeightsData);
armnn::ConstTensor recurrentToInputWeights(recurrentWeightsInfo, recurrentToInputWeightsData);
armnn::ConstTensor inputGateBias(biasInfo, inputGateBiasData);
// Peephole
std::vector<int16_t> cellToInputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
std::vector<int16_t> cellToForgetWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
std::vector<int16_t> cellToOutputWeightsData = GenerateRandomData<int16_t>(peepholeWeightsInfo.GetNumElements());
armnn::ConstTensor cellToInputWeights(peepholeWeightsInfo, cellToInputWeightsData);
armnn::ConstTensor cellToForgetWeights(peepholeWeightsInfo, cellToForgetWeightsData);
armnn::ConstTensor cellToOutputWeights(peepholeWeightsInfo, cellToOutputWeightsData);
// Projection
std::vector<int8_t> projectionWeightsData = GenerateRandomData<int8_t>(projectionWeightsInfo.GetNumElements());
std::vector<int32_t> projectionBiasData(outputSize, 1);
armnn::ConstTensor projectionWeights(projectionWeightsInfo, projectionWeightsData);
armnn::ConstTensor projectionBias(biasInfo, projectionBiasData);
// Layer Norm
std::vector<int16_t> inputLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
std::vector<int16_t> forgetLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
std::vector<int16_t> cellLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
std::vector<int16_t> outputLayerNormWeightsData =
GenerateRandomData<int16_t>(layerNormWeightsInfo.GetNumElements());
armnn::ConstTensor inputLayerNormWeights(layerNormWeightsInfo, inputLayerNormWeightsData);
armnn::ConstTensor forgetLayerNormWeights(layerNormWeightsInfo, forgetLayerNormWeightsData);
armnn::ConstTensor cellLayerNormWeights(layerNormWeightsInfo, cellLayerNormWeightsData);
armnn::ConstTensor outputLayerNormWeights(layerNormWeightsInfo, outputLayerNormWeightsData);
// Set up params
armnn::LstmInputParams params;
// Mandatory params
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// CIFG
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_InputGateBias = &inputGateBias;
// Peephole
params.m_CellToInputWeights = &cellToInputWeights;
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
// Projection
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// Layer Norm
params.m_InputLayerNormWeights = &inputLayerNormWeights;
params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
params.m_CellLayerNormWeights = &cellLayerNormWeights;
params.m_OutputLayerNormWeights = &outputLayerNormWeights;
// Create network
armnn::INetworkPtr network = armnn::INetwork::Create();
const std::string layerName("qLstm");
armnn::IConnectableLayer* const input = network->AddInputLayer(0);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(2);
armnn::IConnectableLayer* const qLstmLayer = network->AddQLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputStateOut = network->AddOutputLayer(0);
armnn::IConnectableLayer* const cellStateOut = network->AddOutputLayer(1);
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(2);
// Input/Output tensor info
armnn::TensorInfo inputInfo({numBatches , inputSize},
armnn::DataType::QAsymmS8,
inputScale,
inputOffset);
armnn::TensorInfo cellStateInfo({numBatches , numUnits},
armnn::DataType::QSymmS16,
cellStateScale,
cellStateOffset);
armnn::TensorInfo outputStateInfo({numBatches , outputSize},
armnn::DataType::QAsymmS8,
outputScale,
outputOffset);
// Connect input/output slots
input->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(0));
input->GetOutputSlot(0).SetTensorInfo(inputInfo);
outputStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(cellStateInfo);
cellStateIn->GetOutputSlot(0).Connect(qLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(0).Connect(outputStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(0).SetTensorInfo(outputStateInfo);
qLstmLayer->GetOutputSlot(1).Connect(cellStateOut->GetInputSlot(0));
qLstmLayer->GetOutputSlot(1).SetTensorInfo(cellStateInfo);
qLstmLayer->GetOutputSlot(2).Connect(outputLayer->GetInputSlot(0));
qLstmLayer->GetOutputSlot(2).SetTensorInfo(outputStateInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::QLstmDescriptor> checker(layerName,
{inputInfo, cellStateInfo, outputStateInfo},
{outputStateInfo, cellStateInfo, outputStateInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeUnidirectionalSequenceLstmCifgPeepholeNoProjection")
{
armnn::UnidirectionalSequenceLstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = false;
descriptor.m_PeepholeEnabled = true;
descriptor.m_TimeMajor = false;
const uint32_t batchSize = 1;
const uint32_t timeSize = 2;
const uint32_t inputSize = 2;
const uint32_t numUnits = 4;
const uint32_t outputSize = numUnits;
armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
std::vector<float> forgetGateBiasData(numUnits, 1.0f);
armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
std::vector<float> cellBiasData(numUnits, 0.0f);
armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
std::vector<float> outputGateBiasData(numUnits, 0.0f);
armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("UnidirectionalSequenceLstm");
armnn::IConnectableLayer* const unidirectionalSequenceLstmLayer =
network->AddUnidirectionalSequenceLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, timeSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo outputTensorInfo({ batchSize, timeSize, outputSize }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
unidirectionalSequenceLstmLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
unidirectionalSequenceLstmLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::UnidirectionalSequenceLstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{outputTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeUnidirectionalSequenceLstmNoCifgWithPeepholeAndProjection")
{
armnn::UnidirectionalSequenceLstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
descriptor.m_TimeMajor = false;
const uint32_t batchSize = 2;
const uint32_t timeSize = 2;
const uint32_t inputSize = 5;
const uint32_t numUnits = 20;
const uint32_t outputSize = 16;
armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionBiasData(outputSize, 0.f);
armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// additional params because: descriptor.m_CifgEnabled = false
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_CellToInputWeights = &cellToInputWeights;
params.m_InputGateBias = &inputGateBias;
// additional params because: descriptor.m_ProjectionEnabled = true
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// additional params because: descriptor.m_PeepholeEnabled = true
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("unidirectionalSequenceLstm");
armnn::IConnectableLayer* const unidirectionalSequenceLstmLayer =
network->AddUnidirectionalSequenceLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, timeSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo outputTensorInfo({ batchSize, timeSize, outputSize }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
unidirectionalSequenceLstmLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
unidirectionalSequenceLstmLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::UnidirectionalSequenceLstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{outputTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeUnidirectionalSequenceLstmNoCifgWithPeepholeWithProjectionWithLayerNorm")
{
armnn::UnidirectionalSequenceLstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = false; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = true;
descriptor.m_PeepholeEnabled = true;
descriptor.m_LayerNormEnabled = true;
descriptor.m_TimeMajor = false;
const uint32_t batchSize = 2;
const uint32_t timeSize = 2;
const uint32_t inputSize = 5;
const uint32_t numUnits = 20;
const uint32_t outputSize = 16;
armnn::TensorInfo tensorInfo20x5({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToInputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToInputWeights(tensorInfo20x5, inputToInputWeightsData);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToForgetWeights(tensorInfo20x5, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToCellWeights(tensorInfo20x5, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x5.GetNumElements());
armnn::ConstTensor inputToOutputWeights(tensorInfo20x5, inputToOutputWeightsData);
armnn::TensorInfo tensorInfo20({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputGateBias(tensorInfo20, inputGateBiasData);
std::vector<float> forgetGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetGateBias(tensorInfo20, forgetGateBiasData);
std::vector<float> cellBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellBias(tensorInfo20, cellBiasData);
std::vector<float> outputGateBiasData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outputGateBias(tensorInfo20, outputGateBiasData);
armnn::TensorInfo tensorInfo20x16({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToInputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToInputWeights(tensorInfo20x16, recurrentToInputWeightsData);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(tensorInfo20x16, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(tensorInfo20x16, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(tensorInfo20x16.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(tensorInfo20x16, recurrentToOutputWeightsData);
std::vector<float> cellToInputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToInputWeights(tensorInfo20, cellToInputWeightsData);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToForgetWeights(tensorInfo20, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellToOutputWeights(tensorInfo20, cellToOutputWeightsData);
armnn::TensorInfo tensorInfo16x20({outputSize, numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionWeightsData = GenerateRandomData<float>(tensorInfo16x20.GetNumElements());
armnn::ConstTensor projectionWeights(tensorInfo16x20, projectionWeightsData);
armnn::TensorInfo tensorInfo16({outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> projectionBiasData(outputSize, 0.f);
armnn::ConstTensor projectionBias(tensorInfo16, projectionBiasData);
std::vector<float> inputLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor inputLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> forgetLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor forgetLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> cellLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor cellLayerNormWeights(tensorInfo20, forgetGateBiasData);
std::vector<float> outLayerNormWeightsData = GenerateRandomData<float>(tensorInfo20.GetNumElements());
armnn::ConstTensor outLayerNormWeights(tensorInfo20, forgetGateBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
// additional params because: descriptor.m_CifgEnabled = false
params.m_InputToInputWeights = &inputToInputWeights;
params.m_RecurrentToInputWeights = &recurrentToInputWeights;
params.m_CellToInputWeights = &cellToInputWeights;
params.m_InputGateBias = &inputGateBias;
// additional params because: descriptor.m_ProjectionEnabled = true
params.m_ProjectionWeights = &projectionWeights;
params.m_ProjectionBias = &projectionBias;
// additional params because: descriptor.m_PeepholeEnabled = true
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
// additional params because: despriptor.m_LayerNormEnabled = true
params.m_InputLayerNormWeights = &inputLayerNormWeights;
params.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
params.m_CellLayerNormWeights = &cellLayerNormWeights;
params.m_OutputLayerNormWeights = &outLayerNormWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("unidirectionalSequenceLstm");
armnn::IConnectableLayer* const unidirectionalSequenceLstmLayer =
network->AddUnidirectionalSequenceLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
// connect up
armnn::TensorInfo inputTensorInfo({ batchSize, timeSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo outputTensorInfo({ batchSize, timeSize, outputSize }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
unidirectionalSequenceLstmLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
unidirectionalSequenceLstmLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::UnidirectionalSequenceLstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{outputTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
TEST_CASE("SerializeDeserializeUnidirectionalSequenceLstmCifgPeepholeNoProjectionTimeMajor")
{
armnn::UnidirectionalSequenceLstmDescriptor descriptor;
descriptor.m_ActivationFunc = 4;
descriptor.m_ClippingThresProj = 0.0f;
descriptor.m_ClippingThresCell = 0.0f;
descriptor.m_CifgEnabled = true; // if this is true then we DON'T need to set the OptCifgParams
descriptor.m_ProjectionEnabled = false;
descriptor.m_PeepholeEnabled = true;
descriptor.m_TimeMajor = true;
const uint32_t batchSize = 1;
const uint32_t timeSize = 2;
const uint32_t inputSize = 2;
const uint32_t numUnits = 4;
const uint32_t outputSize = numUnits;
armnn::TensorInfo inputWeightsInfo1({numUnits, inputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> inputToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToForgetWeights(inputWeightsInfo1, inputToForgetWeightsData);
std::vector<float> inputToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToCellWeights(inputWeightsInfo1, inputToCellWeightsData);
std::vector<float> inputToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo1.GetNumElements());
armnn::ConstTensor inputToOutputWeights(inputWeightsInfo1, inputToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo2({numUnits, outputSize}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> recurrentToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToForgetWeights(inputWeightsInfo2, recurrentToForgetWeightsData);
std::vector<float> recurrentToCellWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToCellWeights(inputWeightsInfo2, recurrentToCellWeightsData);
std::vector<float> recurrentToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo2.GetNumElements());
armnn::ConstTensor recurrentToOutputWeights(inputWeightsInfo2, recurrentToOutputWeightsData);
armnn::TensorInfo inputWeightsInfo3({numUnits}, armnn::DataType::Float32, 0.0f, 0, true);
std::vector<float> cellToForgetWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToForgetWeights(inputWeightsInfo3, cellToForgetWeightsData);
std::vector<float> cellToOutputWeightsData = GenerateRandomData<float>(inputWeightsInfo3.GetNumElements());
armnn::ConstTensor cellToOutputWeights(inputWeightsInfo3, cellToOutputWeightsData);
std::vector<float> forgetGateBiasData(numUnits, 1.0f);
armnn::ConstTensor forgetGateBias(inputWeightsInfo3, forgetGateBiasData);
std::vector<float> cellBiasData(numUnits, 0.0f);
armnn::ConstTensor cellBias(inputWeightsInfo3, cellBiasData);
std::vector<float> outputGateBiasData(numUnits, 0.0f);
armnn::ConstTensor outputGateBias(inputWeightsInfo3, outputGateBiasData);
armnn::LstmInputParams params;
params.m_InputToForgetWeights = &inputToForgetWeights;
params.m_InputToCellWeights = &inputToCellWeights;
params.m_InputToOutputWeights = &inputToOutputWeights;
params.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
params.m_RecurrentToCellWeights = &recurrentToCellWeights;
params.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
params.m_ForgetGateBias = &forgetGateBias;
params.m_CellBias = &cellBias;
params.m_OutputGateBias = &outputGateBias;
params.m_CellToForgetWeights = &cellToForgetWeights;
params.m_CellToOutputWeights = &cellToOutputWeights;
armnn::INetworkPtr network = armnn::INetwork::Create();
armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
armnn::IConnectableLayer* const cellStateIn = network->AddInputLayer(1);
armnn::IConnectableLayer* const outputStateIn = network->AddInputLayer(2);
const std::string layerName("UnidirectionalSequenceLstm");
armnn::IConnectableLayer* const unidirectionalSequenceLstmLayer =
network->AddUnidirectionalSequenceLstmLayer(descriptor, params, layerName.c_str());
armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
// connect up
armnn::TensorInfo inputTensorInfo({ timeSize, batchSize, inputSize }, armnn::DataType::Float32);
armnn::TensorInfo cellStateTensorInfo({ batchSize, numUnits}, armnn::DataType::Float32);
armnn::TensorInfo outputStateTensorInfo({ batchSize, outputSize }, armnn::DataType::Float32);
armnn::TensorInfo outputTensorInfo({ timeSize, batchSize, outputSize }, armnn::DataType::Float32);
inputLayer->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(0));
inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
outputStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(1));
outputStateIn->GetOutputSlot(0).SetTensorInfo(outputStateTensorInfo);
cellStateIn->GetOutputSlot(0).Connect(unidirectionalSequenceLstmLayer->GetInputSlot(2));
cellStateIn->GetOutputSlot(0).SetTensorInfo(cellStateTensorInfo);
unidirectionalSequenceLstmLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
unidirectionalSequenceLstmLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
CHECK(deserializedNetwork);
VerifyLstmLayer<armnn::UnidirectionalSequenceLstmDescriptor> checker(
layerName,
{inputTensorInfo, outputStateTensorInfo, cellStateTensorInfo},
{outputTensorInfo},
descriptor,
params);
deserializedNetwork->ExecuteStrategy(checker);
}
}