|
@@ -12,6 +12,12 @@
|
|
#include <AzCore/RTTI/BehaviorContext.h>
|
|
#include <AzCore/RTTI/BehaviorContext.h>
|
|
#include <AzCore/Serialization/EditContext.h>
|
|
#include <AzCore/Serialization/EditContext.h>
|
|
#include <AzCore/Serialization/SerializeContext.h>
|
|
#include <AzCore/Serialization/SerializeContext.h>
|
|
|
|
+#include <AzCore/IO/FileIO.h>
|
|
|
|
+#include <AzCore/IO/FileReader.h>
|
|
|
|
+#include <AzCore/IO/Path/Path.h>
|
|
|
|
+#include <AzCore/Console/ILogger.h>
|
|
|
|
+#include <AzNetworking/Serialization/NetworkInputSerializer.h>
|
|
|
|
+#include <AzNetworking/Serialization/NetworkOutputSerializer.h>
|
|
|
|
|
|
namespace MachineLearning
|
|
namespace MachineLearning
|
|
{
|
|
{
|
|
@@ -21,6 +27,12 @@ namespace MachineLearning
|
|
{
|
|
{
|
|
serializeContext->Class<MultilayerPerceptron>()
|
|
serializeContext->Class<MultilayerPerceptron>()
|
|
->Version(1)
|
|
->Version(1)
|
|
|
|
+ ->Field("Name", &MultilayerPerceptron::m_name)
|
|
|
|
+ ->Field("ModelFile", &MultilayerPerceptron::m_modelFile)
|
|
|
|
+ ->Field("TestDataFile", &MultilayerPerceptron::m_testDataFile)
|
|
|
|
+ ->Field("TestLabelFile", &MultilayerPerceptron::m_testLabelFile)
|
|
|
|
+ ->Field("TrainDataFile", &MultilayerPerceptron::m_trainDataFile)
|
|
|
|
+ ->Field("TrainLabelFile", &MultilayerPerceptron::m_trainLabelFile)
|
|
->Field("ActivationCount", &MultilayerPerceptron::m_activationCount)
|
|
->Field("ActivationCount", &MultilayerPerceptron::m_activationCount)
|
|
->Field("Layers", &MultilayerPerceptron::m_layers)
|
|
->Field("Layers", &MultilayerPerceptron::m_layers)
|
|
;
|
|
;
|
|
@@ -29,6 +41,12 @@ namespace MachineLearning
|
|
{
|
|
{
|
|
editContext->Class<MultilayerPerceptron>("A basic multilayer perceptron class", "")
|
|
editContext->Class<MultilayerPerceptron>("A basic multilayer perceptron class", "")
|
|
->ClassElement(AZ::Edit::ClassElements::EditorData, "")
|
|
->ClassElement(AZ::Edit::ClassElements::EditorData, "")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_name, "Name", "The name for this model")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_modelFile, "ModelFile", "The file this model is saved to and loaded from")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_testDataFile, "TestDataFile", "The file test data should be loaded from")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_testLabelFile, "TestLabelFile", "The file test labels should be loaded from")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_trainDataFile, "TrainDataFile", "The file training data should be loaded from")
|
|
|
|
+ ->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_trainLabelFile, "TrainLabelFile", "The file training labels should be loaded from")
|
|
->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_activationCount, "Activation Count", "The number of neurons in the activation layer")
|
|
->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_activationCount, "Activation Count", "The number of neurons in the activation layer")
|
|
->Attribute(AZ::Edit::Attributes::ChangeNotify, &MultilayerPerceptron::OnActivationCountChanged)
|
|
->Attribute(AZ::Edit::Attributes::ChangeNotify, &MultilayerPerceptron::OnActivationCountChanged)
|
|
->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_layers, "Layers", "The layers of the neural network")
|
|
->DataElement(AZ::Edit::UIHandlers::Default, &MultilayerPerceptron::m_layers, "Layers", "The layers of the neural network")
|
|
@@ -46,44 +64,99 @@ namespace MachineLearning
|
|
Attribute(AZ::Script::Attributes::ExcludeFrom, AZ::Script::Attributes::ExcludeFlags::ListOnly)->
|
|
Attribute(AZ::Script::Attributes::ExcludeFrom, AZ::Script::Attributes::ExcludeFlags::ListOnly)->
|
|
Constructor<AZStd::size_t>()->
|
|
Constructor<AZStd::size_t>()->
|
|
Attribute(AZ::Script::Attributes::Storage, AZ::Script::Attributes::StorageType::Value)->
|
|
Attribute(AZ::Script::Attributes::Storage, AZ::Script::Attributes::StorageType::Value)->
|
|
- Method("AddLayer", &MultilayerPerceptron::AddLayer)->
|
|
|
|
|
|
+ Method("GetName", &MultilayerPerceptron::GetName)->
|
|
Method("GetLayerCount", &MultilayerPerceptron::GetLayerCount)->
|
|
Method("GetLayerCount", &MultilayerPerceptron::GetLayerCount)->
|
|
- Method("GetLayer", &MultilayerPerceptron::GetLayer)->
|
|
|
|
- Method("Forward", &MultilayerPerceptron::Forward)->
|
|
|
|
- Method("Reverse", &MultilayerPerceptron::Reverse)->
|
|
|
|
Property("ActivationCount", BehaviorValueProperty(&MultilayerPerceptron::m_activationCount))->
|
|
Property("ActivationCount", BehaviorValueProperty(&MultilayerPerceptron::m_activationCount))->
|
|
Property("Layers", BehaviorValueProperty(&MultilayerPerceptron::m_layers))
|
|
Property("Layers", BehaviorValueProperty(&MultilayerPerceptron::m_layers))
|
|
;
|
|
;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ MultilayerPerceptron::MultilayerPerceptron()
|
|
|
|
+ {
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ MultilayerPerceptron::MultilayerPerceptron(const MultilayerPerceptron& rhs)
|
|
|
|
+ : m_name(rhs.m_name)
|
|
|
|
+ , m_modelFile(rhs.m_modelFile)
|
|
|
|
+ , m_testDataFile(rhs.m_testDataFile)
|
|
|
|
+ , m_testLabelFile(rhs.m_testLabelFile)
|
|
|
|
+ , m_trainDataFile(rhs.m_trainDataFile)
|
|
|
|
+ , m_trainLabelFile(rhs.m_trainLabelFile)
|
|
|
|
+ , m_activationCount(rhs.m_activationCount)
|
|
|
|
+ , m_layers(rhs.m_layers)
|
|
|
|
+ {
|
|
|
|
+ }
|
|
|
|
+
|
|
MultilayerPerceptron::MultilayerPerceptron(AZStd::size_t activationCount)
|
|
MultilayerPerceptron::MultilayerPerceptron(AZStd::size_t activationCount)
|
|
: m_activationCount(activationCount)
|
|
: m_activationCount(activationCount)
|
|
{
|
|
{
|
|
}
|
|
}
|
|
|
|
|
|
- void MultilayerPerceptron::AddLayer(AZStd::size_t layerDimensionality, ActivationFunctions activationFunction)
|
|
|
|
|
|
+ MultilayerPerceptron::~MultilayerPerceptron()
|
|
{
|
|
{
|
|
- AZStd::size_t lastLayerDimensionality = m_activationCount;
|
|
|
|
- if (!m_layers.empty())
|
|
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ MultilayerPerceptron& MultilayerPerceptron::operator=(const MultilayerPerceptron& rhs)
|
|
|
|
+ {
|
|
|
|
+ m_name = rhs.m_name;
|
|
|
|
+ m_modelFile = rhs.m_modelFile;
|
|
|
|
+ m_testDataFile = rhs.m_testDataFile;
|
|
|
|
+ m_testLabelFile = rhs.m_testLabelFile;
|
|
|
|
+ m_trainDataFile = rhs.m_trainDataFile;
|
|
|
|
+ m_trainLabelFile = rhs.m_trainLabelFile;
|
|
|
|
+ m_activationCount = rhs.m_activationCount;
|
|
|
|
+ m_layers = rhs.m_layers;
|
|
|
|
+ return *this;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ AZStd::string MultilayerPerceptron::GetName() const
|
|
|
|
+ {
|
|
|
|
+ return m_name;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ AZStd::string MultilayerPerceptron::GetAssetFile(AssetTypes assetType) const
|
|
|
|
+ {
|
|
|
|
+ switch (assetType)
|
|
{
|
|
{
|
|
- lastLayerDimensionality = m_layers.back().m_biases.GetDimensionality();
|
|
|
|
|
|
+ case AssetTypes::Model:
|
|
|
|
+ return m_modelFile;
|
|
|
|
+ case AssetTypes::TestData:
|
|
|
|
+ return m_testDataFile;
|
|
|
|
+ case AssetTypes::TestLabels:
|
|
|
|
+ return m_testLabelFile;
|
|
|
|
+ case AssetTypes::TrainingData:
|
|
|
|
+ return m_trainDataFile;
|
|
|
|
+ case AssetTypes::TrainingLabels:
|
|
|
|
+ return m_trainLabelFile;
|
|
}
|
|
}
|
|
- m_layers.push_back(AZStd::move(Layer(activationFunction, lastLayerDimensionality, layerDimensionality)));
|
|
|
|
|
|
+ return "";
|
|
}
|
|
}
|
|
|
|
|
|
- AZStd::size_t MultilayerPerceptron::GetLayerCount() const
|
|
|
|
|
|
+ AZStd::size_t MultilayerPerceptron::GetInputDimensionality() const
|
|
{
|
|
{
|
|
- return m_layers.size();
|
|
|
|
|
|
+ return m_activationCount;
|
|
}
|
|
}
|
|
|
|
|
|
- Layer* MultilayerPerceptron::GetLayer(AZStd::size_t layerIndex)
|
|
|
|
|
|
+ AZStd::size_t MultilayerPerceptron::GetOutputDimensionality() const
|
|
{
|
|
{
|
|
- return &m_layers[layerIndex];
|
|
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ if (!m_layers.empty())
|
|
|
|
+ {
|
|
|
|
+ return m_layers.back().m_biases.GetDimensionality();
|
|
|
|
+ }
|
|
|
|
+ return m_activationCount;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ AZStd::size_t MultilayerPerceptron::GetLayerCount() const
|
|
|
|
+ {
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ return m_layers.size();
|
|
}
|
|
}
|
|
|
|
|
|
AZStd::size_t MultilayerPerceptron::GetParameterCount() const
|
|
AZStd::size_t MultilayerPerceptron::GetParameterCount() const
|
|
{
|
|
{
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
AZStd::size_t parameterCount = 0;
|
|
AZStd::size_t parameterCount = 0;
|
|
for (const Layer& layer : m_layers)
|
|
for (const Layer& layer : m_layers)
|
|
{
|
|
{
|
|
@@ -92,50 +165,81 @@ namespace MachineLearning
|
|
return parameterCount;
|
|
return parameterCount;
|
|
}
|
|
}
|
|
|
|
|
|
- const AZ::VectorN* MultilayerPerceptron::Forward(const AZ::VectorN& activations)
|
|
|
|
|
|
+ IInferenceContextPtr MultilayerPerceptron::CreateInferenceContext()
|
|
{
|
|
{
|
|
|
|
+ return new MlpInferenceContext();
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ ITrainingContextPtr MultilayerPerceptron::CreateTrainingContext()
|
|
|
|
+ {
|
|
|
|
+ return new MlpTrainingContext();
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ const AZ::VectorN* MultilayerPerceptron::Forward(IInferenceContextPtr context, const AZ::VectorN& activations)
|
|
|
|
+ {
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ MlpInferenceContext* forwardContext = static_cast<MlpInferenceContext*>(context);
|
|
|
|
+ forwardContext->m_layerData.resize(m_layers.size());
|
|
|
|
+
|
|
const AZ::VectorN* lastLayerOutput = &activations;
|
|
const AZ::VectorN* lastLayerOutput = &activations;
|
|
- for (Layer& layer : m_layers)
|
|
|
|
|
|
+ for (AZStd::size_t iter = 0; iter < m_layers.size(); ++iter)
|
|
{
|
|
{
|
|
- layer.Forward(*lastLayerOutput);
|
|
|
|
- lastLayerOutput = &layer.m_output;
|
|
|
|
|
|
+ m_layers[iter].Forward(forwardContext->m_layerData[iter], *lastLayerOutput);
|
|
|
|
+ lastLayerOutput = &forwardContext->m_layerData[iter].m_output;
|
|
}
|
|
}
|
|
return lastLayerOutput;
|
|
return lastLayerOutput;
|
|
}
|
|
}
|
|
|
|
|
|
- void MultilayerPerceptron::Reverse(LossFunctions lossFunction, const AZ::VectorN& activations, const AZ::VectorN& expected)
|
|
|
|
|
|
+ void MultilayerPerceptron::Reverse(ITrainingContextPtr context, LossFunctions lossFunction, const AZ::VectorN& activations, const AZ::VectorN& expected)
|
|
{
|
|
{
|
|
- ++m_trainingSampleSize;
|
|
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ MlpTrainingContext* reverseContext = static_cast<MlpTrainingContext*>(context);
|
|
|
|
+ MlpInferenceContext* forwardContext = &reverseContext->m_forward;
|
|
|
|
+ reverseContext->m_layerData.resize(m_layers.size());
|
|
|
|
+ forwardContext->m_layerData.resize(m_layers.size());
|
|
|
|
+
|
|
|
|
+ ++reverseContext->m_trainingSampleSize;
|
|
|
|
|
|
// First feed-forward the activations to get our current model predictions
|
|
// First feed-forward the activations to get our current model predictions
|
|
- const AZ::VectorN* output = Forward(activations);
|
|
|
|
|
|
+ // We do additional book-keeping over a standard forward pass to make gradient calculations easier
|
|
|
|
+ const AZ::VectorN* lastLayerOutput = &activations;
|
|
|
|
+ for (AZStd::size_t iter = 0; iter < m_layers.size(); ++iter)
|
|
|
|
+ {
|
|
|
|
+ reverseContext->m_layerData[iter].m_lastInput = lastLayerOutput;
|
|
|
|
+ m_layers[iter].Forward(forwardContext->m_layerData[iter], *lastLayerOutput);
|
|
|
|
+ lastLayerOutput = &forwardContext->m_layerData[iter].m_output;
|
|
|
|
+ }
|
|
|
|
|
|
// Compute the partial derivatives of the loss function with respect to the final layer output
|
|
// Compute the partial derivatives of the loss function with respect to the final layer output
|
|
AZ::VectorN costGradients;
|
|
AZ::VectorN costGradients;
|
|
- ComputeLoss_Derivative(lossFunction, *output, expected, costGradients);
|
|
|
|
|
|
+ ComputeLoss_Derivative(lossFunction, *lastLayerOutput, expected, costGradients);
|
|
|
|
|
|
- for (auto iter = m_layers.rbegin(); iter != m_layers.rend(); ++iter)
|
|
|
|
|
|
+ AZ::VectorN* lossGradient = &costGradients;
|
|
|
|
+ for (int64_t iter = static_cast<int64_t>(m_layers.size()) - 1; iter >= 0; --iter)
|
|
{
|
|
{
|
|
- iter->AccumulateGradients(costGradients);
|
|
|
|
- costGradients = iter->m_backpropagationGradients;
|
|
|
|
|
|
+ m_layers[iter].AccumulateGradients(reverseContext->m_layerData[iter], forwardContext->m_layerData[iter], *lossGradient);
|
|
|
|
+ lossGradient = &reverseContext->m_layerData[iter].m_backpropagationGradients;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
- void MultilayerPerceptron::GradientDescent(float learningRate)
|
|
|
|
|
|
+ void MultilayerPerceptron::GradientDescent(ITrainingContextPtr context, float learningRate)
|
|
{
|
|
{
|
|
- if (m_trainingSampleSize > 0)
|
|
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ MlpTrainingContext* reverseContext = static_cast<MlpTrainingContext*>(context);
|
|
|
|
+ if (reverseContext->m_trainingSampleSize > 0)
|
|
{
|
|
{
|
|
- const float adjustedLearningRate = learningRate / static_cast<float>(m_trainingSampleSize);
|
|
|
|
- for (auto iter = m_layers.rbegin(); iter != m_layers.rend(); ++iter)
|
|
|
|
|
|
+ const float adjustedLearningRate = learningRate / static_cast<float>(reverseContext->m_trainingSampleSize);
|
|
|
|
+ for (AZStd::size_t iter = 0; iter < m_layers.size(); ++iter)
|
|
{
|
|
{
|
|
- iter->ApplyGradients(adjustedLearningRate);
|
|
|
|
|
|
+ m_layers[iter].ApplyGradients(reverseContext->m_layerData[iter], adjustedLearningRate);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- m_trainingSampleSize = 0;
|
|
|
|
|
|
+ reverseContext->m_trainingSampleSize = 0;
|
|
}
|
|
}
|
|
|
|
|
|
void MultilayerPerceptron::OnActivationCountChanged()
|
|
void MultilayerPerceptron::OnActivationCountChanged()
|
|
{
|
|
{
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
AZStd::size_t lastLayerDimensionality = m_activationCount;
|
|
AZStd::size_t lastLayerDimensionality = m_activationCount;
|
|
for (Layer& layer : m_layers)
|
|
for (Layer& layer : m_layers)
|
|
{
|
|
{
|
|
@@ -144,4 +248,99 @@ namespace MachineLearning
|
|
lastLayerDimensionality = layer.m_outputSize;
|
|
lastLayerDimensionality = layer.m_outputSize;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ bool MultilayerPerceptron::LoadModel()
|
|
|
|
+ {
|
|
|
|
+ AZ::IO::SystemFile modelFile;
|
|
|
|
+ AZ::IO::FixedMaxPath filePathFixed = m_modelFile.c_str();
|
|
|
|
+ if (AZ::IO::FileIOBase* fileIOBase = AZ::IO::FileIOBase::GetInstance())
|
|
|
|
+ {
|
|
|
|
+ fileIOBase->ResolvePath(filePathFixed, m_modelFile.c_str());
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (!modelFile.Open(filePathFixed.c_str(), AZ::IO::SystemFile::SF_OPEN_READ_ONLY))
|
|
|
|
+ {
|
|
|
|
+ AZLOG_ERROR("Failed to load '%s'. File could not be opened.", filePathFixed.c_str());
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ const AZ::IO::SizeType length = modelFile.Length();
|
|
|
|
+ if (length == 0)
|
|
|
|
+ {
|
|
|
|
+ AZLOG_ERROR("Failed to load '%s'. File is empty.", filePathFixed.c_str());
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ AZStd::vector<uint8_t> serializeBuffer;
|
|
|
|
+ serializeBuffer.resize(length);
|
|
|
|
+ modelFile.Seek(0, AZ::IO::SystemFile::SF_SEEK_BEGIN);
|
|
|
|
+ modelFile.Read(serializeBuffer.size(), serializeBuffer.data());
|
|
|
|
+ AzNetworking::NetworkOutputSerializer serializer(serializeBuffer.data(), static_cast<uint32_t>(serializeBuffer.size()));
|
|
|
|
+ return Serialize(serializer);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ bool MultilayerPerceptron::SaveModel()
|
|
|
|
+ {
|
|
|
|
+ AZ::IO::SystemFile modelFile;
|
|
|
|
+ AZ::IO::FixedMaxPath filePathFixed = m_modelFile.c_str();
|
|
|
|
+ if (AZ::IO::FileIOBase* fileIOBase = AZ::IO::FileIOBase::GetInstance())
|
|
|
|
+ {
|
|
|
|
+ fileIOBase->ResolvePath(filePathFixed, m_modelFile.c_str());
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (!modelFile.Open(filePathFixed.c_str(), AZ::IO::SystemFile::SF_OPEN_READ_WRITE | AZ::IO::SystemFile::SF_OPEN_CREATE))
|
|
|
|
+ {
|
|
|
|
+ AZLOG_ERROR("Failed to save to '%s'. File could not be opened for writing.", filePathFixed.c_str());
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+ modelFile.Seek(0, AZ::IO::SystemFile::SF_SEEK_BEGIN);
|
|
|
|
+
|
|
|
|
+ AZStd::vector<uint8_t> serializeBuffer;
|
|
|
|
+ serializeBuffer.resize(EstimateSerializeSize());
|
|
|
|
+ AzNetworking::NetworkInputSerializer serializer(serializeBuffer.data(), static_cast<uint32_t>(serializeBuffer.size()));
|
|
|
|
+ if (Serialize(serializer))
|
|
|
|
+ {
|
|
|
|
+ modelFile.Write(serializeBuffer.data(), serializeBuffer.size());
|
|
|
|
+ return true;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ void MultilayerPerceptron::AddLayer(AZStd::size_t layerDimensionality, ActivationFunctions activationFunction)
|
|
|
|
+ {
|
|
|
|
+ // This is not thread safe, this should only be used during model configuration
|
|
|
|
+ const AZStd::size_t lastLayerDimensionality = GetOutputDimensionality();
|
|
|
|
+ m_layers.push_back(AZStd::move(Layer(activationFunction, lastLayerDimensionality, layerDimensionality)));
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ Layer* MultilayerPerceptron::GetLayer(AZStd::size_t layerIndex)
|
|
|
|
+ {
|
|
|
|
+ // This is not thread safe, this method should only be used by unit testing to inspect layer weights and biases for correctness
|
|
|
|
+ return &m_layers[layerIndex];
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ bool MultilayerPerceptron::Serialize(AzNetworking::ISerializer& serializer)
|
|
|
|
+ {
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ return serializer.Serialize(m_name, "Name")
|
|
|
|
+ && serializer.Serialize(m_activationCount, "activationCount")
|
|
|
|
+ && serializer.Serialize(m_layers, "layers");
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ AZStd::size_t MultilayerPerceptron::EstimateSerializeSize() const
|
|
|
|
+ {
|
|
|
|
+ const AZStd::size_t padding = 64; // 64 bytes of extra padding just in case
|
|
|
|
+ AZStd::size_t estimatedSize = padding
|
|
|
|
+ + sizeof(AZStd::size_t)
|
|
|
|
+ + m_name.size()
|
|
|
|
+ + sizeof(m_activationCount)
|
|
|
|
+ + sizeof(AZStd::size_t);
|
|
|
|
+ //AZStd::lock_guard lock(m_mutex);
|
|
|
|
+ for (const Layer& layer : m_layers)
|
|
|
|
+ {
|
|
|
|
+ estimatedSize += layer.EstimateSerializeSize();
|
|
|
|
+ }
|
|
|
|
+ return estimatedSize;
|
|
|
|
+ }
|
|
}
|
|
}
|