Skip to content

Commit

Permalink
coding convolutional net (wip)
Browse files Browse the repository at this point in the history
  • Loading branch information
JulioJerez committed Oct 23, 2023
1 parent 48a6184 commit 42d69db
Show file tree
Hide file tree
Showing 15 changed files with 596 additions and 286 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

namespace ndQuadruped_1
{
//#define ND_TRAIN_MODEL
#define ND_TRAIN_MODEL

#define CONTROLLER_NAME "ndQuadruped_1VPG.dnn"

Expand Down
44 changes: 29 additions & 15 deletions newton-4.00/applications/ndSandbox/toolbox/ndTestDeepBrain.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,15 @@
#include "ndSandboxStdafx.h"
#include "ndTestDeepBrain.h"

#define D_USE_CONVOLUTIONAL_LAYERS

static void ThreeLayersTwoInputsTwoOutputs()
{
ndBrain brain;
ndInt32 hiddenNeurons = 16;

ndFixSizeArray<ndBrainLayer*, 16> layers;

layers.PushBack(new ndBrainLayerLinear(2, hiddenNeurons));
layers.PushBack(new ndBrainLayerTanhActivation(hiddenNeurons));

Expand Down Expand Up @@ -52,14 +55,15 @@ static void ThreeLayersTwoInputsTwoOutputs()
ndArray<ndBrainTrainer*> trainers;

ndBrainThreadPool threads;
threads.SetThreadCount(4);
//threads.SetThreadCount(4);
threads.SetThreadCount(1);
for (ndInt32 i = 0; i < bashSize; ++i)
{
trainers.PushBack(new ndBrainTrainer(&brain));
}

//ndBrainOptimizerSgd optimizer;
ndBrainOptimizerAdam optimizer;
//ndBrainOptimizerSgd optimizer;

ndInt32 randomeSelection[bashSize];
auto UpdateTrainer = ndMakeObject::ndFunction([&trainers, &randomeSelection, &inputBatch, &groundTruth, bashSize](ndInt32 threadIndex, ndInt32 threadCount)
Expand All @@ -84,7 +88,7 @@ static void ThreeLayersTwoInputsTwoOutputs()
randomeSelection[j] = ndInt32 (ndRandInt() % samples);
}
threads.ParallelExecute(UpdateTrainer);
optimizer.Update(&threads, trainers, ndReal(1.0e-3f));
optimizer.Update(&threads, trainers, ndBrainFloat(1.0e-3f));
}

ndBrainVector truth;
Expand Down Expand Up @@ -276,7 +280,7 @@ static void MnistTrainingSet()
,m_bashBufferSize(64)
{
ndInt32 threadCount = ndMin(ndBrainThreadPool::GetMaxThreads(), ndMin(m_bashBufferSize, 16));
//threadCount = 1;
threadCount = 1;
SetThreadCount(threadCount);
for (ndInt32 i = 0; i < m_bashBufferSize; ++i)
{
Expand Down Expand Up @@ -471,7 +475,6 @@ static void MnistTrainingSet()

bool traningTest = fails < minTrainingFail;

//minTrainingFail = fails;
minTrainingFail = ndMax(fails, 5);
ndInt32 actualTraining = fails;
ndBrainThreadPool::ParallelExecute(CrossValidateTest);
Expand Down Expand Up @@ -517,18 +520,29 @@ static void MnistTrainingSet()
ndInt32 neuronsPerLayers = 64;
ndFixSizeArray<ndBrainLayer*, 16> layers;

layers.PushBack(new ndBrainLayerLinear(trainingDigits->GetColumns(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
#ifdef D_USE_CONVOLUTIONAL_LAYERS
//layers.PushBack(new ndBrainLayerLinear(trainingDigits->GetColumns(), neuronsPerLayers));
ndInt32 width = trainingDigits->GetColumns() / 28;
ndInt32 height = 28;
ndAssert((height * width) == trainingDigits->GetColumns());
layers.PushBack(new ndBrainLayerConvolutional(28, width, height, 1, 5, 16));
//layers.PushBack(new ndBrainLayerConvolutional(28, 3, 3, 3, 2, 2));

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
//layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
#else
layers.PushBack(new ndBrainLayerLinear(trainingDigits->GetColumns(), neuronsPerLayers));
layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));

layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), trainingLabels->GetColumns()));
//layers.PushBack(new ndBrainLayerSoftmaxActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
layers.PushBack(new ndBrainLayerCategoricalSoftmaxActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
#endif

//layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
//layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
//
//layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), neuronsPerLayers));
//layers.PushBack(new ndBrainLayerApproximateTanhActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
//
//layers.PushBack(new ndBrainLayerLinear(layers[layers.GetCount() - 1]->GetOutputSize(), trainingLabels->GetColumns()));
////layers.PushBack(new ndBrainLayerSoftmaxActivation(layers[layers.GetCount() - 1]->GetOutputSize()));
//layers.PushBack(new ndBrainLayerCategoricalSoftmaxActivation(layers[layers.GetCount() - 1]->GetOutputSize()));

for (ndInt32 i = 0; i < layers.GetCount(); ++i)
{
Expand Down
2 changes: 1 addition & 1 deletion newton-4.00/sdk/dBrain/ndBrain.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ void ndBrain::CopyFrom(const ndBrain& src)
const ndArray<ndBrainLayer*>& srcLayers = src;
for (ndInt32 i = 0; i < layers.GetCount(); ++i)
{
layers[i]->CopyFrom(*srcLayers[i]);
layers[i]->Set(*srcLayers[i]);
}
}

Expand Down
59 changes: 50 additions & 9 deletions newton-4.00/sdk/dBrain/ndBrainLayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,6 @@ ndInt32 ndBrainLayer::GetOutputSize() const
return 0;
}

void ndBrainLayer::CopyFrom(const ndBrainLayer&)
{
ndAssert(0);
}

bool ndBrainLayer::HasParameters() const
{
ndAssert(0);
Expand All @@ -82,23 +77,63 @@ void ndBrainLayer::InitWeightsXavierMethod()
ndAssert(0);
}

ndBrainVector* ndBrainLayer::GetBias()
//ndBrainVector* ndBrainLayer::GetBias()
//{
// ndAssert(0);
// return nullptr;
//}
//
//ndBrainMatrix* ndBrainLayer::GetWeights()
//{
// ndAssert(0);
// return nullptr;
//}

void ndBrainLayer::Clear()
{
ndAssert(0);
return nullptr;
}

ndBrainMatrix* ndBrainLayer::GetWeights()
void ndBrainLayer::FlushToZero()
{
ndAssert(0);
}

void ndBrainLayer::Scale(ndBrainFloat)
{
ndAssert(0);
}

void ndBrainLayer::Set(const ndBrainLayer&)
{
ndAssert(0);
}

void ndBrainLayer::Add(const ndBrainLayer&)
{
ndAssert(0);
}

void ndBrainLayer::Mul(const ndBrainLayer&)
{
ndAssert(0);
}

void ndBrainLayer::ScaleAdd(const ndBrainLayer&, ndBrainFloat)
{
ndAssert(0);
return nullptr;
}

void ndBrainLayer::Blend(const ndBrainLayer&, ndBrainFloat)
{
ndAssert(0);
}

void ndBrainLayer::AdamUpdate(const ndBrainLayer&, const ndBrainLayer&, ndBrainFloat)
{
ndAssert(0);
}

void ndBrainLayer::Save(const ndBrainSave* const) const
{
ndAssert(0);
Expand All @@ -118,3 +153,9 @@ void ndBrainLayer::CalculateParamGradients(const ndBrainVector&, const ndBrainVe
{
ndAssert(0);
}

void ndBrainLayer::CalculateParamGradients(const ndBrainVector&, const ndBrainVector&, const ndBrainVector&, ndBrainVector&, ndBrainLayer* const) const
{
ndAssert(0);
}

21 changes: 18 additions & 3 deletions newton-4.00/sdk/dBrain/ndBrainLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,21 +43,36 @@ class ndBrainLayer : public ndClassAlloc

virtual ndInt32 GetInputSize() const;
virtual ndInt32 GetOutputSize() const;
virtual void CopyFrom(const ndBrainLayer& src);

virtual void Blend(const ndBrainLayer& src, ndBrainFloat blend);

virtual ndBrainVector* GetBias();
virtual ndBrainMatrix* GetWeights();
//virtual ndBrainVector* GetBias();
//virtual ndBrainMatrix* GetWeights();

virtual void Clear();
virtual void FlushToZero();
virtual void Scale(ndBrainFloat scale);
virtual void Set(const ndBrainLayer& src);
virtual void Add(const ndBrainLayer& src);
virtual void Mul(const ndBrainLayer& src);
virtual void ScaleAdd(const ndBrainLayer& src, ndBrainFloat scale);

virtual void InitWeightsXavierMethod();
virtual void InitWeights(ndBrainFloat weighVariance, ndBrainFloat biasVariance);

virtual void MakePrediction(const ndBrainVector& input, ndBrainVector& output) const;
virtual void InputDerivative(const ndBrainVector& output, const ndBrainVector& outputDerivative, ndBrainVector& inputDerivative) const;

virtual void CalculateParamGradients (
const ndBrainVector& input, const ndBrainVector& output, const ndBrainVector& outputDerivative,
ndBrainVector& inputGradient, ndBrainVector& biasGradient, ndBrainMatrix& weightGradient);

virtual void CalculateParamGradients(
const ndBrainVector& input, const ndBrainVector& output,
const ndBrainVector& outputDerivative, ndBrainVector& inputGradient, ndBrainLayer* const gradientOut) const;

virtual void AdamUpdate(const ndBrainLayer& u, const ndBrainLayer& v, ndBrainFloat epsilon);

virtual void Save(const ndBrainSave* const loadSave) const;
};

Expand Down
44 changes: 41 additions & 3 deletions newton-4.00/sdk/dBrain/ndBrainLayerActivation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,48 @@ ndInt32 ndBrainLayerActivation::GetInputSize() const
return m_neurons;
}

void ndBrainLayerActivation::InitWeightsXavierMethod()
void ndBrainLayerActivation::Clear()
{
ndAssert(0);
}

void ndBrainLayerActivation::InitWeights(ndBrainFloat, ndBrainFloat)
void ndBrainLayerActivation::FlushToZero()
{
}

void ndBrainLayerActivation::Set(const ndBrainLayer&)
{
}

void ndBrainLayerActivation::Scale(ndBrainFloat)
{
}

void ndBrainLayerActivation::Add(const ndBrainLayer&)
{
ndAssert(0);
}

void ndBrainLayerActivation::Mul(const ndBrainLayer&)
{
ndAssert(0);
}

void ndBrainLayerActivation::ScaleAdd(const ndBrainLayer&, ndBrainFloat)
{
ndAssert(0);
}

void ndBrainLayerActivation::CopyFrom(const ndBrainLayer&)
void ndBrainLayerActivation::AdamUpdate(const ndBrainLayer&, const ndBrainLayer&, ndBrainFloat)
{
ndAssert(0);
}

void ndBrainLayerActivation::InitWeightsXavierMethod()
{
}

void ndBrainLayerActivation::InitWeights(ndBrainFloat, ndBrainFloat)
{
}

Expand All @@ -104,3 +137,8 @@ void ndBrainLayerActivation::CalculateParamGradients(const ndBrainVector&, const
{
InputDerivative(output, outputDerivative, inputGradient);
}

void ndBrainLayerActivation::CalculateParamGradients(const ndBrainVector&, const ndBrainVector& output, const ndBrainVector& outputDerivative, ndBrainVector& inputGradient, ndBrainLayer* const) const
{
InputDerivative(output, outputDerivative, inputGradient);
}
15 changes: 14 additions & 1 deletion newton-4.00/sdk/dBrain/ndBrainLayerActivation.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,33 @@ class ndBrainLayerActivation : public ndBrainLayer
virtual ndInt32 GetOutputSize() const;
virtual ndInt32 GetInputSize() const;
virtual const char* GetLabelId() const;
virtual void CopyFrom(const ndBrainLayer& src);
virtual void Blend(const ndBrainLayer& src, ndBrainFloat blend);

virtual void InitWeightsXavierMethod();
virtual void InitWeights(ndBrainFloat weighVariance, ndBrainFloat biasVariance);

virtual void MakePrediction(const ndBrainVector& input, ndBrainVector& output) const;
virtual void InputDerivative(const ndBrainVector& output, const ndBrainVector& outputDerivative, ndBrainVector& inputDerivative) const;

virtual void CalculateParamGradients(
const ndBrainVector& input, const ndBrainVector& output, const ndBrainVector& outputDerivative,
ndBrainVector& inputGradient, ndBrainVector& biasGradient, ndBrainMatrix& weightGradient);

virtual void CalculateParamGradients(
const ndBrainVector& input, const ndBrainVector& output,
const ndBrainVector& outputDerivative, ndBrainVector& inputGradient, ndBrainLayer* const gradientOut) const;

virtual void Save(const ndBrainSave* const loadSave) const;

void Clear();
void FlushToZero();
void Scale(ndBrainFloat scale);
void Set(const ndBrainLayer& src);
void Add(const ndBrainLayer& src);
void Mul(const ndBrainLayer& src);
void ScaleAdd(const ndBrainLayer& src, ndBrainFloat scale);

void AdamUpdate(const ndBrainLayer& u, const ndBrainLayer& v, ndBrainFloat epsilon);
protected:
ndInt32 m_neurons;
};
Expand Down
Loading

0 comments on commit 42d69db

Please sign in to comment.