Skip to content

Commit

Permalink
Merge pull request #119 from cbovar/Simplify_shape
Browse files Browse the repository at this point in the history
Simplify shape
  • Loading branch information
cbovar authored May 13, 2018
2 parents 7dcdab8 + 2634de7 commit a8dae8d
Show file tree
Hide file tree
Showing 46 changed files with 731 additions and 656 deletions.
8 changes: 4 additions & 4 deletions src/ConvNetSharp.Core.Tests/FullyConnLayerTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,10 @@ public void Forward()
layer.Init(inputWidth, inputHeight, inputDepth);

// Make sure filter shape had flatten input shape
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(0));
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(1));
Assert.AreEqual(8, layer.Filters.Shape.GetDimension(2));
Assert.AreEqual(2, layer.Filters.Shape.GetDimension(3));
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[0]);
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[1]);
Assert.AreEqual(8, layer.Filters.Shape.Dimensions[2]);
Assert.AreEqual(2, layer.Filters.Shape.Dimensions[3]);

for (var i = 0; i < 8; i++)
{
Expand Down
8 changes: 4 additions & 4 deletions src/ConvNetSharp.Core.Tests/PoolLayerTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ public void Forward()
var input = BuilderInstance.Volume.From(data, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize));
layer.DoForward(input);

Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(0));
Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(1));
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(2));
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(3));
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[0]);
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[1]);
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[2]);
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[3]);

Assert.AreEqual(5.0, layer.OutputActivation.Get(0,0,0,0));
Assert.AreEqual(21.0, layer.OutputActivation.Get(0, 0, 1, 0));
Expand Down
8 changes: 4 additions & 4 deletions src/ConvNetSharp.Core.Tests/SoftMaxLayerTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ public SoftmaxLayerTests()
public void OutputIsNormalized()
{
var output = this.layer.DoForward(input, true);
Assert.AreEqual(1, output.Shape.GetDimension(0));
Assert.AreEqual(1, output.Shape.GetDimension(1));
Assert.AreEqual(4, output.Shape.GetDimension(2));
Assert.AreEqual(3, output.Shape.GetDimension(3));
Assert.AreEqual(1, output.Shape.Dimensions[0]);
Assert.AreEqual(1, output.Shape.Dimensions[1]);
Assert.AreEqual(4, output.Shape.Dimensions[2]);
Assert.AreEqual(3, output.Shape.Dimensions[3]);

var values = output.ToArray();
Assert.AreEqual(0.25, values[0]);
Expand Down
4 changes: 2 additions & 2 deletions src/ConvNetSharp.Core/Fluent/FluentNet.cs
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ public int[] GetPrediction()
}

var activation = softmaxLayer.OutputActivation;
var N = activation.Shape.GetDimension(3);
var C = activation.Shape.GetDimension(2);
var N = activation.Shape.Dimensions[3];
var C = activation.Shape.Dimensions[2];
var result = new int[N];

for (var n = 0; n < N; n++)
Expand Down
6 changes: 3 additions & 3 deletions src/ConvNetSharp.Core/Layers/FullyConnLayer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ public override void Backward(Volume<T> outputGradient)
this.OutputActivationGradients = outputGradient;

// compute gradient wrt weights and data
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.GetDimension(3)))
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.GetDimension(3)))
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.Dimensions[3]))
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.Dimensions[3]))
{
reshapedInput.ConvolveGradient(
this.Filters, this.OutputActivationGradients,
Expand All @@ -77,7 +77,7 @@ public override void Backward(Volume<T> outputGradient)

protected override Volume<T> Forward(Volume<T> input, bool isTraining = false)
{
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.GetDimension(3)))
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.Dimensions[3]))
{
reshapedInput.DoConvolution(this.Filters, 0, 1, this.OutputActivation);
this.OutputActivation.DoAdd(this.Bias, this.OutputActivation);
Expand Down
2 changes: 1 addition & 1 deletion src/ConvNetSharp.Core/Layers/LayerBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public virtual Volume<T> DoForward(Volume<T> input, bool isTraining = false)

this.InputActivation = input;

var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.DimensionCount == 4 ? input.Shape.GetDimension(3) : 1);
var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.Dimensions[3]);

if (this.OutputActivation == null ||
!this.OutputActivation.Shape.Equals(outputShape))
Expand Down
4 changes: 2 additions & 2 deletions src/ConvNetSharp.Core/Layers/RegressionLayer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public override void Backward(Volume<T> outputGradient)
public override void Backward(Volume<T> y, out T loss)
{
var reshape = y.ReShape(new Shape(1, 1, -1, Shape.Keep));
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray());
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions);
reshape.DoSubtractFrom(this.OutputActivation, dy);

if (this._result == null)
Expand All @@ -54,7 +54,7 @@ public override void Backward(Volume<T> y, out T loss)
var half = (T)Convert.ChangeType(0.5, typeof(T));
this._result.DoMultiply(this._result, half); // dy * dy * 0.5
this._result.DoSum(this._sum); // sum over all batch
var batchSize = y.Shape.GetDimension(3);
var batchSize = y.Shape.Dimensions[3];
loss = Ops<T>.Divide(this._sum.Get(0), Ops<T>.Cast(batchSize)); // average
}

Expand Down
10 changes: 5 additions & 5 deletions src/ConvNetSharp.Core/Layers/SoftMaxLayer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,17 +26,17 @@ public SoftmaxLayer(int classCount)
public override void Backward(Volume<T> y, out T loss)
{
// input gradient = pi - yi
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray()));
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions));

//loss is the class negative log likelihood
loss = Ops<T>.Zero;
for (var n = 0; n < y.Shape.GetDimension(3); n++)
for (var n = 0; n < y.Shape.Dimensions[3]; n++)
{
for (var d = 0; d < y.Shape.GetDimension(2); d++)
for (var d = 0; d < y.Shape.Dimensions[2]; d++)
{
for (var h = 0; h < y.Shape.GetDimension(1); h++)
for (var h = 0; h < y.Shape.Dimensions[1]; h++)
{
for (var w = 0; w < y.Shape.GetDimension(0); w++)
for (var w = 0; w < y.Shape.Dimensions[0]; w++)
{
var expected = y.Get(w, h, d, n);
var actual = this.OutputActivation.Get(w, h, d, n);
Expand Down
4 changes: 2 additions & 2 deletions src/ConvNetSharp.Core/Net.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ public int[] GetPrediction()
}

var activation = softmaxLayer.OutputActivation;
var N = activation.Shape.GetDimension(3);
var C = activation.Shape.GetDimension(2);
var N = activation.Shape.Dimensions[3];
var C = activation.Shape.Dimensions[2];
var result = new int[N];

for (var n = 0; n < N; n++)
Expand Down
6 changes: 3 additions & 3 deletions src/ConvNetSharp.Core/Training/TrainerBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,15 @@ protected virtual void Backward(Volume<T> y)
{
var chrono = Stopwatch.StartNew();

var batchSize = y.Shape.GetDimension(3);
var batchSize = y.Shape.Dimensions[3];
this.Loss = Ops<T>.Divide(this.Net.Backward(y), Ops<T>.Cast(batchSize));
this.BackwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
}

private void Forward(Volume<T> x)
{
var chrono = Stopwatch.StartNew();
var batchSize = x.Shape.GetDimension(3);
var batchSize = x.Shape.Dimensions[3];
this.Net.Forward(x, true); // also set the flag that lets the net know we're just training
this.ForwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
}
Expand All @@ -46,7 +46,7 @@ public virtual void Train(Volume<T> x, Volume<T> y)

Backward(y);

var batchSize = x.Shape.GetDimension(3);
var batchSize = x.Shape.Dimensions[3];
var chrono = Stopwatch.StartNew();
TrainImplem();
this.UpdateWeightsTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
Expand Down
2 changes: 1 addition & 1 deletion src/ConvNetSharp.Flow.Tests/OpTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ public void ReshapeDerivate()
result = session.Run(diff,
new Dictionary<string, Volume<T>>
{
{"x", NewVolume(new[] {1.0, 2.0, 3.0, 4.0}, Volume.Shape.From(4))},
{"x", NewVolume(new[] {1.0, 2.0, 3.0, 4.0}, Volume.Shape.From(4, 1, 1, 1))},
{"grad", NewVolume(new[] {1.0, 1.0, 1.0, 1.0}, Volume.Shape.From(1, 1, 4, 1))}
});
Assert.AreEqual(new Shape(4, 1, 1, 1), result.Shape);
Expand Down
4 changes: 2 additions & 2 deletions src/ConvNetSharp.Flow/Net.cs
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ public List<ParametersAndGradients<T>> GetParametersAndGradients()
public int[] GetPrediction()
{
var activation = this.Op.Evaluate(this.Session);
var N = activation.Shape.GetDimension(3);
var C = activation.Shape.GetDimension(2);
var N = activation.Shape.Dimensions[3];
var C = activation.Shape.Dimensions[2];
var result = new int[N];

for (var n = 0; n < N; n++)
Expand Down
4 changes: 2 additions & 2 deletions src/ConvNetSharp.Flow/Ops/Concat.cs
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ public override Volume<T> Evaluate(Session<T> session)
var left = this.Parents[0].Evaluate(session);
var right = this.Parents[1].Evaluate(session);

var batchSize = Math.Max(left.Shape.GetDimension(3), right.Shape.GetDimension(3));
var batchSize = Math.Max(left.Shape.Dimensions[3], right.Shape.Dimensions[3]);

int totalLength = (int)(left.Shape.TotalLength / left.Shape.GetDimension(3) + right.Shape.TotalLength / right.Shape.GetDimension(3));
int totalLength = (int)(left.Shape.TotalLength / left.Shape.Dimensions[3] + right.Shape.TotalLength / right.Shape.Dimensions[3]);
if (this.Result == null || this.lastTotalLength != totalLength)
{
this.Result?.Dispose();
Expand Down
8 changes: 4 additions & 4 deletions src/ConvNetSharp.Flow/Ops/Const.cs
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,10 @@ public override Dictionary<string, object> GetData()

if (this.OutputShape != null)
{
data["dim0"] = this.OutputShape.GetDimension(0);
data["dim1"] = this.OutputShape.GetDimension(1);
data["dim2"] = this.OutputShape.GetDimension(2);
data["dim3"] = this.OutputShape.GetDimension(3);
data["dim0"] = this.OutputShape.Dimensions[0];
data["dim1"] = this.OutputShape.Dimensions[1];
data["dim2"] = this.OutputShape.Dimensions[2];
data["dim3"] = this.OutputShape.Dimensions[3];
}

return data;
Expand Down
10 changes: 5 additions & 5 deletions src/ConvNetSharp.Flow/Ops/Convolution.cs
Original file line number Diff line number Diff line change
Expand Up @@ -91,19 +91,19 @@ public override Volume<T> Evaluate(Session<T> session)

if (this.Parents[1].Result == null)
{
var count = this.Width * this.Height * x.Shape.GetDimension(2);
var count = this.Width * this.Height * x.Shape.Dimensions[2];
var scale = Math.Sqrt(2.0 / count);

var filterShape = new Shape(this.Width, this.Height, x.Shape.GetDimension(2), this.FilterCount);
var filterShape = new Shape(this.Width, this.Height, x.Shape.Dimensions[2], this.FilterCount);
this.Parents[1].Result = BuilderInstance<T>.Volume.Random(filterShape, 0.0, scale);
}

var outputDepth = this.FilterCount;
var outputWidth = (int)Math.Floor((x.Shape.GetDimension(0) + this.Pad * 2 - this.Width) / (double)this.Stride + 1);
var outputHeight = (int)Math.Floor((x.Shape.GetDimension(1) + this.Pad * 2 - this.Height) / (double)this.Stride + 1);
var outputWidth = (int)Math.Floor((x.Shape.Dimensions[0] + this.Pad * 2 - this.Width) / (double)this.Stride + 1);
var outputHeight = (int)Math.Floor((x.Shape.Dimensions[1] + this.Pad * 2 - this.Height) / (double)this.Stride + 1);

this.Result?.Dispose();
this.Result = BuilderInstance<T>.Volume.SameAs(new Shape(outputWidth, outputHeight, outputDepth, x.Shape.GetDimension(3)));
this.Result = BuilderInstance<T>.Volume.SameAs(new Shape(outputWidth, outputHeight, outputDepth, x.Shape.Dimensions[3]));
}

x.DoConvolution(this.Parents[1].Evaluate(session), this.Pad, this.Stride, this.Result);
Expand Down
2 changes: 1 addition & 1 deletion src/ConvNetSharp.Flow/Ops/Extract.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public override Volume<T> Evaluate(Session<T> session)
var length = (int)Convert.ChangeType(this.Parents[1].Evaluate(session).Get(0), typeof(int)); // TODO: Find a way to keep this on host
var offset = (int)Convert.ChangeType(this.Parents[2].Evaluate(session).Get(0), typeof(int)); // TODO: Find a way to keep this on host

var batchSize = x.Shape.GetDimension(3);
var batchSize = x.Shape.Dimensions[3];

int totalLength = length * batchSize;
if (this.Result == null || this.lastTotalLength != totalLength)
Expand Down
2 changes: 1 addition & 1 deletion src/ConvNetSharp.Flow/Ops/Max.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public override Volume<T> Evaluate(Session<T> session)
this.IsDirty = false;

var x = this.Parents[0].Evaluate(session);
var reshape = x.ReShape(-1, x.Shape.GetDimension(-1));
var reshape = x.ReShape(-1, x.Shape.Dimensions[3]);
var targetShape = new Shape(reshape.Shape);
targetShape.SetDimension(0, 1);

Expand Down
8 changes: 4 additions & 4 deletions src/ConvNetSharp.Flow/Ops/Pool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ public override Volume<T> Evaluate(Session<T> session)
this._lastInputShape = new Shape(x.Shape);

var outputShape = new Shape(
(int) Math.Floor((x.Shape.GetDimension(0) + this.HorizontalPad * 2 - this.Width) / (double) this.HorizontalStride + 1),
(int) Math.Floor((x.Shape.GetDimension(1) + this.VerticalPad * 2 - this.Height) / (double) this.VerticalStride + 1),
x.Shape.GetDimension(2),
x.Shape.GetDimension(3)
(int) Math.Floor((x.Shape.Dimensions[0] + this.HorizontalPad * 2 - this.Width) / (double) this.HorizontalStride + 1),
(int) Math.Floor((x.Shape.Dimensions[1] + this.VerticalPad * 2 - this.Height) / (double) this.VerticalStride + 1),
x.Shape.Dimensions[2],
x.Shape.Dimensions[3]
);

this.Result?.Dispose();
Expand Down
10 changes: 5 additions & 5 deletions src/ConvNetSharp.Flow/Ops/Reshape.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public override Volume<T> Evaluate(Session<T> session)
var shape = this.Parents[1].Evaluate(session);
var s = new[] { shape.Get(0), shape.Get(1), shape.Get(2), shape.Get(3) };
var t = s.Select(o => Convert.ToInt32(o)).ToArray();
this._tempShape = new Shape(t);
this._tempShape = new Shape(t[0], t[1], t[2], t[3]);
this._lastBatchSize = session.BatchSize;
}

Expand All @@ -82,10 +82,10 @@ public override Dictionary<string, object> GetData()

if (this.OutputShape != null)
{
data["dim0"] = this.OutputShape.GetDimension(0);
data["dim1"] = this.OutputShape.GetDimension(1);
data["dim2"] = this.OutputShape.GetDimension(2);
data["dim3"] = this.OutputShape.GetDimension(3);
data["dim0"] = this.OutputShape.Dimensions[0];
data["dim1"] = this.OutputShape.Dimensions[1];
data["dim2"] = this.OutputShape.Dimensions[2];
data["dim3"] = this.OutputShape.Dimensions[3];
}

return data;
Expand Down
20 changes: 15 additions & 5 deletions src/ConvNetSharp.Flow/Ops/Shape.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,16 @@

namespace ConvNetSharp.Flow.Ops
{
/// <summary>
/// Describes the shape of the data.
/// Shape always has 4 dimensons: [width, height, class, batch size]
///
/// e.g. A 1D array fits in a volume that has a shape of [1,1,n,1]
/// A 2D array fits in a volume that has a shape of [w,h,1,1]
/// A 2D array with 3 channels (a color image for example) fits in a volume that has a shape of [w,h,3,1]
/// 10 2D arrays (e.g. 10 grayscale images) fits in a volume that a shape of [w,h,1,10]
/// </summary>
/// <typeparam name="T">type of data (double or float)</typeparam>
public class Shape<T> : Op<T> where T : struct, IEquatable<T>, IFormattable
{
private readonly VolumeBuilder<T> _builder;
Expand Down Expand Up @@ -53,14 +63,14 @@ public override Volume<T> Evaluate(Session<T> session)

if (this.Index == -1)
{
this.Result.Set(0, Ops<T>.Cast(y.Shape.GetDimension(0)));
this.Result.Set(1, Ops<T>.Cast(y.Shape.GetDimension(1)));
this.Result.Set(2, Ops<T>.Cast(y.Shape.GetDimension(2)));
this.Result.Set(3, Ops<T>.Cast(y.Shape.GetDimension(3)));
this.Result.Set(0, Ops<T>.Cast(y.Shape.Dimensions[0]));
this.Result.Set(1, Ops<T>.Cast(y.Shape.Dimensions[1]));
this.Result.Set(2, Ops<T>.Cast(y.Shape.Dimensions[2]));
this.Result.Set(3, Ops<T>.Cast(y.Shape.Dimensions[3]));
}
else
{
this.Result.Set(0, Ops<T>.Cast(y.Shape.GetDimension(this.Index)));
this.Result.Set(0, Ops<T>.Cast(y.Shape.Dimensions[this.Index]));
}

return base.Evaluate(session);
Expand Down
10 changes: 5 additions & 5 deletions src/ConvNetSharp.Flow/Ops/SoftMaxCrossEntropy.cs
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@ public override Volume<T> Evaluate(Session<T> session)
var outputActivation = this.Parents[0].Evaluate(session);

var loss = Ops<T>.Zero;
for (var n = 0; n < y.Shape.GetDimension(3); n++)
for (var n = 0; n < y.Shape.Dimensions[3]; n++)
{
for (var d = 0; d < y.Shape.GetDimension(2); d++)
for (var d = 0; d < y.Shape.Dimensions[2]; d++)
{
for (var h = 0; h < y.Shape.GetDimension(1); h++)
for (var h = 0; h < y.Shape.Dimensions[1]; h++)
{
for (var w = 0; w < y.Shape.GetDimension(0); w++)
for (var w = 0; w < y.Shape.Dimensions[0]; w++)
{
var expected = y.Get(w, h, d, n);
var actual = outputActivation.Get(w, h, d, n);
Expand All @@ -66,7 +66,7 @@ public override Volume<T> Evaluate(Session<T> session)
}
}

var batchSize = outputActivation.Shape.GetDimension(3);
var batchSize = outputActivation.Shape.Dimensions[3];
loss = Ops<T>.Divide(Ops<T>.Negate(loss), Ops<T>.Cast(batchSize));
this.Result.Set(0, loss);

Expand Down
Loading

0 comments on commit a8dae8d

Please sign in to comment.