-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbasic_cnn.cs
485 lines (456 loc) · 21.9 KB
/
basic_cnn.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
System.Action<string> print = System.Console.WriteLine;
// https://github.com/grensen/neural_network_2022
print("github.com/grensen/convolutional_neural_network");
print("\nBegin convolution neural network demo\n");
// 0. load MNIST data
AutoData d = new(@"C:\mnist\");
// 1. init cnn + nn
int startDimension = 28; // or = (int)sqrt(784)
var isCnn = true; // true = cnn or false = nn
int[] cnn = { 1, 8, 16 }; // non-RGB = 1 (MNIST) or RGB = 3 (CIFAR-10), cnn input layer dimension
int[] filter = { 5, 3 }; // x * y dim
int[] stride = { 2, 1 }; // replaces pooling with higher strides than 1
int[] net = { 784, 300, 300, 300, 10 }; // nn
var lr = 0.005f;
var momentum = 0.5f;
var lr_mlt = 0.98f;
var mom_mlt = 0.95f;
// 2.0 out conv dimensions : 28 -> ((inDim - (filterX - 1)) / stride), (28 - (5 - 1)) / 2 = 12, (12 - (3 - 1)) / 1 = 10
int[] dim = GetCnnDimensions(cnn.Length - 1, startDimension, filter, stride);
// 2.1 convolution steps for layer wise preparation
int[] cStep = GetCnnSteps(cnn, dim);
// 2.2 kernel steps for layer wise preparation
int[] kStep = GetKernelSteps(cnn, filter);
// 3.0 init visual based kernel weights
float[] kernel = InitKernel(cnn, filter);
// 3.1 init neural network weights
float[] weight = NeuralNetWeightInit(net, cStep, isCnn);
// print stuff
NetInfo();
// get time
DateTime elapsed = DateTime.Now;
// MNIST training (60k) for n epochs
for (int epoch = 0; epoch < 20; epoch++, lr *= lr_mlt, momentum *= mom_mlt)
CnnTraining(d, isCnn, cnn, filter, stride, kernel, dim, cStep, kStep, net, weight, 60000, epoch, lr, momentum);
Console.WriteLine("Done after " + ((DateTime.Now - elapsed).TotalMilliseconds / 1000.0).ToString("F2") + "s\n");
// MNIST test 10k
CnnTraining(d, isCnn, cnn, filter, stride, kernel, dim, cStep, kStep, net, weight, 10000);
print("\nEnd MNIST CNN demo");
void NetInfo()
{
if (isCnn) print("Convolution = " + string.Join(",", cnn).Replace(",", "-"));
if (isCnn) print("Kernel size = " + string.Join(",", filter).Replace(",", "-"));
if (isCnn) print("Stride step = " + string.Join(",", stride).Replace(",", "-"));
if (isCnn) print("DimensionX = " + string.Join(",", dim).Replace(",", "-"));
if (isCnn) print("Map (Dim²) = " + string.Join(",", dim.Select((x, index) => x * x).ToArray()).Replace(",", "-"));
if (isCnn) print("CNN = " + string.Join(",", cnn.Select((x, index) => x * dim[index] * dim[index]).ToArray()).Replace(",", "-"));
print("NN = " + string.Join(",", net).Replace(",", "-"));
if (isCnn) print("Kernel weights = " + kernel.Length.ToString());
print("Network weights = " + weight.Length.ToString());
print("Learning = " + lr.ToString("F3") + " | MLT = " + lr_mlt.ToString("F2"));
print("Momentum = " + momentum.ToString("F2") + " | MLT = " + mom_mlt.ToString("F2"));
print("\nStarting training");
}
// 5.0 core run
static void CnnTraining(AutoData d, // data stuff
bool isCnn, int[] cnn, int[] filter, int[] stride, float[] kernel, int[] dim, int[] cSteps, int[] kStep, // cnn stuff
int[] net, float[] weight, // nn stuff
int len, int epoch = -1, float lr = 0, float mom = 0) // optional hyperparameters for training only
{
DateTime elapsed = DateTime.Now;
int correct = 0, all = 0;
// change from training to test if no learning rate
bool training = lr == 0 ? false : true;
// cnn stuff
int cnn_layerLen = cnn.Length - 1;
int cnn_neuronLen = GetCnnNeuronsLen(28, cnn, dim);
int cOutput = cnn_neuronLen - cSteps[cnn_layerLen];
// nn stuff
int weightLen = weight.Length, neuronLen = GetNeuronsLen(net);
int layer = net.Length - 1, input = net[0], output = net[layer]; // output neurons
int inputHidden = neuronLen - output; // size of input and hidden neurons
// correction value for each neural network weight
float[] delta = training ? new float[weight.Length] : null;
// start training each epoch
for (int x = 1, batch = 1; x < len + 1; x++)
{
// get target label
int target = d.GetLabel(x - 1, training);
// create neurons arrays for nn and cnn
float[] neuron = null, conv = null;
// feed input sample and set arrays
if (isCnn) // cnn active
{
// feed image and set arrays
conv = FeedSample(d.GetSample(x - 1, training), cnn_neuronLen);
neuron = new float[neuronLen];
// convolution feed forward
ConvolutionForward(cnn, dim, cSteps, filter, kStep, stride, conv, kernel);
// send last cnn layer to first nn layer
for (int i = 0, ii = cSteps[cnn_layerLen]; i < cOutput; i++) neuron[i] = conv[ii + i];
}
else // just neural net, no cnn
{
neuron = FeedSample(d.GetSample(x - 1, training), neuronLen);
}
// neural net feed forward
int prediction = FeedForward(net, neuron, weight, layer, output, inputHidden);
// general network prediction
correct += prediction == target ? 1 : 0; all++; // true count
// test zone ends here
if (!training) continue; // dirty
// probability check
if (neuron[inputHidden + target] >= 0.99) continue;
// nn gradient array
float[] gradient = new float[neuronLen];
// nn backprop
Backprop(net, neuron, weight, gradient, delta, target, layer, inputHidden, neuronLen, weightLen);
// count batch size
batch++;
// cnn backprop
if (isCnn)
{
// create cnn gradient array
float[] cnnGradient = new float[cSteps[cnn_layerLen] + cOutput];
// sent gradient from first nn input layer to last cnn layer
for (int i = 0, ii = cSteps[cnn_layerLen]; i < cOutput; i++, ii++)
cnnGradient[ii] = gradient[i];
// convolution backprop with kernel update - TODO: add delta and batch support
ConvolutionBackprop(cnn, dim, cSteps, filter, kStep, stride, conv, kernel, cnnGradient);
}
// update
if (prediction == target) continue;
Update(net, weight, delta, layer, (neuronLen / layer * 1.0f) / (batch + 1), lr, mom);
batch = 0;
} // runs end
if (lr == 0)
Console.WriteLine("Accuracy on test data = " + (correct * 100.0 / all).ToString("F2")
+ "% after " + ((DateTime.Now - elapsed).TotalMilliseconds / 1000.0).ToString("F2") + "s");
else
Console.WriteLine("epoch = " + epoch.ToString().PadLeft(2) + " | acc = " + (correct * 100.0 / all).ToString("F2").PadLeft(6)
+ "% | time = " + ((DateTime.Now - elapsed).TotalMilliseconds / 1000.0).ToString("F2") + "s");
//
static int GetCnnNeuronsLen(int startDimension, int[] cnn, int[] dim)
{
int cnn_layerLen = cnn.Length - 1;
int cnn_neuronLen = startDimension * startDimension; // add input first
for (int i = 0; i < cnn_layerLen; i++)
cnn_neuronLen += cnn[i + 1] * dim[i + 1] * dim[i + 1];
return cnn_neuronLen;
}
//
static int GetNeuronsLen(int[] net)
{
int sum = 0;
for (int n = 0; n < net.Length; n++) sum += net[n];
return sum;
}
}
// 2.0 create conv dimensions
static int[] GetCnnDimensions(int cnn_layerLen, int startDimension, int[] filter, int[] stride)
{
int[] dim = new int[cnn_layerLen + 1];
for (int i = 0, c_dim = (dim[0] = startDimension); i < cnn_layerLen; i++)
dim[i + 1] = c_dim = (c_dim - (filter[i] - 1)) / stride[i];
return dim;
}
// 2.1 convolution steps
static int[] GetCnnSteps(int[] cnn, int[] dim)
{
int cnn_layerLen = cnn.Length - 1;
int[] cs = new int[cnn_layerLen + 2];
cs[1] = dim[0] * dim[0]; // startDimension^2
for (int i = 0, sum = cs[1]; i < cnn_layerLen; i++)
cs[i + 2] = sum += cnn[i + 1] * dim[i + 1] * dim[i + 1];
return cs;
}
// 2.2 kernel steps
static int[] GetKernelSteps(int[] cnn, int[] filter)
{
// steps in stucture for kernel weights
int cnn_layerLen = cnn.Length - 1;
int[] ks = new int[cnn_layerLen];
for (int i = 0; i < cnn_layerLen - 1; i++)
ks[i + 1] += cnn[i + 0] * cnn[i + 1] * filter[i] * filter[i];
return ks;
}
// 3.0 init visual based kernel weights
static float[] InitKernel(int[] cnn, int[] filter)
{
int cnn_layerLen = cnn.Length - 1, cnn_weightLen = 0;
for (int i = 0; i < cnn_layerLen; i++) cnn_weightLen += cnn[i + 0] * cnn[i + 1] * filter[i] * filter[i];
float[] kernel = new float[cnn_weightLen];
Erratic rnd = new(1234567);
for (int i = 0, c = 0; i < cnn_layerLen; i++)
{
float sd = MathF.Sqrt(6.0f / ((cnn[i] + cnn[i + 1]) * filter[i] * filter[i]));
for (int j = 0, f = filter[i]; j < cnn[i + 1]; j++)
for (int k = 0; k < cnn[i + 0]; k++)
for (int u = 0; u < f; u++)
for (int v = 0; v < f; v++, c++)
// kernel[c] = rnd.NextFloat(-1.0f / (f * f / 1.0f), 1.0f / (f * f / 1.0f)); //
// kernel[c] = rnd.NextFloat(-sd * 1.0f, sd * 1.0f) * 1.0f;
kernel[c] = rnd.NextFloat(-1.0f, 1.0f) / (filter[i] * filter[i] * 0.5f);
}
return kernel;
}
// 3.1 init neural network weights
static float[] NeuralNetWeightInit(int[] net, int[] cStep, bool isCnn)
{
// 3.0.1 fit cnn input to nn output
if (isCnn) SetNeuralNetInputDimension(cStep, net);
// 3.1 init neural network weights
return Glorot(net);
// 3.0.1 fit nn input to cnn output
static void SetNeuralNetInputDimension(int[] convStep, int[] net)
{
net[0] = convStep[^1] - convStep[^2]; // cnn output length
}
// 3.1 glorot nn weights init
static float[] Glorot(int[] net)
{
int len = 0;
for (int n = 0; n < net.Length - 1; n++) len += net[n] * net[n + 1];
float[] weight = new float[len];
Erratic rnd = new(12345);
for (int i = 0, w = 0; i < net.Length - 1; i++, w += net[i - 0] * net[i - 1]) // layer
{
float sd = (float)Math.Sqrt(6.0f / (net[i] + net[i + 1]));
for (int m = w; m < w + net[i] * net[i + 1]; m++) // weights
weight[m] = rnd.NextFloat(-sd * 1.0f, sd * 1.0f);
}
return weight;
}
}
// 4.0 input sample
static float[] FeedSample(Sample s, int neuronLen)
{
float[] neuron = new float[neuronLen];
for (int i = 0; i < 784; i++) neuron[i] = s.sample[i];
return neuron;
}
// 4.1 cnn ff
static void ConvolutionForward(int[] cnn, int[] dim, int[] cs, int[] filter, int[] kstep, int[] stride, float[] conv, float[] kernel)
{
for (int i = 0; i < cnn.Length - 1; i++)
{
int left = cnn[i], right = cnn[i + 1], lDim = dim[i], rDim = dim[i + 1], lStep = cs[i + 0], rStep = cs[i + 1],
kd = filter[i], ks = kstep[i], st = stride[i], lMap = lDim * lDim, rMap = rDim * rDim, kMap = kd * kd, sDim = st * lDim;
// convolution
for (int l = 0, ls = lStep; l < left; l++, ls += lMap) // input channel feature map
for (int r = 0, rs = rStep; r < right; r++, rs += rMap) // output channel feature map
{
int k = rs; // output map position
for (int y = 0, w = ks + (l * right + r) * kMap; y < rDim; y++) // conv dim y
for (int x = 0; x < rDim; x++, k++) // conv dim x
{
float sum = 0;
int j = ls + y * sDim + x * st; // input map position for kernel operation
for (int col = 0, fid = 0; col < kd; col++) // filter dim y
for (int row = col * lDim, len = row + kd; row < len; row++, fid++) // filter dim x
sum += conv[j + row] * kernel[w + fid];
conv[k] += sum;
}
}
// relu activation
for (int r = 0, kN = rStep; r < right; r++, kN += rMap) // output maps
for (int k = kN, K = k + rMap; k < K; k++) // conv map
{
float sum = conv[k];
conv[k] = sum > 0 ? sum * left : 0; // relu activation for each neuron
}
}
}
// 4.2 nn ff
static int FeedForward(int[] net, float[] neuron, float[] weight, int layer, int output, int inputHidden)
{
// feed forward
for (int i = 0, k = net[0], w = 0, j = 0; i < layer; i++)
{
int left = net[i], right = net[i + 1];
for (int l = 0; l < left; l++)
{
float n = neuron[j + l];
if (n > 0) for (int r = 0; r < right; r++) neuron[k + r] += n * weight[w + r];
w += right;
}
j += left; k += right;
}
float max = neuron[inputHidden]; // init class 0 prediction
int prediction = 0; // init class 0 prediction
for (int i = 1; i < output; i++)
{
float n = neuron[i + inputHidden];
if (n > max) { max = n; prediction = i; } // grab maxout prediction here
}
float scale = 0; // softmax with max trick
for (int n = inputHidden, N = inputHidden + output; n != N; n++) scale += neuron[n] = MathF.Exp(neuron[n] - max);
for (int n = inputHidden, N = inputHidden + output; n != N; n++) neuron[n] = neuron[n] / scale;
return prediction;
}
// 4.3 nn bp
static void Backprop(int[] net, float[] neuron, float[] weight, float[] gradient, float[] delta,
int target, int layer, int inputHidden, int neuronLen, int weightLen)
{
// output error (target - output)
for (int n = inputHidden, nc = 0; n < neuronLen; n++, nc++)
gradient[n] = target == nc ? 1 - neuron[n] : -neuron[n];
// all gradients mlp
for (int i = layer - 1, j = inputHidden, k = neuronLen, ww = weightLen; i >= 0; i--)
{
int left = net[i], right = net[i + 1]; j -= left; k -= right; ww -= right * left;
for (int l = 0, w = ww; l < left; l++)
{
float gra = 0, n = neuron[j + l];
if (n > 0) for (int r = 0; r < right; r++)
gra += weight[w + r] * gradient[k + r];
w += right;
gradient[j + l] = gra;
}
}
// all deltas mlp
for (int i = layer - 1, j = inputHidden, k = neuronLen, ww = weightLen; i >= 0; i--)
{
int left = net[i], right = net[i + 1]; j -= left; k -= right; ww -= right * left;
for (int l = 0, w = ww; l < left; l++)
{
float n = neuron[j + l];
if (n > 0) for (int r = 0; r < right; r++)
delta[w + r] += n * gradient[k + r];
w += right;
}
}
}
// 4.4 cnn bp
static void ConvolutionBackprop(int[] cnn, int[] dim, int[] cs, int[] filter, int[] kstep, int[] stride, float[] conv, float[] kernel, float[] cGradient)
{
// convolution gradient
for (int i = cnn.Length - 2; i >= 1; i--)
for (int left = cnn[i], right = cnn[i + 1], lDim = dim[i], rDim = dim[i + 1], lStep = cs[i + 0], rStep = cs[i + 1],
kd = filter[i], ks = kstep[i], st = stride[i], lMap = lDim * lDim, rMap = rDim * rDim, kMap = kd * kd, sDim = st * lDim, l = 0, ls = lStep; l < left; l++, ls += lMap) // input channel feature map
for (int r = 0, rs = rStep; r < right; r++, rs += rMap) // output channel feature map
for (int y = 0, k = rs, w = ks + (l * right + r) * kMap; y < rDim; y++) // conv dim y
for (int x = 0; x < rDim; x++, k++) // conv dim x
if (conv[k] > 0) // relu derivative
{
float gra = cGradient[k];
int j = ls + y * sDim + x * st; // input map position
for (int col = 0, fid = 0; col < kd; col++) // filter dim y cols
for (int row = col * lDim, len = row + kd; row < len; row++, fid++) // filter dim x rows
cGradient[j + row] += kernel[w + fid] * gra;
}
// kernel delta with kernel weights update
for (int i = cnn.Length - 2; i >= 0; i--)
for (int left = cnn[i], right = cnn[i + 1], lDim = dim[i], rDim = dim[i + 1], lStep = cs[i + 0], rStep = cs[i + 1],
kd = filter[i], ks = kstep[i], st = stride[i], lMap = lDim * lDim, rMap = rDim * rDim, kMap = kd * kd, sDim = st * lDim, l = 0, ls = lStep;
l < left; l++, ls += lMap) // input channel feature map
for (int r = 0, rs = rStep; r < right; r++, rs += rMap) // output channel feature map
for (int y = 0, k = rs, w = ks + (l * right + r) * kMap; y < rDim; y++) // conv dim y
for (int x = 0; x < rDim; x++, k++) // conv dim x
if (conv[k] > 0) // relu derivative
{
float gra = cGradient[k];
int j = ls + y * sDim + x * st; // input map position
for (int col = 0, fid = 0; col < kd; col++) // filter dim y cols
for (int row = col * lDim, len = row + kd; row < len; row++, fid++) // filter dim x rows
kernel[w + fid] += conv[j + row] * gra * 0.005f;// * 0.5f;
}
}
// 4.5 update
static void Update(int[] net, float[] weight, float[] delta, int layer, float mlt, float lr, float mom)
{
for (int i = 0, mStep = 0; i < layer; i++, mStep += net[i - 0] * net[i - 1]) // layers
{
float oneUp = (float)Math.Sqrt(2.0f / (net[i + 1] + net[i])) * mlt;
for (int m = mStep, mEnd = mStep + net[i] * net[i + 1]; m < mEnd; m++) // weights
{
float del = delta[m], s2 = del * del;
if (s2 > oneUp) continue; // check overwhelming deltas
weight[m] += del * lr;
delta[m] = del * mom;
}
}
}
//
struct Sample
{
public float[] sample;
public int label;
}
struct AutoData // https://github.com/grensen/easy_regression#autodata
{
public string source;
public byte[] samplesTest, labelsTest;
public byte[] samplesTraining, labelsTraining;
public AutoData(string yourPath)
{
this.source = yourPath;
// hardcoded urls from my github
string trainDataUrl = "https://github.com/grensen/gif_test/raw/master/MNIST_Data/train-images.idx3-ubyte";
string trainLabelUrl = "https://github.com/grensen/gif_test/raw/master/MNIST_Data/train-labels.idx1-ubyte";
string testDataUrl = "https://github.com/grensen/gif_test/raw/master/MNIST_Data/t10k-images.idx3-ubyte";
string testnLabelUrl = "https://github.com/grensen/gif_test/raw/master/MNIST_Data/t10k-labels.idx1-ubyte";
// change easy names
string d1 = @"trainData", d2 = @"trainLabel", d3 = @"testData", d4 = @"testLabel";
if (!File.Exists(yourPath + d1)
|| !File.Exists(yourPath + d2)
|| !File.Exists(yourPath + d3)
|| !File.Exists(yourPath + d4))
{
System.Console.WriteLine("\nData does not exist");
if (!Directory.Exists(yourPath)) Directory.CreateDirectory(yourPath);
// padding bits: data = 16, labels = 8
System.Console.WriteLine("Download MNIST dataset from GitHub");
this.samplesTraining = (new System.Net.WebClient().DownloadData(trainDataUrl)).Skip(16).Take(60000 * 784).ToArray();
this.labelsTraining = (new System.Net.WebClient().DownloadData(trainLabelUrl)).Skip(8).Take(60000).ToArray();
this.samplesTest = (new System.Net.WebClient().DownloadData(testDataUrl)).Skip(16).Take(10000 * 784).ToArray();
this.labelsTest = (new System.Net.WebClient().DownloadData(testnLabelUrl)).Skip(8).Take(10000).ToArray();
System.Console.WriteLine("Save cleaned MNIST data into folder " + yourPath + "\n");
File.WriteAllBytes(yourPath + d1, this.samplesTraining);
File.WriteAllBytes(yourPath + d2, this.labelsTraining);
File.WriteAllBytes(yourPath + d3, this.samplesTest);
File.WriteAllBytes(yourPath + d4, this.labelsTest); return;
}
// data on the system, just load from yourPath
System.Console.WriteLine("Load MNIST data from " + yourPath + "\n");
this.samplesTraining = File.ReadAllBytes(yourPath + d1).Take(60000 * 784).ToArray();
this.labelsTraining = File.ReadAllBytes(yourPath + d2).Take(60000).ToArray();
this.samplesTest = File.ReadAllBytes(yourPath + d3).Take(10000 * 784).ToArray();
this.labelsTest = File.ReadAllBytes(yourPath + d4).Take(10000).ToArray();
}
public Sample GetSample(int id, bool isTrain)
{
Sample s = new();
s.sample = new float[784];
if (isTrain) for (int i = 0; i < 784; i++)
s.sample[i] = samplesTraining[id * 784 + i] / 255f;
else for (int i = 0; i < 784; i++)
s.sample[i] = samplesTest[id * 784 + i] / 255f;
s.label = isTrain ? labelsTraining[id] : labelsTest[id];
return s;
}
public int GetLabel(int id, bool isTrain)
{
return isTrain ? labelsTraining[id] : labelsTest[id];
}
}
class Erratic // https://jamesmccaffrey.wordpress.com/2019/05/20/a-pseudo-pseudo-random-number-generator/
{
private float seed;
public Erratic(float seed2)
{
this.seed = this.seed + 0.5f + seed2; // avoid 0
}
public float Next()
{
double x = Math.Sin(this.seed) * 1000;
double result = x - Math.Floor(x); // [0.0,1.0)
this.seed = (float)result; // for next call
return (float)result;
}
public float NextFloat(float lo, float hi)
{
float x = this.Next();
return (hi - lo) * x + lo;
}
};