-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsmnn.out
546 lines (522 loc) · 18.1 KB
/
smnn.out
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
pqR version 2.15.1 (2020-07-23), based on R 2.15.0 (2012-03-30)
R 2.15.0 is Copyright (C) 2012 The R Foundation for Statistical Computing
ISBN 3-900051-07-0
Modifications to R in pqR are Copyright (C) 2013-2020 Radford M. Neal
Some modules are from R-2.15.1 or later versions distributed by the R Core Team
Platform: x86_64-apple-darwin17.7.0 (64-bit)
R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.
Natural language support but running in an English locale
R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.
Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.
No helper threads, task merging enabled, uncompressed pointers.
> # Compute the output of a network with two hidden layers
> # (with tanh activation function), when given 'x' as input.
> # Parameters are in the list 'L'.
>
> nnet <- function (x, L)
+ {
+ h1 <- tanh (L$b1 + x %*% L$W1)
+ h2 <- tanh (L$b2 + h1 %*% L$W2)
+ as.vector (L$b3 + h2 %*% L$W3)
+ }
>
>
> # Train network with hidden layers of n1 and n2 units on
> # data in 'X' and 'y'. Initialize with std. dev. 'sd'. Do
> # 'iters' gradient descent iterations with stepsize 'step'.
>
> train <- function (X, y, n1, n2, iters, step, sd=0.1)
+ {
+ # Initialize parameters randomly.
+
+ L <- list()
+ n0 <- ncol(X)
+ L$b1 <- rnorm (n1,sd=sd)
+ L$W1 <- matrix (rnorm(n0*n1,sd=sd), n0, n1)
+ L$b2 <- rnorm (n2,sd=sd)
+ L$W2 <- matrix (rnorm(n1*n2,sd=sd), n1, n2)
+ L$b3 <- rnorm (1,sd=sd)
+ L$W3 <- rnorm (n2,sd=sd)
+
+ # Train for 'iters' iterations to minimize squared
+ # error predicting 'y'.
+
+ for (i in 1:iters) {
+
+ # Find gradient of squared error (summed over all
+ # training examples) with respect to the parameters.
+
+ r <- with gradient (L) {
+ e <- 0
+ for (i in 1:nrow(X)) {
+ o <- nnet (X[i,], L)
+ e <- e + (y[i]-o)^2
+ }
+ e
+ }
+
+ g <- attr(r,"gradient")
+
+ if (i %% 100 == 0)
+ cat ("Iteration", i, ": Error", round(r,4), "\n")
+
+ # Update parameters to reduce squared error.
+
+ L$b1 <- L$b1 - step * as.vector (g$b1)
+ L$W1 <- L$W1 - step * as.vector (g$W1)
+ L$b2 <- L$b2 - step * as.vector (g$b2)
+ L$W2 <- L$W2 - step * as.vector (g$W2)
+ L$b3 <- L$b3 - step * as.vector (g$b3)
+ L$W3 <- L$W3 - step * as.vector (g$W3)
+ }
+
+ L
+ }
>
>
> # Example of learning a 2D function.
>
> set.seed(1)
>
> pdf("smnn.pdf",width=8,height=4.6)
> par(mfrow=c(1,2))
>
> truef <- function (X) cos (2*sqrt(X[,1]*X[,2])) - 2*(0.4-X[,1])^2
>
> grid <- seq(0,1,length=101)
> Xgrid <- cbind (rep(grid,times=101), rep(grid,each=101))
>
> contour (matrix(truef(Xgrid),101,101), levels=seq(-1,1,by=0.1))
> title("True function")
>
> N <- 100
> X <- cbind (runif(N), runif(N))
> y <- truef (X) + rnorm(N,sd=0.01)
>
> print (system.time (L <- train (X, y, 10, 10, 30000, 0.001)))
Iteration 100 : Error 14.3896
Iteration 200 : Error 2.5427
Iteration 300 : Error 2.352
Iteration 400 : Error 2.2085
Iteration 500 : Error 2.084
Iteration 600 : Error 1.9662
Iteration 700 : Error 1.848
Iteration 800 : Error 1.7255
Iteration 900 : Error 1.5968
Iteration 1000 : Error 1.4622
Iteration 1100 : Error 1.3236
Iteration 1200 : Error 1.1845
Iteration 1300 : Error 1.0492
Iteration 1400 : Error 0.9226
Iteration 1500 : Error 0.8087
Iteration 1600 : Error 0.7101
Iteration 1700 : Error 0.6279
Iteration 1800 : Error 0.5612
Iteration 1900 : Error 0.5081
Iteration 2000 : Error 0.466
Iteration 2100 : Error 0.4323
Iteration 2200 : Error 0.4046
Iteration 2300 : Error 0.3812
Iteration 2400 : Error 0.3606
Iteration 2500 : Error 0.3419
Iteration 2600 : Error 0.3244
Iteration 2700 : Error 0.3079
Iteration 2800 : Error 0.2921
Iteration 2900 : Error 0.2769
Iteration 3000 : Error 0.2623
Iteration 3100 : Error 0.2483
Iteration 3200 : Error 0.235
Iteration 3300 : Error 0.2225
Iteration 3400 : Error 0.2109
Iteration 3500 : Error 0.2002
Iteration 3600 : Error 0.1904
Iteration 3700 : Error 0.1816
Iteration 3800 : Error 0.1738
Iteration 3900 : Error 0.1669
Iteration 4000 : Error 0.1609
Iteration 4100 : Error 0.1558
Iteration 4200 : Error 0.1514
Iteration 4300 : Error 0.1477
Iteration 4400 : Error 0.1446
Iteration 4500 : Error 0.142
Iteration 4600 : Error 0.1397
Iteration 4700 : Error 0.1378
Iteration 4800 : Error 0.1362
Iteration 4900 : Error 0.1348
Iteration 5000 : Error 0.1335
Iteration 5100 : Error 0.1323
Iteration 5200 : Error 0.1313
Iteration 5300 : Error 0.1303
Iteration 5400 : Error 0.1294
Iteration 5500 : Error 0.1285
Iteration 5600 : Error 0.1277
Iteration 5700 : Error 0.1268
Iteration 5800 : Error 0.126
Iteration 5900 : Error 0.1252
Iteration 6000 : Error 0.1244
Iteration 6100 : Error 0.1236
Iteration 6200 : Error 0.1227
Iteration 6300 : Error 0.1219
Iteration 6400 : Error 0.1211
Iteration 6500 : Error 0.1203
Iteration 6600 : Error 0.1194
Iteration 6700 : Error 0.1186
Iteration 6800 : Error 0.1178
Iteration 6900 : Error 0.1169
Iteration 7000 : Error 0.116
Iteration 7100 : Error 0.1152
Iteration 7200 : Error 0.1143
Iteration 7300 : Error 0.1134
Iteration 7400 : Error 0.1125
Iteration 7500 : Error 0.1116
Iteration 7600 : Error 0.1107
Iteration 7700 : Error 0.1097
Iteration 7800 : Error 0.1088
Iteration 7900 : Error 0.1078
Iteration 8000 : Error 0.1068
Iteration 8100 : Error 0.1059
Iteration 8200 : Error 0.1049
Iteration 8300 : Error 0.1039
Iteration 8400 : Error 0.1028
Iteration 8500 : Error 0.1018
Iteration 8600 : Error 0.1008
Iteration 8700 : Error 0.0997
Iteration 8800 : Error 0.0987
Iteration 8900 : Error 0.0976
Iteration 9000 : Error 0.0965
Iteration 9100 : Error 0.0954
Iteration 9200 : Error 0.0943
Iteration 9300 : Error 0.0932
Iteration 9400 : Error 0.0921
Iteration 9500 : Error 0.0909
Iteration 9600 : Error 0.0898
Iteration 9700 : Error 0.0887
Iteration 9800 : Error 0.0875
Iteration 9900 : Error 0.0864
Iteration 10000 : Error 0.0852
Iteration 10100 : Error 0.0841
Iteration 10200 : Error 0.0829
Iteration 10300 : Error 0.0818
Iteration 10400 : Error 0.0806
Iteration 10500 : Error 0.0795
Iteration 10600 : Error 0.0784
Iteration 10700 : Error 0.0772
Iteration 10800 : Error 0.0761
Iteration 10900 : Error 0.075
Iteration 11000 : Error 0.0739
Iteration 11100 : Error 0.0729
Iteration 11200 : Error 0.0718
Iteration 11300 : Error 0.0707
Iteration 11400 : Error 0.0697
Iteration 11500 : Error 0.0687
Iteration 11600 : Error 0.0677
Iteration 11700 : Error 0.0668
Iteration 11800 : Error 0.0658
Iteration 11900 : Error 0.0649
Iteration 12000 : Error 0.064
Iteration 12100 : Error 0.0631
Iteration 12200 : Error 0.0623
Iteration 12300 : Error 0.0615
Iteration 12400 : Error 0.0607
Iteration 12500 : Error 0.06
Iteration 12600 : Error 0.0592
Iteration 12700 : Error 0.0585
Iteration 12800 : Error 0.0579
Iteration 12900 : Error 0.0572
Iteration 13000 : Error 0.0566
Iteration 13100 : Error 0.056
Iteration 13200 : Error 0.0554
Iteration 13300 : Error 0.0549
Iteration 13400 : Error 0.0544
Iteration 13500 : Error 0.0539
Iteration 13600 : Error 0.0534
Iteration 13700 : Error 0.053
Iteration 13800 : Error 0.0525
Iteration 13900 : Error 0.0521
Iteration 14000 : Error 0.0517
Iteration 14100 : Error 0.0514
Iteration 14200 : Error 0.051
Iteration 14300 : Error 0.0507
Iteration 14400 : Error 0.0504
Iteration 14500 : Error 0.0501
Iteration 14600 : Error 0.0498
Iteration 14700 : Error 0.0495
Iteration 14800 : Error 0.0493
Iteration 14900 : Error 0.049
Iteration 15000 : Error 0.0488
Iteration 15100 : Error 0.0485
Iteration 15200 : Error 0.0483
Iteration 15300 : Error 0.0481
Iteration 15400 : Error 0.0479
Iteration 15500 : Error 0.0477
Iteration 15600 : Error 0.0475
Iteration 15700 : Error 0.0473
Iteration 15800 : Error 0.0472
Iteration 15900 : Error 0.047
Iteration 16000 : Error 0.0468
Iteration 16100 : Error 0.0466
Iteration 16200 : Error 0.0465
Iteration 16300 : Error 0.0463
Iteration 16400 : Error 0.0462
Iteration 16500 : Error 0.046
Iteration 16600 : Error 0.0459
Iteration 16700 : Error 0.0457
Iteration 16800 : Error 0.0456
Iteration 16900 : Error 0.0455
Iteration 17000 : Error 0.0453
Iteration 17100 : Error 0.0452
Iteration 17200 : Error 0.045
Iteration 17300 : Error 0.0449
Iteration 17400 : Error 0.0448
Iteration 17500 : Error 0.0447
Iteration 17600 : Error 0.0445
Iteration 17700 : Error 0.0444
Iteration 17800 : Error 0.0443
Iteration 17900 : Error 0.0441
Iteration 18000 : Error 0.044
Iteration 18100 : Error 0.0439
Iteration 18200 : Error 0.0438
Iteration 18300 : Error 0.0436
Iteration 18400 : Error 0.0435
Iteration 18500 : Error 0.0434
Iteration 18600 : Error 0.0433
Iteration 18700 : Error 0.0432
Iteration 18800 : Error 0.043
Iteration 18900 : Error 0.0429
Iteration 19000 : Error 0.0428
Iteration 19100 : Error 0.0427
Iteration 19200 : Error 0.0426
Iteration 19300 : Error 0.0424
Iteration 19400 : Error 0.0423
Iteration 19500 : Error 0.0422
Iteration 19600 : Error 0.0421
Iteration 19700 : Error 0.042
Iteration 19800 : Error 0.0419
Iteration 19900 : Error 0.0417
Iteration 20000 : Error 0.0416
Iteration 20100 : Error 0.0415
Iteration 20200 : Error 0.0414
Iteration 20300 : Error 0.0413
Iteration 20400 : Error 0.0412
Iteration 20500 : Error 0.0411
Iteration 20600 : Error 0.0409
Iteration 20700 : Error 0.0408
Iteration 20800 : Error 0.0407
Iteration 20900 : Error 0.0406
Iteration 21000 : Error 0.0405
Iteration 21100 : Error 0.0404
Iteration 21200 : Error 0.0402
Iteration 21300 : Error 0.0401
Iteration 21400 : Error 0.04
Iteration 21500 : Error 0.0399
Iteration 21600 : Error 0.0398
Iteration 21700 : Error 0.0397
Iteration 21800 : Error 0.0396
Iteration 21900 : Error 0.0394
Iteration 22000 : Error 0.0393
Iteration 22100 : Error 0.0392
Iteration 22200 : Error 0.0391
Iteration 22300 : Error 0.039
Iteration 22400 : Error 0.0389
Iteration 22500 : Error 0.0388
Iteration 22600 : Error 0.0386
Iteration 22700 : Error 0.0385
Iteration 22800 : Error 0.0384
Iteration 22900 : Error 0.0383
Iteration 23000 : Error 0.0382
Iteration 23100 : Error 0.0381
Iteration 23200 : Error 0.038
Iteration 23300 : Error 0.0379
Iteration 23400 : Error 0.0377
Iteration 23500 : Error 0.0376
Iteration 23600 : Error 0.0375
Iteration 23700 : Error 0.0374
Iteration 23800 : Error 0.0373
Iteration 23900 : Error 0.0372
Iteration 24000 : Error 0.0371
Iteration 24100 : Error 0.0369
Iteration 24200 : Error 0.0368
Iteration 24300 : Error 0.0367
Iteration 24400 : Error 0.0366
Iteration 24500 : Error 0.0365
Iteration 24600 : Error 0.0364
Iteration 24700 : Error 0.0363
Iteration 24800 : Error 0.0361
Iteration 24900 : Error 0.036
Iteration 25000 : Error 0.0359
Iteration 25100 : Error 0.0358
Iteration 25200 : Error 0.0357
Iteration 25300 : Error 0.0356
Iteration 25400 : Error 0.0355
Iteration 25500 : Error 0.0353
Iteration 25600 : Error 0.0352
Iteration 25700 : Error 0.0351
Iteration 25800 : Error 0.035
Iteration 25900 : Error 0.0349
Iteration 26000 : Error 0.0348
Iteration 26100 : Error 0.0347
Iteration 26200 : Error 0.0346
Iteration 26300 : Error 0.0344
Iteration 26400 : Error 0.0343
Iteration 26500 : Error 0.0342
Iteration 26600 : Error 0.0341
Iteration 26700 : Error 0.034
Iteration 26800 : Error 0.0339
Iteration 26900 : Error 0.0338
Iteration 27000 : Error 0.0336
Iteration 27100 : Error 0.0335
Iteration 27200 : Error 0.0334
Iteration 27300 : Error 0.0333
Iteration 27400 : Error 0.0332
Iteration 27500 : Error 0.0331
Iteration 27600 : Error 0.033
Iteration 27700 : Error 0.0328
Iteration 27800 : Error 0.0327
Iteration 27900 : Error 0.0326
Iteration 28000 : Error 0.0325
Iteration 28100 : Error 0.0324
Iteration 28200 : Error 0.0323
Iteration 28300 : Error 0.0322
Iteration 28400 : Error 0.032
Iteration 28500 : Error 0.0319
Iteration 28600 : Error 0.0318
Iteration 28700 : Error 0.0317
Iteration 28800 : Error 0.0316
Iteration 28900 : Error 0.0315
Iteration 29000 : Error 0.0314
Iteration 29100 : Error 0.0313
Iteration 29200 : Error 0.0311
Iteration 29300 : Error 0.031
Iteration 29400 : Error 0.0309
Iteration 29500 : Error 0.0308
Iteration 29600 : Error 0.0307
Iteration 29700 : Error 0.0306
Iteration 29800 : Error 0.0305
Iteration 29900 : Error 0.0304
Iteration 30000 : Error 0.0302
user system elapsed
116.838 2.077 120.064
>
> print(L)
$b1
[1] 0.03312670 0.20018262 0.24676223 -0.01408799 -0.13190646 0.95526175
[7] 0.83734081 0.14606761 -0.02462760 -0.12726358
$W1
[,1] [,2] [,3] [,4] [,5] [,6]
[1,] 0.03525711 0.02912505 -0.3205619 -0.1552310 0.7020654 -1.278785502
[2,] 0.16116601 -0.30141713 0.7138095 -0.1129879 0.4641709 0.006622717
[,7] [,8] [,9] [,10]
[1,] -0.57090636 -0.4428152 0.1285794 0.08687519
[2,] -0.01354628 0.4079685 -0.4365000 0.15498818
$b2
[1] -0.34293889 -0.20020641 0.02076739 -0.08903067 -0.43495754 -0.05843044
[7] 0.19194693 -0.15688172 0.11293244 -0.01667223
$W2
[,1] [,2] [,3] [,4] [,5]
[1,] 0.08779067 0.040523120 0.12190410 -0.061563880 0.02924750
[2,] -0.14039797 -0.020745646 -0.10311296 -0.003229727 -0.18405424
[3,] 0.21705558 0.052428595 0.04972113 -0.005890708 0.20060872
[4,] -0.06129124 -0.044846296 0.06087357 0.284234651 -0.08694759
[5,] 0.50452272 0.199311250 0.26535208 -0.167779046 0.10651224
[6,] -1.13926102 0.009698309 -0.05581614 -0.004507875 -0.83212193
[7,] -0.68677196 -0.328046234 -0.09858379 -0.267336226 -0.46624918
[8,] -0.04901414 0.055596435 -0.08361635 0.193690492 0.11070522
[9,] -0.20787836 -0.030178901 -0.11118541 -0.171206670 -0.04524939
[10,] 0.30731707 0.024214413 -0.04041101 0.056122865 0.16223186
[,6] [,7] [,8] [,9] [,10]
[1,] -0.013375114 0.12756703 0.125794909 0.13362453 -0.01536123
[2,] 0.028718694 -0.23224075 -0.003505478 0.25125999 -0.11206751
[3,] -0.042199409 0.63576142 -0.109929119 -0.06981236 -0.16812393
[4,] -0.174498230 -0.01580498 -0.091564538 -0.19233638 0.11904437
[5,] 0.404113973 -0.09143451 0.210282645 0.09979008 0.16618030
[6,] -0.083858353 0.37314551 -0.011172276 0.23772385 0.05606663
[7,] 0.016377294 0.16520086 -0.131513875 0.09516465 0.07050044
[8,] -0.012960203 0.47652817 -0.123339841 -0.10312035 -0.16417911
[9,] 0.003282022 -0.35260490 -0.121651772 0.02247268 0.03444989
[10,] 0.004169573 0.04861305 0.090870529 0.02009523 -0.03499806
$b3
[1] -0.2336827
$W3
[1] -1.3747102 0.2610998 0.2388467 -0.2731995 -0.5760811 0.3898746
[7] -0.7763057 0.3582894 0.1255908 0.2315758
>
> contour (
+ matrix (apply (Xgrid, 1, function (x) nnet (x, L)), 101, 101),
+ levels=seq(-1,1,by=0.1))
> title("Learned function")
>
>
> # Confirm that there's no allocation of large (eg, 10 x 100)
> # intermediate Jacobian matrices when computing gradients of
> # network parameters.
>
> Rprofmemt(nelem=8)
> with gradient (L) nnet (c(0.3,0.6), L)
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):"nnet"
RPROFMEM: 112 (double 10):
RPROFMEM: 112 (double 10):
RPROFMEM: 112 (double 10):
RPROFMEM: 112 (double 10):
RPROFMEM: 112 (double 10):
RPROFMEM: 192 (double 20):
RPROFMEM: 112 (double 10):
RPROFMEM: 112 (double 10):
RPROFMEM: 832 (double 100):
RPROFMEM: 112 (double 10):
[1] 0.6356475
attr(,"gradient")
attr(,"gradient")$b1
[1] 0.004003755 0.189674164 -0.335686523 -0.097764937 0.125175207
[6] 0.551210340 0.281341609 -0.289031292 0.214343363 -0.228470444
attr(,"gradient")$W1
[1] 0.001201127 0.002402253 0.056902249 0.113804499 -0.100705957
[6] -0.201411914 -0.029329481 -0.058658962 0.037552562 0.075105124
[11] 0.165363102 0.330726204 0.084402483 0.168804965 -0.086709388
[16] -0.173418775 0.064303009 0.128606018 -0.068541133 -0.137082267
attr(,"gradient")$b2
[1] -0.5834784 0.2446103 0.2378599 -0.2553818 -0.2618058 0.3892174
[7] -0.3258300 0.3447323 0.1144117 0.2310504
attr(,"gradient")$W2
[1] -0.0813882699 -0.0163738557 -0.3044884652 0.0745383124 -0.1999925715
[6] -0.3030936106 -0.3366621383 -0.1472861685 0.1417818792 0.0047891527
[11] 0.0341202153 0.0068643735 0.1276499917 -0.0312485235 0.0838424210
[16] 0.1270652301 0.1411380859 0.0617464382 -0.0594388877 -0.0020077453
[21] 0.0331786136 0.0066749402 0.1241272868 -0.0303861707 0.0815286558
[26] 0.1235586626 0.1372431556 0.0600424468 -0.0577985769 -0.0019523384
[31] -0.0356227095 -0.0071666483 -0.1332710985 0.0326245619 -0.0875344479
[36] -0.1326605867 -0.1473531450 -0.0644654616 0.0620562975 0.0020961570
[41] -0.0365187904 -0.0073469236 -0.1366235018 0.0334452251 -0.0897363565
[46] -0.1359976327 -0.1510597790 -0.0660870752 0.0636173091 0.0021488854
[51] 0.0542911868 0.0109224101 0.2031133008 -0.0497218266 0.1334078495
[56] 0.2021828435 0.2245752006 0.0982493042 -0.0945775908 -0.0031946714
[61] -0.0454493996 -0.0091436015 -0.1700345510 0.0416241990 -0.1116812326
[66] -0.1692556266 -0.1880011956 -0.0822485591 0.0791748158 0.0026743917
[71] 0.0480860448 0.0096740471 0.1798987250 -0.0440389338 0.1181601694
[76] 0.1790746131 0.1989076643 0.0870200252 -0.0837679656 -0.0028295405
[81] 0.0159590668 0.0032106771 0.0597057997 -0.0146158888 0.0392156610
[86] 0.0594322888 0.0660145933 0.0288806949 -0.0278013831 -0.0009390838
[91] 0.0322287741 0.0064838495 0.1205737629 -0.0295162735 0.0791946482
[96] 0.1200214173 0.1333141497 0.0583235478 -0.0561439155 -0.0018964467
attr(,"gradient")$b3
[1] 1
attr(,"gradient")$W3
[1] -0.75865843 -0.25130456 0.06427731 -0.25537931 -0.73860675 0.04105581
[7] 0.76176197 -0.19452108 0.29834920 -0.04763190
>