J Solutions ch. 10 - Artificial neural networks
Solutions to exercises of chapter 11.
J.1 Exercise 1
library("neuralnet")
#To create a neural network to perform square root
#Generate 50 random numbers uniformly distributed between 0 and 100
#And store them as a dataframe
traininginput <- as.data.frame(runif(50, min=0, max=100))
trainingoutput <- sqrt(traininginput)
#Column bind the data into one variable
trainingdata <- cbind(traininginput,trainingoutput)
colnames(trainingdata) <- c("Input","Output")
#Train the neural network
#Will have 10 hidden layers
#Threshold is a numeric value specifying the threshold for the partial
#derivatives of the error function as stopping criteria.
net.sqrt <- neuralnet(Output~Input,trainingdata, hidden=10, threshold=0.01)
print(net.sqrt)
## $call
## neuralnet(formula = Output ~ Input, data = trainingdata, hidden = 10,
## threshold = 0.01)
##
## $response
## Output
## 1 9.887107
## 2 9.021587
## 3 2.641486
## 4 6.296746
## 5 7.585025
## 6 8.569663
## 7 9.937186
## 8 7.739652
## 9 9.460415
## 10 4.034213
## 11 9.361685
## 12 3.738484
## 13 5.695265
## 14 9.093981
## 15 6.994324
## 16 8.547495
## 17 7.484857
## 18 3.965048
## 19 4.251643
## 20 5.446298
## 21 1.595018
## 22 6.486409
## 23 3.945531
## 24 3.939136
## 25 8.471605
## 26 2.754018
## 27 9.821327
## 28 7.656749
## 29 1.162355
## 30 6.261186
## 31 9.886908
## 32 8.392716
## 33 5.862018
## 34 6.042819
## 35 9.321704
## 36 4.833661
## 37 7.153967
## 38 5.783277
## 39 6.990797
## 40 6.296827
## 41 2.774473
## 42 4.929751
## 43 5.711002
## 44 6.216423
## 45 9.903544
## 46 2.209842
## 47 9.616530
## 48 7.014968
## 49 6.500895
## 50 7.814224
##
## $covariate
##
## [1,] 97.754886
## [2,] 81.389036
## [3,] 6.977448
## [4,] 39.649014
## [5,] 57.532603
## [6,] 73.439130
## [7,] 98.747664
## [8,] 59.902210
## [9,] 89.499449
## [10,] 16.274874
## [11,] 87.641142
## [12,] 13.976265
## [13,] 32.436039
## [14,] 82.700495
## [15,] 48.920562
## [16,] 73.059670
## [17,] 56.023087
## [18,] 15.721607
## [19,] 18.076470
## [20,] 29.662162
## [21,] 2.544083
## [22,] 42.073497
## [23,] 15.567214
## [24,] 15.516794
## [25,] 71.768086
## [26,] 7.584615
## [27,] 96.458458
## [28,] 58.625804
## [29,] 1.351070
## [30,] 39.202451
## [31,] 97.750947
## [32,] 70.437687
## [33,] 34.363257
## [34,] 36.515656
## [35,] 86.894162
## [36,] 23.364283
## [37,] 51.179246
## [38,] 33.446296
## [39,] 48.871249
## [40,] 39.650026
## [41,] 7.697700
## [42,] 24.302441
## [43,] 32.615548
## [44,] 38.643911
## [45,] 98.080187
## [46,] 4.883404
## [47,] 92.477657
## [48,] 49.209769
## [49,] 42.261632
## [50,] 61.062103
##
## $model.list
## $model.list$response
## [1] "Output"
##
## $model.list$variables
## [1] "Input"
##
##
## $err.fct
## function (x, y)
## {
## 1/2 * (y - x)^2
## }
## <bytecode: 0x55698afae8f0>
## <environment: 0x55698afad268>
## attr(,"type")
## [1] "sse"
##
## $act.fct
## function (x)
## {
## 1/(1 + exp(-x))
## }
## <bytecode: 0x55698afa9fc0>
## <environment: 0x55698afa96c8>
## attr(,"type")
## [1] "logistic"
##
## $linear.output
## [1] TRUE
##
## $data
## Input Output
## 1 97.754886 9.887107
## 2 81.389036 9.021587
## 3 6.977448 2.641486
## 4 39.649014 6.296746
## 5 57.532603 7.585025
## 6 73.439130 8.569663
## 7 98.747664 9.937186
## 8 59.902210 7.739652
## 9 89.499449 9.460415
## 10 16.274874 4.034213
## 11 87.641142 9.361685
## 12 13.976265 3.738484
## 13 32.436039 5.695265
## 14 82.700495 9.093981
## 15 48.920562 6.994324
## 16 73.059670 8.547495
## 17 56.023087 7.484857
## 18 15.721607 3.965048
## 19 18.076470 4.251643
## 20 29.662162 5.446298
## 21 2.544083 1.595018
## 22 42.073497 6.486409
## 23 15.567214 3.945531
## 24 15.516794 3.939136
## 25 71.768086 8.471605
## 26 7.584615 2.754018
## 27 96.458458 9.821327
## 28 58.625804 7.656749
## 29 1.351070 1.162355
## 30 39.202451 6.261186
## 31 97.750947 9.886908
## 32 70.437687 8.392716
## 33 34.363257 5.862018
## 34 36.515656 6.042819
## 35 86.894162 9.321704
## 36 23.364283 4.833661
## 37 51.179246 7.153967
## 38 33.446296 5.783277
## 39 48.871249 6.990797
## 40 39.650026 6.296827
## 41 7.697700 2.774473
## 42 24.302441 4.929751
## 43 32.615548 5.711002
## 44 38.643911 6.216423
## 45 98.080187 9.903544
## 46 4.883404 2.209842
## 47 92.477657 9.616530
## 48 49.209769 7.014968
## 49 42.261632 6.500895
## 50 61.062103 7.814224
##
## $exclude
## NULL
##
## $net.result
## $net.result[[1]]
## [,1]
## [1,] 9.880267
## [2,] 9.029979
## [3,] 2.641350
## [4,] 6.296918
## [5,] 7.580838
## [6,] 8.574525
## [7,] 9.927892
## [8,] 7.736252
## [9,] 9.466453
## [10,] 4.032825
## [11,] 9.369015
## [12,] 3.738437
## [13,] 5.698348
## [14,] 9.102511
## [15,] 6.990221
## [16,] 8.552113
## [17,] 7.480343
## [18,] 3.963876
## [19,] 4.250066
## [20,] 5.449501
## [21,] 1.594027
## [22,] 6.485265
## [23,] 3.944432
## [24,] 3.938063
## [25,] 8.475370
## [26,] 2.754533
## [27,] 9.817375
## [28,] 7.652885
## [29,] 1.162854
## [30,] 6.261598
## [31,] 9.880077
## [32,] 8.395578
## [33,] 5.864615
## [34,] 6.044570
## [35,] 9.329413
## [36,] 4.834447
## [37,] 7.149388
## [38,] 5.786142
## [39,] 6.986709
## [40,] 6.296998
## [41,] 2.775118
## [42,] 4.931075
## [43,] 5.714054
## [44,] 6.217129
## [45,] 9.895924
## [46,] 2.210187
## [47,] 9.619359
## [48,] 7.010787
## [49,] 6.499650
## [50,] 7.811323
##
##
## $weights
## $weights[[1]]
## $weights[[1]][[1]]
## [,1] [,2] [,3] [,4] [,5] [,6]
## [1,] -1.75164853 -0.87715872 0.23907004 -0.006900232 1.2696073 -0.23088058
## [2,] 0.02346635 0.02040994 -0.04337229 0.781756870 -0.3429156 -0.08430701
## [,7] [,8] [,9] [,10]
## [1,] -0.60997142 -0.24531814 -0.53071015 -1.08181933
## [2,] -0.04106483 -0.06353129 -0.02723749 0.02105761
##
## $weights[[1]][[2]]
## [,1]
## [1,] 2.2621651
## [2,] 5.0799455
## [3,] 3.1819110
## [4,] -1.6512742
## [5,] 1.3635892
## [6,] -0.6629605
## [7,] -3.5449594
## [8,] -1.0354286
## [9,] -0.3529155
## [10,] -2.1580939
## [11,] 1.0604650
##
##
##
## $generalized.weights
## $generalized.weights[[1]]
## [,1]
## [1,] -0.0005494264
## [2,] -0.0007666484
## [3,] -0.0438779528
## [4,] -0.0023645172
## [5,] -0.0013266148
## [6,] -0.0009081378
## [7,] -0.0005385748
## [8,] -0.0012473047
## [9,] -0.0006490144
## [10,] -0.0101070635
## [11,] -0.0006740232
## [12,] -0.0129861792
## [13,] -0.0032726687
## [14,] -0.0007460355
## [15,] -0.0017005957
## [16,] -0.0009157161
## [17,] -0.0013815525
## [18,] -0.0106946059
## [19,] -0.0085210681
## [20,] -0.0037900746
## [21,] -0.3369274704
## [22,] -0.0021518242
## [23,] -0.0108688442
## [24,] -0.0109267819
## [25,] -0.0009421689
## [26,] -0.0378038535
## [27,] -0.0005639355
## [28,] -0.0012890358
## [29,] -2.1407509108
## [30,] -0.0024076889
## [31,] -0.0005494699
## [32,] -0.0009705379
## [33,] -0.0029783214
## [34,] -0.0026987893
## [35,] -0.0006843790
## [36,] -0.0056117695
## [37,] -0.0015864916
## [38,] -0.0031126020
## [39,] -0.0017032444
## [40,] -0.0023644209
## [41,] -0.0368177736
## [42,] -0.0052610149
## [43,] -0.0032432386
## [44,] -0.0024636332
## [45,] -0.0005458463
## [46,] -0.0840052183
## [47,] -0.0006110352
## [48,] -0.0016852046
## [49,] -0.0021367122
## [50,] -0.0012112347
##
##
## $startweights
## $startweights[[1]]
## $startweights[[1]][[1]]
## [,1] [,2] [,3] [,4] [,5] [,6] [,7]
## [1,] -0.9176373 0.1638798 -0.7583329 3.026520 -1.08603435 -1.9889664 -1.568138
## [2,] 0.6329243 0.3898536 -0.5766335 1.783128 -0.01759759 -0.3597596 -1.075851
## [,8] [,9] [,10]
## [1,] 0.82989700 -2.133656 0.5607576
## [2,] 0.07302238 -1.635460 1.3887142
##
## $startweights[[1]][[2]]
## [,1]
## [1,] 1.38842987
## [2,] 0.39602370
## [3,] 1.35478998
## [4,] -0.61472040
## [5,] 0.48981678
## [6,] -0.32807907
## [7,] 1.07461490
## [8,] -0.04615121
## [9,] -0.67971980
## [10,] -0.52449754
## [11,] 0.14289237
##
##
##
## $result.matrix
## [,1]
## error 4.148357e-04
## reached.threshold 9.889696e-03
## steps 4.976000e+03
## Intercept.to.1layhid1 -1.751649e+00
## Input.to.1layhid1 2.346635e-02
## Intercept.to.1layhid2 -8.771587e-01
## Input.to.1layhid2 2.040994e-02
## Intercept.to.1layhid3 2.390700e-01
## Input.to.1layhid3 -4.337229e-02
## Intercept.to.1layhid4 -6.900232e-03
## Input.to.1layhid4 7.817569e-01
## Intercept.to.1layhid5 1.269607e+00
## Input.to.1layhid5 -3.429156e-01
## Intercept.to.1layhid6 -2.308806e-01
## Input.to.1layhid6 -8.430701e-02
## Intercept.to.1layhid7 -6.099714e-01
## Input.to.1layhid7 -4.106483e-02
## Intercept.to.1layhid8 -2.453181e-01
## Input.to.1layhid8 -6.353129e-02
## Intercept.to.1layhid9 -5.307102e-01
## Input.to.1layhid9 -2.723749e-02
## Intercept.to.1layhid10 -1.081819e+00
## Input.to.1layhid10 2.105761e-02
## Intercept.to.Output 2.262165e+00
## 1layhid1.to.Output 5.079946e+00
## 1layhid2.to.Output 3.181911e+00
## 1layhid3.to.Output -1.651274e+00
## 1layhid4.to.Output 1.363589e+00
## 1layhid5.to.Output -6.629605e-01
## 1layhid6.to.Output -3.544959e+00
## 1layhid7.to.Output -1.035429e+00
## 1layhid8.to.Output -3.529155e-01
## 1layhid9.to.Output -2.158094e+00
## 1layhid10.to.Output 1.060465e+00
##
## attr(,"class")
## [1] "nn"
#Plot the neural network
plot(net.sqrt)
#Test the neural network on some training data
testdata <- as.data.frame((1:10)^2) #Generate some squared numbers
net.results <- compute(net.sqrt, testdata) #Run them through the neural network
#See what properties net.sqrt has
ls(net.results)
## [1] "net.result" "neurons"
## [,1]
## [1,] 1.016310
## [2,] 2.001754
## [3,] 3.001946
## [4,] 3.998710
## [5,] 5.001700
## [6,] 6.001978
## [7,] 6.995876
## [8,] 7.998638
## [9,] 9.008320
## [10,] 9.987302
#Display a better version of the results
cleanoutput <- cbind(testdata,sqrt(testdata),
as.data.frame(net.results$net.result))
colnames(cleanoutput) <- c("Input","Expected Output","Neural Net Output")
print(cleanoutput)
## Input Expected Output Neural Net Output
## 1 1 1 1.016310
## 2 4 2 2.001754
## 3 9 3 3.001946
## 4 16 4 3.998710
## 5 25 5 5.001700
## 6 36 6 6.001978
## 7 49 7 6.995876
## 8 64 8 7.998638
## 9 81 9 9.008320
## 10 100 10 9.987302
Acknowledgement: this example excercise was from http://gekkoquant.com/2012/05/26/neural-networks-with-r-simple-example/