Commit fa7711ed authored by Margret A. Riegert's avatar Margret A. Riegert
Browse files

Remove nn.configs epochs runs


Former-commit-id: 0521b0f268eefdd03b0404426f9da807aba82e0d [formerly 69443caf4aea6e3acd317f7a1152c0b9a7cfb441]
Former-commit-id: cc877cae20214720c0442b5a02737528c2f1da14
parent 57e17f18
../../train.data/output/fann.data/aggregated.fann
\ No newline at end of file
150 6 1
0.833333313465 0.75 0.027499999851 0.25 0.25 0.0
0.0983890891075
0.833333313465 0.833333313465 0.0500000007451 0.5 1.0 1.0
0.140962868929
0.416666656733 0.506250023842 0.0825000032783 0.5 0.10000000149 1.0
0.0896460413933
0.416666656733 0.34375 0.027499999851 0.25 0.15000000596 1.0
0.000286594964564
0.833333313465 0.833333313465 0.0500000007451 0.5 0.5 0.0
0.126059830189
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.833333313465 0.027499999851 0.25 0.10000000149 0.0
0.02739995718
0.833333313465 0.916666686535 0.0500000007451 0.10000000149 0.10000000149 1.0
0.0787788033485
0.833333313465 0.833333313465 0.027499999851 0.5 0.5 1.0
0.110528737307
0.833333313465 0.75 0.027499999851 0.5 0.0500000007451 1.0
0.00800873339176
0.833333313465 0.75 0.0500000007451 0.10000000149 0.10000000149 1.0
1.51961285155e-06
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 1.0
0.115100085735
0.416666656733 0.416666656733 0.027499999851 0.449999988079 0.40000000596 1.0
0.0446473360062
0.416666656733 0.416666656733 0.027499999851 0.449999988079 0.40000000596 1.0
0.0446473360062
0.833333313465 0.833333313465 0.0724999979138 0.25 0.10000000149 1.0
0.0232810080051
0.833333313465 0.916666686535 0.027499999851 0.10000000149 1.0 1.0
0.071240067482
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.25 0.0
4.89293597639e-07
0.416666656733 0.506250023842 0.0825000032783 0.25 1.0 1.0
0.0729819238186
0.416666656733 0.416666656733 0.10000000149 0.25 1.0 1.0
0.0227480828762
0.416666656733 0.34375 0.027499999851 0.25 0.25 0.0
0.0763816535473
0.416666656733 0.416666656733 0.027499999851 0.25 0.5 0.0
0.0320791751146
0.416666656733 0.416666656733 0.027499999851 0.5 0.75 0.0
0.0750223249197
0.833333313465 0.75 0.0500000007451 0.10000000149 1.0 1.0
0.00199573859572
0.833333313465 0.75 0.027499999851 0.5 0.40000000596 1.0
0.0597345679998
0.833333313465 0.75 0.027499999851 0.25 0.0500000007451 1.0
0.000476341694593
0.416666656733 0.506250023842 0.027499999851 0.449999988079 0.40000000596 1.0
0.104277402163
0.833333313465 0.75 0.027499999851 0.5 0.0500000007451 0.0
0.0923725962639
0.833333313465 0.833333313465 0.027499999851 0.25 0.15000000596 1.0
0.0304247140884
0.833333313465 0.916666686535 0.0724999979138 0.25 0.10000000149 0.0
0.00459352135658
0.416666656733 0.506250023842 0.027499999851 0.34999999404 0.34999999404 1.0
0.0943122804165
0.416666656733 0.506250023842 0.027499999851 0.25 0.25 0.0
0.00166312232614
0.833333313465 0.916666686535 0.0500000007451 0.25 1.0 0.0
0.066886395216
0.833333313465 0.75 0.027499999851 0.449999988079 0.40000000596 1.0
0.0506376922131
0.833333313465 0.916666686535 0.0500000007451 0.10000000149 1.0 0.0
0.01811632514
0.416666656733 0.416666656733 0.0500000007451 0.10000000149 1.0 1.0
0.0080329477787
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.34999999404 0.0
6.50933361612e-06
0.833333313465 0.916666686535 0.027499999851 0.5 0.34999999404 0.0
0.0693202316761
0.833333313465 0.833333313465 0.0825000032783 0.25 1.0 1.0
0.0508903563023
0.416666656733 0.506250023842 0.027499999851 0.5 0.75 0.0
0.0447101294994
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 1.0
0.082073867321
0.833333313465 0.916666686535 0.0500000007451 0.25 0.10000000149 1.0
0.0831059813499
0.833333313465 0.75 0.10000000149 0.25 0.5 1.0
0.0141941010952
0.833333313465 0.833333313465 0.10000000149 0.5 0.10000000149 0.0
0.0564995706081
0.416666656733 0.416666656733 0.027499999851 0.649999976158 0.75 1.0
0.0872146636248
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.25 0.0
4.89293597639e-07
0.416666656733 0.34375 0.027499999851 0.25 0.25 1.0
0.00110981054604
0.833333313465 0.833333313465 0.027499999851 0.5 0.5 0.0
0.121908634901
0.833333313465 0.916666686535 0.0500000007451 0.5 0.5 0.0
0.0932010114193
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 1.0
0.225813746452
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 0.0
0.161192983389
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.916666686535 0.027499999851 0.10000000149 1.0 0.0
0.0127716064453
0.833333313465 0.833333313465 0.0825000032783 0.10000000149 1.0 0.0
0.0751796960831
0.416666656733 0.506250023842 0.027499999851 0.25 0.10000000149 0.0
9.03990585357e-05
0.416666656733 0.506250023842 0.027499999851 0.10000000149 1.0 0.0
0.000884927809238
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.5 0.0
0.00378392636776
0.833333313465 0.916666686535 0.027499999851 0.449999988079 0.40000000596 0.0
0.0660502612591
0.833333313465 0.916666686535 0.027499999851 0.5 0.40000000596 1.0
0.14977824688
0.416666656733 0.416666656733 0.0724999979138 0.5 1.0 0.0
0.0947640240192
0.833333313465 0.916666686535 0.10000000149 0.10000000149 0.5 1.0
0.0481331348419
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 1.0
0.115100085735
0.833333313465 0.75 0.027499999851 0.5 0.75 0.0
0.189258128405
0.416666656733 0.34375 0.027499999851 0.10000000149 1.0 1.0
0.000182891264558
0.416666656733 0.34375 0.027499999851 0.25 0.25 0.0
0.0763816535473
0.416666656733 0.34375 0.027499999851 0.25 0.0500000007451 1.0
1.43048237078e-06
0.833333313465 0.833333313465 0.027499999851 0.649999976158 0.75 1.0
0.17442932725
0.416666656733 0.506250023842 0.027499999851 0.649999976158 0.75 0.0
0.0661680549383
0.833333313465 0.75 0.027499999851 0.5 1.0 0.0
0.211408793926
0.833333313465 0.916666686535 0.0500000007451 0.5 1.0 0.0
0.149686008692
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.5 1.0
0.0827236771584
0.833333313465 0.916666686535 0.027499999851 0.5 0.25 0.0
0.0535411834717
0.833333313465 0.833333313465 0.027499999851 0.25 0.15000000596 1.0
0.0304247140884
0.833333313465 0.75 0.10000000149 0.10000000149 0.5 1.0
0.000268508680165
0.833333313465 0.833333313465 0.027499999851 0.5 0.15000000596 1.0
0.0624456703663
0.833333313465 0.833333313465 0.0724999979138 0.5 0.5 1.0
0.100612014532
0.416666656733 0.416666656733 0.0825000032783 0.5 0.10000000149 1.0
0.0244713425636
0.416666656733 0.506250023842 0.027499999851 0.649999976158 0.75 1.0
0.14541696012
0.833333313465 0.833333313465 0.027499999851 0.10000000149 0.34999999404 1.0
0.0158381164074
0.416666656733 0.34375 0.027499999851 0.449999988079 0.40000000596 1.0
0.0148452222347
0.416666656733 0.34375 0.10000000149 0.10000000149 1.0 0.0
0.105646729469
0.833333313465 0.75 0.10000000149 0.10000000149 0.5 0.0
0.120179772377
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 1.0
0.09239590168
0.416666656733 0.506250023842 0.027499999851 0.449999988079 0.40000000596 0.0
0.0202322602272
0.833333313465 0.833333313465 0.0825000032783 0.25 0.10000000149 1.0
0.0228868126869
0.416666656733 0.506250023842 0.0825000032783 0.25 0.10000000149 1.0
0.0855351388454
0.833333313465 0.75 0.0724999979138 0.25 0.5 1.0
0.0164286792278
0.416666656733 0.506250023842 0.0724999979138 0.25 0.10000000149 1.0
0.0860334038734
0.833333313465 0.75 0.10000000149 0.25 0.10000000149 1.0
0.00199778750539
0.833333313465 0.75 0.027499999851 0.10000000149 1.0 0.0
0.10701406002
0.833333313465 0.75 0.027499999851 0.5 0.10000000149 1.0
0.0182993113995
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 1.0
0.0192974805832
0.833333313465 0.75 0.0825000032783 0.5 0.5 0.0
0.174897909164
0.416666656733 0.34375 0.027499999851 0.25 1.0 0.0
0.0919248461723
0.833333313465 0.916666686535 0.10000000149 0.25 1.0 1.0
0.0807681679726
0.833333313465 0.916666686535 0.10000000149 0.25 0.10000000149 1.0
0.0791258215904
0.833333313465 0.75 0.10000000149 0.10000000149 1.0 1.0
0.000551892444491
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.10000000149 0.0
0.00687929987907
0.416666656733 0.34375 0.0825000032783 0.10000000149 0.5 1.0
3.29251633957e-06
0.833333313465 0.75 0.027499999851 0.0500000007451 0.0500000007451 1.0
0.0
0.416666656733 0.34375 0.027499999851 0.10000000149 0.40000000596 0.0
0.0766813457012
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.10000000149 0.0
0.00687929987907
0.833333313465 0.75 0.027499999851 0.10000000149 0.0500000007451 0.0
0.0843638777733
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.833333313465 0.027499999851 0.449999988079 0.40000000596 1.0
0.0892946720123
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 0.0
0.161192983389
0.416666656733 0.416666656733 0.027499999851 0.649999976158 0.75 0.0
0.0957203805447
0.833333313465 0.833333313465 0.0500000007451 0.25 1.0 0.0
0.102800011635
0.416666656733 0.416666656733 0.027499999851 0.25 0.75 1.0
0.0314598828554
0.416666656733 0.34375 0.027499999851 0.5 0.34999999404 1.0
0.016285084188
0.833333313465 0.75 0.0724999979138 0.10000000149 0.10000000149 0.0
0.0887522101402
0.416666656733 0.34375 0.0825000032783 0.10000000149 1.0 1.0
3.27293528244e-05
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 0.0
0.0221522450447
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 1.0
0.0192974805832
0.416666656733 0.416666656733 0.027499999851 0.5 0.25 0.0
0.0427494347095
0.833333313465 0.75 0.0724999979138 0.5 1.0 0.0
0.228234738111
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.5 0.0
0.0204427242279
0.833333313465 0.75 0.10000000149 0.5 0.5 1.0
0.0587410926819
0.833333313465 0.833333313465 0.10000000149 0.5 1.0 0.0
0.199389606714
0.833333313465 0.75 0.0500000007451 0.25 1.0 0.0
0.151173084974
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 0.0
0.040547311306
0.416666656733 0.416666656733 0.027499999851 0.10000000149 0.25 1.0
0.00693267583847
0.833333313465 0.75 0.027499999851 0.15000000596 0.15000000596 1.0
0.000527108088136
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 0.0
5.56519808015e-08
0.416666656733 0.506250023842 0.0825000032783 0.25 1.0 1.0
0.0729819238186
0.416666656733 0.34375 0.0500000007451 0.5 1.0 0.0
0.12647792697
0.833333313465 0.916666686535 0.10000000149 0.5 1.0 0.0
0.166082054377
0.833333313465 0.75 0.0724999979138 0.25 1.0 1.0
0.0265915840864
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 1.0
0.082073867321
0.416666656733 0.34375 0.027499999851 0.10000000149 0.75 0.0
0.0800074934959
0.416666656733 0.506250023842 0.0500000007451 0.5 1.0 1.0
0.124712452292
0.833333313465 0.916666686535 0.027499999851 0.15000000596 0.15000000596 0.0
0.00126434117556
0.416666656733 0.416666656733 0.10000000149 0.10000000149 0.5 1.0
0.00405505299568
0.833333313465 0.75 0.10000000149 0.10000000149 1.0 1.0
0.000551892444491
0.833333313465 0.75 0.0724999979138 0.5 1.0 1.0
0.0924507081509
0.833333313465 0.916666686535 0.10000000149 0.5 0.5 1.0
0.139924436808
0.833333313465 0.75 0.027499999851 0.34999999404 0.34999999404 1.0
0.029446721077
0.833333313465 0.833333313465 0.027499999851 0.5 1.0 1.0
0.151217371225
0.833333313465 0.833333313465 0.0500000007451 0.25 1.0 1.0
0.062157869339
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.15000000596 0.0
-1.16723271049e-08
0.416666656733 0.506250023842 0.027499999851 0.5 0.40000000596 1.0
0.109000921249
0.416666656733 0.506250023842 0.027499999851 0.10000000149 1.0 1.0
0.0767360925674
0.416666656733 0.506250023842 0.10000000149 0.25 0.10000000149 1.0
0.0846647024155
0.416666656733 0.416666656733 0.027499999851 0.25 0.34999999404 0.0
0.026492357254
0.416666656733 0.416666656733 0.0724999979138 0.5 0.5 0.0
0.065139696002
0.416666656733 0.34375 0.027499999851 0.10000000149 0.0500000007451 1.0
0.0
0.416666656733 0.416666656733 0.0724999979138 0.5 1.0 1.0
0.0656247437
0.833333313465 0.75 0.027499999851 0.15000000596 0.15000000596 0.0
0.0869477987289
0.416666656733 0.506250023842 0.0825000032783 0.25 0.5 0.0
0.00844240933657
0.833333313465 0.833333313465 0.027499999851 0.34999999404 0.34999999404 1.0
0.0644689798355
0.833333313465 0.833333313465 0.10000000149 0.25 0.10000000149 1.0
0.0222077965736
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -9.28587973117828369141e-01) (1, -3.94000977277755737305e-01) (2, 4.62552070617675781250e+00) (3, -4.20165491104125976562e+00) (4, -1.69443283230066299438e-02) (5, 7.23784983158111572266e-01) (6, 2.29117512702941894531e+00) (0, -1.99888372421264648438e+00) (1, -5.55471539497375488281e-01) (2, -4.97387351989746093750e+01) (3, 8.50402832031250000000e+00) (4, -8.35454368591308593750e+00) (5, -3.31979322433471679688e+00) (6, 4.44229030609130859375e+00) (7, -3.60311102867126464844e+00) (8, -1.50806224346160888672e+00) (9, -1.39886662364006042480e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -3.33401393890380859375e+00) (1, 3.71157735586166381836e-01) (2, -1.42915515899658203125e+01) (3, 2.78310728073120117188e+00) (4, -1.35423910617828369141e+00) (5, -1.64980435371398925781e+00) (6, 1.60252702236175537109e+00) (0, -5.26930868625640869141e-01) (1, 1.02949905395507812500e+00) (2, 1.10462741851806640625e+01) (3, -6.07332324981689453125e+00) (4, -7.91303098201751708984e-01) (5, 1.44485890865325927734e+00) (6, 1.51739811897277832031e+00) (7, -3.16750001907348632812e+00) (8, -3.09214091300964355469e+00) (9, -1.71420738101005554199e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -4.22883749008178710938e+00) (1, 1.72855186462402343750e+00) (2, 1.01697564125061035156e+00) (3, -3.69713211059570312500e+00) (4, -1.86824035644531250000e+00) (5, 1.85429751873016357422e-01) (6, 5.43634033203125000000e+00) (0, 2.49554467201232910156e+00) (1, -3.34417128562927246094e+00) (2, -2.04726719856262207031e+00) (3, -5.09763383865356445312e+00) (4, -2.86187201738357543945e-01) (5, 9.01604533195495605469e-01) (6, 5.57442784309387207031e-01) (7, -2.60164022445678710938e+00) (8, -1.76475024223327636719e+00) (9, -3.11826109886169433594e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -2.46801567077636718750e+00) (1, -9.76779401302337646484e-01) (2, -5.57779846191406250000e+01) (3, 8.38674926757812500000e+00) (4, -9.35412502288818359375e+00) (5, -3.70702219009399414062e+00) (6, 6.02705144882202148438e+00) (0, -3.67756456136703491211e-01) (1, -6.58882975578308105469e-01) (2, 4.09101247787475585938e+00) (3, -3.68209385871887207031e+00) (4, 2.94938385486602783203e-02) (5, 6.75125956535339355469e-01) (6, 1.56218576431274414062e+00) (7, -1.47833991050720214844e+00) (8, -3.84751558303833007812e+00) (9, -2.73475170135498046875e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -2.85262846946716308594e+00) (1, -1.40705800056457519531e+00) (2, -8.29535675048828125000e+01) (3, 9.85726261138916015625e+00) (4, -1.31131801605224609375e+01) (5, -5.58768987655639648438e+00) (6, 9.37193012237548828125e+00) (0, -1.55619633197784423828e+00) (1, -6.31267249584197998047e-01) (2, 5.68922090530395507812e+00) (3, -5.81053733825683593750e+00) (4, -4.05646294355392456055e-01) (5, 9.95057284832000732422e-01) (6, 3.53458714485168457031e+00) (7, -1.14002370834350585938e+00) (8, -2.58302330970764160156e+00) (9, -8.36060464382171630859e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, 4.23122358322143554688e+00) (1, 3.65344095230102539062e+00) (2, -2.57743072509765625000e+00) (3, -1.12124118804931640625e+01) (4, -3.26462340354919433594e+00) (5, 1.09435915946960449219e+00) (6, -2.60525289922952651978e-02) (0, -4.17110872268676757812e+00) (1, -2.94111180305480957031e+00) (2, 2.29989957809448242188e+00) (3, -2.35044598579406738281e+00) (4, -1.29760217666625976562e+00) (5, -2.45284866541624069214e-02) (6, 5.66631841659545898438e+00) (7, -1.46692597866058349609e+00) (8, -1.77592480182647705078e+00) (9, -1.27620172500610351562e+00)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -9.74569892883300781250e+00) (1, 8.37255382537841796875e+00) (2, -2.13007640838623046875e+00) (3, -1.61579740047454833984e+00) (4, -8.57025861740112304688e-01) (5, -1.50000000000000000000e+03) (6, 5.53674876689910888672e-01) (0, 3.41783761978149414062e+00) (1, -4.23219680786132812500e+00) (2, 1.48371922969818115234e+00) (3, -1.67634081840515136719e+00) (4, -3.31248790025711059570e-01) (5, 1.68993616104125976562e+00) (6, -1.46280777454376220703e+00) (7, -7.88658952713012695312e+00) (8, -8.73473834991455078125e+00) (9, -4.52123284339904785156e-01)
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000