Commit eb56e87c authored by Amir Yazdanbakhsh's avatar Amir Yazdanbakhsh
Browse files

application directory is added to the repository


Former-commit-id: 6d994ff33711f4ceade7d8cef278279166c2dcb2 [formerly 6c012b09ef904cb5e9742ee56798442a248a5ddc]
Former-commit-id: 3bfbfbcde1415ab3d0b93836618bc396ac111425
parent 9128bf87
CC := g++
LD := g++
include ../config.mk
CFLAGS := -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD
LFLAGS := -lfann -lbackprop -lboost_regex -lParrot
HEADERS := src
INCLUDE := -I${ANALOG_INC} -I${FANN_INC} -I${HEADERS}
LIB := -L${ANALOG_LIB} -L${FANN_LIB} -L$(PARROT_LIB)
MODULE := blackscholes.out
CPP_FILES := blackscholes.c
OBJ_FILES := $(addprefix obj/,$(notdir $(CPP_FILES:.c=.o)))
.PHONY: all clean
all: DIR $(MODULE)
DIR:
@echo ${CPP_FILES}
@echo ${OBJ_FILES}
if [ ! -d "./bin" ];then \
mkdir bin; \
fi
if [ ! -d "./obj" ];then \
mkdir obj; \
fi
$(MODULE): $(OBJ_FILES)
$(LD) $^ $(LIB) $(LFLAGS) -o bin/$@
obj/%.o: src/%.c
python $(PLANG) -c $(PARROT_JSON) -a observe -e "g++ $(CFLAGS) $(INCLUDE) -I$(PARROT_LIB) -c" -s "$<" -o "$@" -t
#$(CC) $(CFLAGS) $(INCLUDE) -c $< -o $@
clean:
@rm -rf *.o
@rm -rf *.d
@rm -rf *.out
@rm -rf bin
@rm -rf obj
CC := g++
LD := g++
include ../config.mk
CFLAGS := -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD
LFLAGS := -lfann -lbackprop -lboost_regex -lParrot
HEADERS := src
INCLUDE := -I${ANALOG_INC} -I${FANN_INC} -I${HEADERS} -I$(PARROT_LIB)
LIB := -L${ANALOG_LIB} -L${FANN_LIB} -L$(PARROT_LIB)
MODULE := blackscholes.nn.out
CPP_FILES := $(wildcard src.nn/*.c)
OBJ_FILES := $(addprefix obj/,$(notdir $(CPP_FILES:.c=.o)))
.PHONY: all clean
all: DIR $(MODULE)
DIR:
@echo ${CPP_FILES}
@echo ${OBJ_FILES}
if [ ! -d "./bin" ];then \
mkdir bin; \
fi
if [ ! -d "./obj" ];then \
mkdir obj; \
fi
$(MODULE): $(OBJ_FILES)
$(LD) $^ $(LIB) $(LFLAGS) -o bin/$@
obj/%.o: src.nn/%.c
$(CC) $(CFLAGS) $(INCLUDE) -c $< -o $@
clean:
@rm -rf *.o
@rm -rf *.d
@rm -rf *.out
@rm -rf bin
@rm -rf obj
{
"learning_rate": "0.4",
"epoch_number": "1000",
"sampling_rate": "0.005",
"test_data_fraction": "0.7",
"max_layer_num": "4",
"max_neuron_num_per_layer": "8"
}
\ No newline at end of file
FANN_FLO_2.1
num_layers=4
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 9 9 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (9, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (9, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, 4.15708780288696289062e+00) (1, -8.50745868682861328125e+00) (2, 2.96497154235839843750e+00) (3, -3.58017897605895996094e+00) (4, -8.68925929069519042969e-01) (5, 4.65158653259277343750e+00) (6, -1.30439639091491699219e+00) (0, -2.64584213495254516602e-01) (1, -4.43225717544555664062e+00) (2, 2.02129483222961425781e-01) (3, -4.91862518310546875000e+02) (4, -9.80735321044921875000e+01) (5, -1.37689843750000000000e+03) (6, 1.52099683880805969238e-01) (0, -4.73761749267578125000e+00) (1, 3.98123931884765625000e+00) (2, 3.15824413299560546875e+00) (3, -1.67477238178253173828e+00) (4, -1.42398071289062500000e+00) (5, -1.40535620117187500000e+03) (6, 3.42407941818237304688e-01) (0, -3.42078834772109985352e-01) (1, -3.19215655326843261719e-02) (2, 1.49393320083618164062e+00) (3, -1.76143884658813476562e+00) (4, -2.18968987464904785156e+00) (5, -5.13431057333946228027e-02) (6, 2.96551257371902465820e-01) (0, 8.46284449100494384766e-01) (1, -1.99832692742347717285e-01) (2, 2.59308147430419921875e+01) (3, -2.54921698570251464844e+00) (4, -3.13844490051269531250e+01) (5, 6.52958488464355468750e+00) (6, 1.35299175977706909180e-01) (0, -6.95242226123809814453e-01) (1, -1.29383516311645507812e+00) (2, 1.01512489318847656250e+01) (3, -3.74508309364318847656e+00) (4, -1.69566011428833007812e+00) (5, -2.45073605328798294067e-02) (6, 5.22289037704467773438e-01) (0, -4.72632265090942382812e+00) (1, -8.13149094581604003906e-01) (2, -1.12681694030761718750e+01) (3, 1.05300799012184143066e-01) (4, 5.82407355308532714844e-01) (5, -1.86560496687889099121e-01) (6, 3.67573237419128417969e+00) (0, -1.82268977165222167969e+00) (1, 1.47566223144531250000e+00) (2, 5.93074500560760498047e-01) (3, -1.14390678405761718750e+01) (4, 1.79593753814697265625e+00) (5, -6.94831669330596923828e-01) (6, 1.53303360939025878906e+00) (7, -7.41578483581542968750e+00) (8, 1.50000000000000000000e+03) (9, -1.77783412933349609375e+01) (10, 3.12705669403076171875e+01) (11, 1.11259889602661132812e+01) (12, 1.42258024215698242188e+00) (13, 7.35442543029785156250e+00) (14, 1.09430539608001708984e+00) (15, -3.68982744216918945312e+00) (7, 7.40424423217773437500e+01) (8, 1.50000000000000000000e+03) (9, 6.80895566940307617188e+00) (10, 4.47792100906372070312e+00) (11, -5.92545270919799804688e+00) (12, -4.01668977737426757812e+00) (13, 8.09563064575195312500e+00) (14, 4.52087908983230590820e-01) (15, -4.75312852859497070312e+00) (7, 1.99098529815673828125e+01) (8, 1.50000000000000000000e+03) (9, 6.85241365432739257812e+00) (10, 5.27917623519897460938e-01) (11, 2.80270695686340332031e+00) (12, -1.45111608505249023438e+01) (13, -2.32518744468688964844e+00) (14, 1.80600750446319580078e+00) (15, -1.49224543571472167969e+00) (7, -4.74953460693359375000e+00) (8, 1.50000000000000000000e+03) (9, 1.39768314361572265625e+01) (10, -3.81708955764770507812e+00) (11, 1.83735060691833496094e+00) (12, -5.45990705490112304688e+00) (13, 7.65983879566192626953e-01) (14, 1.39110612869262695312e+00) (15, -4.21951675415039062500e+00) (7, 5.44630908966064453125e+00) (8, 1.50000000000000000000e+03) (9, 1.02574310302734375000e+01) (10, -2.01016992330551147461e-01) (11, -1.94441771507263183594e+00) (12, -7.71795177459716796875e+00) (13, -2.60075688362121582031e-01) (14, 1.05361707508563995361e-01) (15, -8.23294878005981445312e-01) (7, 8.72722434997558593750e+00) (8, 1.50000000000000000000e+03) (9, 5.44353187084197998047e-01) (10, 8.35870876908302307129e-02) (11, 2.82883495092391967773e-01) (12, -1.43774585723876953125e+01) (13, -1.91181635856628417969e+00) (14, 9.45206165313720703125e-01) (15, -3.47729176282882690430e-01) (7, 1.20273056030273437500e+01) (8, 1.50000000000000000000e+03) (9, -4.34757471084594726562e+00) (10, -1.96111738681793212891e+00) (11, 7.36245751380920410156e-01) (12, -8.24599170684814453125e+00) (13, -6.90470647811889648438e+00) (14, 1.64568901062011718750e+00) (15, -2.51780629158020019531e+00) (7, 4.46098756790161132812e+00) (8, 1.50000000000000000000e+03) (9, 1.84086074829101562500e+01) (10, -2.34488010406494140625e+00) (11, 1.92729365825653076172e+00) (12, -1.29226942062377929688e+01) (13, -8.60768556594848632812e-01) (14, -1.49563539028167724609e+00) (15, -1.34761705994606018066e-01) (16, -6.22880518436431884766e-01) (17, -5.58566153049468994141e-01) (18, -5.19399821758270263672e-01) (19, -5.55761957168579101562e+00) (20, -4.02937948703765869141e-01) (21, -1.12400937080383300781e+00) (22, -7.45624780654907226562e+00) (23, -5.56009531021118164062e-01) (24, 8.37634056806564331055e-02)
blackscholes.c
obj/blackscholes.o
if [ ! -d "./bin" ];then \
mkdir bin; \
fi
if [ ! -d "./obj" ];then \
mkdir obj; \
fi
python /home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotObserver/plang.py -c /home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotObserver/ParrotC.json -a observe -e "g++ -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD -I/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/src -I/filespace/people/y/yazdanbakhsh/FANN/bin/include -Isrc -I/home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotLib -c" -s "src/blackscholes.c" -o "obj/blackscholes.o" -t
obj/blackscholes.o.parroto.c:259:0: warning: ignoring #pragma parrot [-Wunknown-pragmas]
#pragma parrot(input, "blackscholes", [6]dataIn)
^
obj/blackscholes.o.parroto.c:268:0: warning: ignoring #pragma parrot [-Wunknown-pragmas]
#pragma parrot(output, "blackscholes", [1]<0.1; 0.9>dataOut)
^
obj/blackscholes.o.parroto.c: In function ‘int bs_thread(void*)’:
obj/blackscholes.o.parroto.c:246:20: warning: unused variable ‘price’ [-Wunused-variable]
fptype price;
^
--------------------------------
[6]dataIn
['dataIn', '6', None, None]
('dataIn', '6', ('0', '0'))
[('dataIn', '6', ('0', '0'))]
--------------------------------
[1]<0.1; 0.9>dataOut
['dataOut', '1', '0.1', ' 0.9']
('dataOut', '1', ('0.1', '0.9'))
[('dataOut', '1', ('0.1', '0.9'))]
Parrot Observer
g++ -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD -I/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/src -I/filespace/people/y/yazdanbakhsh/FANN/bin/include -Isrc -I/home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotLib -c obj/blackscholes.o.parroto.c -o obj/blackscholes.o
mv obj/blackscholes.o.parroto.c obj/blackscholes.o.parroto.c.tmp
#g++ -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD -I/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/src -I/filespace/people/y/yazdanbakhsh/FANN/bin/include -Isrc -c src/blackscholes.c -o obj/blackscholes.o
g++ obj/blackscholes.o -L/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/lib -L/filespace/people/y/yazdanbakhsh/FANN/bin/lib -L/home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotLib -lfann -lbackprop -lboost_regex -lParrot -o bin/blackscholes.out
src.nn/blackscholes_nn.c
obj/blackscholes_nn.o
if [ ! -d "./bin" ];then \
mkdir bin; \
fi
if [ ! -d "./obj" ];then \
mkdir obj; \
fi
g++ -Wall -Wnarrowing -lfann -lbackprop -lboost_regex -std=c++11 -O3 -MD -I/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/src -I/filespace/people/y/yazdanbakhsh/FANN/bin/include -Isrc -I/home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotLib -c src.nn/blackscholes_nn.c -o obj/blackscholes_nn.o
src.nn/blackscholes_nn.c: In function ‘int bs_thread(void*)’:
src.nn/blackscholes_nn.c:247:20: warning: unused variable ‘price’ [-Wunused-variable]
fptype price;
^
src.nn/blackscholes_nn.c:248:20: warning: unused variable ‘N1’ [-Wunused-variable]
fptype N1, N2;
^
src.nn/blackscholes_nn.c:248:24: warning: unused variable ‘N2’ [-Wunused-variable]
fptype N1, N2;
^
g++ obj/blackscholes_nn.o -L/home/yazdan/phd/benchmarks/npu.bench/anpu.compiler/lib -L/filespace/people/y/yazdanbakhsh/FANN/bin/lib -L/home/yazdan/phd/projects/parrot.cpu/parrot.c/src/ParrotLib -lfann -lbackprop -lboost_regex -lParrot -o bin/blackscholes.nn.out
This diff is collapsed.
../../train.data/output/fann.data/aggregated.fann
\ No newline at end of file
150 6 1
0.833333313465 0.75 0.027499999851 0.25 0.25 0.0
0.0983890891075
0.833333313465 0.833333313465 0.0500000007451 0.5 1.0 1.0
0.140962868929
0.416666656733 0.506250023842 0.0825000032783 0.5 0.10000000149 1.0
0.0896460413933
0.416666656733 0.34375 0.027499999851 0.25 0.15000000596 1.0
0.000286594964564
0.833333313465 0.833333313465 0.0500000007451 0.5 0.5 0.0
0.126059830189
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.833333313465 0.027499999851 0.25 0.10000000149 0.0
0.02739995718
0.833333313465 0.916666686535 0.0500000007451 0.10000000149 0.10000000149 1.0
0.0787788033485
0.833333313465 0.833333313465 0.027499999851 0.5 0.5 1.0
0.110528737307
0.833333313465 0.75 0.027499999851 0.5 0.0500000007451 1.0
0.00800873339176
0.833333313465 0.75 0.0500000007451 0.10000000149 0.10000000149 1.0
1.51961285155e-06
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 1.0
0.115100085735
0.416666656733 0.416666656733 0.027499999851 0.449999988079 0.40000000596 1.0
0.0446473360062
0.416666656733 0.416666656733 0.027499999851 0.449999988079 0.40000000596 1.0
0.0446473360062
0.833333313465 0.833333313465 0.0724999979138 0.25 0.10000000149 1.0
0.0232810080051
0.833333313465 0.916666686535 0.027499999851 0.10000000149 1.0 1.0
0.071240067482
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.25 0.0
4.89293597639e-07
0.416666656733 0.506250023842 0.0825000032783 0.25 1.0 1.0
0.0729819238186
0.416666656733 0.416666656733 0.10000000149 0.25 1.0 1.0
0.0227480828762
0.416666656733 0.34375 0.027499999851 0.25 0.25 0.0
0.0763816535473
0.416666656733 0.416666656733 0.027499999851 0.25 0.5 0.0
0.0320791751146
0.416666656733 0.416666656733 0.027499999851 0.5 0.75 0.0
0.0750223249197
0.833333313465 0.75 0.0500000007451 0.10000000149 1.0 1.0
0.00199573859572
0.833333313465 0.75 0.027499999851 0.5 0.40000000596 1.0
0.0597345679998
0.833333313465 0.75 0.027499999851 0.25 0.0500000007451 1.0
0.000476341694593
0.416666656733 0.506250023842 0.027499999851 0.449999988079 0.40000000596 1.0
0.104277402163
0.833333313465 0.75 0.027499999851 0.5 0.0500000007451 0.0
0.0923725962639
0.833333313465 0.833333313465 0.027499999851 0.25 0.15000000596 1.0
0.0304247140884
0.833333313465 0.916666686535 0.0724999979138 0.25 0.10000000149 0.0
0.00459352135658
0.416666656733 0.506250023842 0.027499999851 0.34999999404 0.34999999404 1.0
0.0943122804165
0.416666656733 0.506250023842 0.027499999851 0.25 0.25 0.0
0.00166312232614
0.833333313465 0.916666686535 0.0500000007451 0.25 1.0 0.0
0.066886395216
0.833333313465 0.75 0.027499999851 0.449999988079 0.40000000596 1.0
0.0506376922131
0.833333313465 0.916666686535 0.0500000007451 0.10000000149 1.0 0.0
0.01811632514
0.416666656733 0.416666656733 0.0500000007451 0.10000000149 1.0 1.0
0.0080329477787
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.34999999404 0.0
6.50933361612e-06
0.833333313465 0.916666686535 0.027499999851 0.5 0.34999999404 0.0
0.0693202316761
0.833333313465 0.833333313465 0.0825000032783 0.25 1.0 1.0
0.0508903563023
0.416666656733 0.506250023842 0.027499999851 0.5 0.75 0.0
0.0447101294994
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 1.0
0.082073867321
0.833333313465 0.916666686535 0.0500000007451 0.25 0.10000000149 1.0
0.0831059813499
0.833333313465 0.75 0.10000000149 0.25 0.5 1.0
0.0141941010952
0.833333313465 0.833333313465 0.10000000149 0.5 0.10000000149 0.0
0.0564995706081
0.416666656733 0.416666656733 0.027499999851 0.649999976158 0.75 1.0
0.0872146636248
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.25 0.0
4.89293597639e-07
0.416666656733 0.34375 0.027499999851 0.25 0.25 1.0
0.00110981054604
0.833333313465 0.833333313465 0.027499999851 0.5 0.5 0.0
0.121908634901
0.833333313465 0.916666686535 0.0500000007451 0.5 0.5 0.0
0.0932010114193
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 1.0
0.225813746452
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 0.0
0.161192983389
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.916666686535 0.027499999851 0.10000000149 1.0 0.0
0.0127716064453
0.833333313465 0.833333313465 0.0825000032783 0.10000000149 1.0 0.0
0.0751796960831
0.416666656733 0.506250023842 0.027499999851 0.25 0.10000000149 0.0
9.03990585357e-05
0.416666656733 0.506250023842 0.027499999851 0.10000000149 1.0 0.0
0.000884927809238
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.5 0.0
0.00378392636776
0.833333313465 0.916666686535 0.027499999851 0.449999988079 0.40000000596 0.0
0.0660502612591
0.833333313465 0.916666686535 0.027499999851 0.5 0.40000000596 1.0
0.14977824688
0.416666656733 0.416666656733 0.0724999979138 0.5 1.0 0.0
0.0947640240192
0.833333313465 0.916666686535 0.10000000149 0.10000000149 0.5 1.0
0.0481331348419
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 1.0
0.115100085735
0.833333313465 0.75 0.027499999851 0.5 0.75 0.0
0.189258128405
0.416666656733 0.34375 0.027499999851 0.10000000149 1.0 1.0
0.000182891264558
0.416666656733 0.34375 0.027499999851 0.25 0.25 0.0
0.0763816535473
0.416666656733 0.34375 0.027499999851 0.25 0.0500000007451 1.0
1.43048237078e-06
0.833333313465 0.833333313465 0.027499999851 0.649999976158 0.75 1.0
0.17442932725
0.416666656733 0.506250023842 0.027499999851 0.649999976158 0.75 0.0
0.0661680549383
0.833333313465 0.75 0.027499999851 0.5 1.0 0.0
0.211408793926
0.833333313465 0.916666686535 0.0500000007451 0.5 1.0 0.0
0.149686008692
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.5 1.0
0.0827236771584
0.833333313465 0.916666686535 0.027499999851 0.5 0.25 0.0
0.0535411834717
0.833333313465 0.833333313465 0.027499999851 0.25 0.15000000596 1.0
0.0304247140884
0.833333313465 0.75 0.10000000149 0.10000000149 0.5 1.0
0.000268508680165
0.833333313465 0.833333313465 0.027499999851 0.5 0.15000000596 1.0
0.0624456703663
0.833333313465 0.833333313465 0.0724999979138 0.5 0.5 1.0
0.100612014532
0.416666656733 0.416666656733 0.0825000032783 0.5 0.10000000149 1.0
0.0244713425636
0.416666656733 0.506250023842 0.027499999851 0.649999976158 0.75 1.0
0.14541696012
0.833333313465 0.833333313465 0.027499999851 0.10000000149 0.34999999404 1.0
0.0158381164074
0.416666656733 0.34375 0.027499999851 0.449999988079 0.40000000596 1.0
0.0148452222347
0.416666656733 0.34375 0.10000000149 0.10000000149 1.0 0.0
0.105646729469
0.833333313465 0.75 0.10000000149 0.10000000149 0.5 0.0
0.120179772377
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 1.0
0.09239590168
0.416666656733 0.506250023842 0.027499999851 0.449999988079 0.40000000596 0.0
0.0202322602272
0.833333313465 0.833333313465 0.0825000032783 0.25 0.10000000149 1.0
0.0228868126869
0.416666656733 0.506250023842 0.0825000032783 0.25 0.10000000149 1.0
0.0855351388454
0.833333313465 0.75 0.0724999979138 0.25 0.5 1.0
0.0164286792278
0.416666656733 0.506250023842 0.0724999979138 0.25 0.10000000149 1.0
0.0860334038734
0.833333313465 0.75 0.10000000149 0.25 0.10000000149 1.0
0.00199778750539
0.833333313465 0.75 0.027499999851 0.10000000149 1.0 0.0
0.10701406002
0.833333313465 0.75 0.027499999851 0.5 0.10000000149 1.0
0.0182993113995
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 1.0
0.0192974805832
0.833333313465 0.75 0.0825000032783 0.5 0.5 0.0
0.174897909164
0.416666656733 0.34375 0.027499999851 0.25 1.0 0.0
0.0919248461723
0.833333313465 0.916666686535 0.10000000149 0.25 1.0 1.0
0.0807681679726
0.833333313465 0.916666686535 0.10000000149 0.25 0.10000000149 1.0
0.0791258215904
0.833333313465 0.75 0.10000000149 0.10000000149 1.0 1.0
0.000551892444491
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.10000000149 0.0
0.00687929987907
0.416666656733 0.34375 0.0825000032783 0.10000000149 0.5 1.0
3.29251633957e-06
0.833333313465 0.75 0.027499999851 0.0500000007451 0.0500000007451 1.0
0.0
0.416666656733 0.34375 0.027499999851 0.10000000149 0.40000000596 0.0
0.0766813457012
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.10000000149 0.0
0.00687929987907
0.833333313465 0.75 0.027499999851 0.10000000149 0.0500000007451 0.0
0.0843638777733
0.833333313465 0.916666686535 0.027499999851 0.25 0.25 0.0
0.0153429806232
0.833333313465 0.833333313465 0.027499999851 0.449999988079 0.40000000596 1.0
0.0892946720123
0.833333313465 0.916666686535 0.027499999851 0.649999976158 0.75 0.0
0.161192983389
0.416666656733 0.416666656733 0.027499999851 0.649999976158 0.75 0.0
0.0957203805447
0.833333313465 0.833333313465 0.0500000007451 0.25 1.0 0.0
0.102800011635
0.416666656733 0.416666656733 0.027499999851 0.25 0.75 1.0
0.0314598828554
0.416666656733 0.34375 0.027499999851 0.5 0.34999999404 1.0
0.016285084188
0.833333313465 0.75 0.0724999979138 0.10000000149 0.10000000149 0.0
0.0887522101402
0.416666656733 0.34375 0.0825000032783 0.10000000149 1.0 1.0
3.27293528244e-05
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 0.0
0.0221522450447
0.416666656733 0.416666656733 0.027499999851 0.25 0.25 1.0
0.0192974805832
0.416666656733 0.416666656733 0.027499999851 0.5 0.25 0.0
0.0427494347095
0.833333313465 0.75 0.0724999979138 0.5 1.0 0.0
0.228234738111
0.416666656733 0.416666656733 0.0724999979138 0.10000000149 0.5 0.0
0.0204427242279
0.833333313465 0.75 0.10000000149 0.5 0.5 1.0
0.0587410926819
0.833333313465 0.833333313465 0.10000000149 0.5 1.0 0.0
0.199389606714
0.833333313465 0.75 0.0500000007451 0.25 1.0 0.0
0.151173084974
0.833333313465 0.916666686535 0.027499999851 0.34999999404 0.34999999404 0.0
0.040547311306
0.416666656733 0.416666656733 0.027499999851 0.10000000149 0.25 1.0
0.00693267583847
0.833333313465 0.75 0.027499999851 0.15000000596 0.15000000596 1.0
0.000527108088136
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 0.0
5.56519808015e-08
0.416666656733 0.506250023842 0.0825000032783 0.25 1.0 1.0
0.0729819238186
0.416666656733 0.34375 0.0500000007451 0.5 1.0 0.0
0.12647792697
0.833333313465 0.916666686535 0.10000000149 0.5 1.0 0.0
0.166082054377
0.833333313465 0.75 0.0724999979138 0.25 1.0 1.0
0.0265915840864
0.833333313465 0.916666686535 0.027499999851 0.10000000149 0.0500000007451 1.0
0.082073867321
0.416666656733 0.34375 0.027499999851 0.10000000149 0.75 0.0
0.0800074934959
0.416666656733 0.506250023842 0.0500000007451 0.5 1.0 1.0
0.124712452292
0.833333313465 0.916666686535 0.027499999851 0.15000000596 0.15000000596 0.0
0.00126434117556
0.416666656733 0.416666656733 0.10000000149 0.10000000149 0.5 1.0
0.00405505299568
0.833333313465 0.75 0.10000000149 0.10000000149 1.0 1.0
0.000551892444491
0.833333313465 0.75 0.0724999979138 0.5 1.0 1.0
0.0924507081509
0.833333313465 0.916666686535 0.10000000149 0.5 0.5 1.0
0.139924436808
0.833333313465 0.75 0.027499999851 0.34999999404 0.34999999404 1.0
0.029446721077
0.833333313465 0.833333313465 0.027499999851 0.5 1.0 1.0
0.151217371225
0.833333313465 0.833333313465 0.0500000007451 0.25 1.0 1.0
0.062157869339
0.416666656733 0.506250023842 0.027499999851 0.10000000149 0.15000000596 0.0
-1.16723271049e-08
0.416666656733 0.506250023842 0.027499999851 0.5 0.40000000596 1.0
0.109000921249
0.416666656733 0.506250023842 0.027499999851 0.10000000149 1.0 1.0
0.0767360925674
0.416666656733 0.506250023842 0.10000000149 0.25 0.10000000149 1.0
0.0846647024155
0.416666656733 0.416666656733 0.027499999851 0.25 0.34999999404 0.0
0.026492357254
0.416666656733 0.416666656733 0.0724999979138 0.5 0.5 0.0
0.065139696002
0.416666656733 0.34375 0.027499999851 0.10000000149 0.0500000007451 1.0
0.0
0.416666656733 0.416666656733 0.0724999979138 0.5 1.0 1.0
0.0656247437
0.833333313465 0.75 0.027499999851 0.15000000596 0.15000000596 0.0
0.0869477987289
0.416666656733 0.506250023842 0.0825000032783 0.25 0.5 0.0
0.00844240933657
0.833333313465 0.833333313465 0.027499999851 0.34999999404 0.34999999404 1.0
0.0644689798355
0.833333313465 0.833333313465 0.10000000149 0.25 0.10000000149 1.0
0.0222077965736
FANN_FLO_2.1
num_layers=3
learning_rate=0.400000
connection_rate=1.000000
network_type=0
learning_momentum=0.000000
training_algorithm=2
train_error_function=1
train_stop_function=0
cascade_output_change_fraction=0.010000
quickprop_decay=-0.000100
quickprop_mu=1.750000
rprop_increase_factor=1.200000
rprop_decrease_factor=0.500000
rprop_delta_min=0.000000
rprop_delta_max=50.000000
rprop_delta_zero=0.100000
cascade_output_stagnation_epochs=12
cascade_candidate_change_fraction=0.010000
cascade_candidate_stagnation_epochs=12
cascade_max_out_epochs=150
cascade_min_out_epochs=50
cascade_max_cand_epochs=150
cascade_min_cand_epochs=50
cascade_num_candidate_groups=2
bit_fail_limit=3.49999994039535522461e-01
cascade_candidate_limit=1.00000000000000000000e+03
cascade_weight_multiplier=4.00000005960464477539e-01
cascade_activation_functions_count=10
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
cascade_activation_steepnesses_count=4
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
layer_sizes=7 3 2
scale_included=0
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (7, 3, 5.00000000000000000000e-01) (7, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00) (3, 3, 5.00000000000000000000e-01) (0, 3, 0.00000000000000000000e+00)
connections (connected_to_neuron, weight)=(0, -9.28587973117828369141e-01) (1, -3.94000977277755737305e-01) (2, 4.62552070617675781250e+00) (3, -4.20165491104125976562e+00) (4, -1.69443283230066299438e-02) (5, 7.23784983158111572266e-01) (6, 2.29117512702941894531e+00) (0, -1.99888372421264648438e+00) (1, -5.55471539497375488281e-01) (2, -4.97387351989746093750e+01) (3, 8.50402832031250000000e+00) (4, -8.35454368591308593750e+00) (5, -3.31979322433471679688e+00) (6, 4.44229030609130859375e+00) (7, -3.60311102867126464844e+00) (8, -1.50806224346160888672e+00) (9, -1.39886662364006042480e-01)
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment