[libfann] 208/242: Documentation

Christian Kastner chrisk-guest at moszumanska.debian.org
Sat Oct 4 21:10:45 UTC 2014


This is an automated email from the git hooks/post-receive script.

chrisk-guest pushed a commit to tag Version2_0_0
in repository libfann.

commit 7348079b01304591a3eb63aa201bdab9ceea6b00
Author: Steffen Nissen <lukesky at diku.dk>
Date:   Sun Nov 20 20:33:30 2005 +0000

    Documentation
---
 examples/cascade_train.c    | 17 +++++++----------
 src/fann.c                  | 40 ++++++++++++++++++++--------------------
 src/fann_cascade.c          | 36 +++++++++++++-----------------------
 src/include/fann.h          | 12 +++++++++++-
 src/include/fann_data.h     |  7 ++++++-
 src/include/fann_error.h    | 10 +++++++++-
 src/include/fann_internal.h | 15 +++++++++++++++
 src/include/fann_io.h       |  5 ++++-
 src/include/fann_train.h    |  2 +-
 9 files changed, 86 insertions(+), 58 deletions(-)

diff --git a/examples/cascade_train.c b/examples/cascade_train.c
index 324a18c..b575d4b 100644
--- a/examples/cascade_train.c
+++ b/examples/cascade_train.c
@@ -51,9 +51,6 @@ int main()
 
 	printf("Reading data.\n");
 
-	train_data = fann_read_train_from_file("../benchmarks/datasets/parity8.train");
-	test_data = fann_read_train_from_file("../benchmarks/datasets/parity8.test");
-
 	train_data = fann_read_train_from_file("../benchmarks/datasets/pumadyn-32fm.train");
 	test_data = fann_read_train_from_file("../benchmarks/datasets/pumadyn-32fm.test");
 
@@ -69,9 +66,6 @@ int main()
 	train_data = fann_read_train_from_file("../benchmarks/datasets/diabetes.train");
 	test_data = fann_read_train_from_file("../benchmarks/datasets/diabetes.test");
 
-	train_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.train");
-	test_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.test");
-
 	train_data = fann_read_train_from_file("../benchmarks/datasets/gene.train");
 	test_data = fann_read_train_from_file("../benchmarks/datasets/gene.test");
 
@@ -90,18 +84,21 @@ int main()
 	train_data = fann_read_train_from_file("../benchmarks/datasets/building.train");
 	test_data = fann_read_train_from_file("../benchmarks/datasets/building.test");
 
+	train_data = fann_read_train_from_file("../benchmarks/datasets/robot.train");
+	test_data = fann_read_train_from_file("../benchmarks/datasets/robot.test");
+
 	train_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.train");
 	test_data = fann_read_train_from_file("../benchmarks/datasets/two-spiral.test");
 
-	train_data = fann_read_train_from_file("../benchmarks/datasets/robot.train");
-	test_data = fann_read_train_from_file("../benchmarks/datasets/robot.test");
+	train_data = fann_read_train_from_file("../benchmarks/datasets/parity8.train");
+	test_data = fann_read_train_from_file("../benchmarks/datasets/parity8.test");
 
 	fann_scale_train_data(train_data, 0, 1);
 	fann_scale_train_data(test_data, 0, 1);
 
 	printf("Creating network.\n");
 
-	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_input_train_data(train_data));
+	ann = fann_create_shortcut(2, fann_num_input_train_data(train_data), fann_num_output_train_data(train_data));
 
 	fann_set_training_algorithm(ann, FANN_TRAIN_BATCH);
 	fann_set_training_algorithm(ann, FANN_TRAIN_QUICKPROP);
@@ -130,7 +127,7 @@ int main()
 	fann_set_cascade_max_cand_epochs(ann, 150);
 	fann_set_cascade_num_candidate_groups(ann, 1);
 
-	fann_set_callback(ann, print_callback);
+	/*fann_set_callback(ann, print_callback);*/
 
 	fann_print_parameters(ann);
 	/*fann_print_connections(ann); */
diff --git a/src/fann.c b/src/fann.c
index eb1fc18..da1a5d5 100644
--- a/src/fann.c
+++ b/src/fann.c
@@ -944,24 +944,24 @@ FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
 	unsigned int i;
 #endif
 
-	printf("Input layer                :%4d neurons, 1 bias\n", ann->num_input);
+	printf("Input layer                          :%4d neurons, 1 bias\n", ann->num_input);
 	for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer - 1; layer_it++)
 	{
 		if(ann->shortcut_connections)
 		{
-			printf("  Hidden layer             :%4d neurons, 0 bias\n",
+			printf("  Hidden layer                       :%4d neurons, 0 bias\n",
 				   layer_it->last_neuron - layer_it->first_neuron);
 		}
 		else
 		{
-			printf("  Hidden layer             :%4d neurons, 1 bias\n",
+			printf("  Hidden layer                       :%4d neurons, 1 bias\n",
 				   layer_it->last_neuron - layer_it->first_neuron - 1);
 		}
 	}
 	printf("Output layer                         :%4d neurons\n", ann->num_output);
 	printf("Total neurons and biases             :%4d\n", fann_get_total_neurons(ann));
 	printf("Total connections                    :%4d\n", ann->total_connections);
-	printf("Connection rate                      :  %5.2f\n", ann->connection_rate);
+	printf("Connection rate                      :%8.3f\n", ann->connection_rate);
 	printf("Shortcut connections                 :%4d\n", ann->shortcut_connections);
 #ifdef FIXEDFANN
 	printf("Decimal point                        :%4d\n", ann->decimal_point);
@@ -974,31 +974,31 @@ FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
 #ifdef FIXEDFANN
 	printf("Bit fail limit                       :%4d\n", ann->bit_fail_limit);
 #else
-	printf("Learning rate                        :  %5.2f\n", ann->learning_rate);
-	printf("Learning momentum                    :  %5.2f\n", ann->learning_momentum);
-	printf("Quickprop decay                      :  %9.6f\n", ann->quickprop_decay);
-	printf("Quickprop mu                         :  %5.2f\n", ann->quickprop_mu);
-	printf("RPROP increase factor                :  %5.2f\n", ann->rprop_increase_factor);
-	printf("RPROP decrease factor                :  %5.2f\n", ann->rprop_decrease_factor);
-	printf("RPROP delta min                      :  %5.2f\n", ann->rprop_delta_min);
-	printf("RPROP delta max                      :  %5.2f\n", ann->rprop_delta_max);
-	printf("Cascade output change fraction       :  %9.6f\n", ann->cascade_output_change_fraction);
-	printf("Cascade candidate change fraction    :  %9.6f\n", ann->cascade_candidate_change_fraction);
+	printf("Learning rate                        :%8.3f\n", ann->learning_rate);
+	printf("Learning momentum                    :%8.3f\n", ann->learning_momentum);
+	printf("Quickprop decay                      :%11.6f\n", ann->quickprop_decay);
+	printf("Quickprop mu                         :%8.3f\n", ann->quickprop_mu);
+	printf("RPROP increase factor                :%8.3f\n", ann->rprop_increase_factor);
+	printf("RPROP decrease factor                :%8.3f\n", ann->rprop_decrease_factor);
+	printf("RPROP delta min                      :%8.3f\n", ann->rprop_delta_min);
+	printf("RPROP delta max                      :%8.3f\n", ann->rprop_delta_max);
+	printf("Cascade output change fraction       :%11.6f\n", ann->cascade_output_change_fraction);
+	printf("Cascade candidate change fraction    :%11.6f\n", ann->cascade_candidate_change_fraction);
 	printf("Cascade output stagnation epochs     :%4d\n", ann->cascade_output_stagnation_epochs);
 	printf("Cascade candidate stagnation epochs  :%4d\n", ann->cascade_candidate_stagnation_epochs);
 	printf("Cascade max output epochs            :%4d\n", ann->cascade_max_out_epochs);
 	printf("Cascade max candidate epochs         :%4d\n", ann->cascade_max_cand_epochs);
-	printf("Cascade weight multiplier            :  %9.6f\n", ann->cascade_weight_multiplier);
-	printf("Cascade candidate limit              :  %9.6f\n", ann->cascade_candidate_limit);
+	printf("Cascade weight multiplier            :%8.3f\n", ann->cascade_weight_multiplier);
+	printf("Cascade candidate limit              :%8.3f\n", ann->cascade_candidate_limit);
 	for(i = 0; i < ann->cascade_activation_functions_count; i++)
-		printf("Cascade activation func[%d] :   %s\n", i,
+		printf("Cascade activation functions[%d]      :   %s\n", i,
 			FANN_ACTIVATIONFUNC_NAMES[ann->cascade_activation_functions[i]]);
 	for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
-		printf("Cascade activation steep[%d]:  %5.2f\n", i,
+		printf("Cascade activation steepnesses[%d]    :%8.3f\n", i,
 			ann->cascade_activation_steepnesses[i]);
 		
-	printf("Cascade candidate groups   :%4d\n", ann->cascade_num_candidate_groups);
-	printf("Cascade no. of candidates  :%4d\n", fann_get_cascade_num_candidates(ann));
+	printf("Cascade candidate groups             :%4d\n", ann->cascade_num_candidate_groups);
+	printf("Cascade no. of candidates            :%4d\n", fann_get_cascade_num_candidates(ann));
 #endif
 }
 
diff --git a/src/fann_cascade.c b/src/fann_cascade.c
index bb94c9a..a6f95ca 100644
--- a/src/fann_cascade.c
+++ b/src/fann_cascade.c
@@ -26,20 +26,6 @@
 /* #define CASCADE_DEBUG */
 /* #define CASCADE_DEBUG_FULL */
 
-int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float desired_error);
-
-float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data);
-
-int fann_train_candidates(struct fann *ann, struct fann_train_data *data);
-
-float fann_train_candidates_epoch(struct fann *ann, struct fann_train_data *data);
-
-void fann_install_candidate(struct fann *ann);
-
-int fann_initialize_candidates(struct fann *ann);
-
-void fann_set_shortcut_connections(struct fann *ann);
-
 void fann_print_connections_raw(struct fann *ann)
 {
 	unsigned int i;
@@ -71,7 +57,7 @@ FANN_EXTERNAL void FANN_API fann_cascadetrain_on_data(struct fann *ann, struct f
 
 	if(neurons_between_reports && ann->callback == NULL)
 	{
-		printf("Max neurons %6d. Desired error: %.6f\n", max_neurons, desired_error);
+		printf("Max neurons %3d. Desired error: %.6f\n", max_neurons, desired_error);
 	}
 
 	for(i = 1; i <= max_neurons; i++)
@@ -102,15 +88,22 @@ FANN_EXTERNAL void FANN_API fann_cascadetrain_on_data(struct fann *ann, struct f
 			if(ann->callback == NULL)
 			{
 				printf
-					("Neurons     %6d. Current error: %.6f. Total error: %.6f. Epochs %6d. Bit fail %d.\n",
+					("Neurons     %3d. Current error: %.6f. Total error:%8.4f. Epochs %5d. Bit fail %3d",
 					 i, error, ann->MSE_value, total_epochs, ann->num_bit_fail);
+				if((ann->last_layer-2) != ann->first_layer)
+				{
+					printf(". candidate steepness %.2f. function %s", 
+					   (ann->last_layer-2)->first_neuron->activation_steepness,
+					   FANN_ACTIVATIONFUNC_NAMES[(ann->last_layer-2)->first_neuron->activation_function]);
+				}
+				printf("\n");
 			}
 			else if((*ann->callback) (ann, data, max_neurons, 
 				neurons_between_reports, desired_error, total_epochs) == -1) 
 			{
 				/* you can break the training by returning -1 */
 				break;
-			}
+			}					 
 		}
 
 #ifdef CASCADE_DEBUG_FULL
@@ -153,7 +146,7 @@ FANN_EXTERNAL void FANN_API fann_cascadetrain_on_data(struct fann *ann, struct f
 
 	if(neurons_between_reports && ann->callback == NULL)
 	{
-		printf("Train outputs       Current error: %.6f. Epochs %6d\n", fann_get_MSE(ann),
+		printf("Train outputs    Current error: %.6f. Epochs %6d\n", fann_get_MSE(ann),
 			   total_epochs);
 	}
 
@@ -446,7 +439,6 @@ int fann_initialize_candidates(struct fann *ann)
 				neurons[candidate_index].value = 0;
 				neurons[candidate_index].sum = 0;
 				
-				/* TODO should be some kind of parameter (random?) */
 				neurons[candidate_index].activation_function =
 					ann->cascade_activation_functions[i];
 				neurons[candidate_index].activation_steepness =
@@ -920,13 +912,11 @@ void fann_add_candidate_neuron(struct fann *ann, struct fann_layer *layer)
 	neuron_place->activation_steepness = candidate->activation_steepness;
 	neuron_place->last_con = (neuron_place + 1)->first_con;
 	neuron_place->first_con = neuron_place->last_con - num_connections_in;
+#ifdef CASCADE_DEBUG_FULL
 	printf("neuron[%d] = weights[%d ... %d] activation: %s, steepness: %f\n",
 		   neuron_place - ann->first_layer->first_neuron, neuron_place->first_con,
 		   neuron_place->last_con - 1, FANN_ACTIVATIONFUNC_NAMES[neuron_place->activation_function],
-		   neuron_place->activation_steepness);
-#ifdef CASCADE_DEBUG_FULL
-	printf("neuron[%d] = weights[%d ... %d]\n", neuron_place - ann->first_layer->first_neuron,
-		   neuron_place->first_con, neuron_place->last_con - 1);
+		   neuron_place->activation_steepness);/* TODO remove */
 #endif
 
 	candidate_con = candidate->first_con;
diff --git a/src/include/fann.h b/src/include/fann.h
index f12c82d..77f632f 100644
--- a/src/include/fann.h
+++ b/src/include/fann.h
@@ -23,7 +23,17 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
    floatfann.h was included.
 */ 
 
-/* Section: FANN Creation/Execution */
+/* Section: FANN Creation/Execution
+   
+   The FANN library is designed to be very easy to use. 
+   A feedforward ann can be created by a simple <fann_create_standard> function, while
+   other ANNs can be created just as easily. The ANNs can be trained by <fann_train_on_file>
+   and executed by <fann_run>.
+   
+   All of this can be done without much knowledge of the internals of ANNs, although the ANNs created will
+   still be powerfull and effective. If you have more knowledge about ANNs, and desire more control, almost
+   every part of the ANNs can be parametized to create specialized and highly optimal ANNs.
+ */
 /* Group: Creation, Destruction & Execution */
 	
 #ifndef FANN_INCLUDE
diff --git a/src/include/fann_data.h b/src/include/fann_data.h
index a72ce33..4d7f240 100644
--- a/src/include/fann_data.h
+++ b/src/include/fann_data.h
@@ -22,7 +22,12 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
 #include <stdio.h>
 
-/* Section: FANN Datatypes */
+/* Section: FANN Datatypes
+
+   The two main datatypes used in the fann library is <struct fann>, 
+   which represents an artificial neural network, and <struct fann_train_data>,
+   which represent training data.
+ */
 
 
 /* Type: fann_type
diff --git a/src/include/fann_error.h b/src/include/fann_error.h
index 086a0cc..bd007a1 100644
--- a/src/include/fann_error.h
+++ b/src/include/fann_error.h
@@ -25,7 +25,15 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 #define FANN_ERRSTR_MAX 128
 struct fann_error;
 
-/* Section: FANN Error Handling */
+/* Section: FANN Error Handling
+
+   Errors from the fann library are usually reported on stderr. 
+   It is however possible to redirect these error messages to a file, 
+   or completely ignore them by the <fann_set_error_log> function.
+   
+   It is also possible to inspect the last error message by using the
+   <fann_get_errno> and <fann_get_errstr> functions.
+ */
 
 /* Enum: fann_errno_enum
 	Used to define error events on <struct fann> and <struct fann_train_data>. 
diff --git a/src/include/fann_internal.h b/src/include/fann_internal.h
index ad0835b..9325be0 100644
--- a/src/include/fann_internal.h
+++ b/src/include/fann_internal.h
@@ -102,6 +102,21 @@ fann_type fann_activation_derived(unsigned int activation_function,
 
 int fann_desired_error_reached(struct fann *ann, float desired_error);
 
+/* Some functions for cascade */
+int fann_train_outputs(struct fann *ann, struct fann_train_data *data, float desired_error);
+
+float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data);
+
+int fann_train_candidates(struct fann *ann, struct fann_train_data *data);
+
+float fann_train_candidates_epoch(struct fann *ann, struct fann_train_data *data);
+
+void fann_install_candidate(struct fann *ann);
+
+int fann_initialize_candidates(struct fann *ann);
+
+void fann_set_shortcut_connections(struct fann *ann);
+
 /* called fann_max, in order to not interferre with predefined versions of max */
 #define fann_max(x, y) (((x) > (y)) ? (x) : (y))
 #define fann_min(x, y) (((x) < (y)) ? (x) : (y))
diff --git a/src/include/fann_io.h b/src/include/fann_io.h
index 66c95a1..4d96847 100644
--- a/src/include/fann_io.h
+++ b/src/include/fann_io.h
@@ -20,7 +20,10 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 #ifndef __fann_io_h__
 #define __fann_io_h__
 	
-/* Section: FANN File Input/Output */	
+/* Section: FANN File Input/Output 
+   
+   It is possible to save an entire ann to a file with <fann_save> for future loading with <fann_create_from_file>.
+ */	
 
 /* Group: File Input and Output */	
 
diff --git a/src/include/fann_train.h b/src/include/fann_train.h
index 3a78183..4ac963c 100644
--- a/src/include/fann_train.h
+++ b/src/include/fann_train.h
@@ -42,7 +42,7 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 	Structure used to store data, for use with training.
 	
 	The data inside this structure should never be manipulated directly, but should use some 
-	of the supplied functions in <Training Data>.
+	of the supplied functions in <Training Data Manipulation>.
 	
 	The training data structure is very usefull for storing data during training and testing of a
 	neural network.

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/libfann.git



More information about the debian-science-commits mailing list