[libfann] 172/242: python and compat_time patches

Christian Kastner chrisk-guest at moszumanska.debian.org
Sat Oct 4 21:10:39 UTC 2014


This is an automated email from the git hooks/post-receive script.

chrisk-guest pushed a commit to tag Version2_0_0
in repository libfann.

commit c527051f3cb007383255d880023487d07a232f3c
Author: Steffen Nissen <lukesky at diku.dk>
Date:   Tue Jan 4 07:36:56 2005 +0000

    python and compat_time patches
---
 python/README                   | 17 +++++++++
 python/examples/mushroom.py     | 12 ++-----
 python/examples/simple_train.py |  2 +-
 python/fann.py                  | 76 ++++++++++++++++-----------------------
 python/fann_helper.c            |  5 +++
 python/libfann.i                | 48 ++++++++++++++++---------
 python/libfann.py               | 79 ++++++++++++++++++++++++++++++++++-------
 python/makefile.gnu             |  2 ++
 python/setup.py                 |  2 +-
 src/fann.c                      |  5 +++
 src/include/fann.h              | 14 +++++++-
 11 files changed, 175 insertions(+), 87 deletions(-)

diff --git a/python/README b/python/README
index 31073fd..c0b6af8 100644
--- a/python/README
+++ b/python/README
@@ -1,6 +1,9 @@
 This python binding is provided by Vincenzo Di Massa <hawk.it at tiscalinet.it>
 and Gil Megidish <gil at megidish.net>
 
+Instructions for Windows:
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
 MAKE
 Make sure to make the fann library first. You are required to have
 swig and python development files installed. After you compiled the
@@ -24,3 +27,17 @@ version.
 
 USAGE
 Just import fann.
+
+Instructions for Unix/Linux:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First build and install the fann library. Then run:
+
+./setup_unix.py build
+./setup_unix.py install
+
+Install alone will work too, if you run it twice (a small bug).
+The examples/ (not installed) should work now after you copy the datasets:
+
+mkdir examples/datasets
+cp ../examples/xor.data ../benchmarks/datasets/mushroom.* examples/datasets/
diff --git a/python/examples/mushroom.py b/python/examples/mushroom.py
index 17f2055..b4160cc 100755
--- a/python/examples/mushroom.py
+++ b/python/examples/mushroom.py
@@ -20,9 +20,9 @@ ann = fann.create(connection_rate, learning_rate, (train_data.get_num_input(), n
 
 # start training the network
 print "Training network"
-ann.set_activation_function_hidden(fann.FANN_SIGMOID_SYMMETRIC_STEPWISE)
-ann.set_activation_function_output(fann.FANN_SIGMOID_STEPWISE)
-ann.set_training_algorithm(fann.FANN_TRAIN_INCREMENTAL)
+ann.set_activation_function_hidden(fann.SIGMOID_SYMMETRIC_STEPWISE)
+ann.set_activation_function_output(fann.SIGMOID_STEPWISE)
+ann.set_training_algorithm(fann.TRAIN_INCREMENTAL)
 	
 ann.train_on_data(train_data, max_iterations, iterations_between_reports, desired_error)
 	
@@ -40,9 +40,3 @@ print "MSE error on test data: %f" % ann.get_MSE()
 print "Saving network"
 ann.save("mushroom_float.net")
 
-# blow it all up
-print "Cleaning up."
-ann.destroy()
-test_data.destroy()
-train_data.destroy()
-
diff --git a/python/examples/simple_train.py b/python/examples/simple_train.py
index 946873a..30c21e2 100755
--- a/python/examples/simple_train.py
+++ b/python/examples/simple_train.py
@@ -12,9 +12,9 @@ max_iterations = 100000
 iterations_between_reports = 1000
 
 ann = fann.create(connection_rate, learning_rate, (num_input, num_neurons_hidden, num_output))
+ann.set_activation_function_output(fann.SIGMOID_SYMMETRIC_STEPWISE)
 
 ann.train_on_file("datasets/xor.data", max_iterations, iterations_between_reports, desired_error)
 
 ann.save("xor_float.net")
 
-ann.destroy()
diff --git a/python/fann.py b/python/fann.py
index 7711e30..a472b48 100755
--- a/python/fann.py
+++ b/python/fann.py
@@ -21,30 +21,23 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
 import libfann
 
-# Activation function
-FANN_LINEAR = 0
-FANN_THRESHOLD = 1
-FANN_THRESHOLD_SYMMETRIC = 2
-FANN_SIGMOID = 3
-FANN_SIGMOID_STEPWISE = 4                # default
-FANN_SIGMOID_SYMMETRIC = 5
-FANN_SIGMOID_SYMMETRIC_STEPWISE = 6
-FANN_GAUSSIAN = 7
-FANN_GAUSSIAN_STEPWISE = 8
-FANN_ELLIOT = 9                          # not implemented yet
-FANN_ELLIOT_SYMMETRIC = 10               # not implemented yet
-
-# Training algorithm
-FANN_TRAIN_INCREMENTAL = 0
-FANN_TRAIN_BATCH = 1
-FANN_TRAIN_RPROP = 2
-FANN_TRAIN_QUICKPROP = 3
+# import all FANN_ constants without FANN_ prefix
+for name, value in libfann.__dict__.iteritems():
+    if name.startswith('FANN_') and not name.endswith('_NAMES'):
+        globals()[name[5:]] = value
+del name, value
 
 class fann_class:
 
     def __init__(self, ann):
+        """
+        Never call this directly.
+        """
         self.__ann = ann
-    
+
+    def __del__(self):
+        libfann.fann_destroy(self.__ann)
+
     def get_native_object(self):
         return self.__train_data
 
@@ -54,13 +47,6 @@ class fann_class:
         """
         return libfann.fann_run(self.__ann, input)
 
-    def destroy(self):
-        """ 
-        Destructs the entire network.
-        Be sure to call this function after finished using the network.
-        """
-        libfann.fann_destroy(self.__ann)
-
     def randomize_weights(self, min_weight, max_weight):
         """
         Randomize weights (from the beginning the weights are random between -0.1 and 0.1)
@@ -198,31 +184,31 @@ class fann_class:
         """
         libfann.fann_set_activation_function_output(self.__ann, activation_function)
 
-    def get_activation_hidden_steepness(self):
+    def get_activation_steepness_hidden(self):
         """
         Get the steepness parameter for the sigmoid function used in the hidden layers.
         """
-        return libfann.get_activation_hidden_steepness(self.__ann)
+        return libfann.get_activation_steepness_hidden(self.__ann)
 
-    def set_activation_hidden_steepness(self, steepness):
+    def set_activation_steepness_hidden(self, steepness):
         """
         Set the steepness of the sigmoid function used in the hidden layers.
         Only usefull if sigmoid function is used in the hidden layers (default 0.5).
         """
-        libfann.fann_set_activation_hidden_steepness(self.__ann, steepness)
+        libfann.fann_set_activation_steepness_hidden(self.__ann, steepness)
 
-    def get_activation_output_steepness(self):
+    def get_activation_steepness_output(self):
         """
         Get the steepness parameter for the sigmoid function used in the output layer.
         """
-        return libfann.fann_get_activation_output_steepness(self.__ann)
+        return libfann.fann_get_activation_steepness_output(self.__ann)
 
-    def set_activation_output_steepness(self, steepness):
+    def set_activation_steepness_output(self, steepness):
         """
         Set the steepness of the sigmoid function used in the output layer.
         Only usefull if sigmoid function is used in the output layer (default 0.5).
         """
-        libfann.fann_set_activation_output_steepness(self.__ann, steepness)
+        libfann.fann_set_activation_steepness_output(self.__ann, steepness)
 
     def train_on_data(self, data, max_epochs, epochs_between_reports, desired_error):
         """
@@ -269,7 +255,12 @@ class fann_class:
 class train_class:
 
     def __init__(self, train_data):
+        """
+        Never call this directly.
+        """
         self.__train_data = train_data
+    def __del__(self):
+        libfann.fann_destroy_train(self.__train_data)
 
     def get_native_object(self):
         return self.__train_data
@@ -289,13 +280,6 @@ class train_class:
     def get_output(self, index):
 	return libfann.get_train_data_output(self.__train_data, index);
 
-    def destroy(self):
-        """
-        Destructs the training data
-        Be sure to call this function after finished using the training data.
-        """
-        libfann.fann_destroy_train(self.__train_data)
-
     def shuffle(self):
         """
         Shuffles training data, randomizing the order
@@ -317,12 +301,10 @@ class train_class:
 
     def merge(self, other):
         """
-        Merges training data into a single struct
+        Merges training data into a new struct
         """
         outcome = libfann.fann_merge_train_data(self.__train_data, other.get_native_object())
-        self.destroy()
-        self.__train_data = outcome
-        return self
+        return train_class(outcome)
 
     def duplicate(self):
         """
@@ -345,6 +327,8 @@ def create(connection_rate, learning_rate, layers):
     When running the network, the bias nodes always emits 1
     """
     ann = libfann.fann_create_array(connection_rate, learning_rate, len(layers), layers)
+    if libfann.fann_is_NULL(ann):
+        return None # probably won't happen
     return fann_class(ann)
 
 def create_from_file(filename):
@@ -352,6 +336,8 @@ def create_from_file(filename):
     Constructs a backpropagation neural network from a configuration file.
     """
     ann = libfann.fann_create_from_file(filename)
+    if libfann.fann_is_NULL(ann):
+        raise IOError, "Could not load ann from file '%s'" + filename
     return fann_class(ann)
 
 def read_train_from_file(filename):
diff --git a/python/fann_helper.c b/python/fann_helper.c
index 3b9fd6d..e240557 100644
--- a/python/fann_helper.c
+++ b/python/fann_helper.c
@@ -68,3 +68,8 @@ PyObject *get_train_data_output(struct fann_train_data *t, int row)
 	return get_row_from_double_array(t->output, row, t->num_output);
 }
 
+
+int fann_is_NULL(struct fann *ann)
+{
+	return ann == NULL ? 1 : 0;
+}
diff --git a/python/libfann.i b/python/libfann.i
index 4493c77..639c6a9 100755
--- a/python/libfann.i
+++ b/python/libfann.i
@@ -7,15 +7,16 @@
 #include "../src/include/fann.h"
 %}
 
-%typemap(in) fann_type[ANY] {
+%define CHECKED_FLOAT_ARRAY(typemap_name, expected_length)
+%typemap(in) typemap_name {
   int i;
   if (!PySequence_Check($input)) {
     PyErr_SetString(PyExc_ValueError,"Expected a sequence");
-    return NULL;
+    SWIG_fail;
   }
-  if (PySequence_Length($input) == 0) {
-    PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
-    return NULL;
+  if (PySequence_Length($input) != expected_length) {
+    PyErr_SetString(PyExc_ValueError,"Sequence has wrong length");
+    SWIG_fail;
   }
   $1 = (float *) malloc(PySequence_Length($input)*sizeof(float));
   for (i = 0; i < PySequence_Length($input); i++) {
@@ -24,20 +25,29 @@
       $1[i] = (float) PyFloat_AsDouble(o);
     } else {
       PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");      
-      return NULL;
+      Py_DECREF(o);
+      SWIG_fail;
     }
+    Py_DECREF(o);
   }
 }
+%typemap(freearg) typemap_name {
+   if ($1) free($1);
+}
+%enddef
+
+CHECKED_FLOAT_ARRAY(fann_type *input, arg1->num_input)
+CHECKED_FLOAT_ARRAY(fann_type *desired_output, arg1->num_output)
 
 %typemap(in) int[ANY] {
   int i;
   if (!PySequence_Check($input)) {
     PyErr_SetString(PyExc_ValueError,"Expected a sequence");
-    return NULL;
+    SWIG_fail;
   }
   if (PySequence_Length($input) == 0) {
     PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
-    return NULL;
+    SWIG_fail;
   }
   $1 = (unsigned int *) malloc(PySequence_Length($input)*sizeof(unsigned int));
   for (i = 0; i < PySequence_Length($input); i++) {
@@ -46,37 +56,41 @@
       $1[i] = (int) PyInt_AsLong(o);
     } else {
       PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");      
-      return NULL;
+      Py_DECREF(o);
+      SWIG_fail;
     }
+    Py_DECREF(o);
   }
 }
-
-%typemap(freearg) fann_type* {
+%typemap(freearg) int[ANY] {
    if ($1) free($1);
 }
+%apply int[ANY] {int *, unsigned int*};
+
+typedef double fann_type; 
 
 %typemap(out) PyObject* {
   $result = $1;
 }
 
-%apply fann_type[ANY] {fann_type *};
-%apply int[ANY] {int *, unsigned int*};
+// create_array is used instead
+%ignore fann_create;
+%ignore fann_create_shortcut;
 
-#define FANN_INCLUDE
-%varargs(10,int n = 0) fann_create;
 %rename(fann_run_old) fann_run;
 %rename(fann_run) fann_run2;
 
 %rename(fann_test_old) fann_test;
 %rename(fann_test) fann_test2;
 
+#define FANN_INCLUDE
 %include "../src/include/fann.h"
 %include "../src/include/fann_data.h"
+%include "../src/include/fann_activation.h"
 
 // Helper functions
 PyObject* fann_run2(struct fann *ann, fann_type *input);
 PyObject* fann_test2(struct fann *ann, fann_type *input, fann_type *desired_output);
 PyObject* get_train_data_input(struct fann_train_data *ann, int row);
 PyObject* get_train_data_output(struct fann_train_data *ann, int row);
-
-
+int fann_is_NULL(struct fann *ann);
diff --git a/python/libfann.py b/python/libfann.py
index 2d85db7..9dbe996 100644
--- a/python/libfann.py
+++ b/python/libfann.py
@@ -32,12 +32,8 @@ del types
 
 NULL = _libfann.NULL
 
-fann_create = _libfann.fann_create
-
 fann_create_array = _libfann.fann_create_array
 
-fann_create_shortcut = _libfann.fann_create_shortcut
-
 fann_create_shortcut_array = _libfann.fann_create_shortcut_array
 
 fann_run_old = _libfann.fann_run_old
@@ -94,6 +90,8 @@ fann_save_train = _libfann.fann_save_train
 
 fann_save_train_to_fixed = _libfann.fann_save_train_to_fixed
 
+fann_cascadetrain_on_data_callback = _libfann.fann_cascadetrain_on_data_callback
+
 fann_print_parameters = _libfann.fann_print_parameters
 
 fann_get_training_algorithm = _libfann.fann_get_training_algorithm
@@ -182,15 +180,15 @@ class fann_neuron(_object):
     __getattr__ = lambda self, name: _swig_getattr(self, fann_neuron, name)
     def __repr__(self):
         return "<C fann_neuron instance at %s>" % (self.this,)
-    __swig_setmethods__["weights"] = _libfann.fann_neuron_weights_set
-    __swig_getmethods__["weights"] = _libfann.fann_neuron_weights_get
-    if _newclass:weights = property(_libfann.fann_neuron_weights_get, _libfann.fann_neuron_weights_set)
-    __swig_setmethods__["connected_neurons"] = _libfann.fann_neuron_connected_neurons_set
-    __swig_getmethods__["connected_neurons"] = _libfann.fann_neuron_connected_neurons_get
-    if _newclass:connected_neurons = property(_libfann.fann_neuron_connected_neurons_get, _libfann.fann_neuron_connected_neurons_set)
-    __swig_setmethods__["num_connections"] = _libfann.fann_neuron_num_connections_set
-    __swig_getmethods__["num_connections"] = _libfann.fann_neuron_num_connections_get
-    if _newclass:num_connections = property(_libfann.fann_neuron_num_connections_get, _libfann.fann_neuron_num_connections_set)
+    __swig_setmethods__["first_con"] = _libfann.fann_neuron_first_con_set
+    __swig_getmethods__["first_con"] = _libfann.fann_neuron_first_con_get
+    if _newclass:first_con = property(_libfann.fann_neuron_first_con_get, _libfann.fann_neuron_first_con_set)
+    __swig_setmethods__["last_con"] = _libfann.fann_neuron_last_con_set
+    __swig_getmethods__["last_con"] = _libfann.fann_neuron_last_con_get
+    if _newclass:last_con = property(_libfann.fann_neuron_last_con_get, _libfann.fann_neuron_last_con_set)
+    __swig_setmethods__["sum"] = _libfann.fann_neuron_sum_set
+    __swig_getmethods__["sum"] = _libfann.fann_neuron_sum_get
+    if _newclass:sum = property(_libfann.fann_neuron_sum_get, _libfann.fann_neuron_sum_set)
     __swig_setmethods__["value"] = _libfann.fann_neuron_value_set
     __swig_getmethods__["value"] = _libfann.fann_neuron_value_get
     if _newclass:value = property(_libfann.fann_neuron_value_get, _libfann.fann_neuron_value_set)
@@ -277,6 +275,12 @@ class fann(_object):
     __swig_setmethods__["num_output"] = _libfann.fann_num_output_set
     __swig_getmethods__["num_output"] = _libfann.fann_num_output_get
     if _newclass:num_output = property(_libfann.fann_num_output_get, _libfann.fann_num_output_set)
+    __swig_setmethods__["weights"] = _libfann.fann_weights_set
+    __swig_getmethods__["weights"] = _libfann.fann_weights_get
+    if _newclass:weights = property(_libfann.fann_weights_get, _libfann.fann_weights_set)
+    __swig_setmethods__["connections"] = _libfann.fann_connections_set
+    __swig_getmethods__["connections"] = _libfann.fann_connections_get
+    if _newclass:connections = property(_libfann.fann_connections_get, _libfann.fann_connections_set)
     __swig_setmethods__["train_errors"] = _libfann.fann_train_errors_set
     __swig_getmethods__["train_errors"] = _libfann.fann_train_errors_get
     if _newclass:train_errors = property(_libfann.fann_train_errors_get, _libfann.fann_train_errors_set)
@@ -319,9 +323,39 @@ class fann(_object):
     __swig_setmethods__["MSE_value"] = _libfann.fann_MSE_value_set
     __swig_getmethods__["MSE_value"] = _libfann.fann_MSE_value_get
     if _newclass:MSE_value = property(_libfann.fann_MSE_value_get, _libfann.fann_MSE_value_set)
+    __swig_setmethods__["num_bit_fail"] = _libfann.fann_num_bit_fail_set
+    __swig_getmethods__["num_bit_fail"] = _libfann.fann_num_bit_fail_get
+    if _newclass:num_bit_fail = property(_libfann.fann_num_bit_fail_get, _libfann.fann_num_bit_fail_set)
     __swig_setmethods__["train_error_function"] = _libfann.fann_train_error_function_set
     __swig_getmethods__["train_error_function"] = _libfann.fann_train_error_function_get
     if _newclass:train_error_function = property(_libfann.fann_train_error_function_get, _libfann.fann_train_error_function_set)
+    __swig_setmethods__["cascade_change_fraction"] = _libfann.fann_cascade_change_fraction_set
+    __swig_getmethods__["cascade_change_fraction"] = _libfann.fann_cascade_change_fraction_get
+    if _newclass:cascade_change_fraction = property(_libfann.fann_cascade_change_fraction_get, _libfann.fann_cascade_change_fraction_set)
+    __swig_setmethods__["cascade_stagnation_epochs"] = _libfann.fann_cascade_stagnation_epochs_set
+    __swig_getmethods__["cascade_stagnation_epochs"] = _libfann.fann_cascade_stagnation_epochs_get
+    if _newclass:cascade_stagnation_epochs = property(_libfann.fann_cascade_stagnation_epochs_get, _libfann.fann_cascade_stagnation_epochs_set)
+    __swig_setmethods__["cascade_num_candidates"] = _libfann.fann_cascade_num_candidates_set
+    __swig_getmethods__["cascade_num_candidates"] = _libfann.fann_cascade_num_candidates_get
+    if _newclass:cascade_num_candidates = property(_libfann.fann_cascade_num_candidates_get, _libfann.fann_cascade_num_candidates_set)
+    __swig_setmethods__["cascade_best_candidate"] = _libfann.fann_cascade_best_candidate_set
+    __swig_getmethods__["cascade_best_candidate"] = _libfann.fann_cascade_best_candidate_get
+    if _newclass:cascade_best_candidate = property(_libfann.fann_cascade_best_candidate_get, _libfann.fann_cascade_best_candidate_set)
+    __swig_setmethods__["cascade_candidate_limit"] = _libfann.fann_cascade_candidate_limit_set
+    __swig_getmethods__["cascade_candidate_limit"] = _libfann.fann_cascade_candidate_limit_get
+    if _newclass:cascade_candidate_limit = property(_libfann.fann_cascade_candidate_limit_get, _libfann.fann_cascade_candidate_limit_set)
+    __swig_setmethods__["cascade_weight_multiplier"] = _libfann.fann_cascade_weight_multiplier_set
+    __swig_getmethods__["cascade_weight_multiplier"] = _libfann.fann_cascade_weight_multiplier_get
+    if _newclass:cascade_weight_multiplier = property(_libfann.fann_cascade_weight_multiplier_get, _libfann.fann_cascade_weight_multiplier_set)
+    __swig_setmethods__["cascade_candidate_scores"] = _libfann.fann_cascade_candidate_scores_set
+    __swig_getmethods__["cascade_candidate_scores"] = _libfann.fann_cascade_candidate_scores_get
+    if _newclass:cascade_candidate_scores = property(_libfann.fann_cascade_candidate_scores_get, _libfann.fann_cascade_candidate_scores_set)
+    __swig_setmethods__["total_neurons_allocated"] = _libfann.fann_total_neurons_allocated_set
+    __swig_getmethods__["total_neurons_allocated"] = _libfann.fann_total_neurons_allocated_get
+    if _newclass:total_neurons_allocated = property(_libfann.fann_total_neurons_allocated_get, _libfann.fann_total_neurons_allocated_set)
+    __swig_setmethods__["total_connections_allocated"] = _libfann.fann_total_connections_allocated_set
+    __swig_getmethods__["total_connections_allocated"] = _libfann.fann_total_connections_allocated_get
+    if _newclass:total_connections_allocated = property(_libfann.fann_total_connections_allocated_get, _libfann.fann_total_connections_allocated_set)
     __swig_setmethods__["quickprop_decay"] = _libfann.fann_quickprop_decay_set
     __swig_getmethods__["quickprop_decay"] = _libfann.fann_quickprop_decay_get
     if _newclass:quickprop_decay = property(_libfann.fann_quickprop_decay_get, _libfann.fann_quickprop_decay_set)
@@ -340,6 +374,9 @@ class fann(_object):
     __swig_setmethods__["rprop_delta_max"] = _libfann.fann_rprop_delta_max_set
     __swig_getmethods__["rprop_delta_max"] = _libfann.fann_rprop_delta_max_get
     if _newclass:rprop_delta_max = property(_libfann.fann_rprop_delta_max_get, _libfann.fann_rprop_delta_max_set)
+    __swig_setmethods__["rprop_delta_zero"] = _libfann.fann_rprop_delta_zero_set
+    __swig_getmethods__["rprop_delta_zero"] = _libfann.fann_rprop_delta_zero_get
+    if _newclass:rprop_delta_zero = property(_libfann.fann_rprop_delta_zero_get, _libfann.fann_rprop_delta_zero_set)
     __swig_setmethods__["train_slopes"] = _libfann.fann_train_slopes_set
     __swig_getmethods__["train_slopes"] = _libfann.fann_train_slopes_get
     if _newclass:train_slopes = property(_libfann.fann_train_slopes_get, _libfann.fann_train_slopes_set)
@@ -447,6 +484,17 @@ FANN_TRAIN_RPROP = _libfann.FANN_TRAIN_RPROP
 FANN_TRAIN_QUICKPROP = _libfann.FANN_TRAIN_QUICKPROP
 FANN_ERRORFUNC_LINEAR = _libfann.FANN_ERRORFUNC_LINEAR
 FANN_ERRORFUNC_TANH = _libfann.FANN_ERRORFUNC_TANH
+FANN_LINEAR = _libfann.FANN_LINEAR
+FANN_THRESHOLD = _libfann.FANN_THRESHOLD
+FANN_THRESHOLD_SYMMETRIC = _libfann.FANN_THRESHOLD_SYMMETRIC
+FANN_SIGMOID = _libfann.FANN_SIGMOID
+FANN_SIGMOID_STEPWISE = _libfann.FANN_SIGMOID_STEPWISE
+FANN_SIGMOID_SYMMETRIC = _libfann.FANN_SIGMOID_SYMMETRIC
+FANN_SIGMOID_SYMMETRIC_STEPWISE = _libfann.FANN_SIGMOID_SYMMETRIC_STEPWISE
+FANN_GAUSSIAN = _libfann.FANN_GAUSSIAN
+FANN_GAUSSIAN_STEPWISE = _libfann.FANN_GAUSSIAN_STEPWISE
+FANN_ELLIOT = _libfann.FANN_ELLIOT
+FANN_ELLIOT_SYMMETRIC = _libfann.FANN_ELLIOT_SYMMETRIC
 
 fann_run = _libfann.fann_run
 
@@ -455,5 +503,10 @@ fann_test = _libfann.fann_test
 get_train_data_input = _libfann.get_train_data_input
 
 get_train_data_output = _libfann.get_train_data_output
+
+fann_is_NULL = _libfann.fann_is_NULL
 cvar = _libfann.cvar
+FANN_TRAIN_NAMES = cvar.FANN_TRAIN_NAMES
+FANN_ERRORFUNC_NAMES = cvar.FANN_ERRORFUNC_NAMES
+FANN_ACTIVATION_NAMES = cvar.FANN_ACTIVATION_NAMES
 
diff --git a/python/makefile.gnu b/python/makefile.gnu
index b673eab..ecce84d 100755
--- a/python/makefile.gnu
+++ b/python/makefile.gnu
@@ -1,5 +1,7 @@
 # This makefile was written to compile a distribution of pyfann for
 # GNU platforms (cygwin included.)
+#
+# This is NOT needed for Linux/Unix, use setup_unix.py instead.
 
 TARGETS = _libfann.dll
 
diff --git a/python/setup.py b/python/setup.py
index 5dffc7c..0ca08f1 100755
--- a/python/setup.py
+++ b/python/setup.py
@@ -22,7 +22,7 @@ class smart_install_data(install_data):
     """
     override default distutils install_data, so we can copy
     files directly, without splitting into modules, scripts,
-    packages, and extensions."
+    packages, and extensions.
     """
     def run(self):
         # need to change self.install_dir to the actual library dir
diff --git a/src/fann.c b/src/fann.c
index 305c642..fb57cab 100644
--- a/src/fann.c
+++ b/src/fann.c
@@ -972,6 +972,7 @@ void fann_allocate_connections(struct fann *ann)
  */
 void fann_seed_rand()
 {
+#ifndef _WIN32
 	FILE *fp = fopen("/dev/urandom", "r");
 	unsigned int foo;
 	struct timeval t;
@@ -986,4 +987,8 @@ void fann_seed_rand()
 		fclose(fp);
 	}
 	srand(foo);
+#else
+    /* COMPAT_TIME REPLACEMENT */
+    srand(GetTickCount());
+#endif
 }
diff --git a/src/include/fann.h b/src/include/fann.h
index bed141e..0ce0382 100644
--- a/src/include/fann.h
+++ b/src/include/fann.h
@@ -33,7 +33,19 @@ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
 #else
 
-#include "compat_time.h"
+/* COMPAT_TIME REPLACEMENT */
+#ifndef _WIN32
+    #include <sys/time.h>
+#else  /* _WIN32 */
+#if !defined(_MSC_EXTENSIONS) &&
+!defined(_INC_WINDOWS)
+    extern unsigned long __stdcall GetTickCount(void);
+#else /* _MSC_EXTENSIONS */
+    #define WIN32_LEAN_AND_MEAN
+    #include <windows.h>
+#endif /* _MSC_EXTENSIONS */
+#endif /* _WIN32 */
+
 #include "fann_data.h"
 #include "fann_internal.h"
 #include "fann_activation.h"

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/libfann.git



More information about the debian-science-commits mailing list