1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
|
diff -N -u -r fann-1.2.0.orig/python/examples/mushroom.py fann-1.2.0.my/python/examples/mushroom.py
--- fann-1.2.0.orig/python/examples/mushroom.py 2004-07-24 01:36:04.000000000 +0200
+++ fann-1.2.0.my/python/examples/mushroom.py 2005-01-03 20:48:21.000000000 +0100
@@ -20,9 +20,9 @@
# start training the network
print "Training network"
-ann.set_activation_function_hidden(fann.FANN_SIGMOID_SYMMETRIC_STEPWISE)
-ann.set_activation_function_output(fann.FANN_SIGMOID_STEPWISE)
-ann.set_training_algorithm(fann.FANN_TRAIN_INCREMENTAL)
+ann.set_activation_function_hidden(fann.SIGMOID_SYMMETRIC_STEPWISE)
+ann.set_activation_function_output(fann.SIGMOID_STEPWISE)
+ann.set_training_algorithm(fann.TRAIN_INCREMENTAL)
ann.train_on_data(train_data, max_iterations, iterations_between_reports, desired_error)
@@ -40,9 +40,3 @@
print "Saving network"
ann.save("mushroom_float.net")
-# blow it all up
-print "Cleaning up."
-ann.destroy()
-test_data.destroy()
-train_data.destroy()
-
diff -N -u -r fann-1.2.0.orig/python/examples/simple_train.py fann-1.2.0.my/python/examples/simple_train.py
--- fann-1.2.0.orig/python/examples/simple_train.py 2004-07-24 01:35:58.000000000 +0200
+++ fann-1.2.0.my/python/examples/simple_train.py 2005-01-03 20:48:01.000000000 +0100
@@ -12,9 +12,9 @@
iterations_between_reports = 1000
ann = fann.create(connection_rate, learning_rate, (num_input, num_neurons_hidden, num_output))
+ann.set_activation_function_output(fann.SIGMOID_SYMMETRIC_STEPWISE)
ann.train_on_file("datasets/xor.data", max_iterations, iterations_between_reports, desired_error)
ann.save("xor_float.net")
-ann.destroy()
diff -N -u -r fann-1.2.0.orig/python/fann_helper.c fann-1.2.0.my/python/fann_helper.c
--- fann-1.2.0.orig/python/fann_helper.c 2004-07-26 09:52:30.000000000 +0200
+++ fann-1.2.0.my/python/fann_helper.c 2005-01-03 22:13:09.000000000 +0100
@@ -68,3 +68,8 @@
return get_row_from_double_array(t->output, row, t->num_output);
}
+
+int fann_is_NULL(struct fann *ann)
+{
+ return ann == NULL ? 1 : 0;
+}
diff -N -u -r fann-1.2.0.orig/python/fann.py fann-1.2.0.my/python/fann.py
--- fann-1.2.0.orig/python/fann.py 2004-07-26 09:46:04.000000000 +0200
+++ fann-1.2.0.my/python/fann.py 2005-01-03 22:32:49.000000000 +0100
@@ -21,30 +21,23 @@
import libfann
-# Activation function
-FANN_LINEAR = 0
-FANN_THRESHOLD = 1
-FANN_THRESHOLD_SYMMETRIC = 2
-FANN_SIGMOID = 3
-FANN_SIGMOID_STEPWISE = 4 # default
-FANN_SIGMOID_SYMMETRIC = 5
-FANN_SIGMOID_SYMMETRIC_STEPWISE = 6
-FANN_GAUSSIAN = 7
-FANN_GAUSSIAN_STEPWISE = 8
-FANN_ELLIOT = 9 # not implemented yet
-FANN_ELLIOT_SYMMETRIC = 10 # not implemented yet
-
-# Training algorithm
-FANN_TRAIN_INCREMENTAL = 0
-FANN_TRAIN_BATCH = 1
-FANN_TRAIN_RPROP = 2
-FANN_TRAIN_QUICKPROP = 3
+# import all FANN_ constants without FANN_ prefix
+for name, value in libfann.__dict__.iteritems():
+ if name.startswith('FANN_') and not name.endswith('_NAMES'):
+ globals()[name[5:]] = value
+del name, value
class fann_class:
def __init__(self, ann):
+ """
+ Never call this directly.
+ """
self.__ann = ann
-
+
+ def __del__(self):
+ libfann.fann_destroy(self.__ann)
+
def get_native_object(self):
return self.__train_data
@@ -54,13 +47,6 @@
"""
return libfann.fann_run(self.__ann, input)
- def destroy(self):
- """
- Destructs the entire network.
- Be sure to call this function after finished using the network.
- """
- libfann.fann_destroy(self.__ann)
-
def randomize_weights(self, min_weight, max_weight):
"""
Randomize weights (from the beginning the weights are random between -0.1 and 0.1)
@@ -198,31 +184,31 @@
"""
libfann.fann_set_activation_function_output(self.__ann, activation_function)
- def get_activation_hidden_steepness(self):
+ def get_activation_steepness_hidden(self):
"""
Get the steepness parameter for the sigmoid function used in the hidden layers.
"""
- return libfann.get_activation_hidden_steepness(self.__ann)
+ return libfann.get_activation_steepness_hidden(self.__ann)
- def set_activation_hidden_steepness(self, steepness):
+ def set_activation_steepness_hidden(self, steepness):
"""
Set the steepness of the sigmoid function used in the hidden layers.
Only usefull if sigmoid function is used in the hidden layers (default 0.5).
"""
- libfann.fann_set_activation_hidden_steepness(self.__ann, steepness)
+ libfann.fann_set_activation_steepness_hidden(self.__ann, steepness)
- def get_activation_output_steepness(self):
+ def get_activation_steepness_output(self):
"""
Get the steepness parameter for the sigmoid function used in the output layer.
"""
- return libfann.fann_get_activation_output_steepness(self.__ann)
+ return libfann.fann_get_activation_steepness_output(self.__ann)
- def set_activation_output_steepness(self, steepness):
+ def set_activation_steepness_output(self, steepness):
"""
Set the steepness of the sigmoid function used in the output layer.
Only usefull if sigmoid function is used in the output layer (default 0.5).
"""
- libfann.fann_set_activation_output_steepness(self.__ann, steepness)
+ libfann.fann_set_activation_steepness_output(self.__ann, steepness)
def train_on_data(self, data, max_epochs, epochs_between_reports, desired_error):
"""
@@ -269,7 +255,12 @@
class train_class:
def __init__(self, train_data):
+ """
+ Never call this directly.
+ """
self.__train_data = train_data
+ def __del__(self):
+ libfann.fann_destroy_train(self.__train_data)
def get_native_object(self):
return self.__train_data
@@ -289,13 +280,6 @@
def get_output(self, index):
return libfann.get_train_data_output(self.__train_data, index);
- def destroy(self):
- """
- Destructs the training data
- Be sure to call this function after finished using the training data.
- """
- libfann.fann_destroy_train(self.__train_data)
-
def shuffle(self):
"""
Shuffles training data, randomizing the order
@@ -317,12 +301,10 @@
def merge(self, other):
"""
- Merges training data into a single struct
+ Merges training data into a new struct
"""
outcome = libfann.fann_merge_train_data(self.__train_data, other.get_native_object())
- self.destroy()
- self.__train_data = outcome
- return self
+ return train_class(outcome)
def duplicate(self):
"""
@@ -345,6 +327,8 @@
When running the network, the bias nodes always emits 1
"""
ann = libfann.fann_create_array(connection_rate, learning_rate, len(layers), layers)
+ if libfann.fann_is_NULL(ann):
+ return None # probably won't happen
return fann_class(ann)
def create_from_file(filename):
@@ -352,6 +336,8 @@
Constructs a backpropagation neural network from a configuration file.
"""
ann = libfann.fann_create_from_file(filename)
+ if libfann.fann_is_NULL(ann):
+ raise IOError, "Could not load ann from file '%s'" + filename
return fann_class(ann)
def read_train_from_file(filename):
diff -N -u -r fann-1.2.0.orig/python/libfann.i fann-1.2.0.my/python/libfann.i
--- fann-1.2.0.orig/python/libfann.i 2004-07-20 00:21:20.000000000 +0200
+++ fann-1.2.0.my/python/libfann.i 2005-01-03 22:58:56.000000000 +0100
@@ -7,15 +7,16 @@
#include "../src/include/fann.h"
%}
-%typemap(in) fann_type[ANY] {
+%define CHECKED_FLOAT_ARRAY(typemap_name, expected_length)
+%typemap(in) typemap_name {
int i;
if (!PySequence_Check($input)) {
PyErr_SetString(PyExc_ValueError,"Expected a sequence");
- return NULL;
+ SWIG_fail;
}
- if (PySequence_Length($input) == 0) {
- PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
- return NULL;
+ if (PySequence_Length($input) != expected_length) {
+ PyErr_SetString(PyExc_ValueError,"Sequence has wrong length");
+ SWIG_fail;
}
$1 = (float *) malloc(PySequence_Length($input)*sizeof(float));
for (i = 0; i < PySequence_Length($input); i++) {
@@ -24,20 +25,29 @@
$1[i] = (float) PyFloat_AsDouble(o);
} else {
PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");
- return NULL;
+ Py_DECREF(o);
+ SWIG_fail;
}
+ Py_DECREF(o);
}
}
+%typemap(freearg) typemap_name {
+ if ($1) free($1);
+}
+%enddef
+
+CHECKED_FLOAT_ARRAY(fann_type *input, arg1->num_input)
+CHECKED_FLOAT_ARRAY(fann_type *desired_output, arg1->num_output)
%typemap(in) int[ANY] {
int i;
if (!PySequence_Check($input)) {
PyErr_SetString(PyExc_ValueError,"Expected a sequence");
- return NULL;
+ SWIG_fail;
}
if (PySequence_Length($input) == 0) {
PyErr_SetString(PyExc_ValueError,"Size mismatch. Expected some elements");
- return NULL;
+ SWIG_fail;
}
$1 = (unsigned int *) malloc(PySequence_Length($input)*sizeof(unsigned int));
for (i = 0; i < PySequence_Length($input); i++) {
@@ -46,37 +56,41 @@
$1[i] = (int) PyInt_AsLong(o);
} else {
PyErr_SetString(PyExc_ValueError,"Sequence elements must be numbers");
- return NULL;
+ Py_DECREF(o);
+ SWIG_fail;
}
+ Py_DECREF(o);
}
}
-
-%typemap(freearg) fann_type* {
+%typemap(freearg) int[ANY] {
if ($1) free($1);
}
+%apply int[ANY] {int *, unsigned int*};
+
+typedef double fann_type;
%typemap(out) PyObject* {
$result = $1;
}
-%apply fann_type[ANY] {fann_type *};
-%apply int[ANY] {int *, unsigned int*};
+// create_array is used instead
+%ignore fann_create;
+%ignore fann_create_shortcut;
-#define FANN_INCLUDE
-%varargs(10,int n = 0) fann_create;
%rename(fann_run_old) fann_run;
%rename(fann_run) fann_run2;
%rename(fann_test_old) fann_test;
%rename(fann_test) fann_test2;
+#define FANN_INCLUDE
%include "../src/include/fann.h"
%include "../src/include/fann_data.h"
+%include "../src/include/fann_activation.h"
// Helper functions
PyObject* fann_run2(struct fann *ann, fann_type *input);
PyObject* fann_test2(struct fann *ann, fann_type *input, fann_type *desired_output);
PyObject* get_train_data_input(struct fann_train_data *ann, int row);
PyObject* get_train_data_output(struct fann_train_data *ann, int row);
-
-
+int fann_is_NULL(struct fann *ann);
diff -N -u -r fann-1.2.0.orig/python/makefile.gnu fann-1.2.0.my/python/makefile.gnu
--- fann-1.2.0.orig/python/makefile.gnu 2004-10-09 13:56:57.000000000 +0200
+++ fann-1.2.0.my/python/makefile.gnu 2005-01-02 16:52:23.000000000 +0100
@@ -1,5 +1,7 @@
# This makefile was written to compile a distribution of pyfann for
# GNU platforms (cygwin included.)
+#
+# This is NOT needed for Linux/Unix, use setup_unix.py instead.
TARGETS = _libfann.dll
diff -N -u -r fann-1.2.0.orig/python/README fann-1.2.0.my/python/README
--- fann-1.2.0.orig/python/README 2004-08-06 10:54:13.000000000 +0200
+++ fann-1.2.0.my/python/README 2005-01-02 17:00:28.000000000 +0100
@@ -1,6 +1,9 @@
This python binding is provided by Vincenzo Di Massa <hawk.it@tiscalinet.it>
and Gil Megidish <gil@megidish.net>
+Instructions for Windows:
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
MAKE
Make sure to make the fann library first. You are required to have
swig and python development files installed. After you compiled the
@@ -24,3 +27,17 @@
USAGE
Just import fann.
+
+Instructions for Unix/Linux:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First build and install the fann library. Then run:
+
+./setup_unix.py build
+./setup_unix.py install
+
+Install alone will work too, if you run it twice (a small bug).
+The examples/ (not installed) should work now after you copy the datasets:
+
+mkdir examples/datasets
+cp ../examples/xor.data ../benchmarks/datasets/mushroom.* examples/datasets/
diff -N -u -r fann-1.2.0.orig/python/setup.py fann-1.2.0.my/python/setup.py
--- fann-1.2.0.orig/python/setup.py 2004-07-26 09:56:59.000000000 +0200
+++ fann-1.2.0.my/python/setup.py 2005-01-02 16:49:45.000000000 +0100
@@ -22,7 +22,7 @@
"""
override default distutils install_data, so we can copy
files directly, without splitting into modules, scripts,
- packages, and extensions."
+ packages, and extensions.
"""
def run(self):
# need to change self.install_dir to the actual library dir
diff -N -u -r fann-1.2.0.orig/python/setup_unix.py fann-1.2.0.my/python/setup_unix.py
--- fann-1.2.0.orig/python/setup_unix.py 1970-01-01 01:00:00.000000000 +0100
+++ fann-1.2.0.my/python/setup_unix.py 2005-01-02 16:27:17.000000000 +0100
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+from distutils.core import setup, Extension
+#from glob import glob
+
+VERSION='1.2.0'
+
+LONG_DESCRIPTION="""\
+Fast Artificial Neural Network Library implements multilayer
+artificial neural networks with support for both fully connected
+and sparsely connected networks. It includes a framework for easy
+handling of training data sets. It is easy to use, versatile, well
+documented, and fast.
+"""
+
+module1 = Extension(
+ '_libfann',
+ sources = ['libfann.i', 'fann_helper.c'],
+ libraries = ['fann'],
+ #extra_objects = glob('../src/fann*.o'),
+ )
+
+setup(
+ name='pyfann',
+ version=VERSION,
+ description='Fast Artificial Neural Network Library (fann)',
+ long_description=LONG_DESCRIPTION,
+ author='Steffen Nissen',
+ author_email='lukesky@diku.dk',
+ maintainer='Gil Megidish',
+ maintainer_email='gil@megidish.net',
+ url='http://sourceforge.net/projects/fann/',
+ license='GNU LESSER GENERAL PUBLIC LICENSE (LGPL)',
+ platforms='UNIX',
+
+ ext_modules = [module1],
+ py_modules = ['libfann', 'fann']
+ )
+
|