failing.py

When 'output', the 2nd arg of ANN_MLP.predict() is Fortran contiguous, the function fails to copy results to 'output'. - Kyle Horn, 2012-06-27 02:52 am

Download (2.6 kB)

 
1
from pylab import *
2
import cv2 as cv
3
4
#==============================================================================
5
6
# Some parameters for learning.  Step size is the gradient step size
7
# for backpropogation.
8
step_size = 0.01
9
10
# Momentum can be ignored for this example.
11
momentum = 0.0
12
13
# Max steps of training
14
nsteps = 10000
15
16
# Error threshold for halting training
17
max_err = 0.0001
18
19
# When to stop: whichever comes first, count or error
20
condition = cv.TERM_CRITERIA_COUNT | cv.TERM_CRITERIA_EPS
21
22
# Tuple of termination criteria: first condition, then # steps, then
23
# error tolerance second and third things are ignored if not implied
24
# by condition
25
criteria = (condition, nsteps, max_err)
26
27
# params is a dictionary with relevant things for NNet training.
28
params = dict( term_crit = criteria, 
29
               train_method = cv.ANN_MLP_TRAIN_PARAMS_BACKPROP, 
30
               bp_dw_scale = step_size, 
31
               bp_moment_scale = momentum )
32
33
#==============================================================================
34
35
layer_sizes = array([1,52,2])
36
mlp = cv.ANN_MLP(layer_sizes)
37
38
# create an input vector x, and an output vector y
39
x = linspace(0,1)       # x is one-dimensional
40
y1 = (sin(x*2*pi)+1)/2. # y is two-dimensional
41
y2 = (cos(x*2*pi)+1)/2.
42
y = hstack((y1.reshape(-1,1),y2.reshape(-1,1)))
43
#z = array(zeros_like(y),order='c')+.5   # z catches output from ANN_MLP(train)
44
z = zeros_like(y)+.5    # when z isn't forced to be C-contiguous, something fails between numpy and C
45
46
# show that z is Fortran contiguous, rather than C contiguous
47
print "Notice that z is Fortran contiguous, rather than C contiguous:"
48
print z.flags
49
print "When performing a lot of vstack and hstack operations, for"
50
print "efficiency reasons, sometimes Numpy will switch continuity."
51
52
# make the input and output larger
53
inpt = x
54
inpt = hstack((inpt,inpt))
55
inpt = hstack((inpt,inpt))
56
inpt = hstack((inpt,inpt))
57
outpt = y
58
outpt = vstack((outpt,outpt))
59
outpt = vstack((outpt,outpt))
60
outpt = vstack((outpt,outpt))
61
62
# train and predict
63
mlp.train(inpt,outpt,None,params=params)
64
mlp.predict(x,z)
65
66
# plot an idealization of the training set as a solid line
67
# and the output of the neural network as dots
68
figure()
69
title("first dimension of training data\n (dots should line up on the sine curve)")
70
plot(x,y[:,0],'b')
71
plot(x,z[:,0],'go')
72
73
# plot an idealization of the training set as a solid line
74
# and the output of the neural network as dots
75
figure()
76
title("second dimension of training data\n (dots should line up on the cosine curve)")
77
plot(x,y[:,1],'r')
78
plot(x,z[:,1],'yo')
79
show()