working.py

When 'output', the 2nd arg of ANN_MLP.predict() is C contiguous, the function works as intended. - Kyle Horn, 2012-06-27 02:52 am

Download (2.3 kB)

 
1
from pylab import *
2
import cv2 as cv
3
4
#==============================================================================
5
6
# Some parameters for learning.  Step size is the gradient step size
7
# for backpropogation.
8
step_size = 0.01
9
10
# Momentum can be ignored for this example.
11
momentum = 0.0
12
13
# Max steps of training
14
nsteps = 10000
15
16
# Error threshold for halting training
17
max_err = 0.0001
18
19
# When to stop: whichever comes first, count or error
20
condition = cv.TERM_CRITERIA_COUNT | cv.TERM_CRITERIA_EPS
21
22
# Tuple of termination criteria: first condition, then # steps, then
23
# error tolerance second and third things are ignored if not implied
24
# by condition
25
criteria = (condition, nsteps, max_err)
26
27
# params is a dictionary with relevant things for NNet training.
28
params = dict( term_crit = criteria, 
29
               train_method = cv.ANN_MLP_TRAIN_PARAMS_BACKPROP, 
30
               bp_dw_scale = step_size, 
31
               bp_moment_scale = momentum )
32
33
#==============================================================================
34
35
layer_sizes = array([1,52,2])
36
mlp = cv.ANN_MLP(layer_sizes)
37
38
# create an input vector x, and an output vector y
39
x = linspace(0,1)       # x is one-dimensional
40
y1 = (sin(x*2*pi)+1)/2. # y is two-dimensional
41
y2 = (cos(x*2*pi)+1)/2.
42
y = hstack((y1.reshape(-1,1),y2.reshape(-1,1)))
43
z = array(zeros_like(y),order='c')+.5   # z catches output from ANN_MLP(train)
44
#z = zeros_like(y)+.5
45
46
print "Now notice that when we force C contiguousness, ANN_MLP"
47
print "works as intended:"
48
print z.flags
49
50
# make the input and output larger
51
inpt = x
52
inpt = hstack((inpt,inpt))
53
inpt = hstack((inpt,inpt))
54
inpt = hstack((inpt,inpt))
55
outpt = y
56
outpt = vstack((outpt,outpt))
57
outpt = vstack((outpt,outpt))
58
outpt = vstack((outpt,outpt))
59
60
# train and predict
61
mlp.train(inpt,outpt,None,params=params)
62
mlp.predict(x,z)
63
64
# plot an idealization of the training set as a solid line
65
# and the output of the neural network as dots
66
figure()
67
title("first dimension of training data\n (dots should line up on the sine curve)")
68
plot(x,y[:,0],'b')
69
plot(x,z[:,0],'go')
70
71
# plot an idealization of the training set as a solid line
72
# and the output of the neural network as dots
73
figure()
74
title("second dimension of training data\n (dots should line up on the cosine curve)")
75
plot(x,y[:,1],'r')
76
plot(x,z[:,1],'yo')
77
show()