from pylab import * import cv2 as cv #============================================================================== # Some parameters for learning. Step size is the gradient step size # for backpropogation. step_size = 0.01 # Momentum can be ignored for this example. momentum = 0.0 # Max steps of training nsteps = 10000 # Error threshold for halting training max_err = 0.0001 # When to stop: whichever comes first, count or error condition = cv.TERM_CRITERIA_COUNT | cv.TERM_CRITERIA_EPS # Tuple of termination criteria: first condition, then # steps, then # error tolerance second and third things are ignored if not implied # by condition criteria = (condition, nsteps, max_err) # params is a dictionary with relevant things for NNet training. params = dict( term_crit = criteria, train_method = cv.ANN_MLP_TRAIN_PARAMS_BACKPROP, bp_dw_scale = step_size, bp_moment_scale = momentum ) #============================================================================== layer_sizes = array([1,52,2]) mlp = cv.ANN_MLP(layer_sizes) # create an input vector x, and an output vector y x = linspace(0,1) # x is one-dimensional y1 = (sin(x*2*pi)+1)/2. # y is two-dimensional y2 = (cos(x*2*pi)+1)/2. y = hstack((y1.reshape(-1,1),y2.reshape(-1,1))) z = array(zeros_like(y),order='c')+.5 # z catches output from ANN_MLP(train) #z = zeros_like(y)+.5 print "Now notice that when we force C contiguousness, ANN_MLP" print "works as intended:" print z.flags # make the input and output larger inpt = x inpt = hstack((inpt,inpt)) inpt = hstack((inpt,inpt)) inpt = hstack((inpt,inpt)) outpt = y outpt = vstack((outpt,outpt)) outpt = vstack((outpt,outpt)) outpt = vstack((outpt,outpt)) # train and predict mlp.train(inpt,outpt,None,params=params) mlp.predict(x,z) # plot an idealization of the training set as a solid line # and the output of the neural network as dots figure() title("first dimension of training data\n (dots should line up on the sine curve)") plot(x,y[:,0],'b') plot(x,z[:,0],'go') # plot an idealization of the training set as a solid line # and the output of the neural network as dots figure() title("second dimension of training data\n (dots should line up on the cosine curve)") plot(x,y[:,1],'r') plot(x,z[:,1],'yo') show()