bu*****@engr.or st.edu (Yaroslav Bulatov) wrote in

news:4d******** *************** ***@posting.goo gle.com:

Just for fun :)

http://yaroslav.hopto.org/russianwik...hp/neural-impl

Is there a more compact way?

Yaroslav

Here's the function from the link:

def train_smallest( data, weights, nu=1):

for it in range(1000):

for dp in data:

activations = [dp[0]+[1]]

for layer in weights: activations.app end([sigmoid(innerpr oduct

(u,activations[-1])) for u in layer]+[1])

sigmas, new_weights = [],deepcopy(weigh ts)

for i in range(len(weigh ts))[::-1]:

sigma_row = []

for j, o in enumerate(activ ations[i+1][:-1]):

if not sigmas: err = dp[1][j]-o

else: err = innerproduct(si gmas[-1], [weights[i][k][j] for k in

range(len(weigh ts[i+1]))])

sigma_row.appen d(o*(1-o)*err)

for k in range(len(weigh ts[i][j])): new_weights[i][j][k]

+=nu*sigma_row[-1]*activations[i][k]

sigmas.append(s igma_row)

weights = new_weights

return weights

--------------------------------------------------------------------

And here's the whole program:

# Python implementation of feed-forward neural network

import Gnuplot

from math import *

from operator import mul

from copy import deepcopy

from Numeric import innerproduct

def sigmoid(x): return 1/(1+exp(-x))

# Trains Neural Network, returns

def train_smallest( data, weights, nu=1):

for it in range(1000):

for dp in data:

activations = [dp[0]+[1]]

for layer in weights: activations.app end([sigmoid(innerpr oduct

(u,activations[-1])) for u in layer]+[1])

sigmas, new_weights = [],deepcopy(weigh ts)

for i in range(len(weigh ts))[::-1]:

sigma_row = []

for j, o in enumerate(activ ations[i+1][:-1]):

if not sigmas: err = dp[1][j]-o

else: err = innerproduct(si gmas[-1], [weights[i][k][j] for k in

range(len(weigh ts[i+1]))])

sigma_row.appen d(o*(1-o)*err)

for k in range(len(weigh ts[i][j])): new_weights[i][j][k]

+=nu*sigma_row[-1]*activations[i][k]

sigmas.append(s igma_row)

weights = new_weights

return weights

# graphs given neural network through gnuplot

def graphNN(weights ):

g = Gnuplot.Gnuplot (debug=0)

g('set yrange [0:1]')

prev_row = ['x', 'y']

g('s(x) = 1/(1+exp(-x))')

# convert neural network to analytic form

for i, row in enumerate(weigh ts):

new_row = []

for j, unit in enumerate(row):

name = "r%dc%d(x,y )" %(i, j)

new_row.append( name)

formula = ''

for k, weight in enumerate(unit) :

if k>0: formula+='+'

if k < len(unit)-1:

formula+='%f*%s ' % (weight, prev_row[k])

else: # bias unit

formula+='%f' % (weight)

g(name+'=s('+fo rmula+')')

prev_row = new_row

g('splot[-2:2][-2:2][] %s' % (name,))

raw_input()

# XOR data

data = [[[1,1],[1]], [[-1,-1], [1]], [[1,-1],[0]], [[-1,1],[0]] ]

# Starting weights (leads to global optimum)

start_weights = [[[-1,-1, -1],[1,-1, -1]],[[1,1,0]]]

# Train the network

weights = train_smallest( data, start_weights)

# Visualize it

graphNN(weights )