Neural Network in 16 lines

pete pete at pete.com
Sat Jul 17 02:17:42 EDT 2004


bulatov at engr.orst.edu (Yaroslav Bulatov) wrote in 
news:4d642979.0407162207.450e45f1 at posting.google.com:

> Just for fun :)
> http://yaroslav.hopto.org/russianwiki/index.php/neural-impl
> 
> Is there a more compact way?
> 
> Yaroslav
> 

Here's the function from the link:

def train_smallest(data, weights, nu=1):
  for it in range(1000):
    for dp in data:
      activations = [dp[0]+[1]]
      for layer in weights: activations.append([sigmoid(innerproduct
(u,activations[-1])) for u in layer]+[1])
      sigmas, new_weights = [],deepcopy(weights)
      for i in range(len(weights))[::-1]:
        sigma_row = []
        for j, o in enumerate(activations[i+1][:-1]):
          if not sigmas: err = dp[1][j]-o
          else: err = innerproduct(sigmas[-1], [weights[i][k][j] for k in 
range(len(weights[i+1]))])
          sigma_row.append(o*(1-o)*err)
          for k in range(len(weights[i][j])): new_weights[i][j][k]
+=nu*sigma_row[-1]*activations[i][k]
        sigmas.append(sigma_row)
      weights = new_weights
  return weights

--------------------------------------------------------------------
And here's the whole program:

# Python implementation of feed-forward neural network

import Gnuplot
from math import *
from operator import mul
from copy import deepcopy
from Numeric import innerproduct

def sigmoid(x): return 1/(1+exp(-x))

# Trains Neural Network, returns 
def train_smallest(data, weights, nu=1):  
  for it in range(1000):
    for dp in data:
      activations = [dp[0]+[1]]
      for layer in weights: activations.append([sigmoid(innerproduct
(u,activations[-1])) for u in layer]+[1])
      sigmas, new_weights = [],deepcopy(weights)
      for i in range(len(weights))[::-1]:
        sigma_row = []
        for j, o in enumerate(activations[i+1][:-1]):
          if not sigmas: err = dp[1][j]-o
          else: err = innerproduct(sigmas[-1], [weights[i][k][j] for k in 
range(len(weights[i+1]))])
          sigma_row.append(o*(1-o)*err)
          for k in range(len(weights[i][j])): new_weights[i][j][k]
+=nu*sigma_row[-1]*activations[i][k]
        sigmas.append(sigma_row)
      weights = new_weights
  return weights

# graphs given neural network through gnuplot
def graphNN(weights):
  g = Gnuplot.Gnuplot(debug=0)
  g('set yrange [0:1]')
    
  prev_row = ['x', 'y']

  g('s(x) = 1/(1+exp(-x))')

  # convert neural network to analytic form
  for i, row in enumerate(weights):
    new_row = []
    for j, unit in enumerate(row):
      name = "r%dc%d(x,y)" %(i, j)
      new_row.append(name)
      formula = ''
      for k, weight in enumerate(unit):
        if k>0: formula+='+'
        if k < len(unit)-1:
          formula+='%f*%s' % (weight, prev_row[k])
        else:     # bias unit
          formula+='%f' % (weight)
      g(name+'=s('+formula+')')
    prev_row = new_row
    
  g('splot[-2:2][-2:2][] %s' % (name,))
  raw_input()

# XOR data
data = [[[1,1],[1]], [[-1,-1], [1]], [[1,-1],[0]], [[-1,1],[0]] ]

# Starting weights (leads to global optimum)
start_weights = [[[-1,-1, -1],[1,-1, -1]],[[1,1,0]]]

# Train the network
weights = train_smallest(data, start_weights)

# Visualize it
graphNN(weights)



More information about the Python-list mailing list