[SciPy-user] optimization using fmin_bfgs with gradient information

Ernest Adrogué eadrogue at gmx.net
Sun Jul 19 14:39:23 EDT 2009


19/07/09 @ 17:41 (+0200), thus spake Sebastian Walter:
> Hello,
> I would like to add your objective function as part of the unit test
> for on of the automatic differentiation tools I've been developing:
> PYADOLC (download at http://github.com/b45ch1/pyadolc).
> 
> Unfortunately, the description of the objective function is
> incomplete: there are some references to self.n and so on in the code.
> Would it be possible for you to post the full description? That would
> be great :)

Yes, of course.
You can use this version that works independently. The original
one feeds on data from a dataset, where as this one uses randomly-generated
data. Example usage:

In [6]: m = Model(random_obs())

In [7]: x0 = m.guess()

In [8]: x0
Out[8]: 
[1.3414634146341464,
 1.4634146341463414,
 1.6341463414634145,
 1.5609756097560976,
 1.4878048780487805,
 1.8048780487804879,
 ........
 1.3902439024390243,
 1.6097560975609757,
 1.5,
 0]

In [9]: m.fobj(x0)
Out[9]: 1281.027571347247



------------------------------
import random
import math

class Observation(object):
    def __init__(self, data):
        self.ht = data[0]
        self.at = data[1]
        self.hg = data[2]
        self.ag = data[3]

def random_obs():
    names = 'abcdefghijklmnopqrstuvwxyz'
    random_data = [(i,j,random.randint(0,5),random.randint(0,5))
                   for i in names for j in names if i != j]
    return [Observation(i) for i in random_data]

class Model(object):
    def __init__(self, observations):
        self.observations = tuple(observations)
        self.names = [i.ht for i in observations]
        self.names.extend([i.at for i in observations])
        self.names = tuple(set(self.names))
        self.n = len(self.names)
    def guess(self):
        a, b = [], []
        total_hg = sum([i.hg for i in self.observations])
        for j in self.names:
            sh_j = sum([i.hg for i in self.observations if i.ht == j])
            ca_j = sum([i.hg for i in self.observations if i.at == j])
            a.append(sh_j/math.sqrt(total_hg))
            b.append(ca_j/math.sqrt(total_hg))
        return a + b + [1.5, 0]
    def tau(self, mu1, mu2, rho, x, y):
        if x == 0 and y == 0:
            t = 1 - mu1 * mu2 * rho
        elif x == 0 and y == 1:
            t = 1 + mu1 * rho
        elif x == 1 and y == 0:
            t = 1 + mu2 * rho
        elif x == 1 and y == 1:
            t = 1 - rho
        else:
            t = 1
        return t
    def fobj(self, x):
        n = self.n
        y = [abs(i) for i in x[:-2]]
        y.insert(0, abs(n - sum(y[:n-1])))
        a = dict(zip(self.names, y[:n]))
        b = dict(zip(self.names, y[n:]))
        g = abs(x[-2])
        r = x[-1]
        pseudo_loglikelihood = 0
        for m in self.observations:
            x = m.hg
            y = m.ag
            mu1 = a[m.ht] * b[m.at] * g
            mu2 = a[m.at] * b[m.ht]
            tau = self.tau(mu1, mu2, r, m.hg, m.ag)
            mu1 = mu1 > 0 and mu1 or 1e-10
            mu2 = mu2 > 0 and mu2 or 1e-10
            tau = tau > 0 and tau or 1e-10
            pseudo_loglikelihood += math.log(tau)
            pseudo_loglikelihood += m.hg * math.log(mu1) - mu1
            pseudo_loglikelihood += m.ag * math.log(mu2) - mu2
        return -pseudo_loglikelihood





More information about the SciPy-User mailing list