2013-03-04 1 views
0

J'essaye d'implémenter la descente de gradient en python; l'implémentation fonctionne quand je l'essaie avec training_set1 mais elle ne renvoie pas un nombre (nan) quand je l'essaie training_set. Une idée de pourquoi mon code est cassé?L'implémentation de descente de dégradé en Python renvoie Nan

from collections import namedtuple 

TrainingInstance = namedtuple("TrainingInstance", ['X', 'Y']) 

training_set1 = [TrainingInstance(0, 4), TrainingInstance(1, 7), 
    TrainingInstance(2, 7), TrainingInstance(3, 8), 
    TrainingInstance(8, 12)] 


training_set = [TrainingInstance(60, 3.1), TrainingInstance(61, 3.6), 
    TrainingInstance(62, 3.8), TrainingInstance(63, 4), 
    TrainingInstance(65, 4.1)] 

def grad_desc(x, x1): 
    # minimize a cost function of two variables using gradient descent 
    training_rate = 0.1 
    iterations = 5000 
    #while sqrd_error(x, x1) > 0.0000001: 
    while iterations > 0: 
     #print sqrd_error(x, x1) 
     x, x1 = x - (training_rate * deriv(x, x1)), x1 - (training_rate * deriv1(x, x1)) 
     iterations -= 1 
    return x, x1 

def sqrd_error(x, x1): 
    sum = 0.0 
    for inst in training_set: 
     sum += ((x + x1 * inst.X) - inst.Y)**2 
    return sum/(2.0 * len(training_set)) 

def deriv(x, x1): 
    sum = 0.0 
    for inst in training_set: 
     sum += ((x + x1 * inst.X) - inst.Y) 
    return sum/len(training_set) 

def deriv1(x, x1): 
    sum = 0.0 
    for inst in training_set: 
     sum += ((x + x1 * inst.X) - inst.Y) * inst.X 
    return sum/len(training_set) 


if __name__ == "__main__": 
    print grad_desc(2, 2) 

Répondre