2017-07-20 4 views
1

J'essaye de nourrir mes propres données 3D à un LSTM. Les données ont: hauteur = 365, largeur = 310, temps = inconnu/incohérent, constitué de 0 et 1, chaque bloc de données qui produisent une sortie sont séparés en un seul fichier.tensorflow static_rnn erreur: entrée doit être une séquence

import tensorflow as tf 
import os 
from tensorflow.contrib import rnn 

filename = "C:/Kuliah/EmotionRecognition/Train1/D2N2Sur.txt" 

hm_epochs = 10 
n_classes = 12 
n_chunk = 443 
n_hidden = 500 

data = tf.placeholder(tf.bool, name='data') 
cat = tf.placeholder("float", [None, n_classes]) 

weights = { 
    'out': tf.Variable(tf.random_normal([n_hidden, n_classes])) 
} 
biases = { 
    'out': tf.Variable(tf.random_normal([n_classes])) 
} 

def RNN(x, weights, biases): 
    lstm_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) 
    outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32) 
    return tf.matmul(outputs[-1], weights['out']) + biases['out'] 

pred = RNN(data, weights, biases) 

cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=cat)) 
optimizer = tf.train.AdamOptimizer(learning_rate=0.001).minimize(cost) 

correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(cat, 1)) 
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32)) 

saver = tf.train.Saver() 

temp = [[]] 
d3 = [[]] 
counter = 0 
with tf.Session() as sess: 
    #load 
    #saver.restore(sess, "C:/Kuliah/EmotionRecognition/model.ckpt") 
    sess.run(tf.global_variables_initializer()) 
    with open(filename) as inf: 
     for line in inf: 
      bla = list(line) 
      bla.pop(len(bla) - 1) 
      for index, item in enumerate(bla): 
       if (item == '0'): 
        bla[index] = False 
       else: 
        bla[index] = True 
      temp.append(bla) 
      counter += 1 
      if counter%365==0: #height 365 
       temp.pop(0) 
       d3.append(temp) 
       temp = [[]] 
     temp.pop(0) 
     d3.append(temp) 

     batch_data = d3.reshape() 
     sess.run(optimizer, feed_dict={data: d3, cat: 11}) 

     acc = sess.run(accuracy, feed_dict={data: d3, cat: 11}) 
     loss = sess.run(loss, feed_dict={data: d3, cat: 11}) 
     print(acc) 
     print(loss) 
     #save 
     saver.save(sess, "C:/Kuliah/EmotionRecognition/model.ckpt") 

ce code me jeter une erreur:

Traceback (most recent call last): 
    File "C:/Kuliah/EmotionRecognition/Main", line 31, in <module> 
    pred = RNN(data, weights, biases) 
    File "C:/Kuliah/EmotionRecognition/Main", line 28, in RNN 
    outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32) 
    File "C:\Users\Anonymous\AppData\Roaming\Python\Python35\site-packages\tensorflow\python\ops\rnn.py", line 1119, in static_rnn 
    raise TypeError("inputs must be a sequence") 
TypeError: inputs must be a sequence 

Répondre

3

Lorsque vous appelez pred = RNN(data, weights, biases), l'argument data devrait être une séquence de longueur la longueur de votre RNN. Mais dans votre cas, c'est un data = tf.placeholder(tf.bool, name='data').

Vous pouvez essayer pred = RNN([data], weights, biases).

Voir la chaîne doc de la méthode:

inputs: A length T list of inputs, each a Tensor of shape [batch_size, input_size] , or a nested tuple of such elements.

Si la longueur de votre RNN est Unknow, vous devriez envisager d'utiliser tf.nn.dynamic_rnn.