2017-09-29 2 views
2

J'utilise Keras 2.0.6 pour construire des modèles. Je garde plusieurs modèles dans une liste appelée keras_models. Ainsi, au lieu simplement sauver un modèle, je veux sauver la liste complète des modèles dans un fichier de conserves au vinaigre comme ci-dessous:enregistrer une liste des modèles Keras

joblib.dump(keras_models, 'keras_models.pkl') 

Cette approche fonctionne très bien quand j'ai une liste de modèle de forêt aléatoire sklearn et une liste de régression de crête des modèles. Cependant, j'ai eu les erreurs suivantes avec une liste de modèles keras. Une idée de ce qui n'allait pas? Existe-t-il un moyen d'enregistrer la liste complète des modèles keras dans un fichier? Merci!

--------------------------------------------------------------------------- 
AttributeError       Traceback (most recent call last) 
/usr/lib/python3.4/pickle.py in _getattribute(obj, name, allow_qualname) 
    271   try: 
--> 272    obj = getattr(obj, subpath) 
    273   except AttributeError: 

AttributeError: 'module' object has no attribute 'lock' 

During handling of the above exception, another exception occurred: 

AttributeError       Traceback (most recent call last) 
/usr/lib/python3.4/pickle.py in save_global(self, obj, name) 
    910    module = sys.modules[module_name] 
--> 911    obj2 = _getattribute(module, name, allow_qualname=self.proto >= 4) 
    912   except (ImportError, KeyError, AttributeError): 

/usr/lib/python3.4/pickle.py in _getattribute(obj, name, allow_qualname) 
    274    raise AttributeError("Can't get attribute {!r} on {!r}" 
--> 275         .format(name, obj)) 
    276  return obj 

AttributeError: Can't get attribute 'lock' on <module '_thread' (built-in)> 

During handling of the above exception, another exception occurred: 

PicklingError        Traceback (most recent call last) 
<ipython-input-5-e038764ff1dd> in <module>() 
    145 #--------------------------------------------------------------------------- 
    146 
--> 147 joblib.dump(keras_models, 'keras_models_20170928.pkl') 
    148 
    149 keras_metric_df.to_pickle('keras_metric_20170928.pkl') 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in dump(value, filename, compress, protocol, cache_size) 
    481  elif is_filename: 
    482   with open(filename, 'wb') as f: 
--> 483    NumpyPickler(f, protocol=protocol).dump(value) 
    484  else: 
    485   NumpyPickler(filename, protocol=protocol).dump(value) 

/usr/lib/python3.4/pickle.py in dump(self, obj) 
    410   if self.proto >= 4: 
    411    self.framer.start_framing() 
--> 412   self.save(obj) 
    413   self.write(STOP) 
    414   self.framer.end_framing() 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_list(self, obj) 
    772 
    773   self.memoize(obj) 
--> 774   self._batch_appends(obj) 
    775 
    776  dispatch[list] = save_list 

/usr/lib/python3.4/pickle.py in _batch_appends(self, items) 
    796     write(MARK) 
    797     for x in tmp: 
--> 798      save(x) 
    799     write(APPENDS) 
    800    elif n: 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    522 
    523   # Save the reduce() output and finally memoize the object 
--> 524   self.save_reduce(obj=obj, *rv) 
    525 
    526  def persistent_id(self, obj): 

/usr/lib/python3.4/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj) 
    625 
    626   if state is not None: 
--> 627    save(state) 
    628    write(BUILD) 
    629 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_dict(self, obj) 
    812 
    813   self.memoize(obj) 
--> 814   self._batch_setitems(obj.items()) 
    815 
    816  dispatch[dict] = save_dict 

/usr/lib/python3.4/pickle.py in _batch_setitems(self, items) 
    838     for k, v in tmp: 
    839      save(k) 
--> 840      save(v) 
    841     write(SETITEMS) 
    842    elif n: 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_list(self, obj) 
    772 
    773   self.memoize(obj) 
--> 774   self._batch_appends(obj) 
    775 
    776  dispatch[list] = save_list 

/usr/lib/python3.4/pickle.py in _batch_appends(self, items) 
    799     write(APPENDS) 
    800    elif n: 
--> 801     save(tmp[0]) 
    802     write(APPEND) 
    803    # else tmp is empty, and we're done 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    522 
    523   # Save the reduce() output and finally memoize the object 
--> 524   self.save_reduce(obj=obj, *rv) 
    525 
    526  def persistent_id(self, obj): 

/usr/lib/python3.4/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj) 
    625 
    626   if state is not None: 
--> 627    save(state) 
    628    write(BUILD) 
    629 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_dict(self, obj) 
    812 
    813   self.memoize(obj) 
--> 814   self._batch_setitems(obj.items()) 
    815 
    816  dispatch[dict] = save_dict 

/usr/lib/python3.4/pickle.py in _batch_setitems(self, items) 
    838     for k, v in tmp: 
    839      save(k) 
--> 840      save(v) 
    841     write(SETITEMS) 
    842    elif n: 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    522 
    523   # Save the reduce() output and finally memoize the object 
--> 524   self.save_reduce(obj=obj, *rv) 
    525 
    526  def persistent_id(self, obj): 

/usr/lib/python3.4/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj) 
    625 
    626   if state is not None: 
--> 627    save(state) 
    628    write(BUILD) 
    629 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_dict(self, obj) 
    812 
    813   self.memoize(obj) 
--> 814   self._batch_setitems(obj.items()) 
    815 
    816  dispatch[dict] = save_dict 

/usr/lib/python3.4/pickle.py in _batch_setitems(self, items) 
    838     for k, v in tmp: 
    839      save(k) 
--> 840      save(v) 
    841     write(SETITEMS) 
    842    elif n: 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    522 
    523   # Save the reduce() output and finally memoize the object 
--> 524   self.save_reduce(obj=obj, *rv) 
    525 
    526  def persistent_id(self, obj): 

/usr/lib/python3.4/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj) 
    625 
    626   if state is not None: 
--> 627    save(state) 
    628    write(BUILD) 
    629 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_dict(self, obj) 
    812 
    813   self.memoize(obj) 
--> 814   self._batch_setitems(obj.items()) 
    815 
    816  dispatch[dict] = save_dict 

/usr/lib/python3.4/pickle.py in _batch_setitems(self, items) 
    838     for k, v in tmp: 
    839      save(k) 
--> 840      save(v) 
    841     write(SETITEMS) 
    842    elif n: 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    522 
    523   # Save the reduce() output and finally memoize the object 
--> 524   self.save_reduce(obj=obj, *rv) 
    525 
    526  def persistent_id(self, obj): 

/usr/lib/python3.4/pickle.py in save_reduce(self, func, args, state, listitems, dictitems, obj) 
    596      "args[0] from __newobj__ args has the wrong class") 
    597    args = args[1:] 
--> 598    save(cls) 
    599    save(args) 
    600    write(NEWOBJ) 

/usr/local/lib/python3.4/dist-packages/sklearn/externals/joblib/numpy_pickle.py in save(self, obj) 
    278    return 
    279 
--> 280   return Pickler.save(self, obj) 
    281 
    282 

/usr/lib/python3.4/pickle.py in save(self, obj, save_persistent_id) 
    477   f = self.dispatch.get(t) 
    478   if f is not None: 
--> 479    f(self, obj) # Call unbound method with explicit self 
    480    return 
    481 

/usr/lib/python3.4/pickle.py in save_type(self, obj) 
    964   elif obj is type(...): 
    965    return self.save_reduce(type, (...,), obj=obj) 
--> 966   return self.save_global(obj) 
    967 
    968  dispatch[FunctionType] = save_global 

/usr/lib/python3.4/pickle.py in save_global(self, obj, name) 
    913    raise PicklingError(
    914     "Can't pickle %r: it's not found as %s.%s" % 
--> 915     (obj, module_name, name)) 
    916   else: 
    917    if obj2 is not obj: 

PicklingError: Can't pickle <class '_thread.lock'>: it's not found as _thread.lock 

Répondre

0

par le Keras documentation,

Il est recommandé de ne pas utiliser pour enregistrer un modèle Keras cornichon ou cPickle.

S'il est vraiment important que tous vos modèles soient stockés dans un seul fichier, je vous recommande de mettre en place vos propres méthodes d'enregistrement/chargement. Vous pouvez utiliser hickle pour enregistrer les matrices de poids au format HDF5 au lieu du format JSON-pickle. (HDF5 est plus efficace lors du stockage de grandes matrices.)