2

Je veux développer un reconnaissance de la parole dans android. J'ai utilisé this thread et this video pour utiliser la reconnaissance vocale dans un appareil Android.
Voici mon code:
MainActivity.java:Exception de pointeur NULL commutation pour rechercher dans Pocketsphinx Demo

package com.example.pocket_sphinx; 

import edu.cmu.pocketsphinx.Hypothesis; 
import edu.cmu.pocketsphinx.RecognitionListener; 
import android.os.Bundle; 
import android.app.Activity; 
import java.io.File; 
import java.io.IOException; 
import java.util.HashMap; 
import static android.widget.Toast.makeText; 
import static edu.cmu.pocketsphinx.SpeechRecognizerSetup.defaultSetup; 
import android.os.AsyncTask; 
import android.util.Log; 
import android.widget.TextView; 
import android.widget.Toast; 
import edu.cmu.pocketsphinx.Assets; 
import edu.cmu.pocketsphinx.SpeechRecognizer; 

public class MainActivity extends Activity implements RecognitionListener { 

     private static final String KWS_SEARCH = "wakeup"; 
     private static final String DICTATION_SEARCH = "digits"; 

     private static final String KEYPHRASE = "oh mighty computer"; 

     private SpeechRecognizer recognizer; 
     private HashMap<String, Integer> captions; 

     @Override 
     public void onCreate(Bundle state) { 
      super.onCreate(state); 

      // Prepare the data for UI 
      captions = new HashMap<String, Integer>(); 

      setContentView(R.layout.activity_main); 
      ((TextView) findViewById(R.id.caption_text)) 
        .setText("Preparing the recognizer"); 

      // Recognizer initialization is a time-consuming and it involves IO, 
      // so we execute it in async task 

      new AsyncTask<Void, Void, Exception>() { 
       @Override 
       protected Exception doInBackground(Void... params) { 
        try { 
         Assets assets = new Assets(MainActivity.this); 

         File assetDir = assets.syncAssets(); 

         setupRecognizer(assetDir); 

         recognizer.startListening(KWS_SEARCH); 

        } catch (IOException e) { 
         return e; 
        } 
        return null; 
       } 

       @Override 
       protected void onPostExecute(Exception result) { 
        if (result != null) { 
         ((TextView) findViewById(R.id.caption_text)) 
           .setText("Failed to init recognizer " + result); 
        } else { 
         switchSearch(KWS_SEARCH); 
        } 
       } 
      }.execute(); 
     } 

     @Override 
     public void onPartialResult(Hypothesis hypothesis) { 
      String text = hypothesis.getHypstr(); 
      Log.d("Spoken text",text); 

      if (text.equals(KEYPHRASE)) 
       switchSearch(DICTATION_SEARCH); 

      else 
       ((TextView) findViewById(R.id.result_text)).setText(text); 
     } 

     @Override 
     public void onResult(Hypothesis hypothesis) { 
      ((TextView) findViewById(R.id.result_text)).setText(""); 
      if (hypothesis != null) { 
       String text = hypothesis.getHypstr(); 
       makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show(); 
      } 
     } 

     @Override 
     public void onBeginningOfSpeech() { 
     } 

     @Override 
     public void onEndOfSpeech() { 
      Log.d("end","In end of speech"); 
      if (DICTATION_SEARCH.equals(recognizer.getSearchName())) 
       switchSearch(KWS_SEARCH); 
     } 

     private void switchSearch(String searchName) { 
      recognizer.stop(); 
      recognizer.startListening(searchName); 
      String caption = getResources().getString(captions.get(searchName)); 
      ((TextView) findViewById(R.id.caption_text)).setText(caption); 
     } 

     private void setupRecognizer(File assetsDir) { 
      File modelsDir = new File(assetsDir, "models"); 
      recognizer = defaultSetup() 
        .setAcousticModel(new File(modelsDir, "hmm/en-us")) 
        .setDictionary(new File(modelsDir, "dict/cmu07a.dic")) 
        .setRawLogDir(assetsDir).setKeywordThreshold(1e-20f) 
        //.setFloat("-beam", 1e-30f) 
        .getRecognizer(); 
      recognizer.addListener(this); 

      // Create keyword-activation search. 
      recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE); 

      // Create language model search. 
      File languageModel = new File(modelsDir, "lm/weather.dmp"); 
      recognizer.addNgramSearch(DICTATION_SEARCH, languageModel); 
     } 
    } 

En fait, je ne sais pas comment l'utiliser.
après l'exécution, il lance une exception d'exécution. J'ai posté le logcat de mon code:

03-08 14:02:53.040: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(99): 788 unique initial diphones 
03-08 14:02:53.230: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(148): 0 root, 0 non-root channels, 58 single-phone words 
03-08 14:02:53.250: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(186): Creating search tree 
03-08 14:02:53.470: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(192): before: 0 root, 0 non-root channels, 58 single-phone words 
03-08 14:02:55.630: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(326): after: max nonroot chan increased to 2230 
03-08 14:02:55.630: I/cmusphinx(17309): INFO: ngram_search_fwdtree.c(339): after: 253 root, 2102 non-root channels, 13 single-phone words 
03-08 14:02:55.630: I/cmusphinx(17309): INFO: ngram_search_fwdflat.c(157): fwdflat: min_ef_width = 4, max_sf_win = 25 
03-08 14:02:57.130: W/dalvikvm(17309): threadid=2: spin on suspend #1 threadid=11 (pcf=0) 
03-08 14:02:57.140: W/dalvikvm(17309): threadid=2: spin on suspend resolved in 1010 msec 
03-08 14:02:57.150: D/-heap(17309): GC_CONCURRENT freed 417K, 8% free 7863K/8455K, paused 14ms+1015ms, total 1597ms 
03-08 14:02:57.210: I/SpeechRecognizer(17309): Start recognition "wakeup" 
03-08 14:02:57.390: I/cmusphinx(17309): INFO: pocketsphinx.c(863): Writing raw audio log file: /mnt/sdcard/Android/data/com.example.pocket_sphinx/files/sync/000000000.raw 
03-08 14:02:57.890: I/SpeechRecognizer(17309): Stop recognition 
03-08 14:02:57.890: I/SpeechRecognizer(17309): Start recognition "wakeup" 
03-08 14:02:57.890: W/dalvikvm(17309): threadid=1: thread exiting with uncaught exception (group=0x41ab9450) 
03-08 14:02:57.930: E/AndroidRuntime(17309): FATAL EXCEPTION: main 
03-08 14:02:57.930: E/AndroidRuntime(17309): java.lang.NullPointerException 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.example.pocket_sphinx.MainActivity.switchSearch(MainActivity.java:108) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.example.pocket_sphinx.MainActivity.access$2(MainActivity.java:105) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.example.pocket_sphinx.MainActivity$1.onPostExecute(MainActivity.java:67) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.example.pocket_sphinx.MainActivity$1.onPostExecute(MainActivity.java:1) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.os.AsyncTask.finish(AsyncTask.java:631) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.os.AsyncTask.access$600(AsyncTask.java:177) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.os.AsyncTask$InternalHandler.handleMessage(AsyncTask.java:644) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.os.Handler.dispatchMessage(Handler.java:99) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.os.Looper.loop(Looper.java:137) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at android.app.ActivityThread.main(ActivityThread.java:4802) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at java.lang.reflect.Method.invokeNative(Native Method) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at java.lang.reflect.Method.invoke(Method.java:511) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:813) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:580) 
03-08 14:02:57.930: E/AndroidRuntime(17309): at dalvik.system.NativeStart.main(Native Method) 
03-08 14:02:57.940: I/cmusphinx(17309): INFO: pocketsphinx.c(863): Writing raw audio log file: /mnt/sdcard/Android/data/com.example.pocket_sphinx/files/sync/000000001.raw 
+1

L'erreur réelle est quelques lignes ci-dessus dans le logcat. Vous devez publier un logcat complet, et non une partie de celui-ci. –

+0

@NikolayShmyrev Merci pour votre attention! Je l'ai modifié – AFN

+0

@NikolayShmyrev cette application doit reconnaître la voix à travers le microphone ou le fichier? – AFN

Répondre

1

Vous avez deux problèmes avec votre code:

1) Vous commencez la recherche à l'intérieur doInBackground et commencer à nouveau la même recherche dans onPostExecute, vous pouvez simplement appeler switchSearch dans onPostExecute, cela suffirait

2) Vous ne remplissez pas le tableau de légendes mais vous l'utilisez dans la méthode switchSearch. Donc, vous obtenez une exception de pointeur nul. Vous pouvez commenter

 String caption = getResources().getString(captions.get(searchName)); 
     ((TextView) findViewById(R.id.caption_text)).setText(caption); 

dans switchSearch si vous ne comptez pas utiliser les sous-titres