2017-01-24 1 views
1

Je voulais courir ExtractDecodeEditEncodeMuxTest de https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java.Android MediaCodec: sortie ExtractDecodeEditEncodeMuxTest après la ligne: surface de sortie: await nouvelle image

J'ai édité le code de sorte qu'il puisse prendre une entrée de la carte SD et la sortie vers la carte SD pour simplifier le code. Mais principal alors que la rupture de la boucle après la ligne outputSurface.awaitNewImage() ;. Et l'encodage de décodage s'est arrêté.

private String mInputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/dingdong.mp4"; 
private String mOutputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/compressed_output.mp4"; 
private String mOutputVideoMimeType; 


@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    try { 
     extractDecodeEditEncodeMux(); 
    } catch (Exception e) { 
     Log.e(TAG,e.getMessage(),e); 
    } 
} 


private void extractDecodeEditEncodeMux() throws Exception { 
    // Exception that may be thrown during release. 
    Exception exception = null; 
    MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE); 
    if (videoCodecInfo == null) { 
     // Don't fail CTS if they don't have an AVC codec (not here, anyway). 
     Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_VIDEO_MIME_TYPE); 
     return; 
    } 
    if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName()); 
    MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE); 
    if (audioCodecInfo == null) { 
     // Don't fail CTS if they don't have an AAC codec (not here, anyway). 
     Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE); 
     return; 
    } 
    if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName()); 
    MediaExtractor videoExtractor = null; 
    MediaExtractor audioExtractor = null; 
    OutputSurface outputSurface = null; 
    MediaCodec videoDecoder = null; 
    MediaCodec audioDecoder = null; 
    MediaCodec videoEncoder = null; 
    MediaCodec audioEncoder = null; 
    MediaMuxer muxer = null; 
    InputSurface inputSurface = null; 
    try { 
     if (mCopyVideo) { 
      videoExtractor = createExtractor(); 
      int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor); 
      assertTrue("missing video track in test video", videoInputTrack != -1); 
      MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack); 
      // We avoid the device-specific limitations on width and height by using values 
      // that are multiples of 16, which all tested devices seem to be able to handle. 
      MediaFormat outputVideoFormat = 
        MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mOutputWidth, mOutputHeight); 
      // Set some properties. Failing to specify some of these can cause the MediaCodec 
      // configure() call to throw an unhelpful exception. 
      outputVideoFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT); 
      outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE); 
      outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE); 
      outputVideoFormat.setInteger(
        MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL); 
      if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat); 
      // Create a MediaCodec for the desired codec, then configure it as an encoder with 
      // our desired properties. Request a Surface to use for input. 
      AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>(); 
      videoEncoder = createVideoEncoder(
        videoCodecInfo, outputVideoFormat, inputSurfaceReference); 
      inputSurface = new InputSurface(inputSurfaceReference.get()); 
      inputSurface.makeCurrent(); 
      // Create a MediaCodec for the decoder, based on the extractor's format. 
      outputSurface = new OutputSurface(); 
      outputSurface.changeFragmentShader(FRAGMENT_SHADER); 
      videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface()); 
     } 
     if (mCopyAudio) { 
      audioExtractor = createExtractor(); 
      int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor); 
      assertTrue("missing audio track in test video", audioInputTrack != -1); 
      MediaFormat inputFormat = audioExtractor.getTrackFormat(audioInputTrack); 
      MediaFormat outputAudioFormat = MediaFormat.createAudioFormat(OUTPUT_AUDIO_MIME_TYPE, OUTPUT_AUDIO_SAMPLE_RATE_HZ, OUTPUT_AUDIO_CHANNEL_COUNT); 
      outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE); 
      outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE); 
      // Create a MediaCodec for the desired codec, then configure it as an encoder with 
      // our desired properties. Request a Surface to use for input. 
      audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat); 
      // Create a MediaCodec for the decoder, based on the extractor's format. 
      audioDecoder = createAudioDecoder(inputFormat); 
     } 
     // Creates a muxer but do not start or add tracks just yet. 
     muxer = createMuxer(); 
     doExtractDecodeEditEncodeMux(videoExtractor, audioExtractor, videoDecoder, videoEncoder, audioDecoder, audioEncoder, muxer, inputSurface, outputSurface); 
    } finally { 
     if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer"); 
     // Try to release everything we acquired, even if one of the releases fails, in which 
     // case we save the first exception we got and re-throw at the end (unless something 
     // other exception has already been thrown). This guarantees the first exception thrown 
     // is reported as the cause of the error, everything is (attempted) to be released, and 
     // all other exceptions appear in the logs. 
     try { 
      if (videoExtractor != null) { 
       videoExtractor.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing videoExtractor", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (audioExtractor != null) { 
       audioExtractor.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing audioExtractor", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (videoDecoder != null) { 
       videoDecoder.stop(); 
       videoDecoder.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing videoDecoder", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (outputSurface != null) { 
       outputSurface.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing outputSurface", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (videoEncoder != null) { 
       videoEncoder.stop(); 
       videoEncoder.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing videoEncoder", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (audioDecoder != null) { 
       audioDecoder.stop(); 
       audioDecoder.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing audioDecoder", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (audioEncoder != null) { 
       audioEncoder.stop(); 
       audioEncoder.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing audioEncoder", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (muxer != null) { 
       muxer.stop(); 
       muxer.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing muxer", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
     try { 
      if (inputSurface != null) { 
       inputSurface.release(); 
      } 
     } catch(Exception e) { 
      Log.e(TAG, "error while releasing inputSurface", e); 
      if (exception == null) { 
       exception = e; 
      } 
     } 
    } 
    if (exception != null) { 
     throw exception; 
    } 
} 
/** 
* Creates an extractor that reads its frames from {@link #mSourceResId}. 
*/ 
private MediaExtractor createExtractor() throws IOException { 
    MediaExtractor extractor = new MediaExtractor(); 
    extractor.setDataSource(mInputFile); 
    return extractor; 
} 
/** 
* Creates a decoder for the given format, which outputs to the given surface. 
* 
* @param inputFormat the format of the stream to decode 
* @param surface into which to decode the frames 
*/ 
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException { 
    MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat)); 
    decoder.configure(inputFormat, surface, null, 0); 
    decoder.start(); 
    return decoder; 
} 

private MediaCodec createVideoEncoder(
     MediaCodecInfo codecInfo, 
     MediaFormat format, 
     AtomicReference<Surface> surfaceReference) throws IOException { 
    MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); 
    encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
    // Must be called before start() is. 
    surfaceReference.set(encoder.createInputSurface()); 
    encoder.start(); 
    return encoder; 
} 

private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException { 
    MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat)); 
    decoder.configure(inputFormat, null, null, 0); 
    decoder.start(); 
    return decoder; 
} 

private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException { 
    MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName()); 
    encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
    encoder.start(); 
    return encoder; 
} 

private MediaMuxer createMuxer() throws IOException { 
    return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 
} 
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) { 
    for (int index = 0; index < extractor.getTrackCount(); ++index) { 
     if (VERBOSE) { 
      Log.d(TAG, "format for track " + index + " is " 
        + getMimeTypeFor(extractor.getTrackFormat(index))); 
     } 
     if (isVideoFormat(extractor.getTrackFormat(index))) { 
      extractor.selectTrack(index); 
      MediaFormat inputFormat = extractor.getTrackFormat(index); 
      mOutputWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH); 
      mOutputHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT); 
      return index; 
     } 
    } 
    return -1; 
} 
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) { 
    for (int index = 0; index < extractor.getTrackCount(); ++index) { 
     if (VERBOSE) { 
      Log.d(TAG, "format for track " + index + " is " 
        + getMimeTypeFor(extractor.getTrackFormat(index))); 
     } 
     if (isAudioFormat(extractor.getTrackFormat(index))) { 
      extractor.selectTrack(index); 
      return index; 
     } 
    } 
    return -1; 
} 

private void doExtractDecodeEditEncodeMux(MediaExtractor videoExtractor, MediaExtractor audioExtractor, MediaCodec videoDecoder, MediaCodec videoEncoder, MediaCodec audioDecoder, MediaCodec audioEncoder, MediaMuxer muxer, InputSurface inputSurface, OutputSurface outputSurface) { 
    ByteBuffer[] videoDecoderInputBuffers = null; 
    ByteBuffer[] videoDecoderOutputBuffers = null; 
    ByteBuffer[] videoEncoderOutputBuffers = null; 
    MediaCodec.BufferInfo videoDecoderOutputBufferInfo = null; 
    MediaCodec.BufferInfo videoEncoderOutputBufferInfo = null; 
    if (mCopyVideo) { 
     videoDecoderInputBuffers = videoDecoder.getInputBuffers(); 
     videoDecoderOutputBuffers = videoDecoder.getOutputBuffers(); 
     videoEncoderOutputBuffers = videoEncoder.getOutputBuffers(); 
     videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo(); 
     videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo(); 
    } 
    ByteBuffer[] audioDecoderInputBuffers = null; 
    ByteBuffer[] audioDecoderOutputBuffers = null; 
    ByteBuffer[] audioEncoderInputBuffers = null; 
    ByteBuffer[] audioEncoderOutputBuffers = null; 
    MediaCodec.BufferInfo audioDecoderOutputBufferInfo = null; 
    MediaCodec.BufferInfo audioEncoderOutputBufferInfo = null; 
    if (mCopyAudio) { 
     audioDecoderInputBuffers = audioDecoder.getInputBuffers(); 
     audioDecoderOutputBuffers = audioDecoder.getOutputBuffers(); 
     audioEncoderInputBuffers = audioEncoder.getInputBuffers(); 
     audioEncoderOutputBuffers = audioEncoder.getOutputBuffers(); 
     audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo(); 
     audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo(); 
    } 
    // We will get these from the decoders when notified of a format change. 
    MediaFormat decoderOutputVideoFormat = null; 
    MediaFormat decoderOutputAudioFormat = null; 
    // We will get these from the encoders when notified of a format change. 
    MediaFormat encoderOutputVideoFormat = null; 
    MediaFormat encoderOutputAudioFormat = null; 
    // We will determine these once we have the output format. 
    int outputVideoTrack = -1; 
    int outputAudioTrack = -1; 
    // Whether things are done on the video side. 
    boolean videoExtractorDone = false; 
    boolean videoDecoderDone = false; 
    boolean videoEncoderDone = false; 
    // Whether things are done on the audio side. 
    boolean audioExtractorDone = false; 
    boolean audioDecoderDone = false; 
    boolean audioEncoderDone = false; 
    // The audio decoder output buffer to process, -1 if none. 
    int pendingAudioDecoderOutputBufferIndex = -1; 
    boolean muxing = false; 
    int videoExtractedFrameCount = 0; 
    int videoDecodedFrameCount = 0; 
    int videoEncodedFrameCount = 0; 
    int audioExtractedFrameCount = 0; 
    int audioDecodedFrameCount = 0; 
    int audioEncodedFrameCount = 0; 
    while ((mCopyVideo && !videoEncoderDone) || (mCopyAudio && !audioEncoderDone)) { 



     //1: Extract video from file and feed to decoder. 
     // Do not extract video if we have determined the output format but we are not yet 
     // ready to mux the frames. 
     while (mCopyVideo && !videoExtractorDone && (encoderOutputVideoFormat == null || muxing)) { 

      int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC); 

      if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { 
       if (VERBOSE) Log.d(TAG, "no video decoder input buffer"); 
       break; 
      } 
      if (VERBOSE)Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex); 

      ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex]; 

      int size = videoExtractor.readSampleData(decoderInputBuffer, 0); 
      long presentationTime = videoExtractor.getSampleTime(); 
      if (VERBOSE)Log.d(TAG, "video extractor: returned buffer of size " + size +" for time "+presentationTime); 


      if (size >= 0) videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size, presentationTime,videoExtractor.getSampleFlags()); 

      videoExtractorDone = !videoExtractor.advance(); 
      if (videoExtractorDone) { 
       if (VERBOSE) Log.d(TAG, "video extractor: EOS"); 
       videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); 
      } 
      videoExtractedFrameCount++; 
      // We extracted a frame, let's try something else next. 
      break; 
     } 

     //3: Poll output frames from the video decoder and feed the encoder. 
     while (mCopyVideo && !videoDecoderDone && (encoderOutputVideoFormat == null || muxing)) { 

      int decoderOutputBufferIndex = videoDecoder.dequeueOutputBuffer(videoDecoderOutputBufferInfo, TIMEOUT_USEC); 

      if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { 
       if (VERBOSE) Log.d(TAG, "no video decoder output buffer"); 
       break; 
      } 
      if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
       if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed"); 
       videoDecoderOutputBuffers = videoDecoder.getOutputBuffers(); 
       break; 
      } 
      if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
       decoderOutputVideoFormat = videoDecoder.getOutputFormat(); 
       if (VERBOSE) Log.d(TAG, "video decoder: output format changed: " + decoderOutputVideoFormat); 
       break; 
      } 
      if (VERBOSE) { 
       Log.d(TAG, "video decoder: returned output buffer: " + decoderOutputBufferIndex); 
       Log.d(TAG, "video decoder: returned buffer of size " + videoDecoderOutputBufferInfo.size); 
      } 
      ByteBuffer decoderOutputBuffer = videoDecoderOutputBuffers[decoderOutputBufferIndex]; 
      if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!= 0) { 
       if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer"); 
       videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false); 
       break; 
      } 
      if (VERBOSE)Log.d(TAG, "video decoder: returned buffer for time " + videoDecoderOutputBufferInfo.presentationTimeUs); 

      boolean render = videoDecoderOutputBufferInfo.size != 0; 
      videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render); 
      if (render) { 
       if (VERBOSE) Log.d(TAG, "output surface: await new image"); 
       outputSurface.awaitNewImage(); 
       // Edit the frame and send it to the encoder. 
       if (VERBOSE) Log.d(TAG, "output surface: draw image"); 
       outputSurface.drawImage(); 
       inputSurface.setPresentationTime(videoDecoderOutputBufferInfo.presentationTimeUs * 1000); 
       if (VERBOSE) Log.d(TAG, "input surface: swap buffers"); 
       inputSurface.swapBuffers(); 
       if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame"); 
      } 
      if ((videoDecoderOutputBufferInfo.flags 
        & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 
       if (VERBOSE) Log.d(TAG, "video decoder: EOS"); 
       videoDecoderDone = true; 
       videoEncoder.signalEndOfInputStream(); 
      } 
      videoDecodedFrameCount++; 
      // We extracted a pending frame, let's try something else next. 
      break; 
     } 



     //6: Poll frames from the video encoder and send them to the muxer. 
     while (mCopyVideo && !videoEncoderDone && (encoderOutputVideoFormat == null || muxing)) { 
      int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(videoEncoderOutputBufferInfo, TIMEOUT_USEC); 
      if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { 
       if (VERBOSE) Log.d(TAG, "no video encoder output buffer"); 
       break; 
      } 
      if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
       if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed"); 
       videoEncoderOutputBuffers = videoEncoder.getOutputBuffers(); 
       break; 
      } 
      if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
       if (VERBOSE) Log.d(TAG, "video encoder: output format changed"); 
       if (outputVideoTrack >= 0) { 
        fail("video encoder changed its output format again?"); 
       } 
       encoderOutputVideoFormat = videoEncoder.getOutputFormat(); 
       break; 
      } 
      assertTrue("should have added track before processing output", muxing); 
      if (VERBOSE) { 
       Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex); 
       Log.d(TAG, "video encoder: returned buffer of size " + videoEncoderOutputBufferInfo.size); 
      } 
      ByteBuffer encoderOutputBuffer = videoEncoderOutputBuffers[encoderOutputBufferIndex]; 
      if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 
       if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer"); 
       // Simply ignore codec config buffers. 
       videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false); 
       break; 
      } 
      if (VERBOSE) Log.d(TAG, "video encoder: returned buffer for time " + videoEncoderOutputBufferInfo.presentationTimeUs); 

      if (videoEncoderOutputBufferInfo.size != 0) {muxer.writeSampleData(outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo); 
      } 
      if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) 
        != 0) { 
       if (VERBOSE) Log.d(TAG, "video encoder: EOS"); 
       videoEncoderDone = true; 
      } 
      videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false); 
      videoEncodedFrameCount++; 
      // We enqueued an encoded frame, let's try something else next. 
      break; 
     } 

     if (!muxing 
       && (!mCopyAudio || encoderOutputAudioFormat != null) 
       && (!mCopyVideo || encoderOutputVideoFormat != null)) { 
      if (mCopyVideo) { 
       Log.d(TAG, "muxer: adding video track."); 
       outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat); 
      } 
      if (mCopyAudio) { 
       Log.d(TAG, "muxer: adding audio track."); 
       outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat); 
      } 
      Log.d(TAG, "muxer: starting"); 
      muxer.start(); 
      muxing = true; 
     } 
    } 
    // Basic sanity checks. 
    if (mCopyVideo) { 
     assertEquals("encoded and decoded video frame counts should match", 
       videoDecodedFrameCount, videoEncodedFrameCount); 
     assertTrue("decoded frame count should be less than extracted frame count", 
       videoDecodedFrameCount <= videoExtractedFrameCount); 
    } 
    if (mCopyAudio) { 
     assertEquals("no frame should be pending", -1, pendingAudioDecoderOutputBufferIndex); 
    } 
    // TODO: Check the generated output file. 
} 
private static boolean isVideoFormat(MediaFormat format) { 
    return getMimeTypeFor(format).startsWith("video/"); 
} 
private static boolean isAudioFormat(MediaFormat format) { 
    return getMimeTypeFor(format).startsWith("audio/"); 
} 
private static String getMimeTypeFor(MediaFormat format) { 
    return format.getString(MediaFormat.KEY_MIME); 
} 
/** 
* Returns the first codec capable of encoding the specified MIME type, or null if no match was 
* found. 
*/ 
private static MediaCodecInfo selectCodec(String mimeType) { 
    int numCodecs = MediaCodecList.getCodecCount(); 
    for (int i = 0; i < numCodecs; i++) { 
     MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); 
     if (!codecInfo.isEncoder()) { 
      continue; 
     } 
     String[] types = codecInfo.getSupportedTypes(); 
     for (int j = 0; j < types.length; j++) { 
      if (types[j].equalsIgnoreCase(mimeType)) { 
       return codecInfo; 
      } 
     } 
    } 
    return null; 
} 

Répondre

0

J'ai trouvé une solution. Je ne devrais pas appeler extractDecodeEditEncodeMux() directement dans la méthode onCreate(). J'ai besoin de créer un thread séparé et d'appeler extractDecodeEditEncodeMux() à partir de ce thread.

@Override 
    protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 

    Thread myThread = new Thread(new Runnable() { 
     @Override 
     public void run() { 
      try { 
       extractDecodeEditEncodeMux(); 
      } catch (Exception e) { 
       e.printStackTrace(); 
      } 
     } 
    }); 
    myThread.start(); 
}