2017-06-19 119 views
0

我一直在尝试修改bigflake的DecodeEditEncode编辑mp4文件,然后将其编码回mp4。我环顾了互联网,找不到解决方案。我正在设置IFRAME间隔和FPS,但Muxer仍抱怨同步帧并且无法停止。MediaMuxer:没有视频轨道的同步帧

有没有人有更多的经验指出我做错了什么? 感谢很多提前:)

private static void encodeToMp4(MediaCodec decoder, 
           OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder) { 
    final int TIMEOUT_USEC = 10000; 
    ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); 
    ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); 
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 
    MediaMuxer mediaMuxer = null; 

    int inputChunk = 0; 
    int outputCount = 0; 
    boolean outputDone = false; 
    boolean inputDone = false; 
    boolean decoderDone = false; 
    while (!outputDone) { 
     if (VERBOSE) Log.d(TAG, "edit loop"); 
     // Feed more data to the decoder. 
     if (!inputDone) { 
      int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); 
      if (inputBufIndex >= 0) { 
       ByteBuffer buffer = decoderInputBuffers[inputBufIndex]; 
       int sampleSize = extractor.readSampleData(buffer, 0); 
       if (sampleSize < 0) { 
        inputChunk++; 
        // End of stream -- send empty frame with EOS flag set. 
        decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, 
          MediaCodec.BUFFER_FLAG_END_OF_STREAM); 
        inputDone = true; 
       } else { 
        // Copy a chunk of input to the decoder. The first chunk should have 
        // the BUFFER_FLAG_CODEC_CONFIG flag set. 
        buffer.clear(); 
        decoder.queueInputBuffer(inputBufIndex, 0, sampleSize, extractor.getSampleTime(), 0); 
        extractor.advance(); 

        inputChunk++; 
       } 
      } 
     } 
     // Assume output is available. Loop until both assumptions are false. 
     boolean decoderOutputAvailable = !decoderDone; 
     boolean encoderOutputAvailable = true; 
     try { 
      mediaMuxer = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 
     } catch (IOException e) { 
      e.printStackTrace(); 
     } 
     MediaFormat mediaFormat = extractor.getTrackFormat(0); 
     mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20); 
     mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); 
     int trackIndex = mediaMuxer.addTrack(mediaFormat); 
     mediaMuxer.start(); 
     while (decoderOutputAvailable || encoderOutputAvailable) { 
      int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 
      if (encoderStatus == (int) MediaCodec.INFO_TRY_AGAIN_LATER) { 
       // no output available yet 
       encoderOutputAvailable = false; 
      } else if (encoderStatus == (int) MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
       encoderOutputBuffers = encoder.getOutputBuffers(); 
      } else if (encoderStatus == (int) MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
       MediaFormat newFormat = encoder.getOutputFormat(); 
      } else { // encoderStatus >= 0 
       ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; 
       if (encodedData == null) { 
        Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); 
       } 
       // Write the data to the output "file". 
       if (info.size != 0) { 
        encodedData.position(info.offset); 
        encodedData.limit(info.offset + info.size); 
        byte[] data = new byte[encodedData.remaining()]; 
        encodedData.get(data); 
        mediaMuxer.writeSampleData(trackIndex, encodedData, info); 
        // outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs); 
        outputCount++; 
        if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes"); 
       } 
       outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; 
       encoder.releaseOutputBuffer(encoderStatus, false); 
      } 
      if (encoderStatus != (int) MediaCodec.INFO_TRY_AGAIN_LATER) { 
       // Continue attempts to drain output. 
       continue; 
      } 
      // Encoder is drained, check to see if we've got a new frame of output from 
      // the decoder. (The output is going to a Surface, rather than a ByteBuffer, 
      // but we still get information through BufferInfo.) 
      if (!decoderDone) { 
       int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 
       if (decoderStatus == (int) MediaCodec.INFO_TRY_AGAIN_LATER) { 
        // no output available yet 
        if (VERBOSE) Log.d(TAG, "no output from decoder available"); 
        decoderOutputAvailable = false; 
       } else if (decoderStatus == (int) MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
        //decoderOutputBuffers = decoder.GetOutputBuffers(); 
        if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)"); 
       } else if (decoderStatus == (int) MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
        // expected before first buffer of data 
        MediaFormat newFormat = decoder.getOutputFormat(); 
        if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); 
       } else if (decoderStatus < 0) { 
        Log.e(TAG, "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); 
       } else { // decoderStatus >= 0 
        if (VERBOSE) Log.d(TAG, "surface decoder given buffer " 
          + decoderStatus + " (size=" + info.size + ")"); 
        // The ByteBuffers are null references, but we still get a nonzero 
        // size for the decoded data. 
        boolean doRender = (info.size != 0); 
        // As soon as we call releaseOutputBuffer, the buffer will be forwarded 
        // to SurfaceTexture to convert to a texture. The API doesn't 
        // guarantee that the texture will be available before the call 
        // returns, so we need to wait for the onFrameAvailable callback to 
        // fire. If we don't wait, we risk rendering from the previous frame. 
        decoder.releaseOutputBuffer(decoderStatus, doRender); 
        if (doRender) { 
         // This waits for the image and renders it after it arrives. 
         if (VERBOSE) Log.d(TAG, "awaiting frame"); 
         outputSurface.awaitNewImage(); 
         outputSurface.drawImage(); 
         // Send it to the encoder. 
         inputSurface.setPresentationTime(info.presentationTimeUs * 1000); 
         if (VERBOSE) Log.d(TAG, "swapBuffers"); 
         inputSurface.swapBuffers(); 
        } 
        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 
         // forward decoder EOS to encoder 
         if (VERBOSE) Log.d(TAG, "signaling input EOS"); 
         if (WORK_AROUND_BUGS) { 
          // Bail early, possibly dropping a frame. 
          return; 
         } else { 
          encoder.signalEndOfInputStream(); 
         } 
        } 
       } 
      } 
     } 
    } 
    if (inputChunk != outputCount) { 
     throw new RuntimeException("frame lost: " + inputChunk + " in, " + 
       outputCount + " out"); 
    } 
    mediaMuxer.stop(); 
    mediaMuxer.release(); 
    listener.onFinished(); 
} 

回答

0

通过以下bigflake测试,而不是固定的ExtractDecodeEditEncodeMuxTest.java所有问题。对于每个想要实现类似目标的人来说,这个例子都是救星。 :)