Note that there are some explanatory texts on larger screens.

plurals
  1. PORe-encoding h.264 content with a different bit rate using Android MediaCodec
    text
    copied!<p>I'm attempting to re-encode a h.264 mp4 file with a different bit rate using the Android MediaCodec API's introduced in 4.2.</p> <p>I am able decode and play the content (prior to changing the bit rate) using the MediaCodec API but if I attempt to re-encode the content with a different bit rate prior to decoding it I get garbled out put (a green screen with grey pixelation).</p> <p>The code I am using is based on the Android Test Case android.media.cts.DecoderTest <a href="https://android-review.googlesource.com/#/c/43410/1/tests/tests/media/src/android/media/cts/DecoderTest.java" rel="nofollow">Android Test Case android.media.cts.DecoderTest</a>:</p> <pre><code>public void encodeDecodeVideoFile(AssetFileDescriptor assetFileDescriptor) { int bitRate = 500000; int frameRate = 30; int width = 480; int height = 368; String mimeType = "video/avc"; MediaCodec encoder, decoder = null; ByteBuffer[] encoderInputBuffers; ByteBuffer[] encoderOutputBuffers; ByteBuffer[] decoderInputBuffers = null; ByteBuffer[] decoderOutputBuffers = null; // Find a code that supports the mime type int numCodecs = MediaCodecList.getCodecCount(); MediaCodecInfo codecInfo = null; for (int i = 0; i &lt; numCodecs &amp;&amp; codecInfo == null; i++) { MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i); if (!info.isEncoder()) { continue; } String[] types = info.getSupportedTypes(); boolean found = false; for (int j = 0; j &lt; types.length &amp;&amp; !found; j++) { if (types[j].equals(mimeType)) found = true; } if (!found) continue; codecInfo = info; } Log.d(TAG, "Found " + codecInfo.getName() + " supporting " + mimeType); // Find a color profile that the codec supports int colorFormat = 0; MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); for (int i = 0; i &lt; capabilities.colorFormats.length &amp;&amp; colorFormat == 0; i++) { int format = capabilities.colorFormats[i]; switch (format) { case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: colorFormat = format; break; default: Log.d(TAG, "Skipping unsupported color format " + format); break; } } Log.d(TAG, "Using color format " + colorFormat); // Determine width, height and slice sizes if (codecInfo.getName().equals("OMX.TI.DUCATI1.VIDEO.H264E")) { // This codec doesn't support a width not a multiple of 16, // so round down. width &amp;= ~15; } int stride = width; int sliceHeight = height; if (codecInfo.getName().startsWith("OMX.Nvidia.")) { stride = (stride + 15) / 16 * 16; sliceHeight = (sliceHeight + 15) / 16 * 16; } // Used MediaExtractor to select the first track from the h.264 content MediaExtractor extractor = new MediaExtractor(); extractor.setDataSource(assetFileDescriptor.getFileDescriptor(), assetFileDescriptor.getStartOffset(), assetFileDescriptor.getLength()); MediaFormat extractedFormat = extractor.getTrackFormat(0); String mime = extractedFormat.getString(MediaFormat.KEY_MIME); Log.d(TAG, "Extartced Mime " + mime); extractor.selectTrack(0); // Create an encoder encoder = MediaCodec.createByCodecName(codecInfo.getName()); MediaFormat inputFormat = MediaFormat.createVideoFormat(mimeType, width, height); inputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); inputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); inputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); inputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); inputFormat.setInteger("stride", stride); inputFormat.setInteger("slice-height", sliceHeight); Log.d(TAG, "Configuring encoder with input format " + inputFormat); encoder.configure(inputFormat, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); encoder.start(); encoderInputBuffers = encoder.getInputBuffers(); encoderOutputBuffers = encoder.getOutputBuffers(); // start encoding + decoding final long kTimeOutUs = 5000; MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); boolean sawInputEOS = false; boolean sawOutputEOS = false; MediaFormat oformat = null; long startMs = System.currentTimeMillis(); while (!sawOutputEOS) { if (!sawInputEOS) { int inputBufIndex = encoder.dequeueInputBuffer(kTimeOutUs); if (inputBufIndex &gt;= 0) { ByteBuffer dstBuf = encoderInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */); long presentationTimeUs = 0; if (sampleSize &lt; 0) { Log.d(TAG, "saw input EOS."); sawInputEOS = true; sampleSize = 0; } else { presentationTimeUs = extractor.getSampleTime(); } encoder.queueInputBuffer(inputBufIndex, 0 /* offset */, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } int res = encoder.dequeueOutputBuffer(info, kTimeOutUs); if (res &gt;= 0) { int outputBufIndex = res; ByteBuffer buf = encoderOutputBuffers[outputBufIndex]; buf.position(info.offset); buf.limit(info.offset + info.size); if ((info.flags &amp; MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { // create a decoder decoder = MediaCodec.createDecoderByType(mimeType); MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); format.setByteBuffer("csd-0", buf); decoder.configure(format, surface /* surface */, null /* crypto */, 0 /* flags */); decoder.start(); decoderInputBuffers = decoder.getInputBuffers(); decoderOutputBuffers = decoder.getOutputBuffers(); } else { int decIndex = decoder.dequeueInputBuffer(-1); decoderInputBuffers[decIndex].clear(); decoderInputBuffers[decIndex].put(buf); decoder.queueInputBuffer(decIndex, 0, info.size, info.presentationTimeUs, info.flags); } encoder.releaseOutputBuffer(outputBufIndex, false /* render */); } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { encoderOutputBuffers = encoder.getOutputBuffers(); Log.d(TAG, "encoder output buffers have changed."); } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat encformat = encoder.getOutputFormat(); Log.d(TAG, "encoder output format has changed to " + encformat); } if (decoder == null) res = MediaCodec.INFO_TRY_AGAIN_LATER; else res = decoder.dequeueOutputBuffer(info, kTimeOutUs); if (res &gt;= 0) { int outputBufIndex = res; ByteBuffer buf = decoderOutputBuffers[outputBufIndex]; buf.position(info.offset); buf.limit(info.offset + info.size); // The worlds simplest FPS implementation while (info.presentationTimeUs / 1000 &gt; System.currentTimeMillis() - startMs) { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); break; } } decoder.releaseOutputBuffer(outputBufIndex, true /* render */); if ((info.flags &amp; MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { Log.d(TAG, "saw output EOS."); sawOutputEOS = true; } } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { decoderOutputBuffers = decoder.getOutputBuffers(); Log.d(TAG, "decoder output buffers have changed."); } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { oformat = decoder.getOutputFormat(); Log.d(TAG, "decoder output format has changed to " + oformat); } } encoder.stop(); encoder.release(); decoder.stop(); decoder.release(); } </code></pre> <p>The file I am trying to encode is from the Android cts Test Project:</p> <p>R.raw.video_480x360_mp4_h264_1000kbps_30fps_aac_stereo_128kbps_44100hz</p> <p>I'm guessing the problem is related to the format parameters I am specifying in the encoder MediaCodec but I can't figure out what is incorrect/missing.</p>
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload