Android uses MediaCodec to encode the video captured by the camera into h264

  • 2021-11-13 18:04:34
  • OfStack

In this paper, we share the example that Android uses MediaCodec to encode the video collected by the camera into h264 for your reference. The specific contents are as follows

MainActivity.java


import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;

public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{

  private SurfaceView surfaceview;

  private SurfaceHolder surfaceHolder;

  private Camera camera;

  private Parameters parameters;

  int width = 1280;

  int height = 720;

  int framerate = 30;

  int biterate = 8500*1000;

  private static int yuvqueuesize = 10;

  // Video buffer queue to be decoded, static member! 
  public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize);

  private AvcEncoder avcCodec;


  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
    surfaceHolder = surfaceview.getHolder();
    surfaceHolder.addCallback(this);
  }


  @Override
  public void surfaceCreated(SurfaceHolder holder) {
    camera = getBackCamera();
    startcamera(camera);
    // Create AvEncoder Object 
    avcCodec = new AvcEncoder(width,height,framerate,biterate);
    // Start the encoding thread 
    avcCodec.StartEncoderThread();

  }

  @Override
  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

  }

  @Override
  public void surfaceDestroyed(SurfaceHolder holder) {
    if (null != camera) {
      camera.setPreviewCallback(null);
      camera.stopPreview();
      camera.release();
      camera = null;
      avcCodec.StopThread();
    }
  }


  @Override
  public void onPreviewFrame(byte[] data, android.hardware.Camera camera) {
    // Save the current frame image in the queue 
    putYUVData(data,data.length);
  }

  public void putYUVData(byte[] buffer, int length) {
    if (YUVQueue.size() >= 10) {
      YUVQueue.poll();
    }
    YUVQueue.add(buffer);
  }


  private void startcamera(Camera mCamera){
    if(mCamera != null){
      try {
        mCamera.setPreviewCallback(this);
        mCamera.setDisplayOrientation(90);
        if(parameters == null){
          parameters = mCamera.getParameters();
        }
        // Gets the default camera Configure 
        parameters = mCamera.getParameters();
        // Format preview 
        parameters.setPreviewFormat(ImageFormat.NV21);
        // Set the preview image resolution 
        parameters.setPreviewSize(width, height);
        // Configure camera Parameter 
        mCamera.setParameters(parameters);
        // Will be fully initialized SurfaceHolder Pass in to setPreviewDisplay(SurfaceHolder) Medium 
        // No surface The camera won't turn on preview Preview 
        mCamera.setPreviewDisplay(surfaceHolder);
        // Call startPreview() Used to update preview Adj. surface , you have to take pictures before start Preview
        mCamera.startPreview();

      } catch (IOException e) {
        e.printStackTrace();
      }
    }
  }

  private Camera getBackCamera() {
    Camera c = null;
    try {
      // Get Camera Example of 
      c = Camera.open(0);
    } catch (Exception e) {
      e.printStackTrace();
    }
    // Get Camera Returns when the instance of fails null
    return c;
  }


}

2.AvcEncoder.java


import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME;


public class AvcEncoder
{
  private final static String TAG = "MeidaCodec";

  private int TIMEOUT_USEC = 12000;

  private MediaCodec mediaCodec;
  int m_width;
  int m_height;
  int m_framerate;

  public byte[] configbyte;


  public AvcEncoder(int width, int height, int framerate, int bitrate) {

    m_width = width;
    m_height = height;
    m_framerate = framerate;
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    try {
      mediaCodec = MediaCodec.createEncoderByType("video/avc");
    } catch (IOException e) {
      e.printStackTrace();
    }
    // Configure encoder parameters 
    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    // Start encoder 
    mediaCodec.start();
    // Create a file to save the encoded data 
    createfile();
  }

  private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
  private BufferedOutputStream outputStream;

  private void createfile(){
    File file = new File(path);
    if(file.exists()){
      file.delete();
    }
    try {
      outputStream = new BufferedOutputStream(new FileOutputStream(file));
    } catch (Exception e){
      e.printStackTrace();
    }
  }

  private void StopEncoder() {
    try {
      mediaCodec.stop();
      mediaCodec.release();
    } catch (Exception e){
      e.printStackTrace();
    }
  }

  public boolean isRuning = false;

  public void StopThread(){
    isRuning = false;
    try {
      StopEncoder();
      outputStream.flush();
      outputStream.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }

  int count = 0;

  public void StartEncoderThread(){
    Thread EncoderThread = new Thread(new Runnable() {

      @Override
      public void run() {
        isRuning = true;
        byte[] input = null;
        long pts = 0;
        long generateIndex = 0;

        while (isRuning) {
          // Visit MainActivity A queue for buffering data to be decoded 
          if (MainActivity.YUVQueue.size() >0){
            // Fetch from the buffer queue 1 Frame 
            input = MainActivity.YUVQueue.poll();
            byte[] yuv420sp = new byte[m_width*m_height*3/2];
            // Converting the video frame to be encoded into YUV420 Format 
            NV21ToNV12(input,yuv420sp,m_width,m_height);
            input = yuv420sp;
          }
          if (input != null) {
            try {
              long startMs = System.currentTimeMillis();
              // Encoder input buffer 
              ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
              // Encoder output buffer 
              ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
              int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
              if (inputBufferIndex >= 0) {
                pts = computePresentationTime(generateIndex);
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                // Put the converted YUV420 The video frames in the format are put into the encoder input buffer 
                inputBuffer.put(input);
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
                generateIndex += 1;
              }

              MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
              int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
              while (outputBufferIndex >= 0) {
                //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                if(bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG){
                  configbyte = new byte[bufferInfo.size];
                  configbyte = outData;
                }else if(bufferInfo.flags == BUFFER_FLAG_KEY_FRAME){
                  byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
                  System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
                  // Copy the encoded video frame from the output buffer of the encoder 
                  System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);

                  outputStream.write(keyframe, 0, keyframe.length);
                }else{
                  // Write to a file 
                  outputStream.write(outData, 0, outData.length);
                }

                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
              }

            } catch (Throwable t) {
              t.printStackTrace();
            }
          } else {
            try {
              Thread.sleep(500);
            } catch (InterruptedException e) {
              e.printStackTrace();
            }
          }
        }
      }
    });
    EncoderThread.start();

  }

  private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
    if(nv21 == null || nv12 == null)return;
    int framesize = width*height;
    int i = 0,j = 0;
    System.arraycopy(nv21, 0, nv12, 0, framesize);
    for(i = 0; i < framesize; i++){
      nv12[i] = nv21[i];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j-1] = nv21[j+framesize];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j] = nv21[j+framesize-1];
    }
  }

  /**
   * Generates the presentation time for frame N, in microseconds.
   */
  private long computePresentationTime(long frameIndex) {
    return 132 + frameIndex * 1000000 / m_framerate;
  }
}

3.activity_main.xml


<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
  android:layout_width="match_parent"
  android:layout_height="match_parent" >

  <SurfaceView
    android:id="@+id/surfaceview"
    android:layout_width="match_parent"
    android:layout_height="match_parent"/>


</RelativeLayout>

4. Add permissions


<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />

Related articles: