您好,登錄后才能下訂單哦!
這篇文章主要介紹Android如何使用MediaCodec將攝像頭采集的視頻編碼為h264,文中介紹的非常詳細,具有一定的參考價值,感興趣的小伙伴們一定要看完!
具體內容如下
MainActivity.java
import android.app.Activity;import android.graphics.ImageFormat;import android.hardware.Camera;import android.hardware.Camera.Parameters;import android.hardware.Camera.PreviewCallback;import android.os.Bundle;import android.view.SurfaceHolder;import android.view.SurfaceView;import java.io.IOException;import java.util.concurrent.ArrayBlockingQueue;public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{ private SurfaceView surfaceview; private SurfaceHolder surfaceHolder; private Camera camera; private Parameters parameters; int width = 1280; int height = 720; int framerate = 30; int biterate = 8500*1000; private static int yuvqueuesize = 10; //待解碼視頻緩沖隊列,靜態成員! public static ArrayBlockingQueue<byte[]> YUVQueue = new ArrayBlockingQueue<byte[]>(yuvqueuesize); private AvcEncoder avcCodec; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); surfaceview = (SurfaceView)findViewById(R.id.surfaceview); surfaceHolder = surfaceview.getHolder(); surfaceHolder.addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { camera = getBackCamera(); startcamera(camera); //創建AvEncoder對象 avcCodec = new AvcEncoder(width,height,framerate,biterate); //啟動編碼線程 avcCodec.StartEncoderThread(); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { if (null != camera) { camera.setPreviewCallback(null); camera.stopPreview(); camera.release(); camera = null; avcCodec.StopThread(); } } @Override public void onPreviewFrame(byte[] data, android.hardware.Camera camera) { //將當前幀圖像保存在隊列中 putYUVData(data,data.length); } public void putYUVData(byte[] buffer, int length) { if (YUVQueue.size() >= 10) { YUVQueue.poll(); } YUVQueue.add(buffer); } private void startcamera(Camera mCamera){ if(mCamera != null){ try { mCamera.setPreviewCallback(this); mCamera.setDisplayOrientation(90); if(parameters == null){ parameters = mCamera.getParameters(); } //獲取默認的camera配置 parameters = mCamera.getParameters(); //設置預覽格式 parameters.setPreviewFormat(ImageFormat.NV21); //設置預覽圖像分辨率 parameters.setPreviewSize(width, height); //配置camera參數 mCamera.setParameters(parameters); //將完全初始化的SurfaceHolder傳入到setPreviewDisplay(SurfaceHolder)中 //沒有surface的話,相機不會開啟preview預覽 mCamera.setPreviewDisplay(surfaceHolder); //調用startPreview()用以更新preview的surface,必須要在拍照之前start Preview mCamera.startPreview(); } catch (IOException e) { e.printStackTrace(); } } } private Camera getBackCamera() { Camera c = null; try { //獲取Camera的實例 c = Camera.open(0); } catch (Exception e) { e.printStackTrace(); } //獲取Camera的實例失敗時返回null return c; }}
2.AvcEncoder.java
import android.media.MediaCodec;import android.media.MediaCodecInfo;import android.media.MediaFormat;import android.os.Environment;import java.io.BufferedOutputStream;import java.io.File;import java.io.FileOutputStream;import java.io.IOException;import java.nio.ByteBuffer;import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG;import static android.media.MediaCodec.BUFFER_FLAG_KEY_FRAME;public class AvcEncoder{ private final static String TAG = "MeidaCodec"; private int TIMEOUT_USEC = 12000; private MediaCodec mediaCodec; int m_width; int m_height; int m_framerate; public byte[] configbyte; public AvcEncoder(int width, int height, int framerate, int bitrate) { m_width = width; m_height = height; m_framerate = framerate; MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); try { mediaCodec = MediaCodec.createEncoderByType("video/avc"); } catch (IOException e) { e.printStackTrace(); } //配置編碼器參數 mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); //啟動編碼器 mediaCodec.start(); //創建保存編碼后數據的文件 createfile(); } private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h364"; private BufferedOutputStream outputStream; private void createfile(){ File file = new File(path); if(file.exists()){ file.delete(); } try { outputStream = new BufferedOutputStream(new FileOutputStream(file)); } catch (Exception e){ e.printStackTrace(); } } private void StopEncoder() { try { mediaCodec.stop(); mediaCodec.release(); } catch (Exception e){ e.printStackTrace(); } } public boolean isRuning = false; public void StopThread(){ isRuning = false; try { StopEncoder(); outputStream.flush(); outputStream.close(); } catch (IOException e) { e.printStackTrace(); } } int count = 0; public void StartEncoderThread(){ Thread EncoderThread = new Thread(new Runnable() { @Override public void run() { isRuning = true; byte[] input = null; long pts = 0; long generateIndex = 0; while (isRuning) { //訪問MainActivity用來緩沖待解碼數據的隊列 if (MainActivity.YUVQueue.size() >0){ //從緩沖隊列中取出一幀 input = MainActivity.YUVQueue.poll(); byte[] yuv420sp = new byte[m_width*m_height*3/2]; //把待編碼的視頻幀轉換為YUV420格式 NV21ToNV12(input,yuv420sp,m_width,m_height); input = yuv420sp; } if (input != null) { try { long startMs = System.currentTimeMillis(); //編碼器輸入緩沖區 ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); //編碼器輸出緩沖區 ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers(); int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1); if (inputBufferIndex >= 0) { pts = computePresentationTime(generateIndex); ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); //把轉換后的YUV420格式的視頻幀放到編碼器輸入緩沖區中 inputBuffer.put(input); mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0); generateIndex += 1; } MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); while (outputBufferIndex >= 0) { //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+""); ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData); if(bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG){ configbyte = new byte[bufferInfo.size]; configbyte = outData; }else if(bufferInfo.flags == BUFFER_FLAG_KEY_FRAME){ byte[] keyframe = new byte[bufferInfo.size + configbyte.length]; System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length); //把編碼后的視頻幀從編碼器輸出緩沖區中拷貝出來 System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length); outputStream.write(keyframe, 0, keyframe.length); }else{ //寫到文件中 outputStream.write(outData, 0, outData.length); } mediaCodec.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); } } catch (Throwable t) { t.printStackTrace(); } } else { try { Thread.sleep(500); } catch (InterruptedException e) { e.printStackTrace(); } } } } }); EncoderThread.start(); } private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){ if(nv21 == null || nv12 == null)return; int framesize = width*height; int i = 0,j = 0; System.arraycopy(nv21, 0, nv12, 0, framesize); for(i = 0; i < framesize; i++){ nv12[i] = nv21[i]; } for (j = 0; j < framesize/2; j+=2) { nv12[framesize + j-1] = nv21[j+framesize]; } for (j = 0; j < framesize/2; j+=2) { nv12[framesize + j] = nv21[j+framesize-1]; } } /** * Generates the presentation time for frame N, in microseconds. */ private long computePresentationTime(long frameIndex) { return 132 + frameIndex * 1000000 / m_framerate; }}
3.activity_main.xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" android:layout_width="match_parent" android:layout_height="match_parent" > <SurfaceView android:id="@+id/surfaceview" android:layout_width="match_parent" android:layout_height="match_parent"/></RelativeLayout>
4.添加權限
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/><uses-permission android:name="android.permission.CAMERA" /><uses-permission android:name="android.permission.INTERNET" />
以上是“Android如何使用MediaCodec將攝像頭采集的視頻編碼為h264”這篇文章的所有內容,感謝各位的閱讀!希望分享的內容對大家有幫助,更多相關知識,歡迎關注億速云行業資訊頻道!
免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。