Android 视频采集

书接上回,上一篇文章说了怎么播放对端 flv 文件流,这一回讲一讲怎么显示本地的视频画面,以及本地音视频内容的采集。视频采集和本地渲染都是在 explorer-device-video 这个模块中实现的。

示例源码下载

本地视频采集

视频采集依赖了 RecordThread 这个类,这就是一个采集线程,主要就是通过 MediaCodec 对采集到音视频进行编码,编码方式是 H264(H265 的编码格式采集粗来的无法正常播放)

public class RecordThread extends Thread {

    private String TAG = RecordThread.class.getSimpleName();
    // 编解码器,混合器
    private MediaMuxer mediaMuxer;
    private MediaCodec audioCodec;
    private MediaCodec videoCodec;
    private AudioRecord audioRecord;
    private int bufferSizeInBytes;

    // 参数设置
    private RecordParam recordParam;
    private MicParam micParam;
    private AudioEncodeParam audioEncodeParam;
    private CameraParam cameraParam;
    private VideoEncodeParam videoEncodeParam;

    private GLThread glThread;
    private volatile Surface surface;
    private boolean isStopRecord = false;   // 主动停止记录
    private boolean isCancelRecord = false;  // 取消记录
    private boolean storeMP4 = false;  // 是否保存混合音频视频的 MP4 文件

    // 记录视频裸流的临时文件,调试使用
    private String path = "/mnt/sdcard/videoTest.flv";
    private File videoTmpFile = new File(path);
    private volatile FileOutputStream storeVideoStream;
    private volatile long seq = 0L;
    private volatile long audioSeq = 0L;

    // 采样频率对照表
    private static Map<Integer, Integer> samplingFrequencyIndexMap = new HashMap<>();

    static {
        samplingFrequencyIndexMap.put(96000, 0);
        samplingFrequencyIndexMap.put(88200, 1);
        samplingFrequencyIndexMap.put(64000, 2);
        samplingFrequencyIndexMap.put(48000, 3);
        samplingFrequencyIndexMap.put(44100, 4);
        samplingFrequencyIndexMap.put(32000, 5);
        samplingFrequencyIndexMap.put(24000, 6);
        samplingFrequencyIndexMap.put(22050, 7);
        samplingFrequencyIndexMap.put(16000, 8);
        samplingFrequencyIndexMap.put(12000, 9);
        samplingFrequencyIndexMap.put(11025, 10);
        samplingFrequencyIndexMap.put(8000, 11);
    }

    // 记录过程回调
    private OnRecordListener onRecordListener;
    public void setOnRecordListener(OnRecordListener onRecordListener) {
        this.onRecordListener = onRecordListener;
    }

    private void onRecordStart() {
        if (onRecordListener != null) onRecordListener.onRecordStart();
    }

    private void onRecordTime(long time) {
        if (onRecordListener != null) onRecordListener.onRecordTime(time);
    }

    private void onRecordComplete(String path) {
        stopGLThread();
        if (onRecordListener != null) {
            onRecordListener.onRecordComplete(path);
        }
    }

    private void onRecordCancel() {
        if (onRecordListener != null) onRecordListener.onRecordCancel();
    }

    private void onRecordError(Exception e) {
        if (onRecordListener != null) onRecordListener.onRecordError(e);
    }

    private void stopGLThread() {
        if (glThread != null) {
            glThread.onDestroy();
            glThread = null;
        }
    }

    public RecordThread(RecordThreadParam recordThreadParam) {
        this(recordThreadParam.getRecordParam(), recordThreadParam.getMicParam(),
                recordThreadParam.getAudioEncodeParam(), recordThreadParam.getCameraParam(),
                recordThreadParam.getVideoEncodeParam());
    }

    private RecordThread(RecordParam recordParam, MicParam micParam, AudioEncodeParam audioEncodeParam,
                         CameraParam cameraParam, VideoEncodeParam videoEncodeParam) {
        this.recordParam = recordParam;
        this.micParam = micParam;
        this.audioEncodeParam = audioEncodeParam;
        this.cameraParam = cameraParam;
        this.videoEncodeParam = videoEncodeParam;
        this.storeMP4 = recordParam.isStoreMP4File();
        Log.d(TAG, "init RecordThread with storeMP4 " + storeMP4);
        glThread = new GLThread(this.cameraParam, this.videoEncodeParam);
        initMuxer();
        initAudio();
        initVideo();
    }

    private void initMuxer() {
        try {
            mediaMuxer = new MediaMuxer(recordParam.getPath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (IOException e) {
            e.printStackTrace();
            mediaMuxer = null;
        }
    }

    private void initAudio() {
        bufferSizeInBytes = AudioRecord.getMinBufferSize(micParam.getSampleRateInHz(), micParam.getChannelConfig(), micParam.getAudioFormat());
        audioRecord = new AudioRecord(micParam.getAudioSource(), micParam.getSampleRateInHz(), micParam.getChannelConfig(), micParam.getAudioFormat(), bufferSizeInBytes);
        try {
            audioCodec = MediaCodec.createEncoderByType(audioEncodeParam.getMime());
            MediaFormat format = MediaFormat.createAudioFormat(audioEncodeParam.getMime(), micParam.getSampleRateInHz(), 2);
            format.setInteger(MediaFormat.KEY_BIT_RATE, audioEncodeParam.getBitRate());
            format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, audioEncodeParam.getMaxInputSize());
            audioCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        } catch (IOException e) {
            e.printStackTrace();
            audioRecord = null;
            audioCodec = null;
        }
    }

    private void initVideo() {
        try {
            Log.d(TAG, "initVideo videoEncodeParam " + JSONObject.toJSONString(videoEncodeParam));

            videoCodec = MediaCodec.createEncoderByType(videoEncodeParam.getMime());
            MediaFormat format = MediaFormat.createVideoFormat(videoEncodeParam.getMime(), videoEncodeParam.getWidth(), videoEncodeParam.getHeight());
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);//MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, videoEncodeParam.getFrameRate());
            format.setInteger(MediaFormat.KEY_BIT_RATE, 22500);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, videoEncodeParam.getiFrameInterval());
            format.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);


            //设置压缩等级  默认是 baseline
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
                format.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel3);
            } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
                format.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileMain);
            }

            videoCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            surface = videoCodec.createInputSurface();
        } catch (IOException e) {
            e.printStackTrace();
            videoCodec = null;
            surface = null;
        }
        glThread.setSurface(surface);
    }

    void cancelRecord() {
        isCancelRecord = true;
        stopSaveTmpFile(true);
        try {
            join(2000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    void stopRecord() {
        isStopRecord = true;
        stopSaveTmpFile(false);
        try {
            join(2000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

        if (glThread != null) {
            glThread.onDestroy();
            glThread = null;
        }
    }

    void stopSaveTmpFile(boolean clean) {
        try {
            if (storeVideoStream != null) {
                storeVideoStream.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

        if (clean) {
            videoTmpFile.deleteOnExit();
        }
    }

    private void release() {
        if (audioRecord != null) {
            audioRecord.stop();
            audioRecord.release();
            audioRecord = null;
        }

        if (audioCodec != null) {
            audioCodec.stop();
            audioCodec.release();
            audioCodec = null;
        }

        if (videoCodec != null) {
            videoCodec.stop();
            videoCodec.release();
            videoCodec = null;
        }

        if (mediaMuxer != null && storeMP4) {
            mediaMuxer.stop();
            mediaMuxer.release();
            mediaMuxer = null;
        }

        if (isCancelRecord) {
            onRecordCancel();
            return;
        }

        if (isStopRecord) {
            onRecordComplete(recordParam.getPath());
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    @Override
    public void run() {
        super.run();
        record();
    }

    private void restartKeepOriData() {
        try {
            if (videoTmpFile.exists()) {
                videoTmpFile.deleteOnExit();
            }
            videoTmpFile.createNewFile();
            storeVideoStream = new FileOutputStream(videoTmpFile, true);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void record() {
        // 存储当前文件,容器混合器为空,不进行后续流程
        if (storeMP4 && mediaMuxer == null) return;

        if (audioCodec == null || videoCodec == null) {
            onRecordError(new IllegalArgumentException("widget is null"));
            return;
        }
        restartKeepOriData();

        boolean isStartMuxer = false; // 合成是否开始
        seq = 0L;
        isStopRecord = false;
        long audioPts = 0;
        long videoPts = 0;
        int audioTrackIndex = -1;
        int videoTrackIndex = -1;

        onRecordStart();
        audioRecord.startRecording();
        audioCodec.start();
        videoCodec.start();
        glThread.start();
        MediaCodec.BufferInfo audioInfo = new MediaCodec.BufferInfo();
        MediaCodec.BufferInfo videoInfo = new MediaCodec.BufferInfo();
        while (true) {
            if (isStopRecord || isCancelRecord) {
                release();
                break;
            }

            // 将 AudioRecord 获取的 PCM 原始数据送入编码器
            int audioInputBufferId = audioCodec.dequeueInputBuffer(0);
            if (audioInputBufferId >= 0) {
                ByteBuffer inputBuffer = audioCodec.getInputBuffer(audioInputBufferId);
                int readSize = -1;
                if (inputBuffer != null) readSize = audioRecord.read(inputBuffer, bufferSizeInBytes);
                if (readSize >= 0) audioCodec.queueInputBuffer(audioInputBufferId, 0, readSize, System.nanoTime() / 1000, 0);
            }

            // 获取从 surface 获取数据,写入 Muxer
            int videoOutputBufferId = videoCodec.dequeueOutputBuffer(videoInfo, 0);
            if (videoOutputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (storeMP4) {
                    videoTrackIndex = mediaMuxer.addTrack(videoCodec.getOutputFormat());
                    if (audioTrackIndex != -1 && !isStartMuxer) {
                        isStartMuxer = true;
                        mediaMuxer.start();
                    }
                }

            } else if (videoOutputBufferId >= 0) {
                ByteBuffer outputBuffer = videoCodec.getOutputBuffer(videoOutputBufferId);
                if (outputBuffer != null && videoInfo.size != 0) {
                    outputBuffer.position(videoInfo.offset);
                    outputBuffer.limit(videoInfo.offset + videoInfo.size);
                    if (storeMP4 && isStartMuxer) mediaMuxer.writeSampleData(videoTrackIndex, outputBuffer, videoInfo);
                }
                videoCodec.releaseOutputBuffer(videoOutputBufferId, false);
            }

            // 将音频数据,写入 Muxer
            int audioOutputBufferId = audioCodec.dequeueOutputBuffer(audioInfo, 0);
            if (audioOutputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (storeMP4) {
                    audioTrackIndex = mediaMuxer.addTrack(audioCodec.getOutputFormat());
                    if (videoTrackIndex != -1 && !isStartMuxer) {
                        isStartMuxer = true;
                        mediaMuxer.start();
                    }
                }

            } else if (audioOutputBufferId >= 0) {
                ByteBuffer outputBuffer = audioCodec.getOutputBuffer(audioOutputBufferId);
                if (outputBuffer != null && audioInfo.size != 0) {
                    outputBuffer.position(audioInfo.offset);
                    outputBuffer.limit(audioInfo.offset + audioInfo.size);
                    if (storeMP4 && isStartMuxer) mediaMuxer.writeSampleData(audioTrackIndex, outputBuffer, audioInfo);
                }
                audioCodec.releaseOutputBuffer(audioOutputBufferId, false);
            }

            onRecordTime(videoInfo.presentationTimeUs);
        }
    }
}

本地视频渲染

本地视频渲染是通过 CameraView 实现的,同时包含摄像头的前置后置切换

public class CameraView extends EGLTextureView implements OnCameraListener {

    private final CameraManager cameraManager = new CameraManager();
    private Activity activity;
    public OesRender render;
    private final int process = RenderConstants.Process.CAMERA;

    public CameraView(Context context) {
        this(context, null);
    }

    public CameraView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init(context);
    }

    private void init(Context context) {
        setEGLContextClientVersion(2);
        render = new OesRender(context, process);
        setRenderer(render);
        setRenderMode(EGLTextureView.RENDERMODE_WHEN_DIRTY);

        activity = (Activity) context;
        cameraManager.addOnCameraListener(this);
    }

    public void openCamera() {
        render.setOnSurfaceTextureListener(new OnSurfaceTextureListener() {
            @Override
            public void onSurfaceTexture(SurfaceTexture surfaceTexture) {
                surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
                    @Override
                    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
                        requestRender();
                    }
                });
                cameraManager.openCamera(activity, surfaceTexture);
            }
        });
        requestRender();
    }

    public void closeCamera() {
        cameraManager.closeCamera();
    }

    public void switchCamera() {
        cameraManager.switchCamera();
        openCamera();
    }

    public void switchCamera(int facing) {
        cameraManager.switchCamera(facing);
        openCamera();
    }

    public void addOnCameraListener(OnCameraListener onCameraListener) {
        cameraManager.addOnCameraListener(onCameraListener);
    }

    @Override
    public void onCameraOpened(Size cameraSize, int facing) {
        render.setOesSize(cameraSize.getHeight(), cameraSize.getWidth());
        requestRender();
    }

    @Override
    public void onCameraClosed() {

    }

    @Override
    public void onCameraError(Exception e) {

    }

    public CameraManager getCameraManager() {
        return cameraManager;
    }

    public int getFboTextureId() {
        return render.getFboTextureId();
    }

    @Override
    protected void onAttachedToWindow() {
        super.onAttachedToWindow();
        RenderManager.getInstance(getContext()).init(process);
    }

    @Override
    protected void onDetachedFromWindow() {
        super.onDetachedFromWindow();
        RenderManager.getInstance(getContext()).release(process);
    }
}

窗口渲染

上述关键代码实现以后,只需要在窗口类调用组件即可

class MainActivity : AppCompatActivity() {
    private val videoRecorder = VideoRecorder()

    // 获取摄像头权限,没有权限无法采集摄像头内容
    private val PERMISSIONS_STORAGE = arrayOf(
        Manifest.permission.CAMERA,
        Manifest.permission.WRITE_EXTERNAL_STORAGE
    )

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)
        ActivityCompat.requestPermissions(this,PERMISSIONS_STORAGE, 2)

		// 调用渲染组件
        videoRecorder.attachCameraView(cameraView)
        // 切换摄像头前置/后置
        btnSwitch.setOnClickListener { v: View? -> cameraView.switchCamera() }
    }
}

本图文内容来源于网友网络收集整理提供,作为学习参考使用,版权属于原作者。
THE END
分享
二维码
< <上一篇
下一篇>>