其他分享
首页 > 其他分享> > 自定义一个VideoCapturer(WebRTC)用于获取大疆无人机实时视频

自定义一个VideoCapturer(WebRTC)用于获取大疆无人机实时视频

作者:互联网

WebRTC做大疆无人机直播

大疆带屏遥控器有直播功能,用的是rtmp,但是延时有点大,所以在遥控器里安装自己的软件,用webrtc来做一个无人机视频实时传输。需要自定义一个VideoCapturer来获取无人机视频封装成便于webrtc使用的流。

1、自定义一个CapturerAircraft类来实现VideoCapturer接口

public class CapturerDefault implements VideoCapturer {
    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) {

    }

    @Override
    public void startCapture(int i, int i1, int i2) {

    }

    @Override
    public void stopCapture() throws InterruptedException {

    }

    @Override
    public void changeCaptureFormat(int i, int i1, int i2) {

    }

    @Override
    public void dispose() {

    }

    @Override
    public boolean isScreencast() {
        return false;
    }
}

2、构造函数传入DJICodecManager参数

    public CapturerAircraft(DJICodecManager manager) {
        dji_code_manager_ = manager;
        dji_code_manager_.resetKeyFrame();
        if (isM300Product()) {
            OcuSyncLink ocuSyncLink = DJILogIn.getProductInstance().getAirLink().getOcuSyncLink();
            // If your MutltipleLensCamera is set at right or top, you need to change the PhysicalSource to RIGHT_CAM or TOP_CAM.
            ocuSyncLink.assignSourceToPrimaryChannel(PhysicalSource.LEFT_CAM, PhysicalSource.FPV_CAM, new CommonCallbacks.CompletionCallback() {
                @Override
                public void onResult(DJIError error) {
                    if (error == null) {
//                        showToast("assignSourceToPrimaryChannel success.");
                    } else {
//                        showToast("assignSourceToPrimaryChannel fail, reason: "+ error.getDescription());
                    }
                }
            });
        }
    }

    public static boolean isM300Product() {
        if (DJISDKManager.getInstance().getProduct() == null) {
            return false;
        }
        Model model = DJISDKManager.getInstance().getProduct().getModel();
        return model == Model.MATRICE_300_RTK;
    }

3、initialize函数里面创建一个DJIYuvDataCallback实例

DJIYuvDataCallback的onYuvDataReceived函数用来接收每一帧的yuv数据,再实例化一个CapturerObserver对象,用来将无人机的视频捕获作为webrtc能使用的I420视频流

    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) {
        //For M300RTK, you need to actively request an I frame.
        dji_yuv_data_callback_ = new DJIYuvDataCallback();
        capturer_observer_ = capturerObserver;
    }
    
    private class DJIYuvDataCallback implements DJICodecManager.YuvDataCallback {
        @Override
        public void onYuvDataReceived(MediaFormat mediaFormat, ByteBuffer byteBuffer, int dataSize, int width, int height) {
            if (is_rate_time) {
                //is_rate_time = false;

                JavaI420Buffer buffer = JavaI420Buffer.allocate(width, height);
                ByteBuffer dataY = buffer.getDataY();
                ByteBuffer dataU = buffer.getDataU();
                ByteBuffer dataV = buffer.getDataV();

                final byte[] bytes = new byte[dataSize];
                byteBuffer.get(bytes);

                int yLen = width * height;
                int uLen = (width + 1) / 2 * ((height + 1) / 2);
                int vLen = uLen;

                int color_format = mediaFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
                switch (color_format) {
                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                        //NV12
                        byte[] ubytes = new byte[uLen];
                        byte[] vbytes = new byte[vLen];
                        for (int i = 0; i < uLen; i++) {
                            ubytes[i] = bytes[yLen + 2 * i];
                            vbytes[i] = bytes[yLen + 2 * i + 1];
                        }

                        dataY.put(bytes, 0, yLen);
                        dataU.put(ubytes, 0, uLen);
                        dataV.put(vbytes, 0, vLen);

                        break;
                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                        //YUV420P=I420

                        int yPos = 0;
                        int uPos = yPos + yLen;
                        int vPos = uPos + uLen;

                        dataY.put(bytes, yPos, yLen);
                        dataU.put(bytes, uPos, uLen);
                        dataV.put(bytes, vPos, vLen);

                        break;
                    default:
                        break;
                }
                long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
                VideoFrame videoFrame = new VideoFrame(buffer, 0, captureTimeNs);
                capturer_observer_.onFrameCaptured(videoFrame);
                videoFrame.release();
            }
        }
    }
    

4、startCapture中用setYuvDataCallback设置yuv帧回调

    @Override
    public void startCapture(int width, int height, int framerate) {
        this.frame_width_ = width;
        this.frame_height_ = height;
        this.frame_rate_ = framerate;
        this.timer.schedule(this.tickTask, 0L, (long)(1000 / frame_rate_));
        dji_code_manager_.enabledYuvData(true);
        dji_code_manager_.setYuvDataCallback(dji_yuv_data_callback_);
    }

以上就完成CapturerAircraft了,完整代码如下:

package com.example.livebroadcastfordrone;

import android.content.Context;
import android.media.MediaFormat;
import android.media.MediaCodecInfo;
import android.os.SystemClock;

import org.webrtc.CapturerObserver;
import org.webrtc.JavaI420Buffer;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;

import java.nio.ByteBuffer;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;

import dji.common.airlink.PhysicalSource;
import dji.common.error.DJIError;
import dji.common.product.Model;
import dji.common.util.CommonCallbacks;
import dji.sdk.airlink.OcuSyncLink;
import dji.sdk.codec.DJICodecManager;
import dji.sdk.sdkmanager.DJISDKManager;

public class CapturerAircraft implements VideoCapturer{
    private DJICodecManager dji_code_manager_;
    private DJIYuvDataCallback dji_yuv_data_callback_;

    private CapturerObserver capturer_observer_;

    private int frame_width_;
    private int frame_height_;
    private int frame_rate_;

    private boolean is_rate_time = false;
    private final Timer timer = new Timer();
    private final TimerTask tickTask = new TimerTask() {
        public void run() {
            is_rate_time = true;
        }
    };

    public CapturerAircraft(DJICodecManager manager) {
        dji_code_manager_ = manager;
        dji_code_manager_.resetKeyFrame();
        if (isM300Product()) {
            OcuSyncLink ocuSyncLink = DJILogIn.getProductInstance().getAirLink().getOcuSyncLink();
            // If your MutltipleLensCamera is set at right or top, you need to change the PhysicalSource to RIGHT_CAM or TOP_CAM.
            ocuSyncLink.assignSourceToPrimaryChannel(PhysicalSource.LEFT_CAM, PhysicalSource.FPV_CAM, new CommonCallbacks.CompletionCallback() {
                @Override
                public void onResult(DJIError error) {
                    if (error == null) {
//                        showToast("assignSourceToPrimaryChannel success.");
                    } else {
//                        showToast("assignSourceToPrimaryChannel fail, reason: "+ error.getDescription());
                    }
                }
            });
        }
    }

    public static boolean isM300Product() {
        if (DJISDKManager.getInstance().getProduct() == null) {
            return false;
        }
        Model model = DJISDKManager.getInstance().getProduct().getModel();
        return model == Model.MATRICE_300_RTK;
    }

    @Override
    public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context context, CapturerObserver capturerObserver) {
        //For M300RTK, you need to actively request an I frame.
        dji_yuv_data_callback_ = new DJIYuvDataCallback();
        capturer_observer_ = capturerObserver;
    }

    private void checkNotDisposed() {
    }

    @Override
    public void startCapture(int width, int height, int framerate) {
        this.frame_width_ = width;
        this.frame_height_ = height;
        this.frame_rate_ = framerate;
        this.timer.schedule(this.tickTask, 0L, (long)(1000 / frame_rate_));
        dji_code_manager_.enabledYuvData(true);
        dji_code_manager_.setYuvDataCallback(dji_yuv_data_callback_);
    }

    @Override
    public void stopCapture() throws InterruptedException {
        this.timer.cancel();
        dji_code_manager_.enabledYuvData(false);
        dji_code_manager_.setYuvDataCallback(null);
    }

    @Override
    public void changeCaptureFormat(int width, int height, int framerate) {

    }

    @Override
    public void dispose() {

    }

    @Override
    public boolean isScreencast() {
        return false;
    }


    private class DJIYuvDataCallback implements DJICodecManager.YuvDataCallback {
        @Override
        public void onYuvDataReceived(MediaFormat mediaFormat, ByteBuffer byteBuffer, int dataSize, int width, int height) {
            if (is_rate_time) {
                //is_rate_time = false;

                JavaI420Buffer buffer = JavaI420Buffer.allocate(width, height);
                ByteBuffer dataY = buffer.getDataY();
                ByteBuffer dataU = buffer.getDataU();
                ByteBuffer dataV = buffer.getDataV();

                final byte[] bytes = new byte[dataSize];
                byteBuffer.get(bytes);

                int yLen = width * height;
                int uLen = (width + 1) / 2 * ((height + 1) / 2);
                int vLen = uLen;

                int color_format = mediaFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
                switch (color_format) {
                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                        //NV12
                        byte[] ubytes = new byte[uLen];
                        byte[] vbytes = new byte[vLen];
                        for (int i = 0; i < uLen; i++) {
                            ubytes[i] = bytes[yLen + 2 * i];
                            vbytes[i] = bytes[yLen + 2 * i + 1];
                        }

                        dataY.put(bytes, 0, yLen);
                        dataU.put(ubytes, 0, uLen);
                        dataV.put(vbytes, 0, vLen);

                        break;
                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                        //YUV420P=I420

                        int yPos = 0;
                        int uPos = yPos + yLen;
                        int vPos = uPos + uLen;

                        dataY.put(bytes, yPos, yLen);
                        dataU.put(bytes, uPos, uLen);
                        dataV.put(bytes, vPos, vLen);

                        break;
                    default:
                        break;
                }
                long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
                VideoFrame videoFrame = new VideoFrame(buffer, 0, captureTimeNs);
                capturer_observer_.onFrameCaptured(videoFrame);
                videoFrame.release();
            }
        }
    }
}

注:DJICodecManager实例要在主线程中创建

标签:Override,自定义,int,void,大疆,VideoCapturer,dji,import,public
来源: https://blog.csdn.net/qq_34214088/article/details/121938383