其他分享
首页 > 其他分享> > Android org.webrtc.VideoRenderer.I420Frame数组转换为PreviewCallback.onPreviewFrame字节[]

Android org.webrtc.VideoRenderer.I420Frame数组转换为PreviewCallback.onPreviewFrame字节[]

作者:互联网

我一直希望一些代码会出现在互联网上,但是一无所获;)

WebRTC传入的I420Frame对象似乎具有3个yuvPlanes数组

典型的Android相机应用程序将PreviewCallback.onPreviewFrame byte []作为单个字节数组获取.

有人可以帮助我如何将I420Frames yuvPlanes转换为单个字节[]数组,例如PreviewCallback.onPreviewFrame byte [] YCbCr_420_SP(NV21)吗?

作为参考,VideoStreamsView.java具有将此代码渲染到OpenGL的功能-但我只希望它像摄像机预览一样;)从:https://code.google.com/p/libjingle/source/browse/trunk/talk/examples/android/src/org/appspot/apprtc/VideoStreamsView.java?r=286

// Upload the YUV planes from |frame| to |textures|.
private void texImage2D(I420Frame frame, int[] textures) {
for (int i = 0; i < 3; ++i) {
  ByteBuffer plane = frame.yuvPlanes[i];
  GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
  GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
  int w = i == 0 ? frame.width : frame.width / 2;
  int h = i == 0 ? frame.height : frame.height / 2;
  abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
  GLES20.glTexImage2D(
      GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
      GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
}
checkNoGLES2Error();
}

谢谢.

解决方法:

好,到这里:

// Copy the bytes out of |src| and into |dst|, ignoring and overwriting
// positon & limit in both buffers.
//** copied from org/webrtc/VideoRenderer.java **//
private static void copyPlane(ByteBuffer src, ByteBuffer dst) {
  src.position(0).limit(src.capacity());
  dst.put(src);
  dst.position(0).limit(dst.capacity());
}

public static android.graphics.YuvImage ConvertTo(org.webrtc.VideoRenderer.I420Frame src, int imageFormat) {
    switch (imageFormat) {
    default:
        return null;

    case android.graphics.ImageFormat.YV12: {
        byte[] bytes = new byte[src.yuvStrides[0]*src.height +
                            src.yuvStrides[1]*src.height/2 + 
                            src.yuvStrides[2]*src.height/2];
        ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.yuvStrides[0]*src.height);
        copyPlane(src.yuvPlanes[0], tmp);
        tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height, src.yuvStrides[2]*src.height/2);
        copyPlane(src.yuvPlanes[2], tmp);
        tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height+src.yuvStrides[2]*src.height/2, src.yuvStrides[1]*src.height/2);
        copyPlane(src.yuvPlanes[1], tmp);
        int[] strides = src.yuvStrides.clone();
        return new YuvImage(bytes, imageFormat, src.width, src.height, strides);
    }

    case android.graphics.ImageFormat.NV21: {
        if (src.yuvStrides[0] != src.width)
            return convertLineByLine(src);
        if (src.yuvStrides[1] != src.width/2)
            return convertLineByLine(src);
        if (src.yuvStrides[2] != src.width/2)
            return convertLineByLine(src);

        byte[] bytes = new byte[src.yuvStrides[0]*src.height +
                            src.yuvStrides[1]*src.height/2 + 
                            src.yuvStrides[2]*src.height/2];
        ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.width*src.height);
        copyPlane(src.yuvPlanes[0], tmp);

        byte[] tmparray = new byte[src.width/2*src.height/2];
        tmp = ByteBuffer.wrap(tmparray, 0, src.width/2*src.height/2);

        copyPlane(src.yuvPlanes[2], tmp);
        for (int row=0; row<src.height/2; row++) {
            for (int col=0; col<src.width/2; col++) {
                bytes[src.width*src.height + row*src.width + col*2] = tmparray[row*src.width/2 + col];
            }
        }
        copyPlane(src.yuvPlanes[1], tmp);
        for (int row=0; row<src.height/2; row++) {
            for (int col=0; col<src.width/2; col++) {
                bytes[src.width*src.height + row*src.width + col*2+1] = tmparray[row*src.width/2 + col];
            }
        }
        return new YuvImage(bytes, imageFormat, src.width, src.height, null);
    }
    }
}

public static android.graphics.YuvImage convertLineByLine(org.webrtc.VideoRenderer.I420Frame src) {
    byte[] bytes = new byte[src.width*src.height*3/2];
    int i=0;
    for (int row=0; row<src.height; row++) {
        for (int col=0; col<src.width; col++) {
            bytes[i++] = src.yuvPlanes[0][col+row*src.yuvStrides[0]];
        }
    }
    for (int row=0; row<src.height/2; row++) {
        for (int col=0; col<src.width/2; col++) {
            bytes[i++] = src.yuvPlanes[2][col+row*src.yuvStrides[2]];
            bytes[i++] = src.yuvPlanes[1][col+row*src.yuvStrides[1]];
        }
    }
    return new YuvImage(bytes, android.graphics.ImageFormat.NV21, src.width, src.height, null);

    }
}

这会将I420Frame转换为android.graphics.ImageFormat.NV21的Android YuvImage,您可以将其转换为compressToJpeg().SDK中对ImageFormat.YV12的支持似乎受到限制.请注意,Y和V必须混洗.

为了简洁起见,大多数错误检查都被跳过.

标签:yuv,webrtc,android-camera,android-image,android
来源: https://codeday.me/bug/20191122/2058792.html