What I'm trying to do is just display the I420Frame ;) or save it to a jpeg. In the native Java - the open GL shader code is already there in AppRTCDemo... that's the only code sample that exists.
What if you just want to render to a normal ImageView object? I'm not concerned about performance as much as decoding at this point. The I420FRAME object is in a byte format not native to Android.
Alex Cohn authored this code:
private static void copyPlane(ByteBuffer src, ByteBuffer dst) {
src.position(0).limit(src.capacity());
dst.put(src);
dst.position(0).limit(dst.capacity());
}
public static android.graphics.YuvImage ConvertTo(org.webrtc.VideoRenderer.I420Frame src) {
byte[] bytes = new byte[src.yuvStrides[0]*src.height +
src.yuvStrides[1]*src.height/2 +
src.yuvStrides[2]*src.height/2];
int[] strides = new int[3];
ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.yuvStrides[0]*src.height);
copyPlane(src.yuvPlanes[0], tmp);
tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height, src.yuvStrides[2]*src.height/2);
copyPlane(src.yuvPlanes[2], tmp);
tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height+src.yuvStrides[2]*src.height/2, src.yuvStrides[1]*src.height/2);
copyPlane(src.yuvPlanes[1], tmp);
strides[0] = src.yuvStrides[0];
strides[1] = src.yuvStrides[2];
strides[2] = src.yuvStrides[1];
android.graphics.YuvImage image = new android.graphics.YuvImage(bytes, android.graphics.ImageFormat.YV12, src.width, src.height, strides);
return image;
}
public static android.graphics.YuvImage ConvertTo(org.webrtc.VideoRenderer.I420Frame src, int imageFormat) {
byte[] bytes = new byte[src.yuvStrides[0]*src.height +
src.yuvStrides[1]*src.height/2 +
src.yuvStrides[2]*src.height/2];
int[] strides = new int[3];
switch (imageFormat) {
default:
return null;
case android.graphics.ImageFormat.YV12: {
ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.yuvStrides[0]*src.height);
copyPlane(src.yuvPlanes[0], tmp);
tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height, src.yuvStrides[2]*src.height/2);
copyPlane(src.yuvPlanes[2], tmp);
tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height+src.yuvStrides[2]*src.height/2, src.yuvStrides[1]*src.height/2);
copyPlane(src.yuvPlanes[1], tmp);
strides[0] = src.yuvStrides[0];
strides[1] = src.yuvStrides[2];
strides[2] = src.yuvStrides[1];
return new YuvImage(bytes, imageFormat, src.width, src.height, strides);
}
case android.graphics.ImageFormat.NV21: {
if (src.yuvStrides[0] != src.width)
return null;
if (src.yuvStrides[1] != src.width/2)
return null;
if (src.yuvStrides[2] != src.width/2)
return null;
ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.width*src.height);
copyPlane(src.yuvPlanes[0], tmp);
byte[] tmparray = new byte[src.width/2*src.height/2];
tmp = ByteBuffer.wrap(tmparray, 0, src.width/2*src.height/2);
copyPlane(src.yuvPlanes[2], tmp);
for (int row=0; row<src.height/2; row++) {
for (int col=0; col<src.width/2; col++) {
bytes[src.width*src.height + row*src.width + col*2] = tmparray[row*src.width/2 + col];
}
}
copyPlane(src.yuvPlanes[1], tmp);
for (int row=0; row<src.height/2; row++) {
for (int col=0; col<src.width/2; col++) {
bytes[src.width*src.height + row*src.width + col*2+1] = tmparray[row*src.width/2 + col];
}
}
return new YuvImage(bytes, imageFormat, src.width, src.height, null);
}
}
}
So far, the colors aren't right in my testing on several versions of Android.