Implement Rotation in Android Renderer.
Make use of rotation information from the frame and rotate it accordingly when we render the frame. BUG=4145 R=glaznev@webrtc.org Committed: https://code.google.com/p/webrtc/source/detail?r=8770 Review URL: https://webrtc-codereview.appspot.com/50369004 Cr-Commit-Position: refs/heads/master@{#8781} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8781 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
143451d259
commit
840da7b755
@ -52,6 +52,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
@Override
|
||||
public boolean canApplyRotation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public int WaitForNextFrameToRender() throws InterruptedException {
|
||||
synchronized (frameLock) {
|
||||
frameLock.wait();
|
||||
|
@ -37,6 +37,7 @@ import java.util.concurrent.LinkedBlockingQueue;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.opengles.GL10;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLContext;
|
||||
@ -264,6 +265,22 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
private int videoWidth;
|
||||
private int videoHeight;
|
||||
|
||||
// This is the degree that the frame should be rotated clockwisely to have
|
||||
// it rendered up right.
|
||||
private int rotationDegree;
|
||||
|
||||
// Mapping array from original UV mapping to the rotated mapping. The number
|
||||
// is the position where the original UV coordination should be mapped
|
||||
// to. (0,1) is the left top coord. (2,3) is the top bottom. (4,5) is the
|
||||
// right top. (6,7) is the right bottom. Note that this is the coordination
|
||||
// that got rotated. For example, using the original left bottom (2,3) as
|
||||
// the top left (0,1) means 90 degree clockwise rotation.
|
||||
private static int rotation_matrix[][] =
|
||||
{ {0, 1, 2, 3, 4, 5, 6, 7}, // 0 degree
|
||||
{2, 3, 6, 7, 0, 1, 4, 5}, // 90 degree (clockwise)
|
||||
{6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
|
||||
{4, 5, 0, 1, 6, 7, 2, 3} };// 270 degree (clockwise)
|
||||
|
||||
private YuvImageRenderer(
|
||||
GLSurfaceView surface, int id,
|
||||
int x, int y, int width, int height,
|
||||
@ -292,6 +309,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
};
|
||||
textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
|
||||
updateTextureProperties = false;
|
||||
rotationDegree = 0;
|
||||
}
|
||||
|
||||
private void createTextures(int yuvProgram, int oesProgram) {
|
||||
@ -340,7 +358,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
if (displayWidth > 1 && displayHeight > 1 &&
|
||||
videoWidth > 1 && videoHeight > 1) {
|
||||
float displayAspectRatio = displayWidth / displayHeight;
|
||||
float videoAspectRatio = (float)videoWidth / videoHeight;
|
||||
// videoAspectRatio should be the one after rotation applied.
|
||||
float videoAspectRatio = 0;
|
||||
if (rotationDegree == 90 || rotationDegree == 270) {
|
||||
videoAspectRatio = (float)videoHeight / videoWidth;
|
||||
} else {
|
||||
videoAspectRatio = (float)videoWidth / videoHeight;
|
||||
}
|
||||
if (scalingType == ScalingType.SCALE_ASPECT_FIT) {
|
||||
// Need to re-adjust vertices width or height to match video AR.
|
||||
if (displayAspectRatio > videoAspectRatio) {
|
||||
@ -389,7 +413,21 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
uRight, texOffsetV, // right top
|
||||
uRight, 1.0f - texOffsetV // right bottom
|
||||
};
|
||||
textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
|
||||
|
||||
float textureCoordinatesRotatedFloat[];
|
||||
if (rotationDegree == 0) {
|
||||
textureCoordinatesRotatedFloat = textureCoordinatesFloat;
|
||||
} else {
|
||||
textureCoordinatesRotatedFloat =
|
||||
new float[textureCoordinatesFloat.length];
|
||||
int index = rotationDegree / 90;
|
||||
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
|
||||
textureCoordinatesRotatedFloat[rotation_matrix[index][i]] =
|
||||
textureCoordinatesFloat[i];
|
||||
}
|
||||
}
|
||||
textureCoords =
|
||||
directNativeFloatBuffer(textureCoordinatesRotatedFloat);
|
||||
}
|
||||
updateTextureProperties = false;
|
||||
}
|
||||
@ -528,16 +566,19 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
private void setSize(final int width, final int height) {
|
||||
if (width == videoWidth && height == videoHeight) {
|
||||
return;
|
||||
private void setSize(final int width, final int height,
|
||||
final int rotation) {
|
||||
if (width == videoWidth && height == videoHeight &&
|
||||
rotation == rotationDegree) {
|
||||
return;
|
||||
}
|
||||
|
||||
Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
|
||||
width + " x " + height);
|
||||
width + " x " + height + " rotation " + rotation);
|
||||
|
||||
videoWidth = width;
|
||||
videoHeight = height;
|
||||
rotationDegree = rotation;
|
||||
int[] strides = { width, width / 2, width / 2 };
|
||||
// Frame re-allocation need to be synchronized with copying
|
||||
// frame to textures in draw() function to avoid re-allocating
|
||||
@ -546,15 +587,17 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
// Clear rendering queue.
|
||||
frameToRenderQueue.poll();
|
||||
// Re-allocate / allocate the frame.
|
||||
yuvFrameToRender = new I420Frame(width, height, strides, null);
|
||||
textureFrameToRender = new I420Frame(width, height, null, -1);
|
||||
yuvFrameToRender = new I420Frame(width, height, rotationDegree,
|
||||
strides, null);
|
||||
textureFrameToRender = new I420Frame(width, height, rotationDegree,
|
||||
null, -1);
|
||||
updateTextureProperties = true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void renderFrame(I420Frame frame) {
|
||||
setSize(frame.width, frame.height);
|
||||
setSize(frame.width, frame.height, frame.rotationDegree);
|
||||
long now = System.nanoTime();
|
||||
framesReceived++;
|
||||
// Skip rendering of this frame if setSize() was not called.
|
||||
@ -602,6 +645,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
surface.requestRender();
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
@Override
|
||||
public boolean canApplyRotation() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/** Passes GLSurfaceView to video renderer. */
|
||||
@ -712,6 +760,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
@Override
|
||||
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
||||
Log.d(TAG, "VideoRendererGui.onSurfaceCreated");
|
||||
|
@ -709,6 +709,9 @@ class VideoRendererWrapper : public VideoRendererInterface {
|
||||
renderer_->RenderFrame(frame);
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
bool CanApplyRotation() override { return true; }
|
||||
|
||||
private:
|
||||
explicit VideoRendererWrapper(cricket::VideoRenderer* renderer)
|
||||
: renderer_(renderer), width_(0), height_(0) {}
|
||||
@ -725,14 +728,19 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
j_render_frame_id_(GetMethodID(
|
||||
jni, GetObjectClass(jni, j_callbacks), "renderFrame",
|
||||
"(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
|
||||
j_can_apply_rotation_id_(GetMethodID(
|
||||
jni, GetObjectClass(jni, j_callbacks),
|
||||
"canApplyRotation", "()Z")),
|
||||
j_frame_class_(jni,
|
||||
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
|
||||
j_i420_frame_ctor_id_(GetMethodID(
|
||||
jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
|
||||
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;)V")),
|
||||
j_texture_frame_ctor_id_(GetMethodID(
|
||||
jni, *j_frame_class_, "<init>",
|
||||
"(IILjava/lang/Object;I)V")),
|
||||
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
|
||||
"(IIILjava/lang/Object;I)V")),
|
||||
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")),
|
||||
can_apply_rotation_set_(false),
|
||||
can_apply_rotation_(false) {
|
||||
CHECK_EXCEPTION(jni);
|
||||
}
|
||||
|
||||
@ -741,10 +749,12 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
void RenderFrame(const cricket::VideoFrame* video_frame) override {
|
||||
ScopedLocalRefFrame local_ref_frame(jni());
|
||||
|
||||
// TODO(guoweis): Remove once the java implementation supports rotation.
|
||||
const cricket::VideoFrame* frame =
|
||||
video_frame->GetCopyWithRotationApplied();
|
||||
// Calling CanApplyRotation here to ensure can_apply_rotation_ is set.
|
||||
CanApplyRotation();
|
||||
|
||||
const cricket::VideoFrame* frame =
|
||||
can_apply_rotation_ ? video_frame
|
||||
: video_frame->GetCopyWithRotationApplied();
|
||||
if (frame->GetNativeHandle() != NULL) {
|
||||
jobject j_frame = CricketToJavaTextureFrame(frame);
|
||||
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
|
||||
@ -756,6 +766,21 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(guoweis): Report that rotation is supported as RenderFrame calls
|
||||
// GetCopyWithRotationApplied.
|
||||
virtual bool CanApplyRotation() override {
|
||||
if (can_apply_rotation_set_) {
|
||||
return can_apply_rotation_;
|
||||
}
|
||||
ScopedLocalRefFrame local_ref_frame(jni());
|
||||
jboolean ret =
|
||||
jni()->CallBooleanMethod(*j_callbacks_, j_can_apply_rotation_id_);
|
||||
CHECK_EXCEPTION(jni());
|
||||
can_apply_rotation_ = ret;
|
||||
can_apply_rotation_set_ = true;
|
||||
return ret;
|
||||
}
|
||||
|
||||
private:
|
||||
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
|
||||
jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
|
||||
@ -778,7 +803,9 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
jni()->SetObjectArrayElement(planes, 2, v_buffer);
|
||||
return jni()->NewObject(
|
||||
*j_frame_class_, j_i420_frame_ctor_id_,
|
||||
frame->GetWidth(), frame->GetHeight(), strides, planes);
|
||||
frame->GetWidth(), frame->GetHeight(),
|
||||
static_cast<int>(frame->GetVideoRotation()),
|
||||
strides, planes);
|
||||
}
|
||||
|
||||
// Return a VideoRenderer.I420Frame referring texture object in |frame|.
|
||||
@ -789,7 +816,9 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
int texture_id = handle->GetTextureId();
|
||||
return jni()->NewObject(
|
||||
*j_frame_class_, j_texture_frame_ctor_id_,
|
||||
frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
|
||||
frame->GetWidth(), frame->GetHeight(),
|
||||
static_cast<int>(frame->GetVideoRotation()),
|
||||
texture_object, texture_id);
|
||||
}
|
||||
|
||||
JNIEnv* jni() {
|
||||
@ -798,10 +827,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
|
||||
ScopedGlobalRef<jobject> j_callbacks_;
|
||||
jmethodID j_render_frame_id_;
|
||||
jmethodID j_can_apply_rotation_id_;
|
||||
ScopedGlobalRef<jclass> j_frame_class_;
|
||||
jmethodID j_i420_frame_ctor_id_;
|
||||
jmethodID j_texture_frame_ctor_id_;
|
||||
ScopedGlobalRef<jclass> j_byte_buffer_class_;
|
||||
bool can_apply_rotation_set_;
|
||||
bool can_apply_rotation_;
|
||||
};
|
||||
|
||||
|
||||
|
@ -47,13 +47,18 @@ public class VideoRenderer {
|
||||
public Object textureObject;
|
||||
public int textureId;
|
||||
|
||||
// rotationDegree is the degree that the frame must be rotated clockwisely
|
||||
// to be rendered correctly.
|
||||
public int rotationDegree;
|
||||
|
||||
/**
|
||||
* Construct a frame of the given dimensions with the specified planar
|
||||
* data. If |yuvPlanes| is null, new planes of the appropriate sizes are
|
||||
* allocated.
|
||||
*/
|
||||
public I420Frame(
|
||||
int width, int height, int[] yuvStrides, ByteBuffer[] yuvPlanes) {
|
||||
int width, int height, int rotationDegree,
|
||||
int[] yuvStrides, ByteBuffer[] yuvPlanes) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.yuvStrides = yuvStrides;
|
||||
@ -65,13 +70,15 @@ public class VideoRenderer {
|
||||
}
|
||||
this.yuvPlanes = yuvPlanes;
|
||||
this.yuvFrame = true;
|
||||
this.rotationDegree = rotationDegree;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a texture frame of the given dimensions with data in SurfaceTexture
|
||||
*/
|
||||
public I420Frame(
|
||||
int width, int height, Object textureObject, int textureId) {
|
||||
int width, int height, int rotationDegree,
|
||||
Object textureObject, int textureId) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.yuvStrides = null;
|
||||
@ -79,6 +86,7 @@ public class VideoRenderer {
|
||||
this.textureObject = textureObject;
|
||||
this.textureId = textureId;
|
||||
this.yuvFrame = false;
|
||||
this.rotationDegree = rotationDegree;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -98,10 +106,12 @@ public class VideoRenderer {
|
||||
source.yuvStrides[1], yuvPlanes[1], yuvStrides[1]);
|
||||
nativeCopyPlane(source.yuvPlanes[2], width / 2, height / 2,
|
||||
source.yuvStrides[2], yuvPlanes[2], yuvStrides[2]);
|
||||
rotationDegree = source.rotationDegree;
|
||||
return this;
|
||||
} else if (!source.yuvFrame && !yuvFrame) {
|
||||
textureObject = source.textureObject;
|
||||
textureId = source.textureId;
|
||||
rotationDegree = source.rotationDegree;
|
||||
return this;
|
||||
} else {
|
||||
throw new RuntimeException("Mismatched frame types! Source: " +
|
||||
@ -109,7 +119,7 @@ public class VideoRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
public I420Frame copyFrom(byte[] yuvData) {
|
||||
public I420Frame copyFrom(byte[] yuvData, int rotationDegree) {
|
||||
if (yuvData.length < width * height * 3 / 2) {
|
||||
throw new RuntimeException("Wrong arrays size: " + yuvData.length);
|
||||
}
|
||||
@ -128,6 +138,7 @@ public class VideoRenderer {
|
||||
yuvPlanes[i].position(0);
|
||||
yuvPlanes[i].limit(yuvPlanes[i].capacity());
|
||||
}
|
||||
this.rotationDegree = rotationDegree;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -147,6 +158,8 @@ public class VideoRenderer {
|
||||
// |frame| might have pending rotation and implementation of Callbacks
|
||||
// should handle that by applying rotation during rendering.
|
||||
public void renderFrame(I420Frame frame);
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
public boolean canApplyRotation();
|
||||
}
|
||||
|
||||
// |this| either wraps a native (GUI) renderer or a client-supplied Callbacks
|
||||
|
@ -138,6 +138,12 @@ public class PeerConnectionTest {
|
||||
--expectedFramesDelivered;
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
@Override
|
||||
public boolean canApplyRotation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public synchronized void expectSignalingChange(SignalingState newState) {
|
||||
expectedSignalingChanges.add(newState);
|
||||
}
|
||||
@ -438,6 +444,12 @@ public class PeerConnectionTest {
|
||||
public void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
++numFramesDelivered;
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
@Override
|
||||
public boolean canApplyRotation() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static VideoRenderer createVideoRenderer(
|
||||
|
@ -101,6 +101,12 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
|
||||
doneRendering = new CountDownLatch(expectedFrames);
|
||||
}
|
||||
|
||||
// TODO(guoweis): Remove this once chrome code base is updated.
|
||||
@Override
|
||||
public boolean canApplyRotation() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
if (!renderFrameCalled) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user