Android VideoRendererGui: Refactor GLES rendering

This CL should not change any visible behaviour. It does the following:
 * Extract GLES rendering into separate class GlRectDrawer. This class is also needed for future video encode with OES texture input.
 * Clean up current ScalingType -> display size calculation and introduce new SCALE_ASPECT_BALANCED (b/21735609) and remove unused SCALE_FILL.
 * Replace current mirror/rotation index juggling with android.opengl.Matrix operations instead.

Review URL: https://codereview.webrtc.org/1191243005

Cr-Commit-Position: refs/heads/master@{#9496}
This commit is contained in:
magjed
2015-06-24 03:59:37 -07:00
committed by Commit bot
parent 2c4c914819
commit 59a677ada2
4 changed files with 347 additions and 283 deletions

View File

@@ -0,0 +1,205 @@
/*
* libjingle
* Copyright 2015 Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import org.webrtc.GlShader;
import org.webrtc.GlUtil;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.Arrays;
/**
* Helper class to draw a quad that covers the entire viewport. Rotation, mirror, and cropping is
* specified using a 4x4 texture coordinate transform matrix. The frame input can either be an OES
* texture or YUV textures in I420 format. The GL state must be preserved between draw calls, this
* is intentional to maximize performance. The function release() must be called manually to free
* the resources held by this object.
*/
public class GlRectDrawer {
// Simple vertex shader, used for both YUV and OES.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D y_tex;\n"
+ "uniform sampler2D u_tex;\n"
+ "uniform sampler2D v_tex;\n"
+ "\n"
+ "void main() {\n"
// CSC according to http://www.fourcc.org/fccyvrgb.php
+ " float y = texture2D(y_tex, interp_tc).r;\n"
+ " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ " gl_FragColor = vec4(y + 1.403 * v, "
+ " y - 0.344 * u - 0.714 * v, "
+ " y + 1.77 * u, 1);\n"
+ "}\n";
private static final String OES_FRAGMENT_SHADER_STRING =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oes_tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ "}\n";
private static final FloatBuffer FULL_RECTANGLE_BUF =
GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
private GlShader oesShader;
private GlShader yuvShader;
private GlShader currentShader;
private float[] currentTexMatrix;
private int texMatrixLocation;
private void initGeometry(GlShader shader) {
shader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
shader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
}
/**
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
public void drawOes(int oesTextureId, float[] texMatrix) {
// Lazy allocation.
if (oesShader == null) {
oesShader = new GlShader(VERTEX_SHADER_STRING, OES_FRAGMENT_SHADER_STRING);
oesShader.useProgram();
initGeometry(oesShader);
}
// Set GLES state to OES.
if (currentShader != oesShader) {
currentShader = oesShader;
oesShader.useProgram();
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
currentTexMatrix = null;
texMatrixLocation = oesShader.getUniformLocation("texMatrix");
}
// updateTexImage() may be called from another thread in another EGL context, so we need to
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
drawRectangle(texMatrix);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
/**
* Draw a YUV frame with specified texture transformation matrix. Required resources are
* allocated at the first call to this function.
*/
public void drawYuv(int width, int height, int[] yuvTextures, float[] texMatrix) {
// Lazy allocation.
if (yuvShader == null) {
yuvShader = new GlShader(VERTEX_SHADER_STRING, YUV_FRAGMENT_SHADER_STRING);
yuvShader.useProgram();
// Set texture samplers.
GLES20.glUniform1i(yuvShader.getUniformLocation("y_tex"), 0);
GLES20.glUniform1i(yuvShader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i(yuvShader.getUniformLocation("v_tex"), 2);
GlUtil.checkNoGLES2Error("y/u/v_tex glGetUniformLocation");
initGeometry(yuvShader);
}
// Set GLES state to YUV.
if (currentShader != yuvShader) {
currentShader = yuvShader;
yuvShader.useProgram();
currentTexMatrix = null;
texMatrixLocation = yuvShader.getUniformLocation("texMatrix");
}
// Bind the textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
}
drawRectangle(texMatrix);
}
private void drawRectangle(float[] texMatrix) {
// Try avoid uploading the texture if possible.
if (!Arrays.equals(currentTexMatrix, texMatrix)) {
currentTexMatrix = texMatrix.clone();
// Copy the texture transformation matrix over.
GLES20.glUniformMatrix4fv(texMatrixLocation, 1, false, texMatrix, 0);
}
// Draw quad.
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
/**
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
*/
public void release() {
if (oesShader != null) {
oesShader.release();
oesShader = null;
}
if (yuvShader != null) {
yuvShader.release();
yuvShader = null;
}
}
}

View File

@@ -30,6 +30,8 @@ package org.webrtc;
import android.opengl.GLES20;
import android.util.Log;
import java.nio.FloatBuffer;
// Helper class for handling OpenGL shaders and shader programs.
public class GlShader {
private static final String TAG = "GlShader";
@@ -88,6 +90,20 @@ public class GlShader {
return location;
}
/**
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
* |buffer| with |dimension| number of components per vertex.
*/
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
if (program == -1) {
throw new RuntimeException("The program has been released");
}
int location = getAttribLocation(label);
GLES20.glEnableVertexAttribArray(location);
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
GlUtil.checkNoGLES2Error("setVertexAttribArray");
}
public int getUniformLocation(String label) {
if (program == -1) {
throw new RuntimeException("The program has been released");

View File

@@ -27,9 +27,6 @@
package org.webrtc;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
@@ -38,12 +35,14 @@ import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import org.webrtc.VideoRenderer.I420Frame;
@@ -70,65 +69,27 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private int screenHeight;
// List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers;
private GlShader yuvShader;
private GlShader oesShader;
private GlRectDrawer drawer;
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
// This limits excessive cropping when adjusting display size.
private static float BALANCED_VISIBLE_FRACTION = 0.56f;
// Types of video scaling:
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
// maintaining the aspect ratio (black borders may be displayed).
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
// maintaining the aspect ratio. Some portion of the video frame may be
// clipped.
// SCALE_FILL - video frame is scaled to to fill the size of the view. Video
// aspect ratio is changed if necessary.
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
// possible of the view while maintaining aspect ratio, under the constraint that at least
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
public static enum ScalingType
{ SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_FILL };
{ SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
private static final int EGL14_SDK_VERSION =
android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
// Current SDK version.
private static final int CURRENT_SDK_VERSION =
android.os.Build.VERSION.SDK_INT;
private final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" +
"attribute vec4 in_pos;\n" +
"attribute vec2 in_tc;\n" +
"\n" +
"void main() {\n" +
" gl_Position = in_pos;\n" +
" interp_tc = in_tc;\n" +
"}\n";
private final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
"uniform sampler2D y_tex;\n" +
"uniform sampler2D u_tex;\n" +
"uniform sampler2D v_tex;\n" +
"\n" +
"void main() {\n" +
// CSC according to http://www.fourcc.org/fccyvrgb.php
" float y = texture2D(y_tex, interp_tc).r;\n" +
" float u = texture2D(u_tex, interp_tc).r - 0.5;\n" +
" float v = texture2D(v_tex, interp_tc).r - 0.5;\n" +
" gl_FragColor = vec4(y + 1.403 * v, " +
" y - 0.344 * u - 0.714 * v, " +
" y + 1.77 * u, 1);\n" +
"}\n";
private static final String OES_FRAGMENT_SHADER_STRING =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
"uniform samplerExternalOES oes_tex;\n" +
"\n" +
"void main() {\n" +
" gl_FragColor = texture2D(oes_tex, interp_tc);\n" +
"}\n";
private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface;
// Create an OpenGL ES 2.0 context.
@@ -148,8 +109,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static class YuvImageRenderer implements VideoRenderer.Callbacks {
private GLSurfaceView surface;
private int id;
private GlShader yuvShader;
private GlShader oesShader;
private int[] yuvTextures = { -1, -1, -1 };
private int oesTexture = -1;
@@ -182,14 +141,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Time in ns spent in renderFrame() function - including copying frame
// data to rendering planes.
private long copyTimeNs;
// Texture vertices.
private float texLeft;
private float texRight;
private float texTop;
private float texBottom;
private FloatBuffer textureVertices;
// Texture UV coordinates.
private FloatBuffer textureCoords;
// The allowed view area in percentage of screen size.
private final Rect layoutInPercentage;
// The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
// |layoutInPercentage|.
private final Rect displayLayout = new Rect();
// Cached texture transformation matrix, calculated from current layout parameters.
private final float[] texMatrix = new float[16];
// Flag if texture vertices or coordinates update is needed.
private boolean updateTextureProperties;
// Texture properties update lock.
@@ -205,23 +163,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// it rendered up right.
private int rotationDegree;
// Mapping array from original UV mapping to the rotated mapping. The number
// is the position where the original UV coordination should be mapped
// to. (0,1) is the top left coord. (2,3) is the bottom left. (4,5) is the
// top right. (6,7) is the bottom right.
private static int rotation_matrix[][] =
// 0 1 2 3 4 5 6 7 // arrays indices
{ {4, 5, 0, 1, 6, 7, 2, 3}, // 90 degree (clockwise)
{6, 7, 4, 5, 2, 3, 0, 1}, // 180 degree (clockwise)
{2, 3, 6, 7, 0, 1, 4, 5} }; // 270 degree (clockwise)
private static int mirror_matrix[][] =
// 0 1 2 3 4 5 6 7 // arrays indices
{ {4, 1, 6, 3, 0, 5, 2, 7}, // 0 degree mirror - u swap
{0, 5, 2, 7, 4, 1, 6, 3}, // 90 degree mirror - v swap
{4, 1, 6, 3, 0, 5, 2, 7}, // 180 degree mirror - u swap
{0, 5, 2, 7, 4, 1, 6, 3} }; // 270 degree mirror - v swap
private YuvImageRenderer(
GLSurfaceView surface, int id,
int x, int y, int width, int height,
@@ -232,40 +173,20 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
this.scalingType = scalingType;
this.mirror = mirror;
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
// Create texture vertices.
texLeft = (x - 50) / 50.0f;
texTop = (50 - y) / 50.0f;
texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
float textureVeticesFloat[] = new float[] {
texLeft, texTop,
texLeft, texBottom,
texRight, texTop,
texRight, texBottom
};
textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
// Create texture UV coordinates.
float textureCoordinatesFloat[] = new float[] {
0, 0, 0, 1, 1, 0, 1, 1
};
textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
updateTextureProperties = false;
rotationDegree = 0;
}
private void createTextures(GlShader yuvShader, GlShader oesShader) {
private void createTextures() {
Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
Thread.currentThread().getId());
this.yuvShader = yuvShader;
this.oesShader = oesShader;
// Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
GLES20.glGenTextures(3, yuvTextures, 0);
for (int i = 0; i < 3; i++) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
128, 128, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
@@ -278,144 +199,99 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
GlUtil.checkNoGLES2Error("y/u/v glGenTextures");
}
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
switch (scalingType) {
case SCALE_ASPECT_FIT:
return 1.0f;
case SCALE_ASPECT_FILL:
return 0.0f;
case SCALE_ASPECT_BALANCED:
return BALANCED_VISIBLE_FRACTION;
default:
throw new IllegalArgumentException();
}
}
private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
int maxDisplayWidth, int maxDisplayHeight) {
// If there is no constraint on the amount of cropping, fill the allowed display area.
if (minVisibleFraction == 0) {
return new Point(maxDisplayWidth, maxDisplayHeight);
}
// Each dimension is constrained on max display size and how much we are allowed to crop.
final int width = Math.min(maxDisplayWidth,
(int) (maxDisplayHeight / minVisibleFraction * videoAspectRatio));
final int height = Math.min(maxDisplayHeight,
(int) (maxDisplayWidth / minVisibleFraction / videoAspectRatio));
return new Point(width, height);
}
private void checkAdjustTextureCoords() {
synchronized(updateTextureLock) {
if (!updateTextureProperties || scalingType == ScalingType.SCALE_FILL) {
if (!updateTextureProperties) {
return;
}
// Re - calculate texture vertices to preserve video aspect ratio.
float texRight = this.texRight;
float texLeft = this.texLeft;
float texTop = this.texTop;
float texBottom = this.texBottom;
float texOffsetU = 0;
float texOffsetV = 0;
float displayWidth = (texRight - texLeft) * screenWidth / 2;
float displayHeight = (texTop - texBottom) * screenHeight / 2;
Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Display: " + displayWidth +
" x " + displayHeight + ". Video: " + videoWidth +
" x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
if (displayWidth > 1 && displayHeight > 1 &&
videoWidth > 1 && videoHeight > 1) {
float displayAspectRatio = displayWidth / displayHeight;
// videoAspectRatio should be the one after rotation applied.
float videoAspectRatio = 0;
if (rotationDegree == 90 || rotationDegree == 270) {
videoAspectRatio = (float)videoHeight / videoWidth;
} else {
videoAspectRatio = (float)videoWidth / videoHeight;
}
if (scalingType == ScalingType.SCALE_ASPECT_FIT) {
// Need to re-adjust vertices width or height to match video AR.
if (displayAspectRatio > videoAspectRatio) {
float deltaX = (displayWidth - videoAspectRatio * displayHeight) /
instance.screenWidth;
texRight -= deltaX;
texLeft += deltaX;
} else {
float deltaY = (displayHeight - displayWidth / videoAspectRatio) /
instance.screenHeight;
texTop -= deltaY;
texBottom += deltaY;
}
}
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
// Need to re-adjust UV coordinates to match display AR.
boolean adjustU = true;
float ratio = 0;
if (displayAspectRatio > videoAspectRatio) {
ratio = (1.0f - videoAspectRatio / displayAspectRatio) /
2.0f;
adjustU = (rotationDegree == 90 || rotationDegree == 270);
} else {
ratio = (1.0f - displayAspectRatio / videoAspectRatio) /
2.0f;
adjustU = (rotationDegree == 0 || rotationDegree == 180);
}
if (adjustU) {
texOffsetU = ratio;
} else {
texOffsetV = ratio;
}
}
Log.d(TAG, " Texture vertices: (" + texLeft + "," + texBottom +
") - (" + texRight + "," + texTop + ")");
float textureVeticesFloat[] = new float[] {
texLeft, texTop,
texLeft, texBottom,
texRight, texTop,
texRight, texBottom
};
textureVertices = GlUtil.createFloatBuffer(textureVeticesFloat);
float uLeft = texOffsetU;
float uRight = 1.0f - texOffsetU;
float vTop = texOffsetV;
float vBottom = 1.0f - texOffsetV;
Log.d(TAG, " Texture UV: (" + uLeft + "," + vTop +
") - (" + uRight + "," + vBottom + ")");
float textureCoordinatesFloat[] = new float[] {
uLeft, vTop, // top left
uLeft, vBottom, // bottom left
uRight, vTop, // top right
uRight, vBottom // bottom right
};
// Rotation needs to be done before mirroring.
textureCoordinatesFloat = applyRotation(textureCoordinatesFloat,
rotationDegree);
textureCoordinatesFloat = applyMirror(textureCoordinatesFloat,
mirror);
textureCoords = GlUtil.createFloatBuffer(textureCoordinatesFloat);
// Initialize to maximum allowed area. Round to integer coordinates inwards the layout
// bounding box (ceil left/top and floor right/bottom) to not break constraints.
displayLayout.set(
(screenWidth * layoutInPercentage.left + 99) / 100,
(screenHeight * layoutInPercentage.top + 99) / 100,
(screenWidth * layoutInPercentage.right) / 100,
(screenHeight * layoutInPercentage.bottom) / 100);
Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
final float videoAspectRatio = (rotationDegree % 180 == 0)
? (float) videoWidth / videoHeight
: (float) videoHeight / videoWidth;
// Adjust display size based on |scalingType|.
final float minVisibleFraction = convertScalingTypeToVisibleFraction(scalingType);
final Point displaySize = getDisplaySize(minVisibleFraction, videoAspectRatio,
displayLayout.width(), displayLayout.height());
displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
(displayLayout.height() - displaySize.y) / 2);
Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ displayLayout.height());
// The matrix stack is using post-multiplication, which means that matrix operations:
// A; B; C; will end up as A * B * C. When you apply this to a vertex, it will result in:
// v' = A * B * C * v, i.e. the last matrix operation is the first thing that affects the
// vertex. This is the opposite of what you might expect.
Matrix.setIdentityM(texMatrix, 0);
// Move coordinates back to [0,1]x[0,1].
Matrix.translateM(texMatrix, 0, 0.5f, 0.5f, 0.0f);
// Rotate frame clockwise in the XY-plane (around the Z-axis).
Matrix.rotateM(texMatrix, 0, -rotationDegree, 0, 0, 1);
// Scale one dimension until video and display size have same aspect ratio.
final float displayAspectRatio = (float) displayLayout.width() / displayLayout.height();
if (displayAspectRatio > videoAspectRatio) {
Matrix.scaleM(texMatrix, 0, 1, videoAspectRatio / displayAspectRatio, 1);
} else {
Matrix.scaleM(texMatrix, 0, displayAspectRatio / videoAspectRatio, 1, 1);
}
// TODO(magjed): We currently ignore the texture transform matrix from the SurfaceTexture.
// It contains a vertical flip that is hardcoded here instead.
Matrix.scaleM(texMatrix, 0, 1, -1, 1);
// Apply optional horizontal flip.
if (mirror) {
Matrix.scaleM(texMatrix, 0, -1, 1, 1);
}
// Center coordinates around origin.
Matrix.translateM(texMatrix, 0, -0.5f, -0.5f, 0.0f);
updateTextureProperties = false;
Log.d(TAG, " AdjustTextureCoords done");
}
}
private float[] applyMirror(float textureCoordinatesFloat[],
boolean mirror) {
if (!mirror) {
return textureCoordinatesFloat;
}
int index = rotationDegree / 90;
return applyMatrixOperation(textureCoordinatesFloat,
mirror_matrix[index]);
}
private float[] applyRotation(float textureCoordinatesFloat[],
int rotationDegree) {
if (rotationDegree == 0) {
return textureCoordinatesFloat;
}
int index = rotationDegree / 90 - 1;
return applyMatrixOperation(textureCoordinatesFloat,
rotation_matrix[index]);
}
private float[] applyMatrixOperation(float textureCoordinatesFloat[],
int matrix_operation[]) {
float textureCoordinatesModifiedFloat[] =
new float[textureCoordinatesFloat.length];
for(int i = 0; i < textureCoordinatesFloat.length; i++) {
textureCoordinatesModifiedFloat[matrix_operation[i]] =
textureCoordinatesFloat[i];
}
return textureCoordinatesModifiedFloat;
}
private void draw() {
private void draw(GlRectDrawer drawer) {
if (!seenFrame) {
// No frame received yet - nothing to render.
return;
}
long now = System.nanoTime();
GlShader currentShader;
// OpenGL defaults to lower left origin.
GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
displayLayout.width(), displayLayout.height());
I420Frame frameFromQueue;
synchronized (frameToRenderQueue) {
@@ -428,33 +304,22 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
startTimeNs = now;
}
if (rendererType == RendererType.RENDERER_YUV) {
// YUV textures rendering.
yuvShader.useProgram();
currentShader = yuvShader;
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
if (frameFromQueue != null) {
int w = (i == 0) ?
frameFromQueue.width : frameFromQueue.width / 2;
int h = (i == 0) ?
frameFromQueue.height : frameFromQueue.height / 2;
if (frameFromQueue != null) {
if (frameFromQueue.yuvFrame) {
// YUV textures rendering. Upload YUV data as textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
int w = (i == 0) ? frameFromQueue.width : frameFromQueue.width / 2;
int h = (i == 0) ? frameFromQueue.height : frameFromQueue.height / 2;
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
frameFromQueue.yuvPlanes[i]);
}
}
GLES20.glUniform1i(yuvShader.getUniformLocation("y_tex"), 0);
GLES20.glUniform1i(yuvShader.getUniformLocation("u_tex"), 1);
GLES20.glUniform1i(yuvShader.getUniformLocation("v_tex"), 2);
} else {
// External texture rendering.
oesShader.useProgram();
currentShader = oesShader;
if (frameFromQueue != null) {
} else {
// External texture rendering. Copy texture id and update texture image to latest.
// TODO(magjed): We should not make an unmanaged copy of texture id. Also, this is not
// the best place to call updateTexImage.
oesTexture = frameFromQueue.textureId;
if (frameFromQueue.textureObject instanceof SurfaceTexture) {
SurfaceTexture surfaceTexture =
@@ -462,31 +327,16 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
surfaceTexture.updateTexImage();
}
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTexture);
}
if (frameFromQueue != null) {
frameToRenderQueue.poll();
}
}
int posLocation = currentShader.getAttribLocation("in_pos");
GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
int texLocation = currentShader.getAttribLocation("in_tc");
GLES20.glEnableVertexAttribArray(texLocation);
GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(posLocation);
GLES20.glDisableVertexAttribArray(texLocation);
GlUtil.checkNoGLES2Error("draw done");
if (rendererType == RendererType.RENDERER_YUV) {
drawer.drawYuv(videoWidth, videoHeight, yuvTextures, texMatrix);
} else {
drawer.drawOes(oesTexture, texMatrix);
}
if (frameFromQueue != null) {
framesRendered++;
@@ -526,23 +376,17 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public void setPosition(int x, int y, int width, int height,
ScalingType scalingType, boolean mirror) {
float texLeft = (x - 50) / 50.0f;
float texTop = (50 - y) / 50.0f;
float texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
float texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
final Rect layoutInPercentage =
new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
synchronized(updateTextureLock) {
if (texLeft == this.texLeft && texTop == this.texTop && texRight == this.texRight &&
texBottom == this.texBottom && scalingType == this.scalingType &&
mirror == this.mirror) {
if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
&& mirror == this.mirror) {
return;
}
Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
") " + width + " x " + height + ". Scaling: " + scalingType +
". Mirror: " + mirror);
this.texLeft = texLeft;
this.texTop = texTop;
this.texRight = texRight;
this.texBottom = texBottom;
this.layoutInPercentage.set(layoutInPercentage);
this.scalingType = scalingType;
this.mirror = mirror;
updateTextureProperties = true;
@@ -694,8 +538,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() {
public void run() {
yuvImageRenderer.createTextures(
instance.yuvShader, instance.oesShader);
yuvImageRenderer.createTextures();
yuvImageRenderer.setScreenSize(
instance.screenWidth, instance.screenHeight);
countDownLatch.countDown();
@@ -754,14 +597,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
}
// Create YUV and OES shaders.
yuvShader = new GlShader(VERTEX_SHADER_STRING, YUV_FRAGMENT_SHADER_STRING);
oesShader = new GlShader(VERTEX_SHADER_STRING, OES_FRAGMENT_SHADER_STRING);
// Create drawer for YUV/OES frames.
drawer = new GlRectDrawer();
synchronized (yuvImageRenderers) {
// Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures(yuvShader, oesShader);
yuvImageRenderer.createTextures();
}
onSurfaceCreatedCalled = true;
}
@@ -780,7 +622,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
width + " x " + height + " ");
screenWidth = width;
screenHeight = height;
GLES20.glViewport(0, 0, width, height);
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
@@ -790,10 +631,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override
public void onDrawFrame(GL10 unused) {
GLES20.glViewport(0, 0, screenWidth, screenHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.draw();
yuvImageRenderer.draw(drawer);
}
}
}

View File

@@ -141,6 +141,7 @@
# and include it here.
'android_java_files': [
'app/webrtc/java/android/org/webrtc/EglBase.java',
'app/webrtc/java/android/org/webrtc/GlRectDrawer.java',
'app/webrtc/java/android/org/webrtc/GlShader.java',
'app/webrtc/java/android/org/webrtc/GlUtil.java',
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',