Add OpenGL Android video renderer which can display multiple

yuv420 images in a single GLSurfaceView.
Start using new video renderer in AppRTC demo app.

BUG=
R=fischman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/15589004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6360 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
glaznev@webrtc.org
2014-06-06 21:57:46 +00:00
parent b8f582591f
commit c3288c130d
8 changed files with 532 additions and 487 deletions

View File

@@ -45,8 +45,6 @@ import java.net.URLConnection;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".

View File

@@ -50,7 +50,6 @@ import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.DataChannel;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
@@ -61,11 +60,10 @@ import org.webrtc.StatsObserver;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoRenderer.I420Frame;
import org.webrtc.VideoRendererGui;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
@@ -88,7 +86,9 @@ public class AppRTCDemoActivity extends Activity
private final SDPObserver sdpObserver = new SDPObserver();
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
private VideoStreamsView vsv;
private AppRTCGLView vsv;
private VideoRenderer.Callbacks localRender;
private VideoRenderer.Callbacks remoteRender;
private Toast logToast;
private final LayoutParams hudLayout =
new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
@@ -111,7 +111,12 @@ public class AppRTCDemoActivity extends Activity
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getRealSize(displaySize);
vsv = new VideoStreamsView(this, displaySize);
vsv = new AppRTCGLView(this, displaySize);
VideoRendererGui.setView(vsv);
remoteRender = VideoRendererGui.create(0, 0, 100, 100);
localRender = VideoRendererGui.create(70, 5, 25, 25);
vsv.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
toggleHUD();
@@ -307,8 +312,7 @@ public class AppRTCDemoActivity extends Activity
capturer, appRtcClient.videoConstraints());
VideoTrack videoTrack =
factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
vsv, VideoStreamsView.Endpoint.LOCAL)));
videoTrack.addRenderer(new VideoRenderer(localRender));
lMS.addTrack(videoTrack);
}
if (appRtcClient.audioConstraints() != null) {
@@ -471,8 +475,8 @@ public class AppRTCDemoActivity extends Activity
stream.videoTracks.size() <= 1,
"Weird-looking stream: " + stream);
if (stream.videoTracks.size() == 1) {
stream.videoTracks.get(0).addRenderer(new VideoRenderer(
new VideoCallbacks(vsv, VideoStreamsView.Endpoint.REMOTE)));
stream.videoTracks.get(0).addRenderer(
new VideoRenderer(remoteRender));
}
}
});
@@ -662,30 +666,4 @@ public class AppRTCDemoActivity extends Activity
}
}
// Implementation detail: bridge the VideoRenderer.Callbacks interface to the
// VideoStreamsView implementation.
private class VideoCallbacks implements VideoRenderer.Callbacks {
private final VideoStreamsView view;
private final VideoStreamsView.Endpoint stream;
public VideoCallbacks(
VideoStreamsView view, VideoStreamsView.Endpoint stream) {
this.view = view;
this.stream = stream;
}
@Override
public void setSize(final int width, final int height) {
view.queueEvent(new Runnable() {
public void run() {
view.setSize(stream, width, height);
}
});
}
@Override
public void renderFrame(I420Frame frame) {
view.queueFrame(stream, frame);
}
}
}

View File

@@ -0,0 +1,58 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.content.Context;
import android.graphics.Point;
import android.opengl.GLSurfaceView;
public class AppRTCGLView extends GLSurfaceView {
private Point screenDimensions;
public AppRTCGLView(Context c, Point screenDimensions) {
super(c);
this.screenDimensions = screenDimensions;
}
public void updateDisplaySize(Point screenDimensions) {
this.screenDimensions = screenDimensions;
}
@Override
protected void onMeasure(int unusedX, int unusedY) {
// Go big or go home!
setMeasuredDimension(screenDimensions.x, screenDimensions.y);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
setSystemUiVisibility(SYSTEM_UI_FLAG_HIDE_NAVIGATION |
SYSTEM_UI_FLAG_FULLSCREEN | SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
}
}

View File

@@ -1,104 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import org.webrtc.VideoRenderer.I420Frame;
import java.util.HashMap;
import java.util.LinkedList;
/**
* This class acts as an allocation pool meant to minimize GC churn caused by
* frame allocation & disposal. The public API comprises of just two methods:
* copyFrame(), which allocates as necessary and copies, and
* returnFrame(), which returns frame ownership to the pool for use by a later
* call to copyFrame().
*
* This class is thread-safe; calls to copyFrame() and returnFrame() are allowed
* to happen on any thread.
*/
class FramePool {
// Maps each summary code (see summarizeFrameDimensions()) to a list of frames
// of that description.
private final HashMap<Long, LinkedList<I420Frame>> availableFrames =
new HashMap<Long, LinkedList<I420Frame>>();
// Every dimension (e.g. width, height, stride) of a frame must be less than
// this value.
private static final long MAX_DIMENSION = 4096;
public I420Frame takeFrame(I420Frame source) {
long desc = summarizeFrameDimensions(source);
I420Frame dst = null;
synchronized (availableFrames) {
LinkedList<I420Frame> frames = availableFrames.get(desc);
if (frames == null) {
frames = new LinkedList<I420Frame>();
availableFrames.put(desc, frames);
}
if (!frames.isEmpty()) {
dst = frames.pop();
} else {
dst = new I420Frame(
source.width, source.height, source.yuvStrides, null);
}
}
return dst;
}
public void returnFrame(I420Frame frame) {
long desc = summarizeFrameDimensions(frame);
synchronized (availableFrames) {
LinkedList<I420Frame> frames = availableFrames.get(desc);
if (frames == null) {
throw new IllegalArgumentException("Unexpected frame dimensions");
}
frames.add(frame);
}
}
/** Validate that |frame| can be managed by the pool. */
public static boolean validateDimensions(I420Frame frame) {
return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
frame.yuvStrides[0] < MAX_DIMENSION &&
frame.yuvStrides[1] < MAX_DIMENSION &&
frame.yuvStrides[2] < MAX_DIMENSION;
}
// Return a code summarizing the dimensions of |frame|. Two frames that
// return the same summary are guaranteed to be able to store each others'
// contents. Used like Object.hashCode(), but we need all the bits of a long
// to do a good job, and hashCode() returns int, so we do this.
private static long summarizeFrameDimensions(I420Frame frame) {
long ret = frame.width;
ret = ret * MAX_DIMENSION + frame.height;
ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
return ret;
}
}

View File

@@ -1,344 +0,0 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.content.Context;
import android.graphics.Point;
import android.graphics.Rect;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import org.webrtc.VideoRenderer.I420Frame;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.EnumMap;
import java.util.EnumSet;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* A GLSurfaceView{,.Renderer} that efficiently renders YUV frames from local &
* remote VideoTracks using the GPU for CSC. Clients will want to call the
* constructor, setSize() and updateFrame() as appropriate, but none of the
* other public methods of this class are of interest to clients (only to system
* classes).
*/
public class VideoStreamsView
extends GLSurfaceView
implements GLSurfaceView.Renderer {
/** Identify which of the two video streams is being addressed. */
public static enum Endpoint { LOCAL, REMOTE };
private final static String TAG = "VideoStreamsView";
private EnumMap<Endpoint, Rect> rects =
new EnumMap<Endpoint, Rect>(Endpoint.class);
private Point screenDimensions;
// [0] are local Y,U,V, [1] are remote Y,U,V.
private int[][] yuvTextures = { { -1, -1, -1}, {-1, -1, -1 }};
private EnumSet<Endpoint> seenFrameInDirection =
EnumSet.noneOf(Endpoint.class);
private int posLocation = -1;
private long lastFPSLogTime = System.nanoTime();
private long numFramesSinceLastLog = 0;
private FramePool framePool = new FramePool();
// Accessed on multiple threads! Must be synchronized.
private EnumMap<Endpoint, I420Frame> framesToRender =
new EnumMap<Endpoint, I420Frame>(Endpoint.class);
public VideoStreamsView(Context c, Point screenDimensions) {
super(c);
this.screenDimensions = screenDimensions;
setPreserveEGLContextOnPause(true);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
public void updateDisplaySize(Point screenDimensions) {
this.screenDimensions = screenDimensions;
}
/** Queue |frame| to be uploaded. */
public void queueFrame(final Endpoint stream, I420Frame frame) {
// Paying for the copy of the YUV data here allows CSC and painting time
// to get spent on the render thread instead of the UI thread.
abortUnless(FramePool.validateDimensions(frame), "Frame too large!");
final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
boolean needToScheduleRender;
synchronized (framesToRender) {
// A new render needs to be scheduled (via updateFrames()) iff there isn't
// already a render scheduled, which is true iff framesToRender is empty.
needToScheduleRender = framesToRender.isEmpty();
I420Frame frameToDrop = framesToRender.put(stream, frameCopy);
if (frameToDrop != null) {
framePool.returnFrame(frameToDrop);
}
}
if (needToScheduleRender) {
queueEvent(new Runnable() {
public void run() {
updateFrames();
}
});
}
}
// Upload the planes from |framesToRender| to the textures owned by this View.
private void updateFrames() {
I420Frame localFrame = null;
I420Frame remoteFrame = null;
synchronized (framesToRender) {
localFrame = framesToRender.remove(Endpoint.LOCAL);
remoteFrame = framesToRender.remove(Endpoint.REMOTE);
}
if (localFrame != null) {
seenFrameInDirection.add(Endpoint.LOCAL);
texImage2D(localFrame, yuvTextures[0]);
framePool.returnFrame(localFrame);
}
if (remoteFrame != null) {
seenFrameInDirection.add(Endpoint.REMOTE);
texImage2D(remoteFrame, yuvTextures[1]);
framePool.returnFrame(remoteFrame);
}
abortUnless(localFrame != null || remoteFrame != null,
"Nothing to render!");
requestRender();
}
/** Inform this View of the dimensions of frames coming from |stream|. */
public void setSize(Endpoint stream, int width, int height) {
// Generate 3 texture ids for Y/U/V and place them into |textures|,
// allocating enough storage for |width|x|height| pixels.
int[] textures = yuvTextures[stream == Endpoint.LOCAL ? 0 : 1];
GLES20.glGenTextures(3, textures, 0);
for (int i = 0; i < 3; ++i) {
int w = i == 0 ? width : width / 2;
int h = i == 0 ? height : height / 2;
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
checkNoGLES2Error();
}
@Override
protected void onMeasure(int unusedX, int unusedY) {
// Go big or go home!
setMeasuredDimension(screenDimensions.x, screenDimensions.y);
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
checkNoGLES2Error();
}
@Override
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (seenFrameInDirection.contains(Endpoint.REMOTE)) {
drawRectangle(yuvTextures[1], remoteVertices);
}
if (seenFrameInDirection.contains(Endpoint.LOCAL)) {
drawRectangle(yuvTextures[0], localVertices);
}
++numFramesSinceLastLog;
long now = System.nanoTime();
if (lastFPSLogTime == -1 || now - lastFPSLogTime > 1e9) {
double fps = numFramesSinceLastLog / ((now - lastFPSLogTime) / 1e9);
Log.d(TAG, "Rendered FPS: " + fps);
lastFPSLogTime = now;
numFramesSinceLastLog = 1;
}
checkNoGLES2Error();
}
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
int program = GLES20.glCreateProgram();
addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program);
addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program);
GLES20.glLinkProgram(program);
int[] result = new int[] { GLES20.GL_FALSE };
result[0] = GLES20.GL_FALSE;
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE,
GLES20.glGetProgramInfoLog(program));
GLES20.glUseProgram(program);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
// Actually set in drawRectangle(), but queried only once here.
posLocation = GLES20.glGetAttribLocation(program, "in_pos");
int tcLocation = GLES20.glGetAttribLocation(program, "in_tc");
GLES20.glEnableVertexAttribArray(tcLocation);
GLES20.glVertexAttribPointer(
tcLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
checkNoGLES2Error();
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
setSystemUiVisibility(SYSTEM_UI_FLAG_HIDE_NAVIGATION |
SYSTEM_UI_FLAG_FULLSCREEN | SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
}
// Wrap a float[] in a direct FloatBuffer using native byte order.
private static FloatBuffer directNativeFloatBuffer(float[] array) {
FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
ByteOrder.nativeOrder()).asFloatBuffer();
buffer.put(array);
buffer.flip();
return buffer;
}
// Upload the YUV planes from |frame| to |textures|.
private void texImage2D(I420Frame frame, int[] textures) {
for (int i = 0; i < 3; ++i) {
ByteBuffer plane = frame.yuvPlanes[i];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
int w = i == 0 ? frame.width : frame.width / 2;
int h = i == 0 ? frame.height : frame.height / 2;
abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
}
checkNoGLES2Error();
}
// Draw |textures| using |vertices| (X,Y coordinates).
private void drawRectangle(int[] textures, FloatBuffer vertices) {
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
}
GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, vertices);
GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkNoGLES2Error();
}
// Compile & attach a |type| shader specified by |source| to |program|.
private static void addShaderTo(
int type, String source, int program) {
int[] result = new int[] { GLES20.GL_FALSE };
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE,
GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
GLES20.glAttachShader(program, shader);
GLES20.glDeleteShader(shader);
checkNoGLES2Error();
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Assert that no OpenGL ES 2.0 error has been raised.
private static void checkNoGLES2Error() {
int error = GLES20.glGetError();
abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
}
// Remote image should span the full screen.
private static final FloatBuffer remoteVertices = directNativeFloatBuffer(
new float[] { -1, 1, -1, -1, 1, 1, 1, -1 });
// Local image should be thumbnailish.
private static final FloatBuffer localVertices = directNativeFloatBuffer(
new float[] { 0.6f, 0.9f, 0.6f, 0.6f, 0.9f, 0.9f, 0.9f, 0.6f });
// Texture Coordinates mapping the entire texture.
private static final FloatBuffer textureCoords = directNativeFloatBuffer(
new float[] { 0, 0, 0, 1, 1, 0, 1, 1 });
// Pass-through vertex shader.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" +
"\n" +
"attribute vec4 in_pos;\n" +
"attribute vec2 in_tc;\n" +
"\n" +
"void main() {\n" +
" gl_Position = in_pos;\n" +
" interp_tc = in_tc;\n" +
"}\n";
// YUV to RGB pixel shader. Loads a pixel from each plane and pass through the
// matrix.
private static final String FRAGMENT_SHADER_STRING =
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
"uniform sampler2D y_tex;\n" +
"uniform sampler2D u_tex;\n" +
"uniform sampler2D v_tex;\n" +
"\n" +
"void main() {\n" +
" float y = texture2D(y_tex, interp_tc).r;\n" +
" float u = texture2D(u_tex, interp_tc).r - .5;\n" +
" float v = texture2D(v_tex, interp_tc).r - .5;\n" +
// CSC according to http://www.fourcc.org/fccyvrgb.php
" gl_FragColor = vec4(y + 1.403 * v, " +
" y - 0.344 * u - 0.714 * v, " +
" y + 1.77 * u, 1);\n" +
"}\n";
}