Add OpenGL Android video renderer which can display multiple
yuv420 images in a single GLSurfaceView. Start using new video renderer in AppRTC demo app. BUG= R=fischman@webrtc.org Review URL: https://webrtc-codereview.appspot.com/15589004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6360 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
b8f582591f
commit
c3288c130d
talk
app/webrtc/java/android/org/webrtc
examples/android/src/org/appspot/apprtc
libjingle.gyplibjingle_examples.gyp
459
talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
Normal file
459
talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
Normal file
@ -0,0 +1,459 @@
|
|||||||
|
/*
|
||||||
|
* libjingle
|
||||||
|
* Copyright 2014, Google Inc.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer.
|
||||||
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer in the documentation
|
||||||
|
* and/or other materials provided with the distribution.
|
||||||
|
* 3. The name of the author may not be used to endorse or promote products
|
||||||
|
* derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||||
|
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||||
|
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||||
|
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||||
|
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||||
|
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||||
|
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.ByteOrder;
|
||||||
|
import java.nio.FloatBuffer;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.concurrent.LinkedBlockingQueue;
|
||||||
|
|
||||||
|
import javax.microedition.khronos.egl.EGLConfig;
|
||||||
|
import javax.microedition.khronos.opengles.GL10;
|
||||||
|
|
||||||
|
import android.opengl.GLES20;
|
||||||
|
import android.opengl.GLSurfaceView;
|
||||||
|
import android.util.Log;
|
||||||
|
|
||||||
|
import org.webrtc.VideoRenderer.I420Frame;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Efficiently renders YUV frames using the GPU for CSC.
|
||||||
|
* Clients will want first to call setView() to pass GLSurfaceView
|
||||||
|
* and then for each video stream either create instance of VideoRenderer using
|
||||||
|
* createGui() call or VideoRenderer.Callbacks interface using create() call.
|
||||||
|
* Only one instance of the class can be created.
|
||||||
|
*/
|
||||||
|
public class VideoRendererGui implements GLSurfaceView.Renderer {
|
||||||
|
private static VideoRendererGui instance = null;
|
||||||
|
private static final String TAG = "VideoRendererGui";
|
||||||
|
private GLSurfaceView surface;
|
||||||
|
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
|
||||||
|
// If true then for every newly created yuv image renderer createTexture()
|
||||||
|
// should be called. The variable is accessed on multiple threads and
|
||||||
|
// all accesses are synchronized on yuvImageRenderers' object lock.
|
||||||
|
private boolean onSurfaceCreatedCalled;
|
||||||
|
// List of yuv renderers.
|
||||||
|
private ArrayList<YuvImageRenderer> yuvImageRenderers;
|
||||||
|
private int program;
|
||||||
|
|
||||||
|
private final String VERTEX_SHADER_STRING =
|
||||||
|
"varying vec2 interp_tc;\n" +
|
||||||
|
"attribute vec4 in_pos;\n" +
|
||||||
|
"attribute vec2 in_tc;\n" +
|
||||||
|
"\n" +
|
||||||
|
"void main() {\n" +
|
||||||
|
" gl_Position = in_pos;\n" +
|
||||||
|
" interp_tc = in_tc;\n" +
|
||||||
|
"}\n";
|
||||||
|
|
||||||
|
private final String FRAGMENT_SHADER_STRING =
|
||||||
|
"precision mediump float;\n" +
|
||||||
|
"varying vec2 interp_tc;\n" +
|
||||||
|
"\n" +
|
||||||
|
"uniform sampler2D y_tex;\n" +
|
||||||
|
"uniform sampler2D u_tex;\n" +
|
||||||
|
"uniform sampler2D v_tex;\n" +
|
||||||
|
"\n" +
|
||||||
|
"void main() {\n" +
|
||||||
|
// CSC according to http://www.fourcc.org/fccyvrgb.php
|
||||||
|
" float y = texture2D(y_tex, interp_tc).r;\n" +
|
||||||
|
" float u = texture2D(u_tex, interp_tc).r - 0.5;\n" +
|
||||||
|
" float v = texture2D(v_tex, interp_tc).r - 0.5;\n" +
|
||||||
|
" gl_FragColor = vec4(y + 1.403 * v, " +
|
||||||
|
" y - 0.344 * u - 0.714 * v, " +
|
||||||
|
" y + 1.77 * u, 1);\n" +
|
||||||
|
"}\n";
|
||||||
|
|
||||||
|
private VideoRendererGui(GLSurfaceView surface) {
|
||||||
|
this.surface = surface;
|
||||||
|
// Create an OpenGL ES 2.0 context.
|
||||||
|
surface.setPreserveEGLContextOnPause(true);
|
||||||
|
surface.setEGLContextClientVersion(2);
|
||||||
|
surface.setRenderer(this);
|
||||||
|
surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
|
||||||
|
|
||||||
|
yuvImageRenderers = new ArrayList<YuvImageRenderer>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Poor-man's assert(): die with |msg| unless |condition| is true.
|
||||||
|
private static void abortUnless(boolean condition, String msg) {
|
||||||
|
if (!condition) {
|
||||||
|
throw new RuntimeException(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assert that no OpenGL ES 2.0 error has been raised.
|
||||||
|
private static void checkNoGLES2Error() {
|
||||||
|
int error = GLES20.glGetError();
|
||||||
|
abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap a float[] in a direct FloatBuffer using native byte order.
|
||||||
|
private static FloatBuffer directNativeFloatBuffer(float[] array) {
|
||||||
|
FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
|
||||||
|
ByteOrder.nativeOrder()).asFloatBuffer();
|
||||||
|
buffer.put(array);
|
||||||
|
buffer.flip();
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compile & attach a |type| shader specified by |source| to |program|.
|
||||||
|
private static void addShaderTo(
|
||||||
|
int type, String source, int program) {
|
||||||
|
int[] result = new int[] {
|
||||||
|
GLES20.GL_FALSE
|
||||||
|
};
|
||||||
|
int shader = GLES20.glCreateShader(type);
|
||||||
|
GLES20.glShaderSource(shader, source);
|
||||||
|
GLES20.glCompileShader(shader);
|
||||||
|
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
|
||||||
|
abortUnless(result[0] == GLES20.GL_TRUE,
|
||||||
|
GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
|
||||||
|
GLES20.glAttachShader(program, shader);
|
||||||
|
GLES20.glDeleteShader(shader);
|
||||||
|
|
||||||
|
checkNoGLES2Error();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class used to display stream of YUV420 frames at particular location
|
||||||
|
* on a screen. New video frames are sent to display using renderFrame()
|
||||||
|
* call.
|
||||||
|
*/
|
||||||
|
private static class YuvImageRenderer implements VideoRenderer.Callbacks {
|
||||||
|
private GLSurfaceView surface;
|
||||||
|
private int program;
|
||||||
|
private FloatBuffer textureVertices;
|
||||||
|
private int[] yuvTextures = { -1, -1, -1 };
|
||||||
|
|
||||||
|
// Render frame queue - accessed by two threads. renderFrame() call does
|
||||||
|
// an offer (writing I420Frame to render) and early-returns (recording
|
||||||
|
// a dropped frame) if that queue is full. draw() call does a peek(),
|
||||||
|
// copies frame to texture and then removes it from a queue using poll().
|
||||||
|
LinkedBlockingQueue<I420Frame> frameToRenderQueue;
|
||||||
|
// Local copy of incoming video frame.
|
||||||
|
private I420Frame frameToRender;
|
||||||
|
// Flag if renderFrame() was ever called
|
||||||
|
boolean seenFrame;
|
||||||
|
// Total number of video frames received in renderFrame() call.
|
||||||
|
private int framesReceived;
|
||||||
|
// Number of video frames dropped by renderFrame() because previous
|
||||||
|
// frame has not been rendered yet.
|
||||||
|
private int framesDropped;
|
||||||
|
// Number of rendered video frames.
|
||||||
|
private int framesRendered;
|
||||||
|
// Time in ns when the first video frame was rendered.
|
||||||
|
private long startTimeNs = -1;
|
||||||
|
// Time in ns spent in draw() function.
|
||||||
|
private long drawTimeNs;
|
||||||
|
// Time in ns spent in renderFrame() function - including copying frame
|
||||||
|
// data to rendering planes
|
||||||
|
private long copyTimeNs;
|
||||||
|
|
||||||
|
// Texture Coordinates mapping the entire texture.
|
||||||
|
private final FloatBuffer textureCoords = directNativeFloatBuffer(
|
||||||
|
new float[] {
|
||||||
|
0, 0, 0, 1, 1, 0, 1, 1
|
||||||
|
});
|
||||||
|
|
||||||
|
private YuvImageRenderer(
|
||||||
|
GLSurfaceView surface,
|
||||||
|
int x, int y, int width, int height) {
|
||||||
|
Log.v(TAG, "YuvImageRenderer.Create");
|
||||||
|
this.surface = surface;
|
||||||
|
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
|
||||||
|
// Create texture vertices.
|
||||||
|
float xLeft = (x - 50) / 50.0f;
|
||||||
|
float yTop = (50 - y) / 50.0f;
|
||||||
|
float xRight = Math.min(1.0f, (x + width - 50) / 50.0f);
|
||||||
|
float yBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
|
||||||
|
float textureVeticesFloat[] = new float[] {
|
||||||
|
xLeft, yTop,
|
||||||
|
xLeft, yBottom,
|
||||||
|
xRight, yTop,
|
||||||
|
xRight, yBottom
|
||||||
|
};
|
||||||
|
textureVertices = directNativeFloatBuffer(textureVeticesFloat);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTextures(int program) {
|
||||||
|
Log.v(TAG, " YuvImageRenderer.createTextures");
|
||||||
|
this.program = program;
|
||||||
|
|
||||||
|
// Generate 3 texture ids for Y/U/V and place them into |textures|.
|
||||||
|
GLES20.glGenTextures(3, yuvTextures, 0);
|
||||||
|
for (int i = 0; i < 3; i++) {
|
||||||
|
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||||
|
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||||
|
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
|
||||||
|
128, 128, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
|
||||||
|
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||||
|
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||||
|
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||||
|
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||||
|
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||||
|
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||||
|
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||||
|
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||||
|
}
|
||||||
|
checkNoGLES2Error();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void draw() {
|
||||||
|
long now = System.nanoTime();
|
||||||
|
if (!seenFrame) {
|
||||||
|
// No frame received yet - nothing to render.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
I420Frame frameFromQueue;
|
||||||
|
synchronized (frameToRenderQueue) {
|
||||||
|
frameFromQueue = frameToRenderQueue.peek();
|
||||||
|
if (frameFromQueue != null && startTimeNs == -1) {
|
||||||
|
startTimeNs = now;
|
||||||
|
}
|
||||||
|
for (int i = 0; i < 3; ++i) {
|
||||||
|
int w = (i == 0) ? frameToRender.width : frameToRender.width / 2;
|
||||||
|
int h = (i == 0) ? frameToRender.height : frameToRender.height / 2;
|
||||||
|
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||||
|
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||||
|
if (frameFromQueue != null) {
|
||||||
|
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
|
||||||
|
w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
|
||||||
|
frameFromQueue.yuvPlanes[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (frameFromQueue != null) {
|
||||||
|
frameToRenderQueue.poll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
|
||||||
|
GLES20.glEnableVertexAttribArray(posLocation);
|
||||||
|
GLES20.glVertexAttribPointer(
|
||||||
|
posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
|
||||||
|
|
||||||
|
int texLocation = GLES20.glGetAttribLocation(program, "in_tc");
|
||||||
|
GLES20.glEnableVertexAttribArray(texLocation);
|
||||||
|
GLES20.glVertexAttribPointer(
|
||||||
|
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
|
||||||
|
|
||||||
|
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||||
|
|
||||||
|
GLES20.glDisableVertexAttribArray(posLocation);
|
||||||
|
GLES20.glDisableVertexAttribArray(texLocation);
|
||||||
|
|
||||||
|
checkNoGLES2Error();
|
||||||
|
|
||||||
|
if (frameFromQueue != null) {
|
||||||
|
framesRendered++;
|
||||||
|
drawTimeNs += (System.nanoTime() - now);
|
||||||
|
if ((framesRendered % 150) == 0) {
|
||||||
|
logStatistics();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void logStatistics() {
|
||||||
|
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
|
||||||
|
Log.v(TAG, "Frames received: " + framesReceived + ". Dropped: " +
|
||||||
|
framesDropped + ". Rendered: " + framesRendered);
|
||||||
|
if (framesReceived > 0 && framesRendered > 0) {
|
||||||
|
Log.v(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
|
||||||
|
" ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
|
||||||
|
Log.v(TAG, "Draw time: " +
|
||||||
|
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
|
||||||
|
(int) (copyTimeNs / (1000 * framesReceived)) + " us");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setSize(final int width, final int height) {
|
||||||
|
Log.v(TAG, "YuvImageRenderer.setSize: " + width + " x " + height);
|
||||||
|
int[] strides = { width, width / 2, width / 2 };
|
||||||
|
// Frame re-allocation need to be synchronized with copying
|
||||||
|
// frame to textures in draw() function to avoid re-allocating
|
||||||
|
// the frame while it is being copied.
|
||||||
|
synchronized (frameToRenderQueue) {
|
||||||
|
// Clear rendering queue
|
||||||
|
frameToRenderQueue.poll();
|
||||||
|
// Re-allocate / allocate the frame
|
||||||
|
frameToRender = new I420Frame(width, height, strides, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public synchronized void renderFrame(I420Frame frame) {
|
||||||
|
long now = System.nanoTime();
|
||||||
|
framesReceived++;
|
||||||
|
// Check input frame parameters.
|
||||||
|
if (!(frame.yuvStrides[0] == frame.width &&
|
||||||
|
frame.yuvStrides[1] == frame.width / 2 &&
|
||||||
|
frame.yuvStrides[2] == frame.width / 2)) {
|
||||||
|
Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
|
||||||
|
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Skip rendering of this frame if setSize() was not called.
|
||||||
|
if (frameToRender == null) {
|
||||||
|
framesDropped++;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Check incoming frame dimensions
|
||||||
|
if (frame.width != frameToRender.width ||
|
||||||
|
frame.height != frameToRender.height) {
|
||||||
|
throw new RuntimeException("Wrong frame size " +
|
||||||
|
frame.width + " x " + frame.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frameToRenderQueue.size() > 0) {
|
||||||
|
// Skip rendering of this frame if previous frame was not rendered yet.
|
||||||
|
framesDropped++;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
frameToRender.copyFrom(frame);
|
||||||
|
copyTimeNs += (System.nanoTime() - now);
|
||||||
|
frameToRenderQueue.offer(frameToRender);
|
||||||
|
seenFrame = true;
|
||||||
|
surface.requestRender();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Passes GLSurfaceView to video renderer. */
|
||||||
|
public static void setView(GLSurfaceView surface) {
|
||||||
|
Log.v(TAG, "VideoRendererGui.setView");
|
||||||
|
instance = new VideoRendererGui(surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates VideoRenderer with top left corner at (x, y) and resolution
|
||||||
|
* (width, height). All parameters are in percentage of screen resolution.
|
||||||
|
*/
|
||||||
|
public static VideoRenderer createGui(
|
||||||
|
int x, int y, int width, int height) throws Exception {
|
||||||
|
YuvImageRenderer javaGuiRenderer = create(x, y, width, height);
|
||||||
|
return new VideoRenderer(javaGuiRenderer);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and
|
||||||
|
* resolution (width, height). All parameters are in percentage of
|
||||||
|
* screen resolution.
|
||||||
|
*/
|
||||||
|
public static YuvImageRenderer create(
|
||||||
|
int x, int y, int width, int height) {
|
||||||
|
// Check display region parameters.
|
||||||
|
if (x < 0 || x > 100 || y < 0 || y > 100 ||
|
||||||
|
width < 0 || width > 100 || height < 0 || height > 100 ||
|
||||||
|
x + width > 100 || y + height > 100) {
|
||||||
|
throw new RuntimeException("Incorrect window parameters.");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (instance == null) {
|
||||||
|
throw new RuntimeException(
|
||||||
|
"Attempt to create yuv renderer before setting GLSurfaceView");
|
||||||
|
}
|
||||||
|
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
|
||||||
|
instance.surface, x, y, width, height);
|
||||||
|
synchronized (instance.yuvImageRenderers) {
|
||||||
|
if (instance.onSurfaceCreatedCalled) {
|
||||||
|
// onSurfaceCreated has already been called for VideoRendererGui -
|
||||||
|
// need to create texture for new image and add image to the
|
||||||
|
// rendering list.
|
||||||
|
final CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||||
|
instance.surface.queueEvent(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
yuvImageRenderer.createTextures(instance.program);
|
||||||
|
countDownLatch.countDown();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Wait for task completion.
|
||||||
|
try {
|
||||||
|
countDownLatch.await();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Add yuv renderer to rendering list.
|
||||||
|
instance.yuvImageRenderers.add(yuvImageRenderer);
|
||||||
|
}
|
||||||
|
return yuvImageRenderer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
||||||
|
Log.v(TAG, "VideoRendererGui.onSurfaceCreated");
|
||||||
|
|
||||||
|
// Create program.
|
||||||
|
program = GLES20.glCreateProgram();
|
||||||
|
addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program);
|
||||||
|
addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program);
|
||||||
|
|
||||||
|
GLES20.glLinkProgram(program);
|
||||||
|
int[] result = new int[] {
|
||||||
|
GLES20.GL_FALSE
|
||||||
|
};
|
||||||
|
result[0] = GLES20.GL_FALSE;
|
||||||
|
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
|
||||||
|
abortUnless(result[0] == GLES20.GL_TRUE,
|
||||||
|
GLES20.glGetProgramInfoLog(program));
|
||||||
|
GLES20.glUseProgram(program);
|
||||||
|
|
||||||
|
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
|
||||||
|
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
|
||||||
|
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
|
||||||
|
|
||||||
|
synchronized (yuvImageRenderers) {
|
||||||
|
// Create textures for all images.
|
||||||
|
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
||||||
|
yuvImageRenderer.createTextures(program);
|
||||||
|
}
|
||||||
|
onSurfaceCreatedCalled = true;
|
||||||
|
}
|
||||||
|
checkNoGLES2Error();
|
||||||
|
GLES20.glClearColor(0.0f, 0.0f, 0.3f, 1.0f);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSurfaceChanged(GL10 unused, int width, int height) {
|
||||||
|
Log.v(TAG, "VideoRendererGui.onSurfaceChanged: " +
|
||||||
|
width + " x " + height + " ");
|
||||||
|
GLES20.glViewport(0, 0, width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onDrawFrame(GL10 unused) {
|
||||||
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||||
|
synchronized (yuvImageRenderers) {
|
||||||
|
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
|
||||||
|
yuvImageRenderer.draw();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -45,8 +45,6 @@ import java.net.URLConnection;
|
|||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Scanner;
|
import java.util.Scanner;
|
||||||
import java.util.regex.Matcher;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".
|
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".
|
||||||
|
@ -50,7 +50,6 @@ import org.json.JSONException;
|
|||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.webrtc.DataChannel;
|
import org.webrtc.DataChannel;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.Logging;
|
|
||||||
import org.webrtc.MediaConstraints;
|
import org.webrtc.MediaConstraints;
|
||||||
import org.webrtc.MediaStream;
|
import org.webrtc.MediaStream;
|
||||||
import org.webrtc.PeerConnection;
|
import org.webrtc.PeerConnection;
|
||||||
@ -61,11 +60,10 @@ import org.webrtc.StatsObserver;
|
|||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.VideoCapturer;
|
import org.webrtc.VideoCapturer;
|
||||||
import org.webrtc.VideoRenderer;
|
import org.webrtc.VideoRenderer;
|
||||||
import org.webrtc.VideoRenderer.I420Frame;
|
import org.webrtc.VideoRendererGui;
|
||||||
import org.webrtc.VideoSource;
|
import org.webrtc.VideoSource;
|
||||||
import org.webrtc.VideoTrack;
|
import org.webrtc.VideoTrack;
|
||||||
|
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
@ -88,7 +86,9 @@ public class AppRTCDemoActivity extends Activity
|
|||||||
private final SDPObserver sdpObserver = new SDPObserver();
|
private final SDPObserver sdpObserver = new SDPObserver();
|
||||||
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
|
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
|
||||||
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
|
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
|
||||||
private VideoStreamsView vsv;
|
private AppRTCGLView vsv;
|
||||||
|
private VideoRenderer.Callbacks localRender;
|
||||||
|
private VideoRenderer.Callbacks remoteRender;
|
||||||
private Toast logToast;
|
private Toast logToast;
|
||||||
private final LayoutParams hudLayout =
|
private final LayoutParams hudLayout =
|
||||||
new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
|
new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
|
||||||
@ -111,7 +111,12 @@ public class AppRTCDemoActivity extends Activity
|
|||||||
|
|
||||||
Point displaySize = new Point();
|
Point displaySize = new Point();
|
||||||
getWindowManager().getDefaultDisplay().getRealSize(displaySize);
|
getWindowManager().getDefaultDisplay().getRealSize(displaySize);
|
||||||
vsv = new VideoStreamsView(this, displaySize);
|
|
||||||
|
vsv = new AppRTCGLView(this, displaySize);
|
||||||
|
VideoRendererGui.setView(vsv);
|
||||||
|
remoteRender = VideoRendererGui.create(0, 0, 100, 100);
|
||||||
|
localRender = VideoRendererGui.create(70, 5, 25, 25);
|
||||||
|
|
||||||
vsv.setOnClickListener(new View.OnClickListener() {
|
vsv.setOnClickListener(new View.OnClickListener() {
|
||||||
@Override public void onClick(View v) {
|
@Override public void onClick(View v) {
|
||||||
toggleHUD();
|
toggleHUD();
|
||||||
@ -307,8 +312,7 @@ public class AppRTCDemoActivity extends Activity
|
|||||||
capturer, appRtcClient.videoConstraints());
|
capturer, appRtcClient.videoConstraints());
|
||||||
VideoTrack videoTrack =
|
VideoTrack videoTrack =
|
||||||
factory.createVideoTrack("ARDAMSv0", videoSource);
|
factory.createVideoTrack("ARDAMSv0", videoSource);
|
||||||
videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
|
videoTrack.addRenderer(new VideoRenderer(localRender));
|
||||||
vsv, VideoStreamsView.Endpoint.LOCAL)));
|
|
||||||
lMS.addTrack(videoTrack);
|
lMS.addTrack(videoTrack);
|
||||||
}
|
}
|
||||||
if (appRtcClient.audioConstraints() != null) {
|
if (appRtcClient.audioConstraints() != null) {
|
||||||
@ -471,8 +475,8 @@ public class AppRTCDemoActivity extends Activity
|
|||||||
stream.videoTracks.size() <= 1,
|
stream.videoTracks.size() <= 1,
|
||||||
"Weird-looking stream: " + stream);
|
"Weird-looking stream: " + stream);
|
||||||
if (stream.videoTracks.size() == 1) {
|
if (stream.videoTracks.size() == 1) {
|
||||||
stream.videoTracks.get(0).addRenderer(new VideoRenderer(
|
stream.videoTracks.get(0).addRenderer(
|
||||||
new VideoCallbacks(vsv, VideoStreamsView.Endpoint.REMOTE)));
|
new VideoRenderer(remoteRender));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -662,30 +666,4 @@ public class AppRTCDemoActivity extends Activity
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implementation detail: bridge the VideoRenderer.Callbacks interface to the
|
|
||||||
// VideoStreamsView implementation.
|
|
||||||
private class VideoCallbacks implements VideoRenderer.Callbacks {
|
|
||||||
private final VideoStreamsView view;
|
|
||||||
private final VideoStreamsView.Endpoint stream;
|
|
||||||
|
|
||||||
public VideoCallbacks(
|
|
||||||
VideoStreamsView view, VideoStreamsView.Endpoint stream) {
|
|
||||||
this.view = view;
|
|
||||||
this.stream = stream;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setSize(final int width, final int height) {
|
|
||||||
view.queueEvent(new Runnable() {
|
|
||||||
public void run() {
|
|
||||||
view.setSize(stream, width, height);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void renderFrame(I420Frame frame) {
|
|
||||||
view.queueFrame(stream, frame);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,58 @@
|
|||||||
|
/*
|
||||||
|
* libjingle
|
||||||
|
* Copyright 2014, Google Inc.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer.
|
||||||
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer in the documentation
|
||||||
|
* and/or other materials provided with the distribution.
|
||||||
|
* 3. The name of the author may not be used to endorse or promote products
|
||||||
|
* derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||||
|
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||||
|
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||||
|
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||||
|
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||||
|
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||||
|
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.appspot.apprtc;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.graphics.Point;
|
||||||
|
import android.opengl.GLSurfaceView;
|
||||||
|
|
||||||
|
public class AppRTCGLView extends GLSurfaceView {
|
||||||
|
private Point screenDimensions;
|
||||||
|
|
||||||
|
public AppRTCGLView(Context c, Point screenDimensions) {
|
||||||
|
super(c);
|
||||||
|
this.screenDimensions = screenDimensions;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void updateDisplaySize(Point screenDimensions) {
|
||||||
|
this.screenDimensions = screenDimensions;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int unusedX, int unusedY) {
|
||||||
|
// Go big or go home!
|
||||||
|
setMeasuredDimension(screenDimensions.x, screenDimensions.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onAttachedToWindow() {
|
||||||
|
super.onAttachedToWindow();
|
||||||
|
setSystemUiVisibility(SYSTEM_UI_FLAG_HIDE_NAVIGATION |
|
||||||
|
SYSTEM_UI_FLAG_FULLSCREEN | SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
||||||
|
}
|
||||||
|
}
|
@ -1,104 +0,0 @@
|
|||||||
/*
|
|
||||||
* libjingle
|
|
||||||
* Copyright 2013, Google Inc.
|
|
||||||
*
|
|
||||||
* Redistribution and use in source and binary forms, with or without
|
|
||||||
* modification, are permitted provided that the following conditions are met:
|
|
||||||
*
|
|
||||||
* 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
* this list of conditions and the following disclaimer.
|
|
||||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
* this list of conditions and the following disclaimer in the documentation
|
|
||||||
* and/or other materials provided with the distribution.
|
|
||||||
* 3. The name of the author may not be used to endorse or promote products
|
|
||||||
* derived from this software without specific prior written permission.
|
|
||||||
*
|
|
||||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
|
||||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
||||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|
||||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
|
||||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
||||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
|
||||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
|
||||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.appspot.apprtc;
|
|
||||||
|
|
||||||
import org.webrtc.VideoRenderer.I420Frame;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class acts as an allocation pool meant to minimize GC churn caused by
|
|
||||||
* frame allocation & disposal. The public API comprises of just two methods:
|
|
||||||
* copyFrame(), which allocates as necessary and copies, and
|
|
||||||
* returnFrame(), which returns frame ownership to the pool for use by a later
|
|
||||||
* call to copyFrame().
|
|
||||||
*
|
|
||||||
* This class is thread-safe; calls to copyFrame() and returnFrame() are allowed
|
|
||||||
* to happen on any thread.
|
|
||||||
*/
|
|
||||||
class FramePool {
|
|
||||||
// Maps each summary code (see summarizeFrameDimensions()) to a list of frames
|
|
||||||
// of that description.
|
|
||||||
private final HashMap<Long, LinkedList<I420Frame>> availableFrames =
|
|
||||||
new HashMap<Long, LinkedList<I420Frame>>();
|
|
||||||
// Every dimension (e.g. width, height, stride) of a frame must be less than
|
|
||||||
// this value.
|
|
||||||
private static final long MAX_DIMENSION = 4096;
|
|
||||||
|
|
||||||
public I420Frame takeFrame(I420Frame source) {
|
|
||||||
long desc = summarizeFrameDimensions(source);
|
|
||||||
I420Frame dst = null;
|
|
||||||
synchronized (availableFrames) {
|
|
||||||
LinkedList<I420Frame> frames = availableFrames.get(desc);
|
|
||||||
if (frames == null) {
|
|
||||||
frames = new LinkedList<I420Frame>();
|
|
||||||
availableFrames.put(desc, frames);
|
|
||||||
}
|
|
||||||
if (!frames.isEmpty()) {
|
|
||||||
dst = frames.pop();
|
|
||||||
} else {
|
|
||||||
dst = new I420Frame(
|
|
||||||
source.width, source.height, source.yuvStrides, null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return dst;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void returnFrame(I420Frame frame) {
|
|
||||||
long desc = summarizeFrameDimensions(frame);
|
|
||||||
synchronized (availableFrames) {
|
|
||||||
LinkedList<I420Frame> frames = availableFrames.get(desc);
|
|
||||||
if (frames == null) {
|
|
||||||
throw new IllegalArgumentException("Unexpected frame dimensions");
|
|
||||||
}
|
|
||||||
frames.add(frame);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Validate that |frame| can be managed by the pool. */
|
|
||||||
public static boolean validateDimensions(I420Frame frame) {
|
|
||||||
return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
|
|
||||||
frame.yuvStrides[0] < MAX_DIMENSION &&
|
|
||||||
frame.yuvStrides[1] < MAX_DIMENSION &&
|
|
||||||
frame.yuvStrides[2] < MAX_DIMENSION;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return a code summarizing the dimensions of |frame|. Two frames that
|
|
||||||
// return the same summary are guaranteed to be able to store each others'
|
|
||||||
// contents. Used like Object.hashCode(), but we need all the bits of a long
|
|
||||||
// to do a good job, and hashCode() returns int, so we do this.
|
|
||||||
private static long summarizeFrameDimensions(I420Frame frame) {
|
|
||||||
long ret = frame.width;
|
|
||||||
ret = ret * MAX_DIMENSION + frame.height;
|
|
||||||
ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
|
|
||||||
ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
|
|
||||||
ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,344 +0,0 @@
|
|||||||
/*
|
|
||||||
* libjingle
|
|
||||||
* Copyright 2013, Google Inc.
|
|
||||||
*
|
|
||||||
* Redistribution and use in source and binary forms, with or without
|
|
||||||
* modification, are permitted provided that the following conditions are met:
|
|
||||||
*
|
|
||||||
* 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
* this list of conditions and the following disclaimer.
|
|
||||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
* this list of conditions and the following disclaimer in the documentation
|
|
||||||
* and/or other materials provided with the distribution.
|
|
||||||
* 3. The name of the author may not be used to endorse or promote products
|
|
||||||
* derived from this software without specific prior written permission.
|
|
||||||
*
|
|
||||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
|
||||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
||||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|
||||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
|
||||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
|
||||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
|
||||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
|
||||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.appspot.apprtc;
|
|
||||||
|
|
||||||
import android.content.Context;
|
|
||||||
import android.graphics.Point;
|
|
||||||
import android.graphics.Rect;
|
|
||||||
import android.opengl.GLES20;
|
|
||||||
import android.opengl.GLSurfaceView;
|
|
||||||
import android.util.Log;
|
|
||||||
|
|
||||||
import org.webrtc.VideoRenderer.I420Frame;
|
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.nio.ByteOrder;
|
|
||||||
import java.nio.FloatBuffer;
|
|
||||||
import java.util.EnumMap;
|
|
||||||
import java.util.EnumSet;
|
|
||||||
|
|
||||||
import javax.microedition.khronos.egl.EGLConfig;
|
|
||||||
import javax.microedition.khronos.opengles.GL10;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A GLSurfaceView{,.Renderer} that efficiently renders YUV frames from local &
|
|
||||||
* remote VideoTracks using the GPU for CSC. Clients will want to call the
|
|
||||||
* constructor, setSize() and updateFrame() as appropriate, but none of the
|
|
||||||
* other public methods of this class are of interest to clients (only to system
|
|
||||||
* classes).
|
|
||||||
*/
|
|
||||||
public class VideoStreamsView
|
|
||||||
extends GLSurfaceView
|
|
||||||
implements GLSurfaceView.Renderer {
|
|
||||||
|
|
||||||
/** Identify which of the two video streams is being addressed. */
|
|
||||||
public static enum Endpoint { LOCAL, REMOTE };
|
|
||||||
|
|
||||||
private final static String TAG = "VideoStreamsView";
|
|
||||||
private EnumMap<Endpoint, Rect> rects =
|
|
||||||
new EnumMap<Endpoint, Rect>(Endpoint.class);
|
|
||||||
private Point screenDimensions;
|
|
||||||
// [0] are local Y,U,V, [1] are remote Y,U,V.
|
|
||||||
private int[][] yuvTextures = { { -1, -1, -1}, {-1, -1, -1 }};
|
|
||||||
private EnumSet<Endpoint> seenFrameInDirection =
|
|
||||||
EnumSet.noneOf(Endpoint.class);
|
|
||||||
private int posLocation = -1;
|
|
||||||
private long lastFPSLogTime = System.nanoTime();
|
|
||||||
private long numFramesSinceLastLog = 0;
|
|
||||||
private FramePool framePool = new FramePool();
|
|
||||||
// Accessed on multiple threads! Must be synchronized.
|
|
||||||
private EnumMap<Endpoint, I420Frame> framesToRender =
|
|
||||||
new EnumMap<Endpoint, I420Frame>(Endpoint.class);
|
|
||||||
|
|
||||||
public VideoStreamsView(Context c, Point screenDimensions) {
|
|
||||||
super(c);
|
|
||||||
this.screenDimensions = screenDimensions;
|
|
||||||
setPreserveEGLContextOnPause(true);
|
|
||||||
setEGLContextClientVersion(2);
|
|
||||||
setRenderer(this);
|
|
||||||
setRenderMode(RENDERMODE_WHEN_DIRTY);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void updateDisplaySize(Point screenDimensions) {
|
|
||||||
this.screenDimensions = screenDimensions;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Queue |frame| to be uploaded. */
|
|
||||||
public void queueFrame(final Endpoint stream, I420Frame frame) {
|
|
||||||
// Paying for the copy of the YUV data here allows CSC and painting time
|
|
||||||
// to get spent on the render thread instead of the UI thread.
|
|
||||||
abortUnless(FramePool.validateDimensions(frame), "Frame too large!");
|
|
||||||
final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
|
|
||||||
boolean needToScheduleRender;
|
|
||||||
synchronized (framesToRender) {
|
|
||||||
// A new render needs to be scheduled (via updateFrames()) iff there isn't
|
|
||||||
// already a render scheduled, which is true iff framesToRender is empty.
|
|
||||||
needToScheduleRender = framesToRender.isEmpty();
|
|
||||||
I420Frame frameToDrop = framesToRender.put(stream, frameCopy);
|
|
||||||
if (frameToDrop != null) {
|
|
||||||
framePool.returnFrame(frameToDrop);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (needToScheduleRender) {
|
|
||||||
queueEvent(new Runnable() {
|
|
||||||
public void run() {
|
|
||||||
updateFrames();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload the planes from |framesToRender| to the textures owned by this View.
|
|
||||||
private void updateFrames() {
|
|
||||||
I420Frame localFrame = null;
|
|
||||||
I420Frame remoteFrame = null;
|
|
||||||
synchronized (framesToRender) {
|
|
||||||
localFrame = framesToRender.remove(Endpoint.LOCAL);
|
|
||||||
remoteFrame = framesToRender.remove(Endpoint.REMOTE);
|
|
||||||
}
|
|
||||||
if (localFrame != null) {
|
|
||||||
seenFrameInDirection.add(Endpoint.LOCAL);
|
|
||||||
texImage2D(localFrame, yuvTextures[0]);
|
|
||||||
framePool.returnFrame(localFrame);
|
|
||||||
}
|
|
||||||
if (remoteFrame != null) {
|
|
||||||
seenFrameInDirection.add(Endpoint.REMOTE);
|
|
||||||
texImage2D(remoteFrame, yuvTextures[1]);
|
|
||||||
framePool.returnFrame(remoteFrame);
|
|
||||||
}
|
|
||||||
abortUnless(localFrame != null || remoteFrame != null,
|
|
||||||
"Nothing to render!");
|
|
||||||
requestRender();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Inform this View of the dimensions of frames coming from |stream|. */
|
|
||||||
public void setSize(Endpoint stream, int width, int height) {
|
|
||||||
// Generate 3 texture ids for Y/U/V and place them into |textures|,
|
|
||||||
// allocating enough storage for |width|x|height| pixels.
|
|
||||||
int[] textures = yuvTextures[stream == Endpoint.LOCAL ? 0 : 1];
|
|
||||||
GLES20.glGenTextures(3, textures, 0);
|
|
||||||
for (int i = 0; i < 3; ++i) {
|
|
||||||
int w = i == 0 ? width : width / 2;
|
|
||||||
int h = i == 0 ? height : height / 2;
|
|
||||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
|
||||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
|
|
||||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
|
|
||||||
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
|
|
||||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
|
||||||
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
|
||||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
|
||||||
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
|
||||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
|
||||||
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
|
||||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
|
||||||
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
|
||||||
}
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void onMeasure(int unusedX, int unusedY) {
|
|
||||||
// Go big or go home!
|
|
||||||
setMeasuredDimension(screenDimensions.x, screenDimensions.y);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onSurfaceChanged(GL10 unused, int width, int height) {
|
|
||||||
GLES20.glViewport(0, 0, width, height);
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onDrawFrame(GL10 unused) {
|
|
||||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
|
||||||
if (seenFrameInDirection.contains(Endpoint.REMOTE)) {
|
|
||||||
drawRectangle(yuvTextures[1], remoteVertices);
|
|
||||||
}
|
|
||||||
if (seenFrameInDirection.contains(Endpoint.LOCAL)) {
|
|
||||||
drawRectangle(yuvTextures[0], localVertices);
|
|
||||||
}
|
|
||||||
++numFramesSinceLastLog;
|
|
||||||
long now = System.nanoTime();
|
|
||||||
if (lastFPSLogTime == -1 || now - lastFPSLogTime > 1e9) {
|
|
||||||
double fps = numFramesSinceLastLog / ((now - lastFPSLogTime) / 1e9);
|
|
||||||
Log.d(TAG, "Rendered FPS: " + fps);
|
|
||||||
lastFPSLogTime = now;
|
|
||||||
numFramesSinceLastLog = 1;
|
|
||||||
}
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
|
||||||
int program = GLES20.glCreateProgram();
|
|
||||||
addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program);
|
|
||||||
addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program);
|
|
||||||
|
|
||||||
GLES20.glLinkProgram(program);
|
|
||||||
int[] result = new int[] { GLES20.GL_FALSE };
|
|
||||||
result[0] = GLES20.GL_FALSE;
|
|
||||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
|
|
||||||
abortUnless(result[0] == GLES20.GL_TRUE,
|
|
||||||
GLES20.glGetProgramInfoLog(program));
|
|
||||||
GLES20.glUseProgram(program);
|
|
||||||
|
|
||||||
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
|
|
||||||
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
|
|
||||||
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
|
|
||||||
|
|
||||||
// Actually set in drawRectangle(), but queried only once here.
|
|
||||||
posLocation = GLES20.glGetAttribLocation(program, "in_pos");
|
|
||||||
|
|
||||||
int tcLocation = GLES20.glGetAttribLocation(program, "in_tc");
|
|
||||||
GLES20.glEnableVertexAttribArray(tcLocation);
|
|
||||||
GLES20.glVertexAttribPointer(
|
|
||||||
tcLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
|
|
||||||
|
|
||||||
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void onAttachedToWindow() {
|
|
||||||
super.onAttachedToWindow();
|
|
||||||
setSystemUiVisibility(SYSTEM_UI_FLAG_HIDE_NAVIGATION |
|
|
||||||
SYSTEM_UI_FLAG_FULLSCREEN | SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wrap a float[] in a direct FloatBuffer using native byte order.
|
|
||||||
private static FloatBuffer directNativeFloatBuffer(float[] array) {
|
|
||||||
FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
|
|
||||||
ByteOrder.nativeOrder()).asFloatBuffer();
|
|
||||||
buffer.put(array);
|
|
||||||
buffer.flip();
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload the YUV planes from |frame| to |textures|.
|
|
||||||
private void texImage2D(I420Frame frame, int[] textures) {
|
|
||||||
for (int i = 0; i < 3; ++i) {
|
|
||||||
ByteBuffer plane = frame.yuvPlanes[i];
|
|
||||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
|
||||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
|
|
||||||
int w = i == 0 ? frame.width : frame.width / 2;
|
|
||||||
int h = i == 0 ? frame.height : frame.height / 2;
|
|
||||||
abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
|
|
||||||
GLES20.glTexImage2D(
|
|
||||||
GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
|
|
||||||
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
|
|
||||||
}
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Draw |textures| using |vertices| (X,Y coordinates).
|
|
||||||
private void drawRectangle(int[] textures, FloatBuffer vertices) {
|
|
||||||
for (int i = 0; i < 3; ++i) {
|
|
||||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
|
||||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
|
|
||||||
}
|
|
||||||
|
|
||||||
GLES20.glVertexAttribPointer(
|
|
||||||
posLocation, 2, GLES20.GL_FLOAT, false, 0, vertices);
|
|
||||||
GLES20.glEnableVertexAttribArray(posLocation);
|
|
||||||
|
|
||||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compile & attach a |type| shader specified by |source| to |program|.
|
|
||||||
private static void addShaderTo(
|
|
||||||
int type, String source, int program) {
|
|
||||||
int[] result = new int[] { GLES20.GL_FALSE };
|
|
||||||
int shader = GLES20.glCreateShader(type);
|
|
||||||
GLES20.glShaderSource(shader, source);
|
|
||||||
GLES20.glCompileShader(shader);
|
|
||||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
|
|
||||||
abortUnless(result[0] == GLES20.GL_TRUE,
|
|
||||||
GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
|
|
||||||
GLES20.glAttachShader(program, shader);
|
|
||||||
GLES20.glDeleteShader(shader);
|
|
||||||
checkNoGLES2Error();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Poor-man's assert(): die with |msg| unless |condition| is true.
|
|
||||||
private static void abortUnless(boolean condition, String msg) {
|
|
||||||
if (!condition) {
|
|
||||||
throw new RuntimeException(msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assert that no OpenGL ES 2.0 error has been raised.
|
|
||||||
private static void checkNoGLES2Error() {
|
|
||||||
int error = GLES20.glGetError();
|
|
||||||
abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remote image should span the full screen.
|
|
||||||
private static final FloatBuffer remoteVertices = directNativeFloatBuffer(
|
|
||||||
new float[] { -1, 1, -1, -1, 1, 1, 1, -1 });
|
|
||||||
|
|
||||||
// Local image should be thumbnailish.
|
|
||||||
private static final FloatBuffer localVertices = directNativeFloatBuffer(
|
|
||||||
new float[] { 0.6f, 0.9f, 0.6f, 0.6f, 0.9f, 0.9f, 0.9f, 0.6f });
|
|
||||||
|
|
||||||
// Texture Coordinates mapping the entire texture.
|
|
||||||
private static final FloatBuffer textureCoords = directNativeFloatBuffer(
|
|
||||||
new float[] { 0, 0, 0, 1, 1, 0, 1, 1 });
|
|
||||||
|
|
||||||
// Pass-through vertex shader.
|
|
||||||
private static final String VERTEX_SHADER_STRING =
|
|
||||||
"varying vec2 interp_tc;\n" +
|
|
||||||
"\n" +
|
|
||||||
"attribute vec4 in_pos;\n" +
|
|
||||||
"attribute vec2 in_tc;\n" +
|
|
||||||
"\n" +
|
|
||||||
"void main() {\n" +
|
|
||||||
" gl_Position = in_pos;\n" +
|
|
||||||
" interp_tc = in_tc;\n" +
|
|
||||||
"}\n";
|
|
||||||
|
|
||||||
// YUV to RGB pixel shader. Loads a pixel from each plane and pass through the
|
|
||||||
// matrix.
|
|
||||||
private static final String FRAGMENT_SHADER_STRING =
|
|
||||||
"precision mediump float;\n" +
|
|
||||||
"varying vec2 interp_tc;\n" +
|
|
||||||
"\n" +
|
|
||||||
"uniform sampler2D y_tex;\n" +
|
|
||||||
"uniform sampler2D u_tex;\n" +
|
|
||||||
"uniform sampler2D v_tex;\n" +
|
|
||||||
"\n" +
|
|
||||||
"void main() {\n" +
|
|
||||||
" float y = texture2D(y_tex, interp_tc).r;\n" +
|
|
||||||
" float u = texture2D(u_tex, interp_tc).r - .5;\n" +
|
|
||||||
" float v = texture2D(v_tex, interp_tc).r - .5;\n" +
|
|
||||||
// CSC according to http://www.fourcc.org/fccyvrgb.php
|
|
||||||
" gl_FragColor = vec4(y + 1.403 * v, " +
|
|
||||||
" y - 0.344 * u - 0.714 * v, " +
|
|
||||||
" y + 1.77 * u, 1);\n" +
|
|
||||||
"}\n";
|
|
||||||
}
|
|
@ -108,6 +108,7 @@
|
|||||||
# included here, or better yet, build a proper .jar in webrtc
|
# included here, or better yet, build a proper .jar in webrtc
|
||||||
# and include it here.
|
# and include it here.
|
||||||
'android_java_files': [
|
'android_java_files': [
|
||||||
|
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
|
||||||
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
|
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
|
||||||
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
|
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
|
||||||
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
|
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
|
||||||
|
@ -323,10 +323,9 @@
|
|||||||
'examples/android/res/values/strings.xml',
|
'examples/android/res/values/strings.xml',
|
||||||
'examples/android/src/org/appspot/apprtc/AppRTCClient.java',
|
'examples/android/src/org/appspot/apprtc/AppRTCClient.java',
|
||||||
'examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java',
|
'examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java',
|
||||||
|
'examples/android/src/org/appspot/apprtc/AppRTCGLView.java',
|
||||||
'examples/android/src/org/appspot/apprtc/UnhandledExceptionHandler.java',
|
'examples/android/src/org/appspot/apprtc/UnhandledExceptionHandler.java',
|
||||||
'examples/android/src/org/appspot/apprtc/FramePool.java',
|
|
||||||
'examples/android/src/org/appspot/apprtc/GAEChannelClient.java',
|
'examples/android/src/org/appspot/apprtc/GAEChannelClient.java',
|
||||||
'examples/android/src/org/appspot/apprtc/VideoStreamsView.java',
|
|
||||||
],
|
],
|
||||||
'outputs': [
|
'outputs': [
|
||||||
'<(PRODUCT_DIR)/AppRTCDemo-debug.apk',
|
'<(PRODUCT_DIR)/AppRTCDemo-debug.apk',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user