HW video decoding optimization to better support HD resolution:

- Change hw video decoder wrapper to allow to feed multiple input
and query for an output every 10 ms.
- Add an option to decode video frame into an Android surface object. Create
shared with video renderer EGL context and external texture on
video decoder thread.
- Support external texture rendering in Android renderer.
- Support TextureVideoFrame in Java and use it to pass texture from video decoder
to renderer.
- Fix HW encoder and decoder detection code to avoid query codec capabilities
from sw codecs.

BUG=
R=tkchin@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/18299004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7185 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
glaznev@webrtc.org 2014-09-15 17:52:42 +00:00
parent cd309e3168
commit 996784548d
7 changed files with 987 additions and 265 deletions

View File

@ -37,6 +37,10 @@ import java.util.concurrent.LinkedBlockingQueue;
import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL10;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLContext;
import android.opengl.GLES11Ext;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.util.Log; import android.util.Log;
@ -54,6 +58,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static VideoRendererGui instance = null; private static VideoRendererGui instance = null;
private static final String TAG = "VideoRendererGui"; private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface; private GLSurfaceView surface;
private static EGLContext eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called. // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture() // If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and // should be called. The variable is accessed on multiple threads and
@ -61,7 +66,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private boolean onSurfaceCreatedCalled; private boolean onSurfaceCreatedCalled;
// List of yuv renderers. // List of yuv renderers.
private ArrayList<YuvImageRenderer> yuvImageRenderers; private ArrayList<YuvImageRenderer> yuvImageRenderers;
private int program; private int yuvProgram;
private int oesProgram;
private final String VERTEX_SHADER_STRING = private final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" + "varying vec2 interp_tc;\n" +
@ -73,7 +79,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
" interp_tc = in_tc;\n" + " interp_tc = in_tc;\n" +
"}\n"; "}\n";
private final String FRAGMENT_SHADER_STRING = private final String YUV_FRAGMENT_SHADER_STRING =
"precision mediump float;\n" + "precision mediump float;\n" +
"varying vec2 interp_tc;\n" + "varying vec2 interp_tc;\n" +
"\n" + "\n" +
@ -91,6 +97,19 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
" y + 1.77 * u, 1);\n" + " y + 1.77 * u, 1);\n" +
"}\n"; "}\n";
private static final String OES_FRAGMENT_SHADER_STRING =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
"uniform samplerExternalOES oes_tex;\n" +
"\n" +
"void main() {\n" +
" gl_FragColor = texture2D(oes_tex, interp_tc);\n" +
"}\n";
private VideoRendererGui(GLSurfaceView surface) { private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface; this.surface = surface;
// Create an OpenGL ES 2.0 context. // Create an OpenGL ES 2.0 context.
@ -124,23 +143,46 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return buffer; return buffer;
} }
// Compile & attach a |type| shader specified by |source| to |program|. private int loadShader(int shaderType, String source) {
private static void addShaderTo(
int type, String source, int program) {
int[] result = new int[] { int[] result = new int[] {
GLES20.GL_FALSE GLES20.GL_FALSE
}; };
int shader = GLES20.glCreateShader(type); int shader = GLES20.glCreateShader(shaderType);
GLES20.glShaderSource(shader, source); GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader); GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0); GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE, if (result[0] != GLES20.GL_TRUE) {
GLES20.glGetShaderInfoLog(shader) + ", source: " + source); Log.e(TAG, "Could not compile shader " + shaderType + ":" +
GLES20.glAttachShader(program, shader); GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader); throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
}
checkNoGLES2Error(); checkNoGLES2Error();
} return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
int program = GLES20.glCreateProgram();
if (program == 0) {
throw new RuntimeException("Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
int[] linkStatus = new int[] {
GLES20.GL_FALSE
};
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: " +
GLES20.glGetProgramInfoLog(program));
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
}
checkNoGLES2Error();
return program;
}
/** /**
* Class used to display stream of YUV420 frames at particular location * Class used to display stream of YUV420 frames at particular location
@ -149,9 +191,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
*/ */
private static class YuvImageRenderer implements VideoRenderer.Callbacks { private static class YuvImageRenderer implements VideoRenderer.Callbacks {
private GLSurfaceView surface; private GLSurfaceView surface;
private int program; private int id;
private int yuvProgram;
private int oesProgram;
private FloatBuffer textureVertices; private FloatBuffer textureVertices;
private int[] yuvTextures = { -1, -1, -1 }; private int[] yuvTextures = { -1, -1, -1 };
private int oesTexture = -1;
private float[] stMatrix = new float[16];
// Render frame queue - accessed by two threads. renderFrame() call does // Render frame queue - accessed by two threads. renderFrame() call does
// an offer (writing I420Frame to render) and early-returns (recording // an offer (writing I420Frame to render) and early-returns (recording
@ -159,8 +205,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// copies frame to texture and then removes it from a queue using poll(). // copies frame to texture and then removes it from a queue using poll().
LinkedBlockingQueue<I420Frame> frameToRenderQueue; LinkedBlockingQueue<I420Frame> frameToRenderQueue;
// Local copy of incoming video frame. // Local copy of incoming video frame.
private I420Frame frameToRender; private I420Frame yuvFrameToRender;
// Flag if renderFrame() was ever called private I420Frame textureFrameToRender;
// Type of video frame used for recent frame rendering.
private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
private RendererType rendererType;
// Flag if renderFrame() was ever called.
boolean seenFrame; boolean seenFrame;
// Total number of video frames received in renderFrame() call. // Total number of video frames received in renderFrame() call.
private int framesReceived; private int framesReceived;
@ -174,7 +224,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Time in ns spent in draw() function. // Time in ns spent in draw() function.
private long drawTimeNs; private long drawTimeNs;
// Time in ns spent in renderFrame() function - including copying frame // Time in ns spent in renderFrame() function - including copying frame
// data to rendering planes // data to rendering planes.
private long copyTimeNs; private long copyTimeNs;
// Texture Coordinates mapping the entire texture. // Texture Coordinates mapping the entire texture.
@ -184,10 +234,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}); });
private YuvImageRenderer( private YuvImageRenderer(
GLSurfaceView surface, GLSurfaceView surface, int id,
int x, int y, int width, int height) { int x, int y, int width, int height) {
Log.v(TAG, "YuvImageRenderer.Create"); Log.d(TAG, "YuvImageRenderer.Create id: " + id);
this.surface = surface; this.surface = surface;
this.id = id;
frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1); frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
// Create texture vertices. // Create texture vertices.
float xLeft = (x - 50) / 50.0f; float xLeft = (x - 50) / 50.0f;
@ -203,11 +254,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
textureVertices = directNativeFloatBuffer(textureVeticesFloat); textureVertices = directNativeFloatBuffer(textureVeticesFloat);
} }
private void createTextures(int program) { private void createTextures(int yuvProgram, int oesProgram) {
Log.v(TAG, " YuvImageRenderer.createTextures"); Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
this.program = program; Thread.currentThread().getId());
this.yuvProgram = yuvProgram;
this.oesProgram = oesProgram;
// Generate 3 texture ids for Y/U/V and place them into |textures|. // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
GLES20.glGenTextures(3, yuvTextures, 0); GLES20.glGenTextures(3, yuvTextures, 0);
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@ -227,38 +280,76 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
private void draw() { private void draw() {
long now = System.nanoTime();
if (!seenFrame) { if (!seenFrame) {
// No frame received yet - nothing to render. // No frame received yet - nothing to render.
return; return;
} }
long now = System.nanoTime();
I420Frame frameFromQueue; I420Frame frameFromQueue;
synchronized (frameToRenderQueue) { synchronized (frameToRenderQueue) {
frameFromQueue = frameToRenderQueue.peek(); frameFromQueue = frameToRenderQueue.peek();
if (frameFromQueue != null && startTimeNs == -1) { if (frameFromQueue != null && startTimeNs == -1) {
startTimeNs = now; startTimeNs = now;
} }
for (int i = 0; i < 3; ++i) {
int w = (i == 0) ? frameToRender.width : frameToRender.width / 2; if (rendererType == RendererType.RENDERER_YUV) {
int h = (i == 0) ? frameToRender.height : frameToRender.height / 2; // YUV textures rendering.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); GLES20.glUseProgram(yuvProgram);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
if (frameFromQueue != null) { for (int i = 0; i < 3; ++i) {
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
frameFromQueue.yuvPlanes[i]); if (frameFromQueue != null) {
int w = (i == 0) ?
frameFromQueue.width : frameFromQueue.width / 2;
int h = (i == 0) ?
frameFromQueue.height : frameFromQueue.height / 2;
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
frameFromQueue.yuvPlanes[i]);
}
} }
} else {
// External texture rendering.
GLES20.glUseProgram(oesProgram);
if (frameFromQueue != null) {
oesTexture = frameFromQueue.textureId;
if (frameFromQueue.textureObject instanceof SurfaceTexture) {
SurfaceTexture surfaceTexture =
(SurfaceTexture) frameFromQueue.textureObject;
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(stMatrix);
}
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTexture);
} }
if (frameFromQueue != null) { if (frameFromQueue != null) {
frameToRenderQueue.poll(); frameToRenderQueue.poll();
} }
} }
int posLocation = GLES20.glGetAttribLocation(program, "in_pos");
if (rendererType == RendererType.RENDERER_YUV) {
GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "y_tex"), 0);
GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "u_tex"), 1);
GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "v_tex"), 2);
}
int posLocation = GLES20.glGetAttribLocation(yuvProgram, "in_pos");
if (posLocation == -1) {
throw new RuntimeException("Could not get attrib location for in_pos");
}
GLES20.glEnableVertexAttribArray(posLocation); GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glVertexAttribPointer( GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices); posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
int texLocation = GLES20.glGetAttribLocation(program, "in_tc"); int texLocation = GLES20.glGetAttribLocation(yuvProgram, "in_tc");
if (texLocation == -1) {
throw new RuntimeException("Could not get attrib location for in_tc");
}
GLES20.glEnableVertexAttribArray(texLocation); GLES20.glEnableVertexAttribArray(texLocation);
GLES20.glVertexAttribPointer( GLES20.glVertexAttribPointer(
texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords); texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
@ -273,7 +364,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
if (frameFromQueue != null) { if (frameFromQueue != null) {
framesRendered++; framesRendered++;
drawTimeNs += (System.nanoTime() - now); drawTimeNs += (System.nanoTime() - now);
if ((framesRendered % 150) == 0) { if ((framesRendered % 90) == 0) {
logStatistics(); logStatistics();
} }
} }
@ -281,12 +372,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private void logStatistics() { private void logStatistics() {
long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs; long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
Log.v(TAG, "Frames received: " + framesReceived + ". Dropped: " + Log.d(TAG, "ID: " + id + ". Type: " + rendererType +
framesDropped + ". Rendered: " + framesRendered); ". Frames received: " + framesReceived +
". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
if (framesReceived > 0 && framesRendered > 0) { if (framesReceived > 0 && framesRendered > 0) {
Log.v(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) + Log.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
" ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs); " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
Log.v(TAG, "Draw time: " + Log.d(TAG, "Draw time: " +
(int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " + (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
(int) (copyTimeNs / (1000 * framesReceived)) + " us"); (int) (copyTimeNs / (1000 * framesReceived)) + " us");
} }
@ -294,16 +386,18 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void setSize(final int width, final int height) { public void setSize(final int width, final int height) {
Log.v(TAG, "YuvImageRenderer.setSize: " + width + " x " + height); Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
width + " x " + height);
int[] strides = { width, width / 2, width / 2 }; int[] strides = { width, width / 2, width / 2 };
// Frame re-allocation need to be synchronized with copying // Frame re-allocation need to be synchronized with copying
// frame to textures in draw() function to avoid re-allocating // frame to textures in draw() function to avoid re-allocating
// the frame while it is being copied. // the frame while it is being copied.
synchronized (frameToRenderQueue) { synchronized (frameToRenderQueue) {
// Clear rendering queue // Clear rendering queue.
frameToRenderQueue.poll(); frameToRenderQueue.poll();
// Re-allocate / allocate the frame // Re-allocate / allocate the frame.
frameToRender = new I420Frame(width, height, strides, null); yuvFrameToRender = new I420Frame(width, height, strides, null);
textureFrameToRender = new I420Frame(width, height, null, -1);
} }
} }
@ -311,24 +405,26 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
public synchronized void renderFrame(I420Frame frame) { public synchronized void renderFrame(I420Frame frame) {
long now = System.nanoTime(); long now = System.nanoTime();
framesReceived++; framesReceived++;
// Check input frame parameters.
if (!(frame.yuvStrides[0] == frame.width &&
frame.yuvStrides[1] == frame.width / 2 &&
frame.yuvStrides[2] == frame.width / 2)) {
Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
return;
}
// Skip rendering of this frame if setSize() was not called. // Skip rendering of this frame if setSize() was not called.
if (frameToRender == null) { if (yuvFrameToRender == null || textureFrameToRender == null) {
framesDropped++; framesDropped++;
return; return;
} }
// Check incoming frame dimensions // Check input frame parameters.
if (frame.width != frameToRender.width || if (frame.yuvFrame) {
frame.height != frameToRender.height) { if (!(frame.yuvStrides[0] == frame.width &&
throw new RuntimeException("Wrong frame size " + frame.yuvStrides[1] == frame.width / 2 &&
frame.width + " x " + frame.height); frame.yuvStrides[2] == frame.width / 2)) {
Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
return;
}
// Check incoming frame dimensions.
if (frame.width != yuvFrameToRender.width ||
frame.height != yuvFrameToRender.height) {
throw new RuntimeException("Wrong frame size " +
frame.width + " x " + frame.height);
}
} }
if (frameToRenderQueue.size() > 0) { if (frameToRenderQueue.size() > 0) {
@ -336,20 +432,36 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
framesDropped++; framesDropped++;
return; return;
} }
frameToRender.copyFrom(frame);
// Create a local copy of the frame.
if (frame.yuvFrame) {
yuvFrameToRender.copyFrom(frame);
rendererType = RendererType.RENDERER_YUV;
frameToRenderQueue.offer(yuvFrameToRender);
} else {
textureFrameToRender.copyFrom(frame);
rendererType = RendererType.RENDERER_TEXTURE;
frameToRenderQueue.offer(textureFrameToRender);
}
copyTimeNs += (System.nanoTime() - now); copyTimeNs += (System.nanoTime() - now);
frameToRenderQueue.offer(frameToRender);
seenFrame = true; seenFrame = true;
// Request rendering.
surface.requestRender(); surface.requestRender();
} }
} }
/** Passes GLSurfaceView to video renderer. */ /** Passes GLSurfaceView to video renderer. */
public static void setView(GLSurfaceView surface) { public static void setView(GLSurfaceView surface) {
Log.v(TAG, "VideoRendererGui.setView"); Log.d(TAG, "VideoRendererGui.setView");
instance = new VideoRendererGui(surface); instance = new VideoRendererGui(surface);
} }
public static EGLContext getEGLContext() {
return eglContext;
}
/** /**
* Creates VideoRenderer with top left corner at (x, y) and resolution * Creates VideoRenderer with top left corner at (x, y) and resolution
* (width, height). All parameters are in percentage of screen resolution. * (width, height). All parameters are in percentage of screen resolution.
@ -360,6 +472,11 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return new VideoRenderer(javaGuiRenderer); return new VideoRenderer(javaGuiRenderer);
} }
public static VideoRenderer.Callbacks createGuiRenderer(
int x, int y, int width, int height) {
return create(x, y, width, height);
}
/** /**
* Creates VideoRenderer.Callbacks with top left corner at (x, y) and * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
* resolution (width, height). All parameters are in percentage of * resolution (width, height). All parameters are in percentage of
@ -379,7 +496,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
"Attempt to create yuv renderer before setting GLSurfaceView"); "Attempt to create yuv renderer before setting GLSurfaceView");
} }
final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer( final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
instance.surface, x, y, width, height); instance.surface, instance.yuvImageRenderers.size(),
x, y, width, height);
synchronized (instance.yuvImageRenderers) { synchronized (instance.yuvImageRenderers) {
if (instance.onSurfaceCreatedCalled) { if (instance.onSurfaceCreatedCalled) {
// onSurfaceCreated has already been called for VideoRendererGui - // onSurfaceCreated has already been called for VideoRendererGui -
@ -388,7 +506,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
final CountDownLatch countDownLatch = new CountDownLatch(1); final CountDownLatch countDownLatch = new CountDownLatch(1);
instance.surface.queueEvent(new Runnable() { instance.surface.queueEvent(new Runnable() {
public void run() { public void run() {
yuvImageRenderer.createTextures(instance.program); yuvImageRenderer.createTextures(
instance.yuvProgram, instance.oesProgram);
countDownLatch.countDown(); countDownLatch.countDown();
} }
}); });
@ -407,41 +526,31 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) { public void onSurfaceCreated(GL10 unused, EGLConfig config) {
Log.v(TAG, "VideoRendererGui.onSurfaceCreated"); Log.d(TAG, "VideoRendererGui.onSurfaceCreated");
// Store render EGL context
eglContext = EGL14.eglGetCurrentContext();
Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
// Create program. // Create YUV and OES programs.
program = GLES20.glCreateProgram(); yuvProgram = createProgram(VERTEX_SHADER_STRING,
addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program); YUV_FRAGMENT_SHADER_STRING);
addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program); oesProgram = createProgram(VERTEX_SHADER_STRING,
OES_FRAGMENT_SHADER_STRING);
GLES20.glLinkProgram(program);
int[] result = new int[] {
GLES20.GL_FALSE
};
result[0] = GLES20.GL_FALSE;
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE,
GLES20.glGetProgramInfoLog(program));
GLES20.glUseProgram(program);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
synchronized (yuvImageRenderers) { synchronized (yuvImageRenderers) {
// Create textures for all images. // Create textures for all images.
for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
yuvImageRenderer.createTextures(program); yuvImageRenderer.createTextures(yuvProgram, oesProgram);
} }
onSurfaceCreatedCalled = true; onSurfaceCreatedCalled = true;
} }
checkNoGLES2Error(); checkNoGLES2Error();
GLES20.glClearColor(0.0f, 0.0f, 0.3f, 1.0f); GLES20.glClearColor(0.0f, 0.3f, 0.1f, 1.0f);
} }
@Override @Override
public void onSurfaceChanged(GL10 unused, int width, int height) { public void onSurfaceChanged(GL10 unused, int width, int height) {
Log.v(TAG, "VideoRendererGui.onSurfaceChanged: " + Log.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
width + " x " + height + " "); width + " x " + height + " ");
GLES20.glViewport(0, 0, width, height); GLES20.glViewport(0, 0, width, height);
} }

View File

@ -81,20 +81,25 @@
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/messagequeue.h" #include "webrtc/base/messagequeue.h"
#include "webrtc/base/ssladapter.h" #include "webrtc/base/ssladapter.h"
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
#include "webrtc/system_wrappers/interface/compile_assert.h" #include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_base.h" #include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/voice_engine/include/voe_base.h" #include "webrtc/voice_engine/include/voe_base.h"
#ifdef ANDROID #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
#include <android/log.h>
#include "webrtc/system_wrappers/interface/logcat_trace_context.h" #include "webrtc/system_wrappers/interface/logcat_trace_context.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
using webrtc::CodecSpecificInfo; using webrtc::CodecSpecificInfo;
using webrtc::DecodedImageCallback; using webrtc::DecodedImageCallback;
using webrtc::EncodedImage; using webrtc::EncodedImage;
using webrtc::I420VideoFrame; using webrtc::I420VideoFrame;
using webrtc::LogcatTraceContext; using webrtc::LogcatTraceContext;
using webrtc::RTPFragmentationHeader; using webrtc::RTPFragmentationHeader;
using webrtc::TextureVideoFrame;
using webrtc::TickTime;
using webrtc::VideoCodec; using webrtc::VideoCodec;
#endif #endif
@ -112,6 +117,7 @@ using webrtc::DataChannelInit;
using webrtc::DataChannelInterface; using webrtc::DataChannelInterface;
using webrtc::DataChannelObserver; using webrtc::DataChannelObserver;
using webrtc::IceCandidateInterface; using webrtc::IceCandidateInterface;
using webrtc::NativeHandle;
using webrtc::MediaConstraintsInterface; using webrtc::MediaConstraintsInterface;
using webrtc::MediaSourceInterface; using webrtc::MediaSourceInterface;
using webrtc::MediaStreamInterface; using webrtc::MediaStreamInterface;
@ -152,6 +158,12 @@ static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
// were attached by the JVM because of a Java->native call. // were attached by the JVM because of a Java->native call.
static pthread_key_t g_jni_ptr; static pthread_key_t g_jni_ptr;
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Set in PeerConnectionFactory_initializeAndroidGlobals().
static bool factory_static_initialized = false;
#endif
// Return thread ID as a string. // Return thread ID as a string.
static std::string GetThreadId() { static std::string GetThreadId() {
char buf[21]; // Big enough to hold a kuint64max plus terminating NULL. char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
@ -257,10 +269,13 @@ class ClassReferenceHolder {
LoadClass(jni, "org/webrtc/DataChannel$Init"); LoadClass(jni, "org/webrtc/DataChannel$Init");
LoadClass(jni, "org/webrtc/DataChannel$State"); LoadClass(jni, "org/webrtc/DataChannel$State");
LoadClass(jni, "org/webrtc/IceCandidate"); LoadClass(jni, "org/webrtc/IceCandidate");
#ifdef ANDROID #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "android/opengl/EGLContext");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
#endif #endif
LoadClass(jni, "org/webrtc/MediaSource$State"); LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/MediaStream"); LoadClass(jni, "org/webrtc/MediaStream");
@ -349,14 +364,14 @@ jclass FindClass(JNIEnv* jni, const char* name) {
jclass GetObjectClass(JNIEnv* jni, jobject object) { jclass GetObjectClass(JNIEnv* jni, jobject object) {
jclass c = jni->GetObjectClass(object); jclass c = jni->GetObjectClass(object);
CHECK_EXCEPTION(jni) << "error during GetObjectClass"; CHECK_EXCEPTION(jni) << "error during GetObjectClass";
CHECK(c); CHECK(c) << "GetObjectClass returned NULL";
return c; return c;
} }
jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) { jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
jobject o = jni->GetObjectField(object, id); jobject o = jni->GetObjectField(object, id);
CHECK_EXCEPTION(jni) << "error during GetObjectField"; CHECK_EXCEPTION(jni) << "error during GetObjectField";
CHECK(o); CHECK(o) << "GetObjectField returned NULL";
return o; return o;
} }
@ -1054,6 +1069,38 @@ class VideoRendererWrapper : public VideoRendererInterface {
scoped_ptr<cricket::VideoRenderer> renderer_; scoped_ptr<cricket::VideoRenderer> renderer_;
}; };
// Wrapper for texture object in TextureVideoFrame.
class NativeHandleImpl : public NativeHandle {
public:
NativeHandleImpl() :
ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
virtual ~NativeHandleImpl() {}
virtual int32_t AddRef() {
return ++ref_count_;
}
virtual int32_t Release() {
return --ref_count_;
}
virtual void* GetHandle() {
return texture_object_;
}
int GetTextureId() {
return texture_id_;
}
void SetTextureObject(void *texture_object, int texture_id) {
texture_object_ = reinterpret_cast<jobject>(texture_object);
texture_id_ = texture_id;
}
int32_t ref_count() {
return ref_count_;
}
private:
int32_t ref_count_;
jobject texture_object_;
int32_t texture_id_;
};
// Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer // Wrapper dispatching webrtc::VideoRendererInterface to a Java VideoRenderer
// instance. // instance.
class JavaVideoRendererWrapper : public VideoRendererInterface { class JavaVideoRendererWrapper : public VideoRendererInterface {
@ -1067,8 +1114,11 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
"(Lorg/webrtc/VideoRenderer$I420Frame;)V")), "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
j_frame_class_(jni, j_frame_class_(jni,
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")), FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
j_frame_ctor_id_(GetMethodID( j_i420_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")), jni, *j_frame_class_, "<init>", "(II[I[Ljava/nio/ByteBuffer;)V")),
j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>",
"(IILjava/lang/Object;I)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) { j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
} }
@ -1083,14 +1133,20 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE { virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE {
ScopedLocalRefFrame local_ref_frame(jni()); ScopedLocalRefFrame local_ref_frame(jni());
jobject j_frame = CricketToJavaFrame(frame); if (frame->GetNativeHandle() != NULL) {
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame); jobject j_frame = CricketToJavaTextureFrame(frame);
CHECK_EXCEPTION(jni()); jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
CHECK_EXCEPTION(jni());
} else {
jobject j_frame = CricketToJavaI420Frame(frame);
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
CHECK_EXCEPTION(jni());
}
} }
private: private:
// Return a VideoRenderer.I420Frame referring to the data in |frame|. // Return a VideoRenderer.I420Frame referring to the data in |frame|.
jobject CricketToJavaFrame(const cricket::VideoFrame* frame) { jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
jintArray strides = jni()->NewIntArray(3); jintArray strides = jni()->NewIntArray(3);
jint* strides_array = jni()->GetIntArrayElements(strides, NULL); jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
strides_array[0] = frame->GetYPitch(); strides_array[0] = frame->GetYPitch();
@ -1109,10 +1165,21 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jni()->SetObjectArrayElement(planes, 1, u_buffer); jni()->SetObjectArrayElement(planes, 1, u_buffer);
jni()->SetObjectArrayElement(planes, 2, v_buffer); jni()->SetObjectArrayElement(planes, 2, v_buffer);
return jni()->NewObject( return jni()->NewObject(
*j_frame_class_, j_frame_ctor_id_, *j_frame_class_, j_i420_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), strides, planes); frame->GetWidth(), frame->GetHeight(), strides, planes);
} }
// Return a VideoRenderer.I420Frame referring texture object in |frame|.
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle =
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
int texture_id = handle->GetTextureId();
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), texture_object, texture_id);
}
JNIEnv* jni() { JNIEnv* jni() {
return AttachCurrentThreadIfNeeded(); return AttachCurrentThreadIfNeeded();
} }
@ -1121,16 +1188,16 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jmethodID j_set_size_id_; jmethodID j_set_size_id_;
jmethodID j_render_frame_id_; jmethodID j_render_frame_id_;
ScopedGlobalRef<jclass> j_frame_class_; ScopedGlobalRef<jclass> j_frame_class_;
jmethodID j_frame_ctor_id_; jmethodID j_i420_frame_ctor_id_;
jmethodID j_texture_frame_ctor_id_;
ScopedGlobalRef<jclass> j_byte_buffer_class_; ScopedGlobalRef<jclass> j_byte_buffer_class_;
}; };
#ifdef ANDROID #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and // TODO(fischman): consider pulling MediaCodecVideoEncoder out of this file and
// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted // into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
// from this file. // from this file.
#include <android/log.h>
//#define TRACK_BUFFER_TIMING //#define TRACK_BUFFER_TIMING
#define TAG "MediaCodecVideo" #define TAG "MediaCodecVideo"
#ifdef TRACK_BUFFER_TIMING #ifdef TRACK_BUFFER_TIMING
@ -1141,6 +1208,9 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) #define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) #define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
// Set to false to switch HW video decoder back to byte buffer output.
#define HW_DECODER_USE_SURFACE true
// Color formats supported by encoder - should mirror supportedColorList // Color formats supported by encoder - should mirror supportedColorList
// from MediaCodecVideoEncoder.java // from MediaCodecVideoEncoder.java
enum COLOR_FORMATTYPE { enum COLOR_FORMATTYPE {
@ -1156,6 +1226,14 @@ enum COLOR_FORMATTYPE {
// Arbitrary interval to poll the codec for new outputs. // Arbitrary interval to poll the codec for new outputs.
enum { kMediaCodecPollMs = 10 }; enum { kMediaCodecPollMs = 10 };
// Media codec maximum output buffer ready timeout.
enum { kMediaCodecTimeoutMs = 500 };
// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
enum { kMediaCodecStatisticsIntervalMs = 3000 };
static int64_t GetCurrentTimeMs() {
return TickTime::Now().Ticks() / 1000000LL;
}
// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
// Android's MediaCodec SDK API behind the scenes to implement (hopefully) // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
@ -1256,11 +1334,20 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
int last_set_fps_; // Last-requested frame rate. int last_set_fps_; // Last-requested frame rate.
int frames_received_; // Number of frames received by encoder. int64_t current_timestamp_us_; // Current frame timestamps in us.
int frames_dropped_; // Number of frames dropped by encoder. int frames_received_; // Number of frames received by encoder.
int frames_in_queue_; // Number of frames in encoder queue. int frames_dropped_; // Number of frames dropped by encoder.
int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame. int frames_in_queue_; // Number of frames in encoder queue.
int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame. int64_t start_time_ms_; // Start time for statistics.
int current_frames_; // Number of frames in the current statistics interval.
int current_bytes_; // Encoded bytes in the current statistics interval.
int current_encoding_time_ms_; // Overall encoding time in the current second
int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
std::vector<int32_t> timestamps_; // Video frames timestamp queue.
std::vector<int64_t> render_times_ms_; // Video frames render time queue.
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
// encoder input.
// Frame size in bytes fed to MediaCodec. // Frame size in bytes fed to MediaCodec.
int yuv_size_; int yuv_size_;
// True only when between a callback_->Encoded() call return a positive value // True only when between a callback_->Encoded() call return a positive value
@ -1427,7 +1514,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
ALOGD("InitEncodeOnCodecThread %d x %d", width, height); ALOGD("InitEncodeOnCodecThread %d x %d. Fps: %d", width, height, fps);
if (width == 0) { if (width == 0) {
width = width_; width = width_;
@ -1444,8 +1531,16 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
frames_received_ = 0; frames_received_ = 0;
frames_dropped_ = 0; frames_dropped_ = 0;
frames_in_queue_ = 0; frames_in_queue_ = 0;
current_timestamp_us_ = 0;
start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0;
current_bytes_ = 0;
current_encoding_time_ms_ = 0;
last_input_timestamp_ms_ = -1; last_input_timestamp_ms_ = -1;
last_output_timestamp_ms_ = -1; last_output_timestamp_ms_ = -1;
timestamps_.clear();
render_times_ms_.clear();
frame_rtc_times_ms_.clear();
// We enforce no extra stride/padding in the format creation step. // We enforce no extra stride/padding in the format creation step.
jobjectArray input_buffers = reinterpret_cast<jobjectArray>( jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
jni->CallObjectMethod(*j_media_codec_video_encoder_, jni->CallObjectMethod(*j_media_codec_video_encoder_,
@ -1505,23 +1600,23 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
} }
if (drop_next_input_frame_) { if (drop_next_input_frame_) {
ALOGV("Encoder drop frame - failed callback.");
drop_next_input_frame_ = false; drop_next_input_frame_ = false;
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
CHECK(frame_types->size() == 1) << "Unexpected stream count"; CHECK(frame_types->size() == 1) << "Unexpected stream count";
bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
CHECK(frame.width() == width_) << "Unexpected resolution change"; CHECK(frame.width() == width_) << "Unexpected resolution change";
CHECK(frame.height() == height_) << "Unexpected resolution change"; CHECK(frame.height() == height_) << "Unexpected resolution change";
bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
// Check if we accumulated too many frames in encoder input buffers // Check if we accumulated too many frames in encoder input buffers
// so the encoder latency exceeds 100ms and drop frame if so. // or the encoder latency exceeds 70 ms and drop frame if so.
if (frames_in_queue_ > 0 && last_input_timestamp_ms_ > 0 && if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
last_output_timestamp_ms_ > 0) {
int encoder_latency_ms = last_input_timestamp_ms_ - int encoder_latency_ms = last_input_timestamp_ms_ -
last_output_timestamp_ms_; last_output_timestamp_ms_;
if (encoder_latency_ms > 100) { if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
ALOGV("Drop frame - encoder is behind by %d ms. Q size: %d", ALOGV("Drop frame - encoder is behind by %d ms. Q size: %d",
encoder_latency_ms, frames_in_queue_); encoder_latency_ms, frames_in_queue_);
frames_dropped_++; frames_dropped_++;
@ -1534,7 +1629,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (j_input_buffer_index == -1) { if (j_input_buffer_index == -1) {
// Video codec falls behind - no input buffer available. // Video codec falls behind - no input buffer available.
ALOGV("Drop frame - no input buffers available"); ALOGV("Encoder drop frame - no input buffers available");
frames_dropped_++; frames_dropped_++;
return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887. return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
} }
@ -1544,7 +1639,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
} }
ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.", ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
frames_received_, j_input_buffer_index, frame.render_time_ms()); frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
jobject j_input_buffer = input_buffers_[j_input_buffer_index]; jobject j_input_buffer = input_buffers_[j_input_buffer_index];
uint8* yuv_buffer = uint8* yuv_buffer =
@ -1552,21 +1647,30 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
CHECK(yuv_buffer) << "Indirect buffer??"; CHECK(yuv_buffer) << "Indirect buffer??";
CHECK(!libyuv::ConvertFromI420( CHECK(!libyuv::ConvertFromI420(
frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane), frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane), frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
yuv_buffer, width_, width_, height_, encoder_fourcc_)) yuv_buffer, width_,
width_, height_,
encoder_fourcc_))
<< "ConvertFromI420 failed"; << "ConvertFromI420 failed";
jlong timestamp_us = frame.render_time_ms() * 1000; last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
last_input_timestamp_ms_ = frame.render_time_ms();
frames_in_queue_++; frames_in_queue_++;
// Save input image timestamps for later output
timestamps_.push_back(frame.timestamp());
render_times_ms_.push_back(frame.render_time_ms());
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_method_, j_encode_method_,
key_frame, key_frame,
j_input_buffer_index, j_input_buffer_index,
yuv_size_, yuv_size_,
timestamp_us); current_timestamp_us_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
current_timestamp_us_ += 1000000 / last_set_fps_;
if (!encode_status || !DeliverPendingOutputs(jni)) { if (!encode_status || !DeliverPendingOutputs(jni)) {
ResetCodec(); ResetCodec();
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
@ -1610,12 +1714,16 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
} }
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
last_set_bitrate_kbps_ = new_bit_rate; if (new_bit_rate > 0) {
last_set_fps_ = frame_rate; last_set_bitrate_kbps_ = new_bit_rate;
}
if (frame_rate > 0) {
last_set_fps_ = frame_rate;
}
bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_, bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_set_rates_method_, j_set_rates_method_,
new_bit_rate, last_set_bitrate_kbps_,
frame_rate); last_set_fps_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (!ret) { if (!ret) {
ResetCodec(); ResetCodec();
@ -1665,8 +1773,9 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
jobject j_output_buffer_info = jni->CallObjectMethod( jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (IsNull(jni, j_output_buffer_info)) if (IsNull(jni, j_output_buffer_info)) {
break; break;
}
int output_buffer_index = int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info); GetOutputBufferInfoIndex(jni, j_output_buffer_info);
@ -1675,31 +1784,62 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
return false; return false;
} }
jlong capture_time_ms = // Get frame timestamps from a queue.
last_output_timestamp_ms_ =
GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
1000; 1000;
last_output_timestamp_ms_ = capture_time_ms; int32_t timestamp = timestamps_.front();
timestamps_.erase(timestamps_.begin());
int64_t render_time_ms = render_times_ms_.front();
render_times_ms_.erase(render_times_ms_.begin());
int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
frame_rtc_times_ms_.front();
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
frames_in_queue_--; frames_in_queue_--;
ALOGV("Encoder got output buffer # %d. TS: %lld. Latency: %lld",
output_buffer_index, last_output_timestamp_ms_,
last_input_timestamp_ms_ - last_output_timestamp_ms_);
// Extract payload and key frame flag.
int32_t callback_status = 0; int32_t callback_status = 0;
jobject j_output_buffer =
GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
uint8* payload = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_output_buffer));
CHECK_EXCEPTION(jni);
ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
" EncTime: %lld",
output_buffer_index, payload_size, last_output_timestamp_ms_,
last_input_timestamp_ms_ - last_output_timestamp_ms_,
frame_encoding_time_ms);
// Calculate and print encoding statistics - every 3 seconds.
current_frames_++;
current_bytes_ += payload_size;
current_encoding_time_ms_ += frame_encoding_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
" encTime: %d for last %d ms",
current_bytes_ * 8 / statistic_time_ms,
last_set_bitrate_kbps_,
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
current_encoding_time_ms_ / current_frames_, statistic_time_ms);
start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0;
current_bytes_= 0;
current_encoding_time_ms_ = 0;
}
// Callback - return encoded frame.
if (callback_) { if (callback_) {
jobject j_output_buffer =
GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
uint8* payload = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_output_buffer));
CHECK_EXCEPTION(jni);
scoped_ptr<webrtc::EncodedImage> image( scoped_ptr<webrtc::EncodedImage> image(
new webrtc::EncodedImage(payload, payload_size, payload_size)); new webrtc::EncodedImage(payload, payload_size, payload_size));
image->_encodedWidth = width_; image->_encodedWidth = width_;
image->_encodedHeight = height_; image->_encodedHeight = height_;
// Convert capture time to 90 kHz RTP timestamp. image->_timeStamp = timestamp;
image->_timeStamp = static_cast<uint32_t>(90 * capture_time_ms); image->capture_time_ms_ = render_time_ms;
image->capture_time_ms_ = capture_time_ms;
image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
image->_completeFrame = true; image->_completeFrame = true;
@ -1722,6 +1862,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
callback_status = callback_->Encoded(*image, &info, &header); callback_status = callback_->Encoded(*image, &info, &header);
} }
// Return output buffer back to the encoder.
bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_release_output_buffer_method_, j_release_output_buffer_method_,
output_buffer_index); output_buffer_index);
@ -1731,10 +1872,11 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
return false; return false;
} }
if (callback_status > 0) if (callback_status > 0) {
drop_next_input_frame_ = true; drop_next_input_frame_ = true;
// Theoretically could handle callback_status<0 here, but unclear what that // Theoretically could handle callback_status<0 here, but unclear what that
// would mean for us. // would mean for us.
}
} }
return true; return true;
@ -1809,6 +1951,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
explicit MediaCodecVideoDecoder(JNIEnv* jni); explicit MediaCodecVideoDecoder(JNIEnv* jni);
virtual ~MediaCodecVideoDecoder(); virtual ~MediaCodecVideoDecoder();
static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
virtual int32_t InitDecode(const VideoCodec* codecSettings, virtual int32_t InitDecode(const VideoCodec* codecSettings,
int32_t numberOfCores) OVERRIDE; int32_t numberOfCores) OVERRIDE;
@ -1834,13 +1978,29 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
int32_t InitDecodeOnCodecThread(); int32_t InitDecodeOnCodecThread();
int32_t ReleaseOnCodecThread(); int32_t ReleaseOnCodecThread();
int32_t DecodeOnCodecThread(const EncodedImage& inputImage); int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
// Deliver any outputs pending in the MediaCodec to our |callback_| and return
// true on success.
bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
bool key_frame_required_; bool key_frame_required_;
bool inited_; bool inited_;
bool use_surface_;
VideoCodec codec_; VideoCodec codec_;
I420VideoFrame decoded_image_; I420VideoFrame decoded_image_;
NativeHandleImpl native_handle_;
DecodedImageCallback* callback_; DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder. int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder
int64_t start_time_ms_; // Start time for statistics.
int current_frames_; // Number of frames in the current statistics interval.
int current_bytes_; // Encoded bytes in the current statistics interval.
int current_decoding_time_ms_; // Overall decoding time in the current second
uint32_t max_pending_frames_; // Maximum number of pending input frames
std::vector<int32_t> timestamps_;
std::vector<int64_t> ntp_times_ms_;
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
// decoder input.
// State that is constant for the lifetime of this object once the ctor // State that is constant for the lifetime of this object once the ctor
// returns. // returns.
@ -1853,6 +2013,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_queue_input_buffer_method_; jmethodID j_queue_input_buffer_method_;
jmethodID j_dequeue_output_buffer_method_; jmethodID j_dequeue_output_buffer_method_;
jmethodID j_release_output_buffer_method_; jmethodID j_release_output_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_; jfieldID j_input_buffers_field_;
jfieldID j_output_buffers_field_; jfieldID j_output_buffers_field_;
jfieldID j_color_format_field_; jfieldID j_color_format_field_;
@ -1860,14 +2021,38 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_; jfieldID j_height_field_;
jfieldID j_stride_field_; jfieldID j_stride_field_;
jfieldID j_slice_height_field_; jfieldID j_slice_height_field_;
jfieldID j_surface_texture_field_;
jfieldID j_textureID_field_;
// MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
jfieldID j_info_size_field_;
jfieldID j_info_presentation_timestamp_us_field_;
// Global references; must be deleted in Release(). // Global references; must be deleted in Release().
std::vector<jobject> input_buffers_; std::vector<jobject> input_buffers_;
jobject surface_texture_;
// Render EGL context.
static jobject render_egl_context_;
}; };
jobject MediaCodecVideoDecoder::render_egl_context_ = NULL;
int MediaCodecVideoDecoder::SetAndroidObjects(JNIEnv* jni,
jobject render_egl_context) {
if (render_egl_context_) {
jni->DeleteGlobalRef(render_egl_context_);
}
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
ALOGD("VideoDecoder EGL context set");
return 0;
}
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) : MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
key_frame_required_(true), key_frame_required_(true),
inited_(false), inited_(false),
use_surface_(HW_DECODER_USE_SURFACE),
codec_thread_(new Thread()), codec_thread_(new Thread()),
j_media_codec_video_decoder_class_( j_media_codec_video_decoder_class_(
jni, jni,
@ -1883,9 +2068,9 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
codec_thread_->SetName("MediaCodecVideoDecoder", NULL); codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
j_init_decode_method_ = GetMethodID(jni, j_init_decode_method_ = GetMethodID(
*j_media_codec_video_decoder_class_, jni, *j_media_codec_video_decoder_class_, "initDecode",
"initDecode", "(II)Z"); "(IIZLandroid/opengl/EGLContext;)Z");
j_release_method_ = j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID( j_dequeue_input_buffer_method_ = GetMethodID(
@ -1893,9 +2078,10 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
j_queue_input_buffer_method_ = GetMethodID( j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
j_dequeue_output_buffer_method_ = GetMethodID( j_dequeue_output_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", "()I"); jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
j_release_output_buffer_method_ = GetMethodID( j_release_output_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z"); jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
j_input_buffers_field_ = GetFieldID( j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, jni, *j_media_codec_video_decoder_class_,
@ -1913,6 +2099,22 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
jni, *j_media_codec_video_decoder_class_, "stride", "I"); jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID( j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
j_textureID_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "textureID", "I");
j_surface_texture_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
"Landroid/graphics/SurfaceTexture;");
jclass j_decoder_output_buffer_info_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
j_info_index_field_ = GetFieldID(
jni, j_decoder_output_buffer_info_class, "index", "I");
j_info_offset_field_ = GetFieldID(
jni, j_decoder_output_buffer_info_class, "offset", "I");
j_info_size_field_ = GetFieldID(
jni, j_decoder_output_buffer_info_class, "size", "I");
j_info_presentation_timestamp_us_field_ = GetFieldID(
jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
memset(&codec_, 0, sizeof(codec_)); memset(&codec_, 0, sizeof(codec_));
@ -1940,6 +2142,7 @@ int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
// Always start with a complete key frame. // Always start with a complete key frame.
key_frame_required_ = true; key_frame_required_ = true;
frames_received_ = 0; frames_received_ = 0;
frames_decoded_ = 0;
// Call Java init. // Call Java init.
return codec_thread_->Invoke<int32_t>( return codec_thread_->Invoke<int32_t>(
@ -1950,28 +2153,50 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
ALOGD("InitDecodeOnCodecThread: %d x %d. FPS: %d", ALOGD("InitDecodeOnCodecThread: %d x %d. fps: %d",
codec_.width, codec_.height, codec_.maxFramerate); codec_.width, codec_.height, codec_.maxFramerate);
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
j_init_decode_method_, j_init_decode_method_,
codec_.width, codec_.width,
codec_.height); codec_.height,
use_surface_,
render_egl_context_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (!success) if (!success) {
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
}
inited_ = true; inited_ = true;
max_pending_frames_ = 0;
if (use_surface_) {
max_pending_frames_ = 1;
}
start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0;
current_bytes_ = 0;
current_decoding_time_ms_ = 0;
timestamps_.clear();
ntp_times_ms_.clear();
frame_rtc_times_ms_.clear();
jobjectArray input_buffers = (jobjectArray)GetObjectField( jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_); jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
size_t num_input_buffers = jni->GetArrayLength(input_buffers); size_t num_input_buffers = jni->GetArrayLength(input_buffers);
input_buffers_.resize(num_input_buffers); input_buffers_.resize(num_input_buffers);
for (size_t i = 0; i < num_input_buffers; ++i) { for (size_t i = 0; i < num_input_buffers; ++i) {
input_buffers_[i] = input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
} }
if (use_surface_) {
jobject surface_texture = GetObjectField(
jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
surface_texture_ = jni->NewGlobalRef(surface_texture);
}
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
@ -1981,15 +2206,29 @@ int32_t MediaCodecVideoDecoder::Release() {
} }
int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
if (!inited_) if (!inited_) {
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
}
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD("DecoderRelease: Frames received: %d.", frames_received_); ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); ++i) for (size_t i = 0; i < input_buffers_.size(); i++) {
jni->DeleteGlobalRef(input_buffers_[i]); jni->DeleteGlobalRef(input_buffers_[i]);
}
input_buffers_.clear(); input_buffers_.clear();
if (use_surface_) {
// Before deleting texture object make sure it is no longer referenced
// by any TextureVideoFrame.
int32_t waitTimeoutUs = 3000000; // 3 second wait
while (waitTimeoutUs > 0 && native_handle_.ref_count() > 0) {
ALOGD("Current Texture RefCnt: %d", native_handle_.ref_count());
usleep(30000);
waitTimeoutUs -= 30000;
}
ALOGD("TextureRefCnt: %d", native_handle_.ref_count());
jni->DeleteGlobalRef(surface_texture_);
}
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
inited_ = false; inited_ = false;
@ -2052,6 +2291,21 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
// Try to drain the decoder and wait until output is not too
// much behind the input.
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGV("Wait for output...");
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
Reset();
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGE("Output buffer dequeue timeout");
Reset();
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
// Get input buffer. // Get input buffer.
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
j_dequeue_input_buffer_method_); j_dequeue_input_buffer_method_);
@ -2075,10 +2329,17 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
Reset(); Reset();
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
ALOGV("Decode frame # %d. Buffer # %d. Size: %d", ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
frames_received_, j_input_buffer_index, inputImage._length); frames_received_, j_input_buffer_index, inputImage._length);
memcpy(buffer, inputImage._buffer, inputImage._length); memcpy(buffer, inputImage._buffer, inputImage._length);
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
timestamps_.push_back(inputImage._timeStamp);
ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder. // Feed input to decoder.
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
@ -2093,26 +2354,57 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
// Get output index. // Try to drain the decoder
int j_output_buffer_index = if (!DeliverPendingOutputs(jni, 0)) {
jni->CallIntMethod(*j_media_codec_video_decoder_, ALOGE("DeliverPendingOutputs error");
j_dequeue_output_buffer_method_);
CHECK_EXCEPTION(jni);
if (j_output_buffer_index < 0) {
ALOGE("dequeueOutputBuffer error");
Reset(); Reset();
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
return WEBRTC_VIDEO_CODEC_OK;
}
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
JNIEnv* jni, int dequeue_timeout_us) {
if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained.
return true;
}
// Get decoder output.
jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_decoder_,
j_dequeue_output_buffer_method_,
dequeue_timeout_us);
CHECK_EXCEPTION(jni);
if (IsNull(jni, j_decoder_output_buffer_info)) {
return true;
}
// Extract data from Java DecoderOutputBufferInfo.
int output_buffer_index =
GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
if (output_buffer_index < 0) {
ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
Reset();
return false;
}
int output_buffer_offset =
GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
int output_buffer_size =
GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
CHECK_EXCEPTION(jni);
// Extract data from Java ByteBuffer. // Extract data from Java ByteBuffer.
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
jobject output_buffer = jobject output_buffer =
jni->GetObjectArrayElement(output_buffers, j_output_buffer_index); jni->GetObjectArrayElement(output_buffers, output_buffer_index);
buffer_capacity = jni->GetDirectBufferCapacity(output_buffer);
uint8_t* payload = uint8_t* payload =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(output_buffer)); reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(output_buffer));
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
payload += output_buffer_offset;
// Get decoded video frame properties.
int color_format = GetIntField(jni, *j_media_codec_video_decoder_, int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
j_color_format_field_); j_color_format_field_);
int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
@ -2120,52 +2412,100 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
j_slice_height_field_); j_slice_height_field_);
if (buffer_capacity < width * height * 3 / 2) { int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
ALOGE("Insufficient output buffer capacity: %d", buffer_capacity); j_textureID_field_);
if (!use_surface_ && output_buffer_size < width * height * 3 / 2) {
ALOGE("Insufficient output buffer size: %d", output_buffer_size);
Reset(); Reset();
return WEBRTC_VIDEO_CODEC_ERROR; return false;
} }
ALOGV("Decoder got output buffer %d x %d. %d x %d. Color: 0x%x. Size: %d",
width, height, stride, slice_height, color_format, buffer_capacity);
if (color_format == COLOR_FormatYUV420Planar) { // Get frame timestamps from a queue.
decoded_image_.CreateFrame( int32_t timestamp = timestamps_.front();
stride * slice_height, payload, timestamps_.erase(timestamps_.begin());
(stride * slice_height) / 4, payload + (stride * slice_height), int64_t ntp_time_ms = ntp_times_ms_.front();
(stride * slice_height) / 4, payload + (5 * stride * slice_height / 4), ntp_times_ms_.erase(ntp_times_ms_.begin());
width, height, int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
stride, stride / 2, stride / 2); frame_rtc_times_ms_.front();
} else { frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
// All other supported formats are nv12.
decoded_image_.CreateEmptyFrame(width, height, width, width / 2, width / 2); ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
libyuv::NV12ToI420( " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
payload, stride, color_format, output_buffer_size, frame_decoding_time_ms);
payload + stride * slice_height, stride,
decoded_image_.buffer(webrtc::kYPlane), // Create yuv420 frame.
decoded_image_.stride(webrtc::kYPlane), if (!use_surface_) {
decoded_image_.buffer(webrtc::kUPlane), if (color_format == COLOR_FormatYUV420Planar) {
decoded_image_.stride(webrtc::kUPlane), decoded_image_.CreateFrame(
decoded_image_.buffer(webrtc::kVPlane), stride * slice_height, payload,
decoded_image_.stride(webrtc::kVPlane), (stride * slice_height) / 4, payload + (stride * slice_height),
width, height); (stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
width, height,
stride, stride / 2, stride / 2);
} else {
// All other supported formats are nv12.
decoded_image_.CreateEmptyFrame(width, height, width,
width / 2, width / 2);
libyuv::NV12ToI420(
payload, stride,
payload + stride * slice_height, stride,
decoded_image_.buffer(webrtc::kYPlane),
decoded_image_.stride(webrtc::kYPlane),
decoded_image_.buffer(webrtc::kUPlane),
decoded_image_.stride(webrtc::kUPlane),
decoded_image_.buffer(webrtc::kVPlane),
decoded_image_.stride(webrtc::kVPlane),
width, height);
}
} }
// Return output buffer back to codec. // Return output buffer back to codec.
success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
j_release_output_buffer_method_, j_release_output_buffer_method_,
j_output_buffer_index); output_buffer_index,
use_surface_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (!success) { if (!success) {
ALOGE("releaseOutputBuffer error"); ALOGE("releaseOutputBuffer error");
Reset(); Reset();
return WEBRTC_VIDEO_CODEC_ERROR; return false;
} }
// Callback. // Calculate and print decoding statistics - every 3 seconds.
decoded_image_.set_timestamp(inputImage._timeStamp); frames_decoded_++;
decoded_image_.set_ntp_time_ms(inputImage.ntp_time_ms_); current_frames_++;
frames_received_++; current_decoding_time_ms_ += frame_decoding_time_ms;
return callback_->Decoded(decoded_image_); int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) {
ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
current_bytes_ * 8 / statistic_time_ms,
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
current_decoding_time_ms_ / current_frames_, statistic_time_ms);
start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0;
current_bytes_= 0;
current_decoding_time_ms_ = 0;
}
// Callback - output decoded frame.
int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
if (use_surface_) {
native_handle_.SetTextureObject(surface_texture_, texture_id);
TextureVideoFrame texture_image(
&native_handle_, width, height, timestamp, 0);
texture_image.set_ntp_time_ms(ntp_time_ms);
callback_status = callback_->Decoded(texture_image);
} else {
decoded_image_.set_timestamp(timestamp);
decoded_image_.set_ntp_time_ms(ntp_time_ms);
callback_status = callback_->Decoded(decoded_image_);
}
if (callback_status > 0) {
ALOGE("callback error");
}
return true;
} }
int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback( int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
@ -2183,6 +2523,19 @@ int32_t MediaCodecVideoDecoder::Reset() {
} }
void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) { void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
if (!inited_) {
return;
}
// We only ever send one message to |this| directly (not through a Bind()'d
// functor), so expect no ID/data.
CHECK(!msg->message_id) << "Unexpected message!";
CHECK(!msg->pdata) << "Unexpected message!";
CheckOnCodecThread();
DeliverPendingOutputs(jni, 0);
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
} }
class MediaCodecVideoDecoderFactory class MediaCodecVideoDecoderFactory
@ -2226,7 +2579,7 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
delete decoder; delete decoder;
} }
#endif // ANDROID #endif // #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
} // anonymous namespace } // anonymous namespace
@ -2403,13 +2756,20 @@ JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)( JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context, JNIEnv* jni, jclass, jobject context,
jboolean initialize_audio, jboolean initialize_video) { jboolean initialize_audio, jboolean initialize_video,
jobject render_egl_context) {
CHECK(g_jvm) << "JNI_OnLoad failed to run?"; CHECK(g_jvm) << "JNI_OnLoad failed to run?";
bool failure = false; bool failure = false;
if (!factory_static_initialized) {
if (initialize_video)
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context);
if (initialize_audio)
failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
factory_static_initialized = true;
}
if (initialize_video) if (initialize_video)
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context); failure |= MediaCodecVideoDecoder::SetAndroidObjects(jni,
if (initialize_audio) render_egl_context);
failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
return !failure; return !failure;
} }
#endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) #endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
@ -2456,7 +2816,7 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
<< "Failed to start threads"; << "Failed to start threads";
scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
#ifdef ANDROID #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
encoder_factory.reset(new MediaCodecVideoEncoderFactory()); encoder_factory.reset(new MediaCodecVideoEncoderFactory());
decoder_factory.reset(new MediaCodecVideoDecoderFactory()); decoder_factory.reset(new MediaCodecVideoDecoderFactory());
#endif #endif

View File

@ -27,14 +27,24 @@
package org.webrtc; package org.webrtc;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Surface;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
@ -49,7 +59,7 @@ class MediaCodecVideoDecoder {
private static final String TAG = "MediaCodecVideoDecoder"; private static final String TAG = "MediaCodecVideoDecoder";
private static final int DEQUEUE_TIMEOUT = 1000000; // 1 sec timeout. private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
private Thread mediaCodecThread; private Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers; private ByteBuffer[] inputBuffers;
@ -74,12 +84,21 @@ class MediaCodecVideoDecoder {
private int height; private int height;
private int stride; private int stride;
private int sliceHeight; private int sliceHeight;
private boolean useSurface;
private int textureID = -1;
private SurfaceTexture surfaceTexture = null;
private Surface surface = null;
private float[] stMatrix = new float[16];
private EGLDisplay eglDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext eglContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
private MediaCodecVideoDecoder() { } private MediaCodecVideoDecoder() { }
// Helper struct for findVp8HwDecoder() below. // Helper struct for findVp8HwDecoder() below.
private static class DecoderProperties { private static class DecoderProperties {
DecoderProperties(String codecName, int colorFormat) { public DecoderProperties(String codecName, int colorFormat) {
this.codecName = codecName; this.codecName = codecName;
this.colorFormat = colorFormat; this.colorFormat = colorFormat;
} }
@ -107,26 +126,32 @@ class MediaCodecVideoDecoder {
continue; // No VP8 support in this codec; try the next one. continue; // No VP8 support in this codec; try the next one.
} }
Log.d(TAG, "Found candidate decoder " + name); Log.d(TAG, "Found candidate decoder " + name);
// Check if this is supported HW decoder.
boolean supportedCodec = false;
for (String hwCodecPrefix : supportedHwCodecPrefixes) {
if (name.startsWith(hwCodecPrefix)) {
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
// Check if codec supports either yuv420 or nv12.
CodecCapabilities capabilities = CodecCapabilities capabilities =
info.getCapabilitiesForType(VP8_MIME_TYPE); info.getCapabilitiesForType(VP8_MIME_TYPE);
for (int colorFormat : capabilities.colorFormats) { for (int colorFormat : capabilities.colorFormats) {
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
} }
for (int supportedColorFormat : supportedColorList) {
// Check if this is supported HW decoder for (int codecColorFormat : capabilities.colorFormats) {
for (String hwCodecPrefix : supportedHwCodecPrefixes) { if (codecColorFormat == supportedColorFormat) {
if (!name.startsWith(hwCodecPrefix)) { // Found supported HW VP8 decoder.
continue; Log.d(TAG, "Found target decoder " + name +
} ". Color: 0x" + Integer.toHexString(codecColorFormat));
// Check if codec supports either yuv420 or nv12 return new DecoderProperties(name, codecColorFormat);
for (int supportedColorFormat : supportedColorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
// Found supported HW VP8 decoder
Log.d(TAG, "Found target decoder " + name +
". Color: 0x" + Integer.toHexString(codecColorFormat));
return new DecoderProperties(name, codecColorFormat);
}
} }
} }
} }
@ -146,31 +171,166 @@ class MediaCodecVideoDecoder {
} }
} }
private boolean initDecode(int width, int height) { private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL Error: 0x" + Integer.toHexString(error));
throw new RuntimeException(
msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
private void checkGlError(String msg) {
int error;
if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, msg + ": GL Error: 0x" + Integer.toHexString(error));
throw new RuntimeException(
msg + ": GL Error: 0x " + Integer.toHexString(error));
}
}
private void eglSetup(EGLContext sharedContext, int width, int height) {
Log.d(TAG, "EGL setup");
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("Unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException("Unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(eglDisplay, attribList, 0, configs, 0,
configs.length, numConfigs, 0)) {
throw new RuntimeException("Unable to find RGB888 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
eglContext = EGL14.eglCreateContext(eglDisplay, configs[0], sharedContext,
attrib_list, 0);
checkEglError("eglCreateContext");
if (eglContext == null) {
throw new RuntimeException("Null EGL context");
}
// Create a pbuffer surface.
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, configs[0],
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("EGL surface was null");
}
}
private void eglRelease() {
Log.d(TAG, "EGL release");
if (eglDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
}
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglContext = EGL14.EGL_NO_CONTEXT;
eglSurface = EGL14.EGL_NO_SURFACE;
}
private void makeCurrent() {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
private boolean initDecode(int width, int height, boolean useSurface,
EGLContext sharedContext) {
if (mediaCodecThread != null) { if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?"); throw new RuntimeException("Forgot to release()?");
} }
if (useSurface && sharedContext == null) {
throw new RuntimeException("No shared EGL context.");
}
DecoderProperties properties = findVp8HwDecoder(); DecoderProperties properties = findVp8HwDecoder();
if (properties == null) { if (properties == null) {
throw new RuntimeException("Cannot find HW VP8 decoder"); throw new RuntimeException("Cannot find HW VP8 decoder");
} }
Log.d(TAG, "Java initDecode: " + width + " x " + height + Log.d(TAG, "Java initDecode: " + width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat)); ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface );
if (sharedContext != null) {
Log.d(TAG, "Decoder shared EGL Context: " + sharedContext);
}
mediaCodecThread = Thread.currentThread(); mediaCodecThread = Thread.currentThread();
try { try {
Surface decodeSurface = null;
this.width = width; this.width = width;
this.height = height; this.height = height;
this.useSurface = useSurface;
stride = width; stride = width;
sliceHeight = height; sliceHeight = height;
if (useSurface) {
// Create shared EGL context.
eglSetup(sharedContext, width, height);
makeCurrent();
// Create output surface
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("glGenTextures");
textureID = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameter");
Log.d(TAG, "Video decoder TextureID = " + textureID);
surfaceTexture = new SurfaceTexture(textureID);
surface = new Surface(surfaceTexture);
decodeSurface = surface;
}
MediaFormat format = MediaFormat format =
MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height); MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); if (!useSurface) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
}
Log.d(TAG, " Format: " + format); Log.d(TAG, " Format: " + format);
mediaCodec = MediaCodec.createByCodecName(properties.codecName); mediaCodec = MediaCodec.createByCodecName(properties.codecName);
if (mediaCodec == null) { if (mediaCodec == null) {
return false; return false;
} }
mediaCodec.configure(format, null, null, 0); mediaCodec.configure(format, decodeSurface, null, 0);
mediaCodec.start(); mediaCodec.start();
colorFormat = properties.colorFormat; colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
@ -195,6 +355,19 @@ class MediaCodecVideoDecoder {
} }
mediaCodec = null; mediaCodec = null;
mediaCodecThread = null; mediaCodecThread = null;
if (useSurface) {
surface.release();
surface = null;
surfaceTexture = null;
if (textureID >= 0) {
int[] textures = new int[1];
textures[0] = textureID;
Log.d(TAG, "Delete video decoder TextureID " + textureID);
GLES20.glDeleteTextures(1, textures, 0);
checkGlError("glDeleteTextures");
}
eglRelease();
}
} }
// Dequeue an input buffer and return its index, -1 if no input buffer is // Dequeue an input buffer and return its index, -1 if no input buffer is
@ -202,7 +375,7 @@ class MediaCodecVideoDecoder {
private int dequeueInputBuffer() { private int dequeueInputBuffer() {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
try { try {
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT); return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Log.e(TAG, "dequeueIntputBuffer failed", e); Log.e(TAG, "dequeueIntputBuffer failed", e);
return -2; return -2;
@ -224,23 +397,40 @@ class MediaCodecVideoDecoder {
} }
} }
// Helper struct for dequeueOutputBuffer() below.
private static class DecoderOutputBufferInfo {
public DecoderOutputBufferInfo(
int index, int offset, int size, long presentationTimestampUs) {
this.index = index;
this.offset = offset;
this.size = size;
this.presentationTimestampUs = presentationTimestampUs;
}
private final int index;
private final int offset;
private final int size;
private final long presentationTimestampUs;
}
// Dequeue and return an output buffer index, -1 if no output // Dequeue and return an output buffer index, -1 if no output
// buffer available or -2 if error happened. // buffer available or -2 if error happened.
private int dequeueOutputBuffer() { private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
try { try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT); int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
Log.d(TAG, "Output buffers changed: " + outputBuffers.length);
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mediaCodec.getOutputFormat(); MediaFormat format = mediaCodec.getOutputFormat();
Log.d(TAG, "Format changed: " + format.toString()); Log.d(TAG, "Format changed: " + format.toString());
width = format.getInteger(MediaFormat.KEY_WIDTH); width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT); height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
// Check if new color space is supported. // Check if new color space is supported.
@ -253,7 +443,7 @@ class MediaCodecVideoDecoder {
} }
if (!validColorFormat) { if (!validColorFormat) {
Log.e(TAG, "Non supported color format"); Log.e(TAG, "Non supported color format");
return -2; return new DecoderOutputBufferInfo(-1, 0, 0, -1);
} }
} }
if (format.containsKey("stride")) { if (format.containsKey("stride")) {
@ -267,21 +457,28 @@ class MediaCodecVideoDecoder {
stride = Math.max(width, stride); stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight); sliceHeight = Math.max(height, sliceHeight);
} }
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT); result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
} }
return result; if (result >= 0) {
return new DecoderOutputBufferInfo(result, info.offset, info.size,
info.presentationTimeUs);
}
return null;
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Log.e(TAG, "dequeueOutputBuffer failed", e); Log.e(TAG, "dequeueOutputBuffer failed", e);
return -2; return new DecoderOutputBufferInfo(-1, 0, 0, -1);
} }
} }
// Release a dequeued output buffer back to the codec for re-use. Return // Release a dequeued output buffer back to the codec for re-use. Return
// false if the codec is no longer operable. // false if the codec is no longer operable.
private boolean releaseOutputBuffer(int index) { private boolean releaseOutputBuffer(int index, boolean render) {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
try { try {
mediaCodec.releaseOutputBuffer(index, false); if (!useSurface) {
render = false;
}
mediaCodec.releaseOutputBuffer(index, render);
return true; return true;
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Log.e(TAG, "releaseOutputBuffer failed", e); Log.e(TAG, "releaseOutputBuffer failed", e);

View File

@ -78,7 +78,7 @@ class MediaCodecVideoEncoder {
// Helper struct for findVp8HwEncoder() below. // Helper struct for findVp8HwEncoder() below.
private static class EncoderProperties { private static class EncoderProperties {
EncoderProperties(String codecName, int colorFormat) { public EncoderProperties(String codecName, int colorFormat) {
this.codecName = codecName; this.codecName = codecName;
this.colorFormat = colorFormat; this.colorFormat = colorFormat;
} }
@ -106,26 +106,33 @@ class MediaCodecVideoEncoder {
continue; // No VP8 support in this codec; try the next one. continue; // No VP8 support in this codec; try the next one.
} }
Log.d(TAG, "Found candidate encoder " + name); Log.d(TAG, "Found candidate encoder " + name);
// Check if this is supported HW encoder.
boolean supportedCodec = false;
for (String hwCodecPrefix : supportedHwCodecPrefixes) {
if (name.startsWith(hwCodecPrefix)) {
supportedCodec = true;
break;
}
}
if (!supportedCodec) {
continue;
}
CodecCapabilities capabilities = CodecCapabilities capabilities =
info.getCapabilitiesForType(VP8_MIME_TYPE); info.getCapabilitiesForType(VP8_MIME_TYPE);
for (int colorFormat : capabilities.colorFormats) { for (int colorFormat : capabilities.colorFormats) {
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
} }
// Check if this is supported HW encoder // Check if codec supports either yuv420 or nv12.
for (String hwCodecPrefix : supportedHwCodecPrefixes) { for (int supportedColorFormat : supportedColorList) {
if (!name.startsWith(hwCodecPrefix)) { for (int codecColorFormat : capabilities.colorFormats) {
continue; if (codecColorFormat == supportedColorFormat) {
} // Found supported HW VP8 encoder.
// Check if codec supports either yuv420 or nv12 Log.d(TAG, "Found target encoder " + name +
for (int supportedColorFormat : supportedColorList) { ". Color: 0x" + Integer.toHexString(codecColorFormat));
for (int codecColorFormat : capabilities.colorFormats) { return new EncoderProperties(name, codecColorFormat);
if (codecColorFormat == supportedColorFormat) {
// Found supported HW VP8 encoder
Log.d(TAG, "Found target encoder " + name +
". Color: 0x" + Integer.toHexString(codecColorFormat));
return new EncoderProperties(name, codecColorFormat);
}
} }
} }
} }

View File

@ -46,8 +46,12 @@ public class PeerConnectionFactory {
// Callers may specify either |initializeAudio| or |initializeVideo| as false // Callers may specify either |initializeAudio| or |initializeVideo| as false
// to skip initializing the respective engine (and avoid the need for the // to skip initializing the respective engine (and avoid the need for the
// respective permissions). // respective permissions).
// |renderEGLContext| can be provided to suport HW video decoding to
// texture and will be used to create a shared EGL context on video
// decoding thread.
public static native boolean initializeAndroidGlobals( public static native boolean initializeAndroidGlobals(
Object context, boolean initializeAudio, boolean initializeVideo); Object context, boolean initializeAudio, boolean initializeVideo,
Object renderEGLContext);
public PeerConnectionFactory() { public PeerConnectionFactory() {
nativeFactory = nativeCreatePeerConnectionFactory(); nativeFactory = nativeCreatePeerConnectionFactory();

View File

@ -44,6 +44,9 @@ public class VideoRenderer {
public final int height; public final int height;
public final int[] yuvStrides; public final int[] yuvStrides;
public final ByteBuffer[] yuvPlanes; public final ByteBuffer[] yuvPlanes;
public final boolean yuvFrame;
public Object textureObject;
public int textureId;
/** /**
* Construct a frame of the given dimensions with the specified planar * Construct a frame of the given dimensions with the specified planar
@ -62,25 +65,72 @@ public class VideoRenderer {
yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height); yuvPlanes[2] = ByteBuffer.allocateDirect(yuvStrides[2] * height);
} }
this.yuvPlanes = yuvPlanes; this.yuvPlanes = yuvPlanes;
this.yuvFrame = true;
}
/**
* Construct a texture frame of the given dimensions with data in SurfaceTexture
*/
public I420Frame(
int width, int height, Object textureObject, int textureId) {
this.width = width;
this.height = height;
this.yuvStrides = null;
this.yuvPlanes = null;
this.textureObject = textureObject;
this.textureId = textureId;
this.yuvFrame = false;
} }
/** /**
* Copy the planes out of |source| into |this| and return |this|. Calling * Copy the planes out of |source| into |this| and return |this|. Calling
* this with mismatched frame dimensions is a programming error and will * this with mismatched frame dimensions or frame type is a programming
* likely crash. * error and will likely crash.
*/ */
public I420Frame copyFrom(I420Frame source) { public I420Frame copyFrom(I420Frame source) {
if (!Arrays.equals(yuvStrides, source.yuvStrides) || if (source.yuvFrame && yuvFrame) {
width != source.width || height != source.height) { if (!Arrays.equals(yuvStrides, source.yuvStrides) ||
throw new RuntimeException("Mismatched dimensions! Source: " + width != source.width || height != source.height) {
throw new RuntimeException("Mismatched dimensions! Source: " +
source.toString() + ", destination: " + toString());
}
copyPlane(source.yuvPlanes[0], yuvPlanes[0]);
copyPlane(source.yuvPlanes[1], yuvPlanes[1]);
copyPlane(source.yuvPlanes[2], yuvPlanes[2]);
return this;
} else if (!source.yuvFrame && !yuvFrame) {
textureObject = source.textureObject;
textureId = source.textureId;
return this;
} else {
throw new RuntimeException("Mismatched frame types! Source: " +
source.toString() + ", destination: " + toString()); source.toString() + ", destination: " + toString());
} }
copyPlane(source.yuvPlanes[0], yuvPlanes[0]);
copyPlane(source.yuvPlanes[1], yuvPlanes[1]);
copyPlane(source.yuvPlanes[2], yuvPlanes[2]);
return this;
} }
public I420Frame copyFrom(byte[] yuvData) {
if (yuvData.length < width * height * 3 / 2) {
throw new RuntimeException("Wrong arrays size: " + yuvData.length);
}
if (!yuvFrame) {
throw new RuntimeException("Can not feed yuv data to texture frame");
}
int planeSize = width * height;
ByteBuffer[] planes = new ByteBuffer[3];
planes[0] = ByteBuffer.wrap(yuvData, 0, planeSize);
planes[1] = ByteBuffer.wrap(yuvData, planeSize, planeSize / 4);
planes[2] = ByteBuffer.wrap(yuvData, planeSize + planeSize / 4,
planeSize / 4);
for (int i = 0; i < 3; i++) {
yuvPlanes[i].position(0);
yuvPlanes[i].put(planes[i]);
yuvPlanes[i].position(0);
yuvPlanes[i].limit(yuvPlanes[i].capacity());
}
return this;
}
@Override @Override
public String toString() { public String toString() {
return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] + return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +

View File

@ -77,7 +77,6 @@ import java.util.regex.Pattern;
public class AppRTCDemoActivity extends Activity public class AppRTCDemoActivity extends Activity
implements AppRTCClient.IceServersObserver { implements AppRTCClient.IceServersObserver {
private static final String TAG = "AppRTCDemoActivity"; private static final String TAG = "AppRTCDemoActivity";
private static boolean factoryStaticInitialized;
private PeerConnectionFactory factory; private PeerConnectionFactory factory;
private VideoSource videoSource; private VideoSource videoSource;
private boolean videoSourceStopped; private boolean videoSourceStopped;
@ -133,13 +132,6 @@ public class AppRTCDemoActivity extends Activity
hudView.setVisibility(View.INVISIBLE); hudView.setVisibility(View.INVISIBLE);
addContentView(hudView, hudLayout); addContentView(hudView, hudLayout);
if (!factoryStaticInitialized) {
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
this, true, true),
"Failed to initializeAndroidGlobals");
factoryStaticInitialized = true;
}
AudioManager audioManager = AudioManager audioManager =
((AudioManager) getSystemService(AUDIO_SERVICE)); ((AudioManager) getSystemService(AUDIO_SERVICE));
// TODO(fischman): figure out how to do this Right(tm) and remove the // TODO(fischman): figure out how to do this Right(tm) and remove the
@ -282,6 +274,9 @@ public class AppRTCDemoActivity extends Activity
@Override @Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) { public void onIceServers(List<PeerConnection.IceServer> iceServers) {
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
this, true, true, VideoRendererGui.getEGLContext()),
"Failed to initializeAndroidGlobals");
factory = new PeerConnectionFactory(); factory = new PeerConnectionFactory();
MediaConstraints pcConstraints = appRtcClient.pcConstraints(); MediaConstraints pcConstraints = appRtcClient.pcConstraints();