Convert DeviceInfoImpl::_captureCapabilities from a map to a vector.
The map was just mapping an index to a pointer of a POD, so the code is easily simplified by using a vector (with implicit index key) and the POD as a value. (also fixes a leak in the windows code, which lacked a virtual dtor for VideoCaptureCapabilityWindows but was deleting through a base pointer). R=perkj@webrtc.org Review URL: https://webrtc-codereview.appspot.com/2298004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@4840 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
ce014d97cd
commit
69fc315fd9
@ -161,11 +161,6 @@ int32_t DeviceInfoAndroid::GetDeviceName(
|
||||
|
||||
int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
||||
const char* deviceUniqueIdUTF8) {
|
||||
for (std::map<int, VideoCaptureCapability*>::iterator it =
|
||||
_captureCapabilities.begin();
|
||||
it != _captureCapabilities.end();
|
||||
++it)
|
||||
delete it->second;
|
||||
_captureCapabilities.clear();
|
||||
|
||||
JNIEnv *env;
|
||||
@ -235,20 +230,20 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
|
||||
env->GetArrayLength((jarray) javaCapabilitiesObj);
|
||||
|
||||
for (jsize i = 0; i < numberOfCapabilities; ++i) {
|
||||
VideoCaptureCapability *cap = new VideoCaptureCapability();
|
||||
VideoCaptureCapability cap;
|
||||
jobject capabilityElement = env->GetObjectArrayElement(
|
||||
(jobjectArray) javaCapabilitiesObj,
|
||||
i);
|
||||
|
||||
cap->width = env->GetIntField(capabilityElement, widthField);
|
||||
cap->height = env->GetIntField(capabilityElement, heigtField);
|
||||
cap->expectedCaptureDelay = _expectedCaptureDelay;
|
||||
cap->rawType = kVideoNV21;
|
||||
cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
|
||||
cap.width = env->GetIntField(capabilityElement, widthField);
|
||||
cap.height = env->GetIntField(capabilityElement, heigtField);
|
||||
cap.expectedCaptureDelay = _expectedCaptureDelay;
|
||||
cap.rawType = kVideoNV21;
|
||||
cap.maxFPS = env->GetIntField(capabilityElement, maxFpsField);
|
||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
||||
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
|
||||
cap->width, cap->height, cap->maxFPS);
|
||||
_captureCapabilities[i] = cap;
|
||||
cap.width, cap.height, cap.maxFPS);
|
||||
_captureCapabilities.push_back(cap);
|
||||
}
|
||||
|
||||
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
|
||||
|
@ -31,13 +31,6 @@ DeviceInfoImpl::DeviceInfoImpl(const int32_t id)
|
||||
DeviceInfoImpl::~DeviceInfoImpl(void)
|
||||
{
|
||||
_apiLock.AcquireLockExclusive();
|
||||
|
||||
for (VideoCaptureCapabilityMap::iterator it = _captureCapabilities.begin();
|
||||
it != _captureCapabilities.end();
|
||||
++it) {
|
||||
delete it->second;
|
||||
}
|
||||
|
||||
free(_lastUsedDeviceName);
|
||||
_apiLock.ReleaseLockExclusive();
|
||||
|
||||
@ -124,23 +117,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
|
||||
return -1;
|
||||
}
|
||||
|
||||
VideoCaptureCapabilityMap::iterator item =
|
||||
_captureCapabilities.find(deviceCapabilityNumber);
|
||||
|
||||
if (item == _captureCapabilities.end())
|
||||
{
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
|
||||
"Failed to find capability number %d of %d possible",
|
||||
deviceCapabilityNumber, _captureCapabilities.size());
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (item->second == NULL)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
capability = *item->second;
|
||||
capability = _captureCapabilities[deviceCapabilityNumber];
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -188,11 +165,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
|
||||
|
||||
for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
|
||||
{
|
||||
VideoCaptureCapabilityMap::iterator item = _captureCapabilities.find(tmp);
|
||||
if (item == _captureCapabilities.end())
|
||||
return -1;
|
||||
|
||||
VideoCaptureCapability& capability = *item->second;
|
||||
VideoCaptureCapability& capability = _captureCapabilities[tmp];
|
||||
|
||||
const int32_t diffWidth = capability.width - requested.width;
|
||||
const int32_t diffHeight = capability.height - requested.height;
|
||||
@ -298,15 +271,9 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
|
||||
bestWidth, bestHeight, bestFrameRate, bestRawType);
|
||||
|
||||
// Copy the capability
|
||||
VideoCaptureCapabilityMap::iterator item =
|
||||
_captureCapabilities.find(bestformatIndex);
|
||||
if (item == _captureCapabilities.end())
|
||||
if (bestformatIndex < 0)
|
||||
return -1;
|
||||
if (item->second == NULL)
|
||||
return -1;
|
||||
|
||||
resulting = *item->second;
|
||||
|
||||
resulting = _captureCapabilities[bestformatIndex];
|
||||
return bestformatIndex;
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/modules/video_capture/include/video_capture.h"
|
||||
#include "webrtc/modules/video_capture/video_capture_delay.h"
|
||||
@ -58,8 +58,8 @@ protected:
|
||||
protected:
|
||||
// Data members
|
||||
int32_t _id;
|
||||
typedef std::map<int, VideoCaptureCapability*> VideoCaptureCapabilityMap;
|
||||
VideoCaptureCapabilityMap _captureCapabilities;
|
||||
typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
|
||||
VideoCaptureCapabilities _captureCapabilities;
|
||||
RWLockWrapper& _apiLock;
|
||||
char* _lastUsedDeviceName;
|
||||
uint32_t _lastUsedDeviceNameLength;
|
||||
|
@ -218,16 +218,10 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(
|
||||
}
|
||||
|
||||
// now fd will point to the matching device
|
||||
// reset old capability map
|
||||
for (std::map<int, VideoCaptureCapability*>::iterator it =
|
||||
_captureCapabilities.begin();
|
||||
it != _captureCapabilities.end();
|
||||
++it) {
|
||||
delete it->second;
|
||||
}
|
||||
// reset old capability list.
|
||||
_captureCapabilities.clear();
|
||||
|
||||
int size = FillCapabilityMap(fd);
|
||||
int size = FillCapabilities(fd);
|
||||
close(fd);
|
||||
|
||||
// Store the new used device name
|
||||
@ -253,7 +247,7 @@ bool DeviceInfoLinux::IsDeviceNameMatches(const char* name,
|
||||
return false;
|
||||
}
|
||||
|
||||
int32_t DeviceInfoLinux::FillCapabilityMap(int fd)
|
||||
int32_t DeviceInfoLinux::FillCapabilities(int fd)
|
||||
{
|
||||
|
||||
// set image format
|
||||
@ -290,39 +284,39 @@ int32_t DeviceInfoLinux::FillCapabilityMap(int fd)
|
||||
if ((video_fmt.fmt.pix.width == size[i][0])
|
||||
&& (video_fmt.fmt.pix.height == size[i][1]))
|
||||
{
|
||||
VideoCaptureCapability *cap = new VideoCaptureCapability();
|
||||
cap->width = video_fmt.fmt.pix.width;
|
||||
cap->height = video_fmt.fmt.pix.height;
|
||||
cap->expectedCaptureDelay = 120;
|
||||
VideoCaptureCapability cap;
|
||||
cap.width = video_fmt.fmt.pix.width;
|
||||
cap.height = video_fmt.fmt.pix.height;
|
||||
cap.expectedCaptureDelay = 120;
|
||||
if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
|
||||
{
|
||||
cap->rawType = kVideoYUY2;
|
||||
cap.rawType = kVideoYUY2;
|
||||
}
|
||||
else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420)
|
||||
{
|
||||
cap->rawType = kVideoI420;
|
||||
cap.rawType = kVideoI420;
|
||||
}
|
||||
else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
|
||||
{
|
||||
cap->rawType = kVideoMJPEG;
|
||||
cap.rawType = kVideoMJPEG;
|
||||
}
|
||||
|
||||
// get fps of current camera mode
|
||||
// V4l2 does not have a stable method of knowing so we just guess.
|
||||
if(cap->width >= 800 && cap->rawType != kVideoMJPEG)
|
||||
if(cap.width >= 800 && cap.rawType != kVideoMJPEG)
|
||||
{
|
||||
cap->maxFPS = 15;
|
||||
cap.maxFPS = 15;
|
||||
}
|
||||
else
|
||||
{
|
||||
cap->maxFPS = 30;
|
||||
cap.maxFPS = 30;
|
||||
}
|
||||
|
||||
_captureCapabilities[index] = cap;
|
||||
_captureCapabilities.push_back(cap);
|
||||
index++;
|
||||
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
||||
"Camera capability, width:%d height:%d type:%d fps:%d",
|
||||
cap->width, cap->height, cap->rawType, cap->maxFPS);
|
||||
cap.width, cap.height, cap.rawType, cap.maxFPS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ public:
|
||||
void* /*parentWindow*/,
|
||||
uint32_t /*positionX*/,
|
||||
uint32_t /*positionY*/) { return -1;}
|
||||
int32_t FillCapabilityMap(int fd);
|
||||
int32_t FillCapabilities(int fd);
|
||||
int32_t Init();
|
||||
private:
|
||||
|
||||
|
@ -371,13 +371,12 @@ int32_t DeviceInfoDS::GetWindowsCapability(
|
||||
VideoCaptureCapabilityWindows& windowsCapability) {
|
||||
ReadLockScoped cs(_apiLock);
|
||||
|
||||
std::map<int, VideoCaptureCapability*>::iterator item =
|
||||
_captureCapabilities.find(capabilityIndex);
|
||||
if (item == _captureCapabilities.end())
|
||||
if (capabilityIndex < 0 || static_cast<size_t>(capabilityIndex) >=
|
||||
_captureCapabilitiesWindows.size()) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
windowsCapability =
|
||||
*static_cast<VideoCaptureCapabilityWindows*>(item->second);
|
||||
windowsCapability = _captureCapabilitiesWindows[capabilityIndex];
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -386,13 +385,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
|
||||
{
|
||||
// Reset old capability list
|
||||
for (std::map<int, VideoCaptureCapability*>::iterator it =
|
||||
_captureCapabilities.begin();
|
||||
it != _captureCapabilities.end();
|
||||
++it) {
|
||||
delete it->second;
|
||||
}
|
||||
|
||||
_captureCapabilities.clear();
|
||||
|
||||
const int32_t deviceUniqueIdUTF8Length =
|
||||
@ -468,7 +460,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32_t index = 0; // Index in created _capabilities map
|
||||
// Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo.
|
||||
// Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2
|
||||
// Interlace flag is only supported in FORMAT_VideoInfo2
|
||||
@ -535,8 +526,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
&& pmt->formattype == preferedVideoFormat)
|
||||
{
|
||||
|
||||
VideoCaptureCapabilityWindows* capability =
|
||||
new VideoCaptureCapabilityWindows();
|
||||
VideoCaptureCapabilityWindows capability;
|
||||
int64_t avgTimePerFrame = 0;
|
||||
|
||||
if (pmt->formattype == FORMAT_VideoInfo)
|
||||
@ -544,9 +534,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
VIDEOINFOHEADER* h =
|
||||
reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat);
|
||||
assert(h);
|
||||
capability->directShowCapabilityIndex = tmp;
|
||||
capability->width = h->bmiHeader.biWidth;
|
||||
capability->height = h->bmiHeader.biHeight;
|
||||
capability.directShowCapabilityIndex = tmp;
|
||||
capability.width = h->bmiHeader.biWidth;
|
||||
capability.height = h->bmiHeader.biHeight;
|
||||
avgTimePerFrame = h->AvgTimePerFrame;
|
||||
}
|
||||
if (pmt->formattype == FORMAT_VideoInfo2)
|
||||
@ -554,10 +544,10 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
VIDEOINFOHEADER2* h =
|
||||
reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
|
||||
assert(h);
|
||||
capability->directShowCapabilityIndex = tmp;
|
||||
capability->width = h->bmiHeader.biWidth;
|
||||
capability->height = h->bmiHeader.biHeight;
|
||||
capability->interlaced = h->dwInterlaceFlags
|
||||
capability.directShowCapabilityIndex = tmp;
|
||||
capability.width = h->bmiHeader.biWidth;
|
||||
capability.height = h->bmiHeader.biHeight;
|
||||
capability.interlaced = h->dwInterlaceFlags
|
||||
& (AMINTERLACE_IsInterlaced
|
||||
| AMINTERLACE_DisplayModeBobOnly);
|
||||
avgTimePerFrame = h->AvgTimePerFrame;
|
||||
@ -566,11 +556,11 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
if (hrVC == S_OK)
|
||||
{
|
||||
LONGLONG *frameDurationList;
|
||||
LONGLONG maxFPS;
|
||||
LONGLONG maxFPS;
|
||||
long listSize;
|
||||
SIZE size;
|
||||
size.cx = capability->width;
|
||||
size.cy = capability->height;
|
||||
size.cx = capability.width;
|
||||
size.cy = capability.height;
|
||||
|
||||
// GetMaxAvailableFrameRate doesn't return max frame rate always
|
||||
// eg: Logitech Notebook. This may be due to a bug in that API
|
||||
@ -585,12 +575,12 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
// On some odd cameras, you may get a 0 for duration.
|
||||
// GetMaxOfFrameArray returns the lowest duration (highest FPS)
|
||||
if (hrVC == S_OK && listSize > 0 &&
|
||||
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
|
||||
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
|
||||
listSize)))
|
||||
{
|
||||
capability->maxFPS = static_cast<int> (10000000
|
||||
capability.maxFPS = static_cast<int> (10000000
|
||||
/ maxFPS);
|
||||
capability->supportFrameRateControl = true;
|
||||
capability.supportFrameRateControl = true;
|
||||
}
|
||||
else // use existing method
|
||||
{
|
||||
@ -598,61 +588,61 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
_id,
|
||||
"GetMaxAvailableFrameRate NOT SUPPORTED");
|
||||
if (avgTimePerFrame > 0)
|
||||
capability->maxFPS = static_cast<int> (10000000
|
||||
capability.maxFPS = static_cast<int> (10000000
|
||||
/ avgTimePerFrame);
|
||||
else
|
||||
capability->maxFPS = 0;
|
||||
capability.maxFPS = 0;
|
||||
}
|
||||
}
|
||||
else // use existing method in case IAMVideoControl is not supported
|
||||
{
|
||||
if (avgTimePerFrame > 0)
|
||||
capability->maxFPS = static_cast<int> (10000000
|
||||
capability.maxFPS = static_cast<int> (10000000
|
||||
/ avgTimePerFrame);
|
||||
else
|
||||
capability->maxFPS = 0;
|
||||
capability.maxFPS = 0;
|
||||
}
|
||||
|
||||
// can't switch MEDIATYPE :~(
|
||||
if (pmt->subtype == MEDIASUBTYPE_I420)
|
||||
{
|
||||
capability->rawType = kVideoI420;
|
||||
capability.rawType = kVideoI420;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_IYUV)
|
||||
{
|
||||
capability->rawType = kVideoIYUV;
|
||||
capability.rawType = kVideoIYUV;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_RGB24)
|
||||
{
|
||||
capability->rawType = kVideoRGB24;
|
||||
capability.rawType = kVideoRGB24;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_YUY2)
|
||||
{
|
||||
capability->rawType = kVideoYUY2;
|
||||
capability.rawType = kVideoYUY2;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_RGB565)
|
||||
{
|
||||
capability->rawType = kVideoRGB565;
|
||||
capability.rawType = kVideoRGB565;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_MJPG)
|
||||
{
|
||||
capability->rawType = kVideoMJPEG;
|
||||
capability.rawType = kVideoMJPEG;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_dvsl
|
||||
|| pmt->subtype == MEDIASUBTYPE_dvsd
|
||||
|| pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
|
||||
{
|
||||
capability->rawType = kVideoYUY2;// MS DV filter seems to create this type
|
||||
capability.rawType = kVideoYUY2;// MS DV filter seems to create this type
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
|
||||
{
|
||||
capability->rawType = kVideoUYVY;
|
||||
capability.rawType = kVideoUYVY;
|
||||
}
|
||||
else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
|
||||
{
|
||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
|
||||
"Device support HDYC.");
|
||||
capability->rawType = kVideoUYVY;
|
||||
capability.rawType = kVideoUYVY;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -662,22 +652,22 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
|
||||
webrtc::kTraceVideoCapture, _id,
|
||||
"Device support unknown media type %ls, width %d, height %d",
|
||||
strGuid);
|
||||
delete capability;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the expected capture delay from the static list
|
||||
capability->expectedCaptureDelay
|
||||
capability.expectedCaptureDelay
|
||||
= GetExpectedCaptureDelay(WindowsCaptureDelays,
|
||||
NoWindowsCaptureDelays,
|
||||
productId,
|
||||
capability->width,
|
||||
capability->height);
|
||||
_captureCapabilities[index++] = capability;
|
||||
capability.width,
|
||||
capability.height);
|
||||
_captureCapabilities.push_back(capability);
|
||||
_captureCapabilitiesWindows.push_back(capability);
|
||||
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
|
||||
"Camera capability, width:%d height:%d type:%d fps:%d",
|
||||
capability->width, capability->height,
|
||||
capability->rawType, capability->maxFPS);
|
||||
capability.width, capability.height,
|
||||
capability.rawType, capability.maxFPS);
|
||||
}
|
||||
DeleteMediaType(pmt);
|
||||
pmt = NULL;
|
||||
|
@ -29,8 +29,8 @@ struct VideoCaptureCapabilityWindows: public VideoCaptureCapability
|
||||
directShowCapabilityIndex = 0;
|
||||
supportFrameRateControl = false;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
class DeviceInfoDS: public DeviceInfoImpl
|
||||
{
|
||||
public:
|
||||
@ -55,7 +55,7 @@ public:
|
||||
char* productUniqueIdUTF8,
|
||||
uint32_t productUniqueIdUTF8Length);
|
||||
|
||||
/*
|
||||
/*
|
||||
* Display OS /capture device specific settings dialog
|
||||
*/
|
||||
virtual int32_t
|
||||
@ -99,7 +99,7 @@ private:
|
||||
ICreateDevEnum* _dsDevEnum;
|
||||
IEnumMoniker* _dsMonikerDevEnum;
|
||||
bool _CoUninitializeIsRequired;
|
||||
|
||||
std::vector<VideoCaptureCapabilityWindows> _captureCapabilitiesWindows;
|
||||
};
|
||||
} // namespace videocapturemodule
|
||||
} // namespace webrtc
|
||||
|
Loading…
x
Reference in New Issue
Block a user