Convert DeviceInfoImpl::_captureCapabilities from a map to a vector.

The map was just mapping an index to a pointer of a POD, so the code is easily
simplified by using a vector (with implicit index key) and the POD as a value.
(also fixes a leak in the windows code, which lacked a virtual dtor for
VideoCaptureCapabilityWindows but was deleting through a base pointer).

R=perkj@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2298004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4840 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org 2013-09-25 17:01:42 +00:00
parent ce014d97cd
commit 69fc315fd9
7 changed files with 72 additions and 126 deletions

View File

@ -161,11 +161,6 @@ int32_t DeviceInfoAndroid::GetDeviceName(
int32_t DeviceInfoAndroid::CreateCapabilityMap( int32_t DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) { const char* deviceUniqueIdUTF8) {
for (std::map<int, VideoCaptureCapability*>::iterator it =
_captureCapabilities.begin();
it != _captureCapabilities.end();
++it)
delete it->second;
_captureCapabilities.clear(); _captureCapabilities.clear();
JNIEnv *env; JNIEnv *env;
@ -235,20 +230,20 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
env->GetArrayLength((jarray) javaCapabilitiesObj); env->GetArrayLength((jarray) javaCapabilitiesObj);
for (jsize i = 0; i < numberOfCapabilities; ++i) { for (jsize i = 0; i < numberOfCapabilities; ++i) {
VideoCaptureCapability *cap = new VideoCaptureCapability(); VideoCaptureCapability cap;
jobject capabilityElement = env->GetObjectArrayElement( jobject capabilityElement = env->GetObjectArrayElement(
(jobjectArray) javaCapabilitiesObj, (jobjectArray) javaCapabilitiesObj,
i); i);
cap->width = env->GetIntField(capabilityElement, widthField); cap.width = env->GetIntField(capabilityElement, widthField);
cap->height = env->GetIntField(capabilityElement, heigtField); cap.height = env->GetIntField(capabilityElement, heigtField);
cap->expectedCaptureDelay = _expectedCaptureDelay; cap.expectedCaptureDelay = _expectedCaptureDelay;
cap->rawType = kVideoNV21; cap.rawType = kVideoNV21;
cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField); cap.maxFPS = env->GetIntField(capabilityElement, maxFpsField);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__, "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap->width, cap->height, cap->maxFPS); cap.width, cap.height, cap.maxFPS);
_captureCapabilities[i] = cap; _captureCapabilities.push_back(cap);
} }
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8); _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);

View File

@ -31,13 +31,6 @@ DeviceInfoImpl::DeviceInfoImpl(const int32_t id)
DeviceInfoImpl::~DeviceInfoImpl(void) DeviceInfoImpl::~DeviceInfoImpl(void)
{ {
_apiLock.AcquireLockExclusive(); _apiLock.AcquireLockExclusive();
for (VideoCaptureCapabilityMap::iterator it = _captureCapabilities.begin();
it != _captureCapabilities.end();
++it) {
delete it->second;
}
free(_lastUsedDeviceName); free(_lastUsedDeviceName);
_apiLock.ReleaseLockExclusive(); _apiLock.ReleaseLockExclusive();
@ -124,23 +117,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
return -1; return -1;
} }
VideoCaptureCapabilityMap::iterator item = capability = _captureCapabilities[deviceCapabilityNumber];
_captureCapabilities.find(deviceCapabilityNumber);
if (item == _captureCapabilities.end())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to find capability number %d of %d possible",
deviceCapabilityNumber, _captureCapabilities.size());
return -1;
}
if (item->second == NULL)
{
return -1;
}
capability = *item->second;
return 0; return 0;
} }
@ -188,11 +165,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities for (int32_t tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
{ {
VideoCaptureCapabilityMap::iterator item = _captureCapabilities.find(tmp); VideoCaptureCapability& capability = _captureCapabilities[tmp];
if (item == _captureCapabilities.end())
return -1;
VideoCaptureCapability& capability = *item->second;
const int32_t diffWidth = capability.width - requested.width; const int32_t diffWidth = capability.width - requested.width;
const int32_t diffHeight = capability.height - requested.height; const int32_t diffHeight = capability.height - requested.height;
@ -298,15 +271,9 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
bestWidth, bestHeight, bestFrameRate, bestRawType); bestWidth, bestHeight, bestFrameRate, bestRawType);
// Copy the capability // Copy the capability
VideoCaptureCapabilityMap::iterator item = if (bestformatIndex < 0)
_captureCapabilities.find(bestformatIndex);
if (item == _captureCapabilities.end())
return -1; return -1;
if (item->second == NULL) resulting = _captureCapabilities[bestformatIndex];
return -1;
resulting = *item->second;
return bestformatIndex; return bestformatIndex;
} }

View File

@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
#include <map> #include <vector>
#include "webrtc/modules/video_capture/include/video_capture.h" #include "webrtc/modules/video_capture/include/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_delay.h" #include "webrtc/modules/video_capture/video_capture_delay.h"
@ -58,8 +58,8 @@ protected:
protected: protected:
// Data members // Data members
int32_t _id; int32_t _id;
typedef std::map<int, VideoCaptureCapability*> VideoCaptureCapabilityMap; typedef std::vector<VideoCaptureCapability> VideoCaptureCapabilities;
VideoCaptureCapabilityMap _captureCapabilities; VideoCaptureCapabilities _captureCapabilities;
RWLockWrapper& _apiLock; RWLockWrapper& _apiLock;
char* _lastUsedDeviceName; char* _lastUsedDeviceName;
uint32_t _lastUsedDeviceNameLength; uint32_t _lastUsedDeviceNameLength;

View File

@ -218,16 +218,10 @@ int32_t DeviceInfoLinux::CreateCapabilityMap(
} }
// now fd will point to the matching device // now fd will point to the matching device
// reset old capability map // reset old capability list.
for (std::map<int, VideoCaptureCapability*>::iterator it =
_captureCapabilities.begin();
it != _captureCapabilities.end();
++it) {
delete it->second;
}
_captureCapabilities.clear(); _captureCapabilities.clear();
int size = FillCapabilityMap(fd); int size = FillCapabilities(fd);
close(fd); close(fd);
// Store the new used device name // Store the new used device name
@ -253,7 +247,7 @@ bool DeviceInfoLinux::IsDeviceNameMatches(const char* name,
return false; return false;
} }
int32_t DeviceInfoLinux::FillCapabilityMap(int fd) int32_t DeviceInfoLinux::FillCapabilities(int fd)
{ {
// set image format // set image format
@ -290,39 +284,39 @@ int32_t DeviceInfoLinux::FillCapabilityMap(int fd)
if ((video_fmt.fmt.pix.width == size[i][0]) if ((video_fmt.fmt.pix.width == size[i][0])
&& (video_fmt.fmt.pix.height == size[i][1])) && (video_fmt.fmt.pix.height == size[i][1]))
{ {
VideoCaptureCapability *cap = new VideoCaptureCapability(); VideoCaptureCapability cap;
cap->width = video_fmt.fmt.pix.width; cap.width = video_fmt.fmt.pix.width;
cap->height = video_fmt.fmt.pix.height; cap.height = video_fmt.fmt.pix.height;
cap->expectedCaptureDelay = 120; cap.expectedCaptureDelay = 120;
if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV) if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
{ {
cap->rawType = kVideoYUY2; cap.rawType = kVideoYUY2;
} }
else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420) else if (videoFormats[fmts] == V4L2_PIX_FMT_YUV420)
{ {
cap->rawType = kVideoI420; cap.rawType = kVideoI420;
} }
else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG) else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
{ {
cap->rawType = kVideoMJPEG; cap.rawType = kVideoMJPEG;
} }
// get fps of current camera mode // get fps of current camera mode
// V4l2 does not have a stable method of knowing so we just guess. // V4l2 does not have a stable method of knowing so we just guess.
if(cap->width >= 800 && cap->rawType != kVideoMJPEG) if(cap.width >= 800 && cap.rawType != kVideoMJPEG)
{ {
cap->maxFPS = 15; cap.maxFPS = 15;
} }
else else
{ {
cap->maxFPS = 30; cap.maxFPS = 30;
} }
_captureCapabilities[index] = cap; _captureCapabilities.push_back(cap);
index++; index++;
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"Camera capability, width:%d height:%d type:%d fps:%d", "Camera capability, width:%d height:%d type:%d fps:%d",
cap->width, cap->height, cap->rawType, cap->maxFPS); cap.width, cap.height, cap.rawType, cap.maxFPS);
} }
} }
} }

View File

@ -42,7 +42,7 @@ public:
void* /*parentWindow*/, void* /*parentWindow*/,
uint32_t /*positionX*/, uint32_t /*positionX*/,
uint32_t /*positionY*/) { return -1;} uint32_t /*positionY*/) { return -1;}
int32_t FillCapabilityMap(int fd); int32_t FillCapabilities(int fd);
int32_t Init(); int32_t Init();
private: private:

View File

@ -371,13 +371,12 @@ int32_t DeviceInfoDS::GetWindowsCapability(
VideoCaptureCapabilityWindows& windowsCapability) { VideoCaptureCapabilityWindows& windowsCapability) {
ReadLockScoped cs(_apiLock); ReadLockScoped cs(_apiLock);
std::map<int, VideoCaptureCapability*>::iterator item = if (capabilityIndex < 0 || static_cast<size_t>(capabilityIndex) >=
_captureCapabilities.find(capabilityIndex); _captureCapabilitiesWindows.size()) {
if (item == _captureCapabilities.end())
return -1; return -1;
}
windowsCapability = windowsCapability = _captureCapabilitiesWindows[capabilityIndex];
*static_cast<VideoCaptureCapabilityWindows*>(item->second);
return 0; return 0;
} }
@ -386,13 +385,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
{ {
// Reset old capability list // Reset old capability list
for (std::map<int, VideoCaptureCapability*>::iterator it =
_captureCapabilities.begin();
it != _captureCapabilities.end();
++it) {
delete it->second;
}
_captureCapabilities.clear(); _captureCapabilities.clear();
const int32_t deviceUniqueIdUTF8Length = const int32_t deviceUniqueIdUTF8Length =
@ -468,7 +460,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
return -1; return -1;
} }
int32_t index = 0; // Index in created _capabilities map
// Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo. // Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo.
// Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2 // Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2
// Interlace flag is only supported in FORMAT_VideoInfo2 // Interlace flag is only supported in FORMAT_VideoInfo2
@ -535,8 +526,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
&& pmt->formattype == preferedVideoFormat) && pmt->formattype == preferedVideoFormat)
{ {
VideoCaptureCapabilityWindows* capability = VideoCaptureCapabilityWindows capability;
new VideoCaptureCapabilityWindows();
int64_t avgTimePerFrame = 0; int64_t avgTimePerFrame = 0;
if (pmt->formattype == FORMAT_VideoInfo) if (pmt->formattype == FORMAT_VideoInfo)
@ -544,9 +534,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
VIDEOINFOHEADER* h = VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat); reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat);
assert(h); assert(h);
capability->directShowCapabilityIndex = tmp; capability.directShowCapabilityIndex = tmp;
capability->width = h->bmiHeader.biWidth; capability.width = h->bmiHeader.biWidth;
capability->height = h->bmiHeader.biHeight; capability.height = h->bmiHeader.biHeight;
avgTimePerFrame = h->AvgTimePerFrame; avgTimePerFrame = h->AvgTimePerFrame;
} }
if (pmt->formattype == FORMAT_VideoInfo2) if (pmt->formattype == FORMAT_VideoInfo2)
@ -554,10 +544,10 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
VIDEOINFOHEADER2* h = VIDEOINFOHEADER2* h =
reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat); reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
assert(h); assert(h);
capability->directShowCapabilityIndex = tmp; capability.directShowCapabilityIndex = tmp;
capability->width = h->bmiHeader.biWidth; capability.width = h->bmiHeader.biWidth;
capability->height = h->bmiHeader.biHeight; capability.height = h->bmiHeader.biHeight;
capability->interlaced = h->dwInterlaceFlags capability.interlaced = h->dwInterlaceFlags
& (AMINTERLACE_IsInterlaced & (AMINTERLACE_IsInterlaced
| AMINTERLACE_DisplayModeBobOnly); | AMINTERLACE_DisplayModeBobOnly);
avgTimePerFrame = h->AvgTimePerFrame; avgTimePerFrame = h->AvgTimePerFrame;
@ -566,11 +556,11 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
if (hrVC == S_OK) if (hrVC == S_OK)
{ {
LONGLONG *frameDurationList; LONGLONG *frameDurationList;
LONGLONG maxFPS; LONGLONG maxFPS;
long listSize; long listSize;
SIZE size; SIZE size;
size.cx = capability->width; size.cx = capability.width;
size.cy = capability->height; size.cy = capability.height;
// GetMaxAvailableFrameRate doesn't return max frame rate always // GetMaxAvailableFrameRate doesn't return max frame rate always
// eg: Logitech Notebook. This may be due to a bug in that API // eg: Logitech Notebook. This may be due to a bug in that API
@ -585,12 +575,12 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
// On some odd cameras, you may get a 0 for duration. // On some odd cameras, you may get a 0 for duration.
// GetMaxOfFrameArray returns the lowest duration (highest FPS) // GetMaxOfFrameArray returns the lowest duration (highest FPS)
if (hrVC == S_OK && listSize > 0 && if (hrVC == S_OK && listSize > 0 &&
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList, 0 != (maxFPS = GetMaxOfFrameArray(frameDurationList,
listSize))) listSize)))
{ {
capability->maxFPS = static_cast<int> (10000000 capability.maxFPS = static_cast<int> (10000000
/ maxFPS); / maxFPS);
capability->supportFrameRateControl = true; capability.supportFrameRateControl = true;
} }
else // use existing method else // use existing method
{ {
@ -598,61 +588,61 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
_id, _id,
"GetMaxAvailableFrameRate NOT SUPPORTED"); "GetMaxAvailableFrameRate NOT SUPPORTED");
if (avgTimePerFrame > 0) if (avgTimePerFrame > 0)
capability->maxFPS = static_cast<int> (10000000 capability.maxFPS = static_cast<int> (10000000
/ avgTimePerFrame); / avgTimePerFrame);
else else
capability->maxFPS = 0; capability.maxFPS = 0;
} }
} }
else // use existing method in case IAMVideoControl is not supported else // use existing method in case IAMVideoControl is not supported
{ {
if (avgTimePerFrame > 0) if (avgTimePerFrame > 0)
capability->maxFPS = static_cast<int> (10000000 capability.maxFPS = static_cast<int> (10000000
/ avgTimePerFrame); / avgTimePerFrame);
else else
capability->maxFPS = 0; capability.maxFPS = 0;
} }
// can't switch MEDIATYPE :~( // can't switch MEDIATYPE :~(
if (pmt->subtype == MEDIASUBTYPE_I420) if (pmt->subtype == MEDIASUBTYPE_I420)
{ {
capability->rawType = kVideoI420; capability.rawType = kVideoI420;
} }
else if (pmt->subtype == MEDIASUBTYPE_IYUV) else if (pmt->subtype == MEDIASUBTYPE_IYUV)
{ {
capability->rawType = kVideoIYUV; capability.rawType = kVideoIYUV;
} }
else if (pmt->subtype == MEDIASUBTYPE_RGB24) else if (pmt->subtype == MEDIASUBTYPE_RGB24)
{ {
capability->rawType = kVideoRGB24; capability.rawType = kVideoRGB24;
} }
else if (pmt->subtype == MEDIASUBTYPE_YUY2) else if (pmt->subtype == MEDIASUBTYPE_YUY2)
{ {
capability->rawType = kVideoYUY2; capability.rawType = kVideoYUY2;
} }
else if (pmt->subtype == MEDIASUBTYPE_RGB565) else if (pmt->subtype == MEDIASUBTYPE_RGB565)
{ {
capability->rawType = kVideoRGB565; capability.rawType = kVideoRGB565;
} }
else if (pmt->subtype == MEDIASUBTYPE_MJPG) else if (pmt->subtype == MEDIASUBTYPE_MJPG)
{ {
capability->rawType = kVideoMJPEG; capability.rawType = kVideoMJPEG;
} }
else if (pmt->subtype == MEDIASUBTYPE_dvsl else if (pmt->subtype == MEDIASUBTYPE_dvsl
|| pmt->subtype == MEDIASUBTYPE_dvsd || pmt->subtype == MEDIASUBTYPE_dvsd
|| pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
{ {
capability->rawType = kVideoYUY2;// MS DV filter seems to create this type capability.rawType = kVideoYUY2;// MS DV filter seems to create this type
} }
else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
{ {
capability->rawType = kVideoUYVY; capability.rawType = kVideoUYVY;
} }
else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
{ {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
"Device support HDYC."); "Device support HDYC.");
capability->rawType = kVideoUYVY; capability.rawType = kVideoUYVY;
} }
else else
{ {
@ -662,22 +652,22 @@ int32_t DeviceInfoDS::CreateCapabilityMap(
webrtc::kTraceVideoCapture, _id, webrtc::kTraceVideoCapture, _id,
"Device support unknown media type %ls, width %d, height %d", "Device support unknown media type %ls, width %d, height %d",
strGuid); strGuid);
delete capability;
continue; continue;
} }
// Get the expected capture delay from the static list // Get the expected capture delay from the static list
capability->expectedCaptureDelay capability.expectedCaptureDelay
= GetExpectedCaptureDelay(WindowsCaptureDelays, = GetExpectedCaptureDelay(WindowsCaptureDelays,
NoWindowsCaptureDelays, NoWindowsCaptureDelays,
productId, productId,
capability->width, capability.width,
capability->height); capability.height);
_captureCapabilities[index++] = capability; _captureCapabilities.push_back(capability);
_captureCapabilitiesWindows.push_back(capability);
WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"Camera capability, width:%d height:%d type:%d fps:%d", "Camera capability, width:%d height:%d type:%d fps:%d",
capability->width, capability->height, capability.width, capability.height,
capability->rawType, capability->maxFPS); capability.rawType, capability.maxFPS);
} }
DeleteMediaType(pmt); DeleteMediaType(pmt);
pmt = NULL; pmt = NULL;

View File

@ -29,8 +29,8 @@ struct VideoCaptureCapabilityWindows: public VideoCaptureCapability
directShowCapabilityIndex = 0; directShowCapabilityIndex = 0;
supportFrameRateControl = false; supportFrameRateControl = false;
} }
}; };
class DeviceInfoDS: public DeviceInfoImpl class DeviceInfoDS: public DeviceInfoImpl
{ {
public: public:
@ -55,7 +55,7 @@ public:
char* productUniqueIdUTF8, char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length); uint32_t productUniqueIdUTF8Length);
/* /*
* Display OS /capture device specific settings dialog * Display OS /capture device specific settings dialog
*/ */
virtual int32_t virtual int32_t
@ -99,7 +99,7 @@ private:
ICreateDevEnum* _dsDevEnum; ICreateDevEnum* _dsDevEnum;
IEnumMoniker* _dsMonikerDevEnum; IEnumMoniker* _dsMonikerDevEnum;
bool _CoUninitializeIsRequired; bool _CoUninitializeIsRequired;
std::vector<VideoCaptureCapabilityWindows> _captureCapabilitiesWindows;
}; };
} // namespace videocapturemodule } // namespace videocapturemodule
} // namespace webrtc } // namespace webrtc