webrtc/modules/video_capture/main/source/Windows/video_capture_windows.cc

448 lines
14 KiB
C++

/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_capture_windows.h"
#include "trace.h"
#include "sink_filter_windows.h"
#include "help_functions_windows.h"
#include "../video_capture_config.h"
#include "critical_section_wrapper.h"
#include <Dvdmedia.h> // VIDEOINFOHEADER2
namespace webrtc
{
namespace videocapturemodule
{
VideoCaptureDS::VideoCaptureDS(const WebRtc_Word32 id)
: VideoCaptureImpl(id), _dsInfo(id), _captureFilter(NULL),
_graphBuilder(NULL), _mediaControl(NULL), _sinkFilter(NULL),
_inputSendPin(NULL), _outputCapturePin(NULL), _mjpgJPGFilter(NULL),
_inputMjpgPin(NULL), _outputMjpgPin(NULL), _dvFilter(NULL),
_inputDvPin(NULL), _outputDvPin(NULL)
{
}
VideoCaptureDS::~VideoCaptureDS()
{
if (_mediaControl)
{
_mediaControl->Stop();
}
if (_graphBuilder)
{
if (_sinkFilter)
_graphBuilder->RemoveFilter(_sinkFilter);
if (_captureFilter)
_graphBuilder->RemoveFilter(_captureFilter);
if (_mjpgJPGFilter)
_graphBuilder->RemoveFilter(_mjpgJPGFilter);
if (_dvFilter)
_graphBuilder->RemoveFilter(_dvFilter);
}
RELEASE_AND_CLEAR(_captureFilter); // release the capture device
RELEASE_AND_CLEAR(_sinkFilter);
RELEASE_AND_CLEAR(_mjpgJPGFilter);
RELEASE_AND_CLEAR(_dvFilter);
RELEASE_AND_CLEAR(_mediaControl);
RELEASE_AND_CLEAR(_inputSendPin);
RELEASE_AND_CLEAR(_outputCapturePin);
RELEASE_AND_CLEAR(_inputMjpgPin);
RELEASE_AND_CLEAR(_outputMjpgPin);
RELEASE_AND_CLEAR(_inputDvPin);
RELEASE_AND_CLEAR(_outputDvPin);
RELEASE_AND_CLEAR(_graphBuilder);
}
WebRtc_Word32 VideoCaptureDS::Init(const WebRtc_Word32 id,
const WebRtc_UWord8* deviceUniqueIdUTF8)
{
WebRtc_Word32 result = 0;
const WebRtc_Word32 nameLength =
(WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
if (nameLength > kVideoCaptureUniqueNameLength)
return -1;
// Store the device name
_deviceUniqueId = new (std::nothrow) WebRtc_UWord8[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
if (_dsInfo.Init() != 0)
return -1;
_captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
if (!_captureFilter)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to create capture filter.");
return -1;
}
// Get the interface for DirectShow's GraphBuilder
HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL,
CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
(void **) &_graphBuilder);
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to create graph builder.");
return -1;
}
hr = _graphBuilder->QueryInterface(IID_IMediaControl,
(void **) &_mediaControl);
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to create media control builder.");
return -1;
}
hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to add the capture device to the graph.");
return -1;
}
_outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
// Create the sink filte used for receiving Captured frames.
_sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr,
*this, _id);
if (hr != S_OK)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to create send filter");
return -1;
}
_sinkFilter->AddRef();
hr = _graphBuilder->AddFilter(_sinkFilter, SINK_FILTER_NAME);
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to add the send filter to the graph.");
return -1;
}
_inputSendPin = GetInputPin(_sinkFilter);
// Create MJPG filter
hr = CoCreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC, IID_IBaseFilter,
(void **) &_mjpgJPGFilter);
if (hr == S_OK)
{
_inputMjpgPin = GetInputPin(_mjpgJPGFilter);
_outputMjpgPin = GetOutputPin(_mjpgJPGFilter);
_graphBuilder->AddFilter(_mjpgJPGFilter, NULL);
}
// Temporary connect here.
// This is done so that no one else can use the capture device.
if (SetCameraOutput(_requestedCapability) != 0)
{
return -1;
}
hr = _mediaControl->Pause();
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to Pause the Capture device. Is it already occupied? %d.",
hr);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, _id,
"Capture device '%s' initialized.", deviceUniqueIdUTF8);
return 0;
}
WebRtc_Word32 VideoCaptureDS::StartCapture(
const VideoCaptureCapability& capability)
{
WEBRTC_TRACE(webrtc::kTraceModuleCall, webrtc::kTraceVideoCapture, _id,
"StartCapture widht %d, height %d, frameRate %d",
capability.width, capability.height, capability.maxFPS);
CriticalSectionScoped cs(_apiCs);
if (capability != _requestedCapability)
{
DisconnectGraph();
if (SetCameraOutput(capability) != 0)
{
return -1;
}
}
HRESULT hr = _mediaControl->Run();
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to start the Capture device.");
return -1;
}
return 0;
}
WebRtc_Word32 VideoCaptureDS::StopCapture()
{
WEBRTC_TRACE(webrtc::kTraceModuleCall, webrtc::kTraceVideoCapture, _id,
"StopCapture");
CriticalSectionScoped cs(_apiCs);
HRESULT hr = _mediaControl->Pause();
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to stop the capture graph. %d", hr);
return -1;
}
return 0;
}
bool VideoCaptureDS::CaptureStarted()
{
OAFilterState state = 0;
HRESULT hr = _mediaControl->GetState(1000, &state);
if (hr != S_OK && hr != VFW_S_CANT_CUE)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to get the CaptureStarted status");
}
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"CaptureStarted %d", state);
return state == State_Running;
}
WebRtc_Word32 VideoCaptureDS::CaptureSettings(
VideoCaptureCapability& settings)
{
settings = _requestedCapability;
return 0;
}
WebRtc_Word32 VideoCaptureDS::SetCameraOutput(
const VideoCaptureCapability& requestedCapability)
{
// Get the best matching capability
VideoCaptureCapability capability;
WebRtc_Word32 capabilityIndex;
// Store the new requested size
_requestedCapability = requestedCapability;
// Match the requested capability with the supported.
if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(_deviceUniqueId,
_requestedCapability,
capability)) < 0)
{
return -1;
}
//Reduce the frame rate if possible.
if (capability.maxFPS > requestedCapability.maxFPS)
{
capability.maxFPS = requestedCapability.maxFPS;
}
// Store the new expected capture delay
_captureDelay = capability.expectedCaptureDelay;
// Convert it to the windows capability index since they are not nexessary
// the same
VideoCaptureCapabilityWindows windowsCapability;
if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0)
{
return -1;
}
IAMStreamConfig* streamConfig = NULL;
AM_MEDIA_TYPE *pmt = NULL;
VIDEO_STREAM_CONFIG_CAPS caps;
HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
(void**) &streamConfig);
if (hr)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Can't get the Capture format settings.");
return -1;
}
//Get the windows capability from the capture device
bool isDVCamera = false;
hr = streamConfig->GetStreamCaps(
windowsCapability.directShowCapabilityIndex,
&pmt, reinterpret_cast<BYTE*> (&caps));
if (!FAILED(hr))
{
if (pmt->formattype == FORMAT_VideoInfo2)
{
VIDEOINFOHEADER2* h =
reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
if (capability.maxFPS > 0
&& windowsCapability.supportFrameRateControl)
{
h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
/ capability.maxFPS);
}
}
else
{
VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>
(pmt->pbFormat);
if (capability.maxFPS > 0
&& windowsCapability.supportFrameRateControl)
{
h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
/ capability.maxFPS);
}
}
// Set the sink filter to request this capability
_sinkFilter->SetMatchingMediaType(capability);
//Order the capture device to use this capability
hr += streamConfig->SetFormat(pmt);
//Check if this is a DV camera and we need to add MS DV Filter
if (pmt->subtype == MEDIASUBTYPE_dvsl
|| pmt->subtype == MEDIASUBTYPE_dvsd
|| pmt->subtype == MEDIASUBTYPE_dvhd)
isDVCamera = true; // This is a DV camera. Use MS DV filter
}
RELEASE_AND_CLEAR(streamConfig);
if (FAILED(hr))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to set capture device output format");
return -1;
}
if (capability.rawType == kVideoMJPEG && _mjpgJPGFilter)
{
// Connect the camera to the MJPEG decoder
hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputMjpgPin,
NULL);
// Connect the MJPEG filter to the Capture filter
hr += _graphBuilder->ConnectDirect(_outputMjpgPin, _inputSendPin, NULL);
}
else if (isDVCamera)
{
hr = ConnectDVCamera();
}
else
{
hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin,
NULL);
}
if (hr != S_OK)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to connect the Capture graph %d", hr);
return -1;
}
return 0;
}
WebRtc_Word32 VideoCaptureDS::DisconnectGraph()
{
HRESULT hr = _mediaControl->Stop();
hr += _graphBuilder->Disconnect(_outputCapturePin);
hr += _graphBuilder->Disconnect(_inputSendPin);
// If the _mjpg filter exist
if (_mjpgJPGFilter)
{
_graphBuilder->Disconnect(_inputMjpgPin);
_graphBuilder->Disconnect(_outputMjpgPin);
}
//if the DV camera filter exist
if (_dvFilter)
{
_graphBuilder->Disconnect(_inputDvPin);
_graphBuilder->Disconnect(_outputDvPin);
}
if (hr != S_OK)
{
WEBRTC_TRACE( webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to Stop the Capture device for reconfiguration %d",
hr);
return -1;
}
return 0;
}
HRESULT VideoCaptureDS::ConnectDVCamera()
{
HRESULT hr = S_OK;
if (!_dvFilter)
{
hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **) &_dvFilter);
if (hr != S_OK)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to create the dv decoder: %x", hr);
return hr;
}
hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
if (hr != S_OK)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to add the dv decoder to the graph: %x", hr);
return hr;
}
_inputDvPin = GetInputPin(_dvFilter);
if (_inputDvPin == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to get input pin from DV decoder");
return -1;
}
_outputDvPin = GetOutputPin(_dvFilter);
if (_outputDvPin == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to get output pin from DV decoder");
return -1;
}
}
hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
if (hr != S_OK)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to connect capture device to the dv devoder: %x",
hr);
return hr;
}
hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
if (hr != S_OK)
{
if (hr == 0x80070004)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to connect the capture device, busy");
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"Failed to connect capture device to the send graph: 0x%x",
hr);
}
return hr;
}
return hr;
}
} // namespace videocapturemodule
} //namespace webrtc