Check return value of fwrite. [Video Module]

Description:
On ChromeOS/ARM, compiler enforces to check return result of a function.
Currently, we don't check return result of fwrite, it causes building errors.

The following files need to patch. The patch should be similar, before I patch all
of them, I will start with 3 files, once we agree upon the solution, we will expand
it to all of them.

The question is should we do 
1. if (error) { return -1;} 
or 
2. if (error) { /*ignor the error*/ }

I took "return -1" in this patch, but I'm OK with either. Please let me know your
thoughts and I will upload a new patch.
Review URL: https://webrtc-codereview.appspot.com/583010

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2315 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@webrtc.org 2012-05-29 17:33:13 +00:00
parent 8a7a019b55
commit 2fc6e388c0
17 changed files with 152 additions and 66 deletions

View File

@ -114,7 +114,10 @@ TEST_F(JpegTest, Encode) {
// Save decoded image to file. // Save decoded image to file.
FILE* save_file = fopen(decoded_filename_.c_str(), "wb"); FILE* save_file = fopen(decoded_filename_.c_str(), "wb");
fwrite(image_buffer._buffer, 1, image_buffer._length, save_file); if (fwrite(image_buffer._buffer, 1,
image_buffer._length, save_file) != image_buffer._length) {
return;
}
fclose(save_file); fclose(save_file);
delete[] image_buffer._buffer; delete[] image_buffer._buffer;

View File

@ -127,7 +127,10 @@ TEST_F(TestLibYuv, ConvertTest) {
0, width_, height_, width_, kRotateNone, 0, width_, height_, width_, kRotateNone,
res_i420_buffer)); res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
// Optimization Speed- quality trade-off => 45 dB only (platform dependant). // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
EXPECT_GT(ceil(psnr), 44); EXPECT_GT(ceil(psnr), 44);
@ -142,7 +145,10 @@ TEST_F(TestLibYuv, ConvertTest) {
0, width_, height_, width_,kRotateNone, res_i420_buffer)); 0, width_, height_, width_,kRotateNone, res_i420_buffer));
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
j++; j++;
delete [] out_uyvy_buffer; delete [] out_uyvy_buffer;
@ -154,7 +160,10 @@ TEST_F(TestLibYuv, ConvertTest) {
kRotateNone, out_i420_buffer)); kRotateNone, out_i420_buffer));
EXPECT_EQ(0, ConvertFromI420(out_i420_buffer, width_, kI420, 0, EXPECT_EQ(0, ConvertFromI420(out_i420_buffer, width_, kI420, 0,
width_, height_, res_i420_buffer)); width_, height_, res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
j++; j++;
@ -169,7 +178,10 @@ TEST_F(TestLibYuv, ConvertTest) {
kI420, 0, kI420, 0,
width_, height_, width_, height_,
res_i420_buffer)); res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
@ -185,7 +197,10 @@ TEST_F(TestLibYuv, ConvertTest) {
0, width_, height_, width_, 0, width_, height_, width_,
kRotateNone, res_i420_buffer)); kRotateNone, res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
EXPECT_EQ(48.0, psnr); EXPECT_EQ(48.0, psnr);
@ -198,7 +213,10 @@ TEST_F(TestLibYuv, ConvertTest) {
0, width_, height_, width_, 0, width_, height_, width_,
kRotateNone, res_i420_buffer)); kRotateNone, res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
// TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565, // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
// Another example is I420ToRGB24, the psnr is 44 // Another example is I420ToRGB24, the psnr is 44
@ -213,7 +231,10 @@ TEST_F(TestLibYuv, ConvertTest) {
0, width_, height_, width_, 0, width_, height_, width_,
kRotateNone, res_i420_buffer)); kRotateNone, res_i420_buffer));
fwrite(res_i420_buffer, frame_length_, 1, output_file); if (fwrite(res_i420_buffer, 1, frame_length_,
output_file) != static_cast<unsigned int>(frame_length_)) {
return;
}
psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_); psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
// TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888, // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
EXPECT_GT(ceil(psnr), 42); EXPECT_GT(ceil(psnr), 42);

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -221,7 +221,10 @@ void TestScaler::ScaleSequence(ScaleMethod method,
EXPECT_EQ(0, test_scaler_.Scale(input_buffer, output_buffer, EXPECT_EQ(0, test_scaler_.Scale(input_buffer, output_buffer,
out_required_size)); out_required_size));
total_clock += TickTime::MillisecondTimestamp() - start_clock; total_clock += TickTime::MillisecondTimestamp() - start_clock;
fwrite(output_buffer, out_required_size, 1, output_file); if (fwrite(output_buffer, 1, out_required_size,
output_file) != static_cast<unsigned int>(out_required_size)) {
return;
}
frame_count++; frame_count++;
} }

View File

@ -246,7 +246,10 @@ VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
_test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy); _test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
if (_encodedFile != NULL) if (_encodedFile != NULL)
{ {
fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(), _encodedFile); if (fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(),
_encodedFile) != newBuffer->GetLength()) {
return -1;
}
} }
_frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy); _frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy);
return 0; return 0;
@ -264,7 +267,10 @@ VideoDecodeCompleteCallback::Decoded(RawImage& image)
_decodedBytes += image._length; _decodedBytes += image._length;
if (_decodedFile != NULL) if (_decodedFile != NULL)
{ {
fwrite(image._buffer, 1, image._length, _decodedFile); if (fwrite(image._buffer, 1, image._length,
_decodedFile) != image._length) {
return -1;
}
} }
return 0; return 0;
} }

View File

@ -115,7 +115,7 @@ NormalTest::Perform()
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_encoder->InitEncode(&_inst, 1, 1460); _encoder->InitEncode(&_inst, 1, 1460);
CodecSpecific_InitBitrate(); CodecSpecific_InitBitrate();
_decoder->InitDecode(&_inst,1); _decoder->InitDecode(&_inst,1);
@ -129,14 +129,21 @@ NormalTest::Perform()
{ {
DoPacketLoss(); DoPacketLoss();
_encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength()); _encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
fwrite(_encodedVideoBuffer.GetBuffer(), 1, _encodedVideoBuffer.GetLength(), _encodedFile); if (fwrite(_encodedVideoBuffer.GetBuffer(), 1,
_encodedVideoBuffer.GetLength(),
_encodedFile) != _encodedVideoBuffer.GetLength()) {
return;
}
decodeLength = Decode(); decodeLength = Decode();
if (decodeLength < 0) if (decodeLength < 0)
{ {
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength); fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile); if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
return;
}
CodecSpecific_InitBitrate(); CodecSpecific_InitBitrate();
_framecnt++; _framecnt++;
} }
@ -150,7 +157,10 @@ NormalTest::Perform()
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength); fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength, _decodedFile); if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
return;
}
} }
double actualBitRate = ActualBitRate(_framecnt) / 1000.0; double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
@ -250,4 +260,3 @@ NormalTest::Decode(int lossValue)
_encodedVideoBuffer.UpdateLength(0); _encodedVideoBuffer.UpdateLength(0);
return lengthDecFrame; return lengthDecFrame;
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -74,7 +74,10 @@ PacketLossTest::Decoded(const RawImage& decodedImage)
// write previous decoded frame again (frame freeze) // write previous decoded frame again (frame freeze)
if (_decodedFile && _lastFrame) if (_decodedFile && _lastFrame)
{ {
fwrite(_lastFrame, 1, _lastFrameLength, _decodedFile); if (fwrite(_lastFrame, 1, _lastFrameLength,
_decodedFile) != _lastFrameLength) {
return;
}
} }
// remove frame from queue // remove frame from queue

View File

@ -240,8 +240,12 @@ VideoSource::Convert(const VideoSource &target, bool force /* = false */) const
if (!fd.DropFrame()) if (!fd.DropFrame())
{ {
ASSERT_TRUE(target.GetWidth() == _width && ASSERT_TRUE(target.GetWidth() == _width &&
target.GetHeight() == _height); // Add video interpolator here! target.GetHeight() == _height);
fwrite(outFrame, 1, lengthOutFrame, outFile); // Add video interpolator here!
if (fwrite(outFrame, 1, lengthOutFrame,
outFile) != lengthOutFrame) {
return;
}
} }
} }

View File

@ -685,8 +685,10 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
#ifdef DEBUG_ENCODER_INPUT #ifdef DEBUG_ENCODER_INPUT
if (_encoderInputFile != NULL) if (_encoderInputFile != NULL)
{ {
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_encoderInputFile); _encoderInputFile) != videoFrame.Length()) {
return -1;
}
} }
#endif #endif
if (ret < 0) if (ret < 0)
@ -898,9 +900,11 @@ VideoCodingModuleImpl::Decode(WebRtc_UWord16 maxWaitTimeMs)
#ifdef DEBUG_DECODER_BIT_STREAM #ifdef DEBUG_DECODER_BIT_STREAM
if (_bitStreamBeforeDecoder != NULL) if (_bitStreamBeforeDecoder != NULL)
{ {
// Write bit stream to file for debugging purposes // Write bit stream to file for debugging purposes
fwrite(frame->Buffer(), 1, frame->Length(), if (fwrite(frame->Buffer(), 1, frame->Length(),
_bitStreamBeforeDecoder); _bitStreamBeforeDecoder) != frame->Length()) {
return -1;
}
} }
#endif #endif
if (_frameStorageCallback != NULL) if (_frameStorageCallback != NULL)

View File

@ -330,8 +330,11 @@ MediaOptTest::Perform()
} }
else else
{ {
// write frame to file // write frame to file
fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(), _actualSourceFile); if (fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(),
_actualSourceFile) != sourceFrame.Length()) {
return -1;
}
} }
_sumEncBytes += encBytes; _sumEncBytes += encBytes;

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -85,7 +85,9 @@ VCMNTEncodeCompleteCallback::SendData(
// will call the VCMReceiver input packet // will call the VCMReceiver input packet
_frameType = frameType; _frameType = frameType;
// writing encodedData into file // writing encodedData into file
fwrite(payloadData, 1, payloadSize, _encodedFile); if (fwrite(payloadData, 1, payloadSize, _encodedFile) != payloadSize) {
return -1;
}
WebRtcRTPHeader rtpInfo; WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; rtpInfo.header.markerBit = true;
rtpInfo.type.Video.width = 0; rtpInfo.type.Video.width = 0;
@ -163,7 +165,10 @@ VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame)
} }
_decodedFile = fopen(_outname.c_str(), "wb"); _decodedFile = fopen(_outname.c_str(), "wb");
} }
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile); if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_decodedFile) != videoFrame.Length()) {
return -1;
}
_decodedBytes+= videoFrame.Length(); _decodedBytes+= videoFrame.Length();
return VCM_OK; return VCM_OK;
} }
@ -396,7 +401,3 @@ NormalTest::Teardown()
fclose(_encodedFile); fclose(_encodedFile);
return; return;
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -419,9 +419,12 @@ VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame)
{ {
if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height())) if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height()))
{ {
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile); if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_frameCnt++; _decodedFile) != videoFrame.Length()) {
//printf("frame dec # %d", _frameCnt); return -1;
}
_frameCnt++;
//printf("frame dec # %d", _frameCnt);
// no need for interpolator and decBuffer // no need for interpolator and decBuffer
if (_decBuffer != NULL) if (_decBuffer != NULL)
{ {
@ -446,7 +449,10 @@ VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame)
} }
// interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer); // interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer);
fwrite(_decBuffer, 1, _origWidth*_origHeight*3/2, _decodedFile); if (fwrite(_decBuffer, 1, _origWidth*_origHeight * 3/2,
_decodedFile) != _origWidth*_origHeight * 3/2) {
return -1;
}
_frameCnt++; _frameCnt++;
} }

View File

@ -59,7 +59,9 @@ VCMEncodeCompleteCallback::SendData(
// will call the VCMReceiver input packet // will call the VCMReceiver input packet
_frameType = frameType; _frameType = frameType;
// writing encodedData into file // writing encodedData into file
fwrite(payloadData, 1, payloadSize, _encodedFile); if (fwrite(payloadData, 1, payloadSize, _encodedFile) != payloadSize) {
return -1;
}
WebRtcRTPHeader rtpInfo; WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; // end of frame rtpInfo.header.markerBit = true; // end of frame
rtpInfo.type.Video.isFirstPacket = true; rtpInfo.type.Video.isFirstPacket = true;
@ -184,9 +186,12 @@ VCMRTPEncodeCompleteCallback::EncodeComplete()
WebRtc_Word32 WebRtc_Word32
VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame) VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame)
{ {
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _decodedFile); if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_decodedBytes+= videoFrame.Length(); _decodedFile) != videoFrame.Length()) {
return VCM_OK; return -1;
}
_decodedBytes+= videoFrame.Length();
return VCM_OK;
} }
WebRtc_Word32 WebRtc_Word32

View File

@ -66,7 +66,10 @@ FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
fprintf(_timingFile, "%u, %u\n", fprintf(_timingFile, "%u, %u\n",
videoFrame.TimeStamp(), videoFrame.TimeStamp(),
MaskWord64ToUWord32(videoFrame.RenderTimeMs())); MaskWord64ToUWord32(videoFrame.RenderTimeMs()));
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), _outFile); if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
_outFile) != videoFrame.Length()) {
return -1;
}
return 0; return 0;
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -36,7 +36,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
"foremanColorEnhancedVPM_cif_short.yuv"; "foremanColorEnhancedVPM_cif_short.yuv";
FILE* modFile = fopen(output_file.c_str(), "w+b"); FILE* modFile = fopen(output_file.c_str(), "w+b");
ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n"; ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
WebRtc_UWord32 frameNum = 0; WebRtc_UWord32 frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength) while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
{ {
@ -45,11 +45,14 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame)); ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame));
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;
fwrite(_videoFrame.Buffer(), 1, _frameLength, modFile); if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
modFile) != _frameLength) {
return;
}
} }
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
printf("\nTime per frame: %d us \n", printf("\nTime per frame: %d us \n",
static_cast<int>(accTicks.Microseconds() / frameNum)); static_cast<int>(accTicks.Microseconds() / frameNum));
rewind(modFile); rewind(modFile);
@ -71,7 +74,7 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_NE(-1L, testLen); ASSERT_NE(-1L, testLen);
rewind(modFile); rewind(modFile);
ASSERT_EQ(refLen, testLen) << "File lengths differ."; ASSERT_EQ(refLen, testLen) << "File lengths differ.";
VideoFrame refVideoFrame; VideoFrame refVideoFrame;
refVideoFrame.VerifyAndAllocate(_frameLength); refVideoFrame.VerifyAndAllocate(_frameLength);
refVideoFrame.SetWidth(_width); refVideoFrame.SetWidth(_width);

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -55,16 +55,19 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
frameNum++; frameNum++;
_videoFrame.SetTimeStamp(timeStamp); _videoFrame.SetTimeStamp(timeStamp);
t0 = TickTime::Now(); t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats; VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame)); ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats)); ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;
if (runIdx == 0) if (runIdx == 0)
{ {
fwrite(_videoFrame.Buffer(), 1, _frameLength, deflickerFile); if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
deflickerFile) != _frameLength) {
return;
}
} }
timeStamp += (90000 / frameRate); timeStamp += (90000 / frameRate);
} }
@ -82,9 +85,9 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
ASSERT_EQ(0, fclose(deflickerFile)); ASSERT_EQ(0, fclose(deflickerFile));
// TODO(kjellander): Add verification of deflicker output file. // TODO(kjellander): Add verification of deflicker output file.
printf("\nAverage run time = %d us / frame\n", printf("\nAverage run time = %d us / frame\n",
static_cast<int>(avgRuntime / frameNum / NumRuns)); static_cast<int>(avgRuntime / frameNum / NumRuns));
printf("Min run time = %d us / frame\n\n", printf("Min run time = %d us / frame\n\n",
static_cast<int>(minRuntime / frameNum)); static_cast<int>(minRuntime / frameNum));
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -92,17 +92,23 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0) if (runIdx == 0)
{ {
fwrite(_videoFrame.Buffer(), 1, _frameLength, noiseFile); if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
noiseFile) != _frameLength) {
return;
}
} }
t0 = TickTime::Now(); t0 = TickTime::Now();
ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0); ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0);
t1 = TickTime::Now(); t1 = TickTime::Now();
accTicks += t1 - t0; accTicks += t1 - t0;
if (runIdx == 0) if (runIdx == 0)
{ {
fwrite(_videoFrame.Buffer(), 1, _frameLength, denoiseFile); if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
denoiseFile) != _frameLength) {
return;
}
} }
} }
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file"; ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
@ -118,9 +124,9 @@ TEST_F(VideoProcessingModuleTest, Denoising)
} }
ASSERT_EQ(0, fclose(denoiseFile)); ASSERT_EQ(0, fclose(denoiseFile));
ASSERT_EQ(0, fclose(noiseFile)); ASSERT_EQ(0, fclose(noiseFile));
printf("\nAverage run time = %d us / frame\n", printf("\nAverage run time = %d us / frame\n",
static_cast<int>(avgRuntime / frameNum / NumRuns)); static_cast<int>(avgRuntime / frameNum / NumRuns));
printf("Min run time = %d us / frame\n\n", printf("Min run time = %d us / frame\n\n",
static_cast<int>(minRuntime / frameNum)); static_cast<int>(minRuntime / frameNum));
} }

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -334,7 +334,10 @@ void TestSize(VideoFrame& sourceFrame, WebRtc_UWord32 targetWidth,
targetHeight != sourceFrame.Height()) { targetHeight != sourceFrame.Height()) {
ASSERT_EQ((targetWidth * targetHeight * 3 / 2), outFrame->Length()); ASSERT_EQ((targetWidth * targetHeight * 3 / 2), outFrame->Length());
// Write to file for visual inspection // Write to file for visual inspection
fwrite(outFrame->Buffer(), 1, outFrame->Length(), standAloneFile); if (fwrite(outFrame->Buffer(), 1,
outFrame->Length(), standAloneFile) != outFrame->Length()) {
return;
}
outFrame->Free(); outFrame->Free();
} }
fclose(standAloneFile); fclose(standAloneFile);