2672 lines
77 KiB
C++
2672 lines
77 KiB
C++
/*
|
|
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
#include <assert.h>
|
|
#include <sys/stat.h>
|
|
#include <sys/types.h>
|
|
|
|
#include "common_types.h"
|
|
#include "engine_configurations.h"
|
|
#include "file_wrapper.h"
|
|
#include "media_file_utility.h"
|
|
#include "module_common_types.h"
|
|
#include "trace.h"
|
|
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
#include "avi_file.h"
|
|
#endif
|
|
|
|
#if (defined(WIN32) || defined(WINCE))
|
|
#define STR_CASE_CMP _stricmp
|
|
#define STR_NCASE_CMP _strnicmp
|
|
#else
|
|
#define STR_CASE_CMP strcasecmp
|
|
#define STR_NCASE_CMP strncasecmp
|
|
#endif
|
|
|
|
namespace {
|
|
enum WaveFormats
|
|
{
|
|
kWaveFormatPcm = 0x0001,
|
|
kWaveFormatALaw = 0x0006,
|
|
kWaveFormatMuLaw = 0x0007
|
|
};
|
|
|
|
// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
|
|
// "WAVE" and ckSize is the chunk size (4 + n)
|
|
struct WAVE_RIFF_header
|
|
{
|
|
WebRtc_Word8 ckID[4];
|
|
WebRtc_Word32 ckSize;
|
|
WebRtc_Word8 wave_ckID[4];
|
|
};
|
|
|
|
// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
|
|
// the chunk size (16, 18 or 40 byte)
|
|
struct WAVE_CHUNK_header
|
|
{
|
|
WebRtc_Word8 fmt_ckID[4];
|
|
WebRtc_Word32 fmt_ckSize;
|
|
};
|
|
} // unnamed namespace
|
|
|
|
namespace webrtc {
|
|
ModuleFileUtility::ModuleFileUtility(const WebRtc_Word32 id)
|
|
: _wavFormatObj(),
|
|
_dataSize(0),
|
|
_readSizeBytes(0),
|
|
_id(id),
|
|
_stopPointInMs(0),
|
|
_startPointInMs(0),
|
|
_playoutPositionMs(0),
|
|
_bytesWritten(0),
|
|
codec_info_(),
|
|
_codecId(kCodecNoCodec),
|
|
_bytesPerSample(0),
|
|
_readPos(0),
|
|
_reading(false),
|
|
_writing(false),
|
|
_tempData()
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
,
|
|
_aviAudioInFile(0),
|
|
_aviVideoInFile(0),
|
|
_aviOutFile(0)
|
|
#endif
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"ModuleFileUtility::ModuleFileUtility()");
|
|
memset(&codec_info_,0,sizeof(CodecInst));
|
|
codec_info_.pltype = -1;
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
memset(&_videoCodec,0,sizeof(_videoCodec));
|
|
#endif
|
|
}
|
|
|
|
ModuleFileUtility::~ModuleFileUtility()
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"ModuleFileUtility::~ModuleFileUtility()");
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
delete _aviAudioInFile;
|
|
delete _aviVideoInFile;
|
|
#endif
|
|
}
|
|
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
WebRtc_Word32 ModuleFileUtility::InitAviWriting(
|
|
const WebRtc_Word8* filename,
|
|
const CodecInst& audioCodecInst,
|
|
const VideoCodec& videoCodecInst,
|
|
const bool videoOnly /*= false*/)
|
|
{
|
|
_writing = false;
|
|
|
|
delete _aviOutFile;
|
|
_aviOutFile = new AviFile( );
|
|
|
|
AVISTREAMHEADER videoStreamHeader;
|
|
videoStreamHeader.fccType = AviFile::MakeFourCc('v', 'i', 'd', 's');
|
|
|
|
#ifdef VIDEOCODEC_H263
|
|
if (strncmp(videoCodecInst.plName, "H263", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('H','2','6','3');
|
|
}
|
|
#endif
|
|
#ifdef VIDEOCODEC_MPEG4
|
|
if (strncmp(videoCodecInst.plName, "MP4V-ES", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('M','4','S','2');
|
|
}
|
|
#endif
|
|
#ifdef VIDEOCODEC_I420
|
|
if (strncmp(videoCodecInst.plName, "I420", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('I','4','2','0');
|
|
}
|
|
#endif
|
|
#ifdef VIDEOCODEC_VP8
|
|
if (strncmp(videoCodecInst.plName, "VP8", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('V','P','8','0');
|
|
}
|
|
#endif
|
|
if (videoStreamHeader.fccHandler == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"InitAviWriting() Codec not supported");
|
|
|
|
return -1;
|
|
}
|
|
videoStreamHeader.dwScale = 1;
|
|
videoStreamHeader.dwRate = videoCodecInst.maxFramerate;
|
|
videoStreamHeader.dwSuggestedBufferSize = videoCodecInst.height *
|
|
(videoCodecInst.width >> 1) * 3;
|
|
videoStreamHeader.dwQuality = (WebRtc_UWord32)-1;
|
|
videoStreamHeader.dwSampleSize = 0;
|
|
videoStreamHeader.rcFrame.top = 0;
|
|
videoStreamHeader.rcFrame.bottom = videoCodecInst.height;
|
|
videoStreamHeader.rcFrame.left = 0;
|
|
videoStreamHeader.rcFrame.right = videoCodecInst.width;
|
|
|
|
BITMAPINFOHEADER bitMapInfoHeader;
|
|
bitMapInfoHeader.biSize = sizeof(BITMAPINFOHEADER);
|
|
bitMapInfoHeader.biHeight = videoCodecInst.height;
|
|
bitMapInfoHeader.biWidth = videoCodecInst.width;
|
|
bitMapInfoHeader.biPlanes = 1;
|
|
bitMapInfoHeader.biBitCount = 12;
|
|
bitMapInfoHeader.biClrImportant = 0;
|
|
bitMapInfoHeader.biClrUsed = 0;
|
|
bitMapInfoHeader.biCompression = videoStreamHeader.fccHandler;
|
|
bitMapInfoHeader.biSizeImage = bitMapInfoHeader.biWidth *
|
|
bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
|
|
|
|
if(videoCodecInst.codecType == kVideoCodecMPEG4)
|
|
{
|
|
if(_aviOutFile->CreateVideoStream(
|
|
videoStreamHeader,
|
|
bitMapInfoHeader,
|
|
videoCodecInst.codecSpecific.MPEG4.configParameters,
|
|
videoCodecInst.codecSpecific.MPEG4.configParametersSize) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
} else
|
|
{
|
|
if(_aviOutFile->CreateVideoStream(
|
|
videoStreamHeader,
|
|
bitMapInfoHeader,
|
|
NULL,
|
|
0) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
if(!videoOnly)
|
|
{
|
|
AVISTREAMHEADER audioStreamHeader;
|
|
audioStreamHeader.fccType = AviFile::MakeFourCc('a', 'u', 'd', 's');
|
|
// fccHandler is the FOURCC of the codec for decoding the stream.
|
|
// It's an optional parameter that is not used by audio streams.
|
|
audioStreamHeader.fccHandler = 0;
|
|
audioStreamHeader.dwScale = 1;
|
|
|
|
WAVEFORMATEX waveFormatHeader;
|
|
waveFormatHeader.cbSize = 0;
|
|
waveFormatHeader.nChannels = 1;
|
|
|
|
if (strncmp(audioCodecInst.plname, "PCMU", 4) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 1;
|
|
audioStreamHeader.dwRate = 8000;
|
|
audioStreamHeader.dwQuality = (WebRtc_UWord32)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize = 80;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = 8000;
|
|
waveFormatHeader.nSamplesPerSec = 8000;
|
|
waveFormatHeader.wBitsPerSample = 8;
|
|
waveFormatHeader.nBlockAlign = 1;
|
|
waveFormatHeader.wFormatTag = kWaveFormatMuLaw;
|
|
|
|
} else if (strncmp(audioCodecInst.plname, "PCMA", 4) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 1;
|
|
audioStreamHeader.dwRate = 8000;
|
|
audioStreamHeader.dwQuality = (WebRtc_UWord32)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize = 80;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = 8000;
|
|
waveFormatHeader.nSamplesPerSec = 8000;
|
|
waveFormatHeader.wBitsPerSample = 8;
|
|
waveFormatHeader.nBlockAlign = 1;
|
|
waveFormatHeader.wFormatTag = kWaveFormatALaw;
|
|
|
|
} else if (strncmp(audioCodecInst.plname, "L16", 3) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 2;
|
|
audioStreamHeader.dwRate = audioCodecInst.plfreq;
|
|
audioStreamHeader.dwQuality = (WebRtc_UWord32)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize =
|
|
(audioCodecInst.plfreq/100) * 2;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = audioCodecInst.plfreq * 2;
|
|
waveFormatHeader.nSamplesPerSec = audioCodecInst.plfreq;
|
|
waveFormatHeader.wBitsPerSample = 16;
|
|
waveFormatHeader.nBlockAlign = 2;
|
|
waveFormatHeader.wFormatTag = kWaveFormatPcm;
|
|
} else
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
if(_aviOutFile->CreateAudioStream(
|
|
audioStreamHeader,
|
|
waveFormatHeader) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
|
|
if( InitWavCodec(waveFormatHeader.nSamplesPerSec,
|
|
waveFormatHeader.nChannels,
|
|
waveFormatHeader.wBitsPerSample,
|
|
waveFormatHeader.wFormatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_aviOutFile->Create(filename);
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WriteAviAudioData(
|
|
const WebRtc_Word8* buffer,
|
|
WebRtc_UWord32 bufferLengthInBytes)
|
|
{
|
|
if( _aviOutFile != 0)
|
|
{
|
|
return _aviOutFile->WriteAudio(
|
|
reinterpret_cast<const WebRtc_UWord8*>(buffer),
|
|
bufferLengthInBytes);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WriteAviVideoData(
|
|
const WebRtc_Word8* buffer,
|
|
WebRtc_UWord32 bufferLengthInBytes)
|
|
{
|
|
if( _aviOutFile != 0)
|
|
{
|
|
return _aviOutFile->WriteVideo(
|
|
reinterpret_cast<const WebRtc_UWord8*>(buffer),
|
|
bufferLengthInBytes);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
|
|
WebRtc_Word32 ModuleFileUtility::CloseAviFile( )
|
|
{
|
|
if( _reading && _aviAudioInFile)
|
|
{
|
|
delete _aviAudioInFile;
|
|
_aviAudioInFile = 0;
|
|
}
|
|
|
|
if( _reading && _aviVideoInFile)
|
|
{
|
|
delete _aviVideoInFile;
|
|
_aviVideoInFile = 0;
|
|
}
|
|
|
|
if( _writing && _aviOutFile)
|
|
{
|
|
delete _aviOutFile;
|
|
_aviOutFile = 0;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitAviReading(const WebRtc_Word8* filename,
|
|
bool videoOnly, bool loop)
|
|
{
|
|
_reading = false;
|
|
delete _aviVideoInFile;
|
|
_aviVideoInFile = new AviFile( );
|
|
|
|
if ((_aviVideoInFile != 0) && _aviVideoInFile->Open(AviFile::AVI_VIDEO,
|
|
filename, loop) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
"Unable to open AVI file (video)");
|
|
return -1;
|
|
}
|
|
|
|
|
|
AVISTREAMHEADER videoInStreamHeader;
|
|
BITMAPINFOHEADER bitmapInfo;
|
|
char codecConfigParameters[AviFile::CODEC_CONFIG_LENGTH] = {};
|
|
WebRtc_Word32 configLength = 0;
|
|
if( _aviVideoInFile->GetVideoStreamInfo(videoInStreamHeader, bitmapInfo,
|
|
codecConfigParameters,
|
|
configLength) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
_videoCodec.width = static_cast<WebRtc_UWord16>(
|
|
videoInStreamHeader.rcFrame.right);
|
|
_videoCodec.height = static_cast<WebRtc_UWord16>(
|
|
videoInStreamHeader.rcFrame.bottom);
|
|
_videoCodec.maxFramerate = static_cast<WebRtc_UWord8>(
|
|
videoInStreamHeader.dwRate);
|
|
|
|
const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
|
|
if (bitmapInfo.biCompression == AviFile::MakeFourCc('M','4','S','2'))
|
|
{
|
|
strncpy(_videoCodec.plName, "MP4V-ES", plnameLen);
|
|
if (configLength > 0)
|
|
{
|
|
if (configLength < kConfigParameterSize)
|
|
{
|
|
_videoCodec.codecSpecific.MPEG4.configParametersSize =
|
|
(WebRtc_UWord8)configLength;
|
|
memcpy(_videoCodec.codecSpecific.MPEG4.configParameters,
|
|
&codecConfigParameters,
|
|
_videoCodec.codecSpecific.MPEG4.configParametersSize);
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
else if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
|
|
{
|
|
strncpy(_videoCodec.plName, "I420", plnameLen);
|
|
_videoCodec.codecType = kVideoCodecI420;
|
|
}
|
|
else if (bitmapInfo.biCompression == AviFile::MakeFourCc('H','2','6','3'))
|
|
{
|
|
strncpy(_videoCodec.plName, "H263", plnameLen);
|
|
_videoCodec.codecType = kVideoCodecH263;
|
|
}
|
|
else if (bitmapInfo.biCompression ==
|
|
AviFile::MakeFourCc('V', 'P', '8', '0'))
|
|
{
|
|
strncpy(_videoCodec.plName, "VP8", plnameLen);
|
|
_videoCodec.codecType = kVideoCodecVP8;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
if(!videoOnly)
|
|
{
|
|
delete _aviAudioInFile;
|
|
_aviAudioInFile = new AviFile();
|
|
|
|
if ( (_aviAudioInFile != 0) &&
|
|
_aviAudioInFile->Open(AviFile::AVI_AUDIO, filename, loop) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
"Unable to open AVI file (audio)");
|
|
return -1;
|
|
}
|
|
|
|
WAVEFORMATEX waveHeader;
|
|
if(_aviAudioInFile->GetAudioStreamInfo(waveHeader) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
if(InitWavCodec(waveHeader.nSamplesPerSec, waveHeader.nChannels,
|
|
waveHeader.wBitsPerSample, waveHeader.wFormatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadAviAudioData(
|
|
WebRtc_Word8* outBuffer,
|
|
const WebRtc_UWord32 bufferLengthInBytes)
|
|
{
|
|
if(_aviAudioInFile == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word32 length = bufferLengthInBytes;
|
|
if(_aviAudioInFile->ReadAudio(
|
|
reinterpret_cast<WebRtc_UWord8*>(outBuffer),
|
|
length) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
return length;
|
|
}
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadAviVideoData(
|
|
WebRtc_Word8* outBuffer,
|
|
const WebRtc_UWord32 bufferLengthInBytes)
|
|
{
|
|
if(_aviVideoInFile == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word32 length = bufferLengthInBytes;
|
|
if( _aviVideoInFile->ReadVideo(
|
|
reinterpret_cast<WebRtc_UWord8*>(outBuffer),
|
|
length) != 0)
|
|
{
|
|
return -1;
|
|
} else {
|
|
return length;
|
|
}
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::VideoCodecInst(VideoCodec& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::CodecInst(codecInst= 0x%x)", &codecInst);
|
|
|
|
if(!_reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst: not currently reading audio file!");
|
|
return -1;
|
|
}
|
|
memcpy(&codecInst,&_videoCodec,sizeof(VideoCodec));
|
|
return 0;
|
|
}
|
|
#endif
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadWavHeader(InStream& wav)
|
|
{
|
|
WAVE_RIFF_header RIFFheaderObj;
|
|
WAVE_CHUNK_header CHUNKheaderObj;
|
|
// TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
|
|
WebRtc_Word8 tmpStr[6] = "FOUR";
|
|
WebRtc_UWord8 tmpStr2[4];
|
|
WebRtc_Word32 i, len;
|
|
bool dataFound = false;
|
|
bool fmtFound = false;
|
|
WebRtc_Word8 dummyRead;
|
|
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"ModuleFileUtility::ReadWavHeader(wav= 0x%x)", &wav);
|
|
|
|
_dataSize = 0;
|
|
len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
|
|
if(len != sizeof(WAVE_RIFF_header))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (too short)");
|
|
return -1;
|
|
}
|
|
|
|
for (i = 0; i < 4; i++)
|
|
{
|
|
tmpStr[i] = RIFFheaderObj.ckID[i];
|
|
}
|
|
if(strcmp(tmpStr, "RIFF") != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (does not have RIFF)");
|
|
return -1;
|
|
}
|
|
for (i = 0; i < 4; i++)
|
|
{
|
|
tmpStr[i] = RIFFheaderObj.wave_ckID[i];
|
|
}
|
|
if(strcmp(tmpStr, "WAVE") != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (does not have WAVE)");
|
|
return -1;
|
|
}
|
|
|
|
len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
|
|
|
|
// WAVE files are stored in little endian byte order. Make sure that the
|
|
// data can be read on big endian as well.
|
|
// TODO (hellner): little endian to system byte order should be done in
|
|
// in a subroutine.
|
|
memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
|
|
CHUNKheaderObj.fmt_ckSize =
|
|
(WebRtc_Word32) ((WebRtc_UWord32) tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8) +
|
|
(((WebRtc_UWord32)tmpStr2[2])<<16) +
|
|
(((WebRtc_UWord32)tmpStr2[3])<<24));
|
|
|
|
memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
|
|
|
|
while ((len == sizeof(WAVE_CHUNK_header)) && (!fmtFound || !dataFound))
|
|
{
|
|
if(strcmp(tmpStr, "fmt ") == 0)
|
|
{
|
|
len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
|
|
|
|
memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
|
|
_wavFormatObj.formatTag =
|
|
(WaveFormats) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8));
|
|
memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
|
|
_wavFormatObj.nChannels =
|
|
(WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8));
|
|
memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
|
|
_wavFormatObj.nSamplesPerSec =
|
|
(WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8) +
|
|
(((WebRtc_UWord32)tmpStr2[2])<<16) +
|
|
(((WebRtc_UWord32)tmpStr2[3])<<24));
|
|
memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
|
|
_wavFormatObj.nAvgBytesPerSec =
|
|
(WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8) +
|
|
(((WebRtc_UWord32)tmpStr2[2])<<16) +
|
|
(((WebRtc_UWord32)tmpStr2[3])<<24));
|
|
memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
|
|
_wavFormatObj.nBlockAlign =
|
|
(WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8));
|
|
memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
|
|
_wavFormatObj.nBitsPerSample =
|
|
(WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8));
|
|
|
|
for (i = 0;
|
|
i < (CHUNKheaderObj.fmt_ckSize -
|
|
(WebRtc_Word32)sizeof(WAVE_FMTINFO_header));
|
|
i++)
|
|
{
|
|
len = wav.Read(&dummyRead, 1);
|
|
if(len != 1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"File corrupted, reached EOF (reading fmt)");
|
|
return -1;
|
|
}
|
|
}
|
|
fmtFound = true;
|
|
}
|
|
else if(strcmp(tmpStr, "data") == 0)
|
|
{
|
|
_dataSize = CHUNKheaderObj.fmt_ckSize;
|
|
dataFound = true;
|
|
break;
|
|
}
|
|
else
|
|
{
|
|
for (i = 0; i < (CHUNKheaderObj.fmt_ckSize); i++)
|
|
{
|
|
len = wav.Read(&dummyRead, 1);
|
|
if(len != 1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"File corrupted, reached EOF (reading other)");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
|
|
|
|
memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
|
|
CHUNKheaderObj.fmt_ckSize =
|
|
(WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
|
|
(((WebRtc_UWord32)tmpStr2[1])<<8) +
|
|
(((WebRtc_UWord32)tmpStr2[2])<<16) +
|
|
(((WebRtc_UWord32)tmpStr2[3])<<24));
|
|
|
|
memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
|
|
}
|
|
|
|
// Either a proper format chunk has been read or a data chunk was come
|
|
// across.
|
|
if( (_wavFormatObj.formatTag != kWaveFormatPcm) &&
|
|
(_wavFormatObj.formatTag != kWaveFormatALaw) &&
|
|
(_wavFormatObj.formatTag != kWaveFormatMuLaw))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Coding formatTag value=%d not supported!",
|
|
_wavFormatObj.formatTag);
|
|
return -1;
|
|
}
|
|
if((_wavFormatObj.nChannels < 1) ||
|
|
(_wavFormatObj.nChannels > 2))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"nChannels value=%d not supported!",
|
|
_wavFormatObj.nChannels);
|
|
return -1;
|
|
}
|
|
|
|
if((_wavFormatObj.nBitsPerSample != 8) &&
|
|
(_wavFormatObj.nBitsPerSample != 16))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"nBitsPerSample value=%d not supported!",
|
|
_wavFormatObj.nBitsPerSample);
|
|
return -1;
|
|
}
|
|
|
|
// Calculate the number of bytes that 10 ms of audio data correspond to.
|
|
if(_wavFormatObj.formatTag == kWaveFormatPcm)
|
|
{
|
|
// TODO (hellner): integer division for 22050 and 11025 would yield
|
|
// the same result as the else statement. Remove those
|
|
// special cases?
|
|
if(_wavFormatObj.nSamplesPerSec == 44100)
|
|
{
|
|
_readSizeBytes = 440 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else if(_wavFormatObj.nSamplesPerSec == 22050) {
|
|
_readSizeBytes = 220 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else if(_wavFormatObj.nSamplesPerSec == 11025) {
|
|
_readSizeBytes = 110 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else {
|
|
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
|
_wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
|
|
}
|
|
|
|
} else {
|
|
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
|
_wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
|
|
}
|
|
WEBRTC_TRACE(
|
|
kTraceModuleCall,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavHeader: format=PCM %d KHz, sampleSize=%d,\
|
|
nChannels=%d, readSize=%d, dataSize=%d, rate=%d",
|
|
_wavFormatObj.nSamplesPerSec/1000,
|
|
_wavFormatObj.nBitsPerSample,
|
|
_wavFormatObj.nChannels,
|
|
_readSizeBytes,
|
|
_dataSize,
|
|
_wavFormatObj.nAvgBytesPerSec * 8);
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitWavCodec(WebRtc_UWord32 samplesPerSec,
|
|
WebRtc_UWord32 channels,
|
|
WebRtc_UWord32 bitsPerSample,
|
|
WebRtc_UWord32 formatTag)
|
|
{
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = samplesPerSec;
|
|
codec_info_.channels = channels;
|
|
codec_info_.rate = bitsPerSample * samplesPerSec;
|
|
|
|
// Calculate the packet size for 10ms frames
|
|
switch(formatTag)
|
|
{
|
|
case kWaveFormatALaw:
|
|
strcpy(codec_info_.plname, "PCMA");
|
|
_codecId = kCodecPcma;
|
|
codec_info_.pltype = 8;
|
|
codec_info_.pacsize = codec_info_.plfreq / 100;
|
|
break;
|
|
case kWaveFormatMuLaw:
|
|
strcpy(codec_info_.plname, "PCMU");
|
|
_codecId = kCodecPcmu;
|
|
codec_info_.pltype = 0;
|
|
codec_info_.pacsize = codec_info_.plfreq / 100;
|
|
break;
|
|
case kWaveFormatPcm:
|
|
codec_info_.pacsize = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
|
|
if(samplesPerSec == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(samplesPerSec == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(samplesPerSec == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
// Set the packet size for "odd" sampling frequencies so that it
|
|
// properly corresponds to _readSizeBytes.
|
|
else if(samplesPerSec == 11025)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 110;
|
|
codec_info_.plfreq = 11000;
|
|
}
|
|
else if(samplesPerSec == 22050)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 220;
|
|
codec_info_.plfreq = 22000;
|
|
}
|
|
else if(samplesPerSec == 44100)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 440;
|
|
codec_info_.plfreq = 44000;
|
|
}
|
|
else if(samplesPerSec == 48000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 480;
|
|
codec_info_.plfreq = 48000;
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Unsupported PCM frequency!");
|
|
return -1;
|
|
}
|
|
break;
|
|
default:
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"unknown WAV format TAG!");
|
|
return -1;
|
|
break;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitWavReading(InStream& wav,
|
|
const WebRtc_UWord32 start,
|
|
const WebRtc_UWord32 stop)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceModuleCall,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitWavReading(wav= 0x%x, start= %d, stop=%d)",
|
|
&wav,
|
|
start,
|
|
stop);
|
|
|
|
_reading = false;
|
|
|
|
if(ReadWavHeader(wav) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to read WAV header!");
|
|
return -1;
|
|
}
|
|
|
|
_playoutPositionMs = 0;
|
|
_readPos = 0;
|
|
|
|
if(start > 0)
|
|
{
|
|
WebRtc_UWord8 dummy[WAV_MAX_BUFFER_SIZE];
|
|
WebRtc_Word32 readLength;
|
|
if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
|
|
{
|
|
while (_playoutPositionMs < start)
|
|
{
|
|
readLength = wav.Read(dummy, _readSizeBytes);
|
|
if(readLength == _readSizeBytes)
|
|
{
|
|
_readPos += readLength;
|
|
_playoutPositionMs += 10;
|
|
}
|
|
else // Must have reached EOF before start position!
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"InitWavReading(), EOF before start position");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
|
|
_wavFormatObj.nBitsPerSample,
|
|
_wavFormatObj.formatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
_bytesPerSample = _wavFormatObj.nBitsPerSample / 8;
|
|
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"WAV header: codecName= %s, sampleSize= %d, freq= %d",
|
|
codec_info_.plname, _bytesPerSample, codec_info_.plfreq);
|
|
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadWavDataAsMono(
|
|
InStream& wav,
|
|
WebRtc_Word8* outData,
|
|
const WebRtc_UWord32 bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d,\
|
|
bufSize= %ld)",
|
|
&wav,
|
|
outData,
|
|
bufferSize);
|
|
|
|
// The number of bytes that should be read from file.
|
|
const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
|
|
// The number of bytes that will be written to outData.
|
|
const WebRtc_UWord32 bytesRequested = (codec_info_.channels == 2) ?
|
|
totalBytesNeeded >> 1 : totalBytesNeeded;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer is too short!");
|
|
return -1;
|
|
}
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
if(!_reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: no longer reading file.");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word32 bytesRead = ReadWavData(
|
|
wav,
|
|
(codec_info_.channels == 2) ? _tempData : (WebRtc_UWord8*)outData,
|
|
totalBytesNeeded);
|
|
if(bytesRead == 0)
|
|
{
|
|
return 0;
|
|
}
|
|
if(bytesRead < 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: failed to read data from WAV file.");
|
|
return -1;
|
|
}
|
|
// Output data is should be mono.
|
|
if(codec_info_.channels == 2)
|
|
{
|
|
for (WebRtc_UWord32 i = 0; i < bytesRequested; i++)
|
|
{
|
|
// Sample value is the average of left and right buffer rounded to
|
|
// closest integer value. Note samples can be either 1 or 2 byte.
|
|
if(_bytesPerSample == 1)
|
|
{
|
|
_tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
|
|
1) >> 1);
|
|
}
|
|
else
|
|
{
|
|
WebRtc_Word16* sampleData = (WebRtc_Word16*) _tempData;
|
|
sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
|
|
1) >> 1);
|
|
}
|
|
}
|
|
memcpy(outData, _tempData, bytesRequested);
|
|
}
|
|
return bytesRequested;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadWavDataAsStereo(
|
|
InStream& wav,
|
|
WebRtc_Word8* outDataLeft,
|
|
WebRtc_Word8* outDataRight,
|
|
const WebRtc_UWord32 bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x,\
|
|
outRight= 0x%x, bufSize= %ld)",
|
|
&wav,
|
|
outDataLeft,
|
|
outDataRight,
|
|
bufferSize);
|
|
|
|
if((outDataLeft == NULL) ||
|
|
(outDataRight == NULL))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: an input buffer is NULL!");
|
|
return -1;
|
|
}
|
|
if(codec_info_.channels != 2)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceError,
|
|
kTraceFile,
|
|
_id,
|
|
"ReadWavDataAsStereo: WAV file does not contain stereo data!");
|
|
return -1;
|
|
}
|
|
if(! _reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsStereo: no longer reading file.");
|
|
return -1;
|
|
}
|
|
|
|
// The number of bytes that should be read from file.
|
|
const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
|
|
// The number of bytes that will be written to the left and the right
|
|
// buffers.
|
|
const WebRtc_UWord32 bytesRequested = totalBytesNeeded >> 1;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavData: Output buffers are too short!");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word32 bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsStereo: failed to read data from WAV file.");
|
|
return -1;
|
|
}
|
|
|
|
// Turn interleaved audio to left and right buffer. Note samples can be
|
|
// either 1 or 2 bytes
|
|
if(_bytesPerSample == 1)
|
|
{
|
|
for (WebRtc_UWord32 i = 0; i < bytesRequested; i++)
|
|
{
|
|
outDataLeft[i] = _tempData[2 * i];
|
|
outDataRight[i] = _tempData[(2 * i) + 1];
|
|
}
|
|
}
|
|
else if(_bytesPerSample == 2)
|
|
{
|
|
WebRtc_Word16* sampleData = reinterpret_cast<WebRtc_Word16*>(_tempData);
|
|
WebRtc_Word16* outLeft = reinterpret_cast<WebRtc_Word16*>(outDataLeft);
|
|
WebRtc_Word16* outRight = reinterpret_cast<WebRtc_Word16*>(
|
|
outDataRight);
|
|
|
|
// Bytes requested to samples requested.
|
|
WebRtc_UWord32 sampleCount = bytesRequested >> 1;
|
|
for (WebRtc_UWord32 i = 0; i < sampleCount; i++)
|
|
{
|
|
outLeft[i] = sampleData[2 * i];
|
|
outRight[i] = sampleData[(2 * i) + 1];
|
|
}
|
|
} else {
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavStereoData: unsupported sample size %d!",
|
|
_bytesPerSample);
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
return bytesRequested;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadWavData(
|
|
InStream& wav,
|
|
WebRtc_UWord8* buffer,
|
|
const WebRtc_UWord32 dataLengthInBytes)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, dataLen= %ld)",
|
|
&wav,
|
|
buffer,
|
|
dataLengthInBytes);
|
|
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
// Make sure that a read won't return too few samples.
|
|
// TODO (hellner): why not read the remaining bytes needed from the start
|
|
// of the file?
|
|
if((_dataSize - _readPos) < (WebRtc_Word32)dataLengthInBytes)
|
|
{
|
|
// Rewind() being -1 may be due to the file not supposed to be looped.
|
|
if(wav.Rewind() == -1)
|
|
{
|
|
_reading = false;
|
|
return 0;
|
|
}
|
|
if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
WebRtc_Word32 bytesRead = wav.Read(buffer, dataLengthInBytes);
|
|
if(bytesRead < 0)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
|
|
// This should never happen due to earlier sanity checks.
|
|
// TODO (hellner): change to an assert and fail here since this should
|
|
// never happen...
|
|
if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
|
|
{
|
|
if((wav.Rewind() == -1) ||
|
|
(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
bytesRead = wav.Read(buffer, dataLengthInBytes);
|
|
if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
_readPos += bytesRead;
|
|
|
|
// TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
|
|
// to read when exactly 10ms should be read?!
|
|
_playoutPositionMs += 10;
|
|
if((_stopPointInMs > 0) &&
|
|
(_playoutPositionMs >= _stopPointInMs))
|
|
{
|
|
if((wav.Rewind() == -1) ||
|
|
(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitWavWriting(OutStream& wav,
|
|
const CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"ModuleFileUtility::InitWavWriting(wav= 0x%x, codec=%s)",
|
|
&wav, codecInst.plname);
|
|
|
|
if(set_codec_info(codecInst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst identifies unsupported codec!");
|
|
return -1;
|
|
}
|
|
_writing = false;
|
|
WebRtc_UWord32 channels = (codecInst.channels == 0) ?
|
|
1 : codecInst.channels;
|
|
|
|
if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
|
|
{
|
|
_bytesPerSample = 1;
|
|
if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
|
|
kWaveFormatMuLaw, 0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
|
|
{
|
|
_bytesPerSample = 1;
|
|
if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWaveFormatALaw,
|
|
0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
|
|
{
|
|
_bytesPerSample = 2;
|
|
if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
|
|
kWaveFormatPcm, 0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst identifies unsupported codec for WAV file!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 0;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WriteWavData(OutStream& out,
|
|
const WebRtc_Word8* buffer,
|
|
const WebRtc_UWord32 dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"WriteWavData: input buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
_bytesWritten += dataLength;
|
|
return dataLength;
|
|
}
|
|
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WriteWavHeader(
|
|
OutStream& wav,
|
|
const WebRtc_UWord32 freq,
|
|
const WebRtc_UWord32 bytesPerSample,
|
|
const WebRtc_UWord32 channels,
|
|
const WebRtc_UWord32 format,
|
|
const WebRtc_UWord32 lengthInBytes)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceModuleCall,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WriteWavHeader(format= PCM %d KHz,\
|
|
bytesPerSample= %d, channels= %d, format= %d, dataLength= %d)",
|
|
freq / 1000,
|
|
bytesPerSample,
|
|
channels,
|
|
format,
|
|
lengthInBytes);
|
|
|
|
// Frame size in bytes for 10 ms of audio.
|
|
// TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
|
|
// be taken into consideration here!
|
|
WebRtc_Word32 frameSize = (freq / 100) * bytesPerSample * channels;
|
|
|
|
// Calculate the number of full frames that the wave file contain.
|
|
const WebRtc_Word32 dataLengthInBytes = frameSize *
|
|
(lengthInBytes / frameSize);
|
|
|
|
WebRtc_Word8 tmpStr[4];
|
|
WebRtc_Word8 tmpChar;
|
|
WebRtc_UWord32 tmpLong;
|
|
|
|
memcpy(tmpStr, "RIFF", 4);
|
|
wav.Write(tmpStr, 4);
|
|
|
|
tmpLong = dataLengthInBytes + 36;
|
|
tmpChar = (WebRtc_Word8)(tmpLong);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 8);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 16);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 24);
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
memcpy(tmpStr, "WAVE", 4);
|
|
wav.Write(tmpStr, 4);
|
|
|
|
memcpy(tmpStr, "fmt ", 4);
|
|
wav.Write(tmpStr, 4);
|
|
|
|
tmpChar = 16;
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
tmpChar = (WebRtc_Word8)(format);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
tmpChar = (WebRtc_Word8)(channels);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
tmpLong = freq;
|
|
tmpChar = (WebRtc_Word8)(tmpLong);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 8);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 16);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 24);
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
// nAverageBytesPerSec = Sample rate * Bytes per sample * Channels
|
|
tmpLong = bytesPerSample * freq * channels;
|
|
tmpChar = (WebRtc_Word8)(tmpLong);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 8);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 16);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 24);
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
// nBlockAlign = Bytes per sample * Channels
|
|
tmpChar = (WebRtc_Word8)(bytesPerSample * channels);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
tmpChar = (WebRtc_Word8)(bytesPerSample*8);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = 0;
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
memcpy(tmpStr, "data", 4);
|
|
wav.Write(tmpStr, 4);
|
|
|
|
tmpLong = dataLengthInBytes;
|
|
tmpChar = (WebRtc_Word8)(tmpLong);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 8);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 16);
|
|
wav.Write(&tmpChar, 1);
|
|
tmpChar = (WebRtc_Word8)(tmpLong >> 24);
|
|
wav.Write(&tmpChar, 1);
|
|
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::UpdateWavHeader(OutStream& wav)
|
|
{
|
|
WebRtc_Word32 res = -1;
|
|
if(wav.Rewind() == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
WebRtc_UWord32 channels = (codec_info_.channels == 0) ?
|
|
1 : codec_info_.channels;
|
|
|
|
if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
|
|
{
|
|
res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
|
|
kWaveFormatPcm, _bytesWritten);
|
|
} else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
|
|
res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatMuLaw,
|
|
_bytesWritten);
|
|
} else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
|
|
res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatALaw,
|
|
_bytesWritten);
|
|
} else {
|
|
// Allow calling this API even if not writing to a WAVE file.
|
|
// TODO (hellner): why?!
|
|
return 0;
|
|
}
|
|
return res;
|
|
}
|
|
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitPreEncodedReading(InStream& in,
|
|
const CodecInst& cinst)
|
|
{
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"ModuleFileUtility::InitPreEncodedReading(in=0x%x, codec='%s')",
|
|
&in, cinst.plname);
|
|
|
|
WebRtc_UWord8 preEncodedID;
|
|
in.Read(&preEncodedID, 1);
|
|
|
|
MediaFileUtility_CodecType codecType =
|
|
(MediaFileUtility_CodecType)preEncodedID;
|
|
|
|
if(set_codec_info(cinst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Pre-encoded file send codec mismatch!");
|
|
return -1;
|
|
}
|
|
if(codecType != _codecId)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Pre-encoded file format codec mismatch!");
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_,&cinst,sizeof(CodecInst));
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadPreEncodedData(
|
|
InStream& in,
|
|
WebRtc_Word8* outData,
|
|
const WebRtc_UWord32 bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x,\
|
|
bufferSize= %d)",
|
|
&in,
|
|
outData,
|
|
bufferSize);
|
|
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
|
|
}
|
|
|
|
WebRtc_UWord32 frameLen;
|
|
WebRtc_UWord8 buf[64];
|
|
// Each frame has a two byte header containing the frame length.
|
|
WebRtc_Word32 res = in.Read(buf, 2);
|
|
if(res != 2)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
// The first byte is the codec identifier.
|
|
in.Read(buf, 1);
|
|
res = in.Read(buf, 2);
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
frameLen = buf[0] + buf[1] * 256;
|
|
if(bufferSize < frameLen)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceError,
|
|
kTraceFile,
|
|
_id,
|
|
"buffer not large enough to read %d bytes of pre-encoded data!",
|
|
frameLen);
|
|
return -1;
|
|
}
|
|
return in.Read(outData, frameLen);
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitPreEncodedWriting(
|
|
OutStream& out,
|
|
const CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceModuleCall,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitFormatedWriting(out=0x%x, codecInst= %s)",
|
|
&out,
|
|
codecInst.plname);
|
|
|
|
if(set_codec_info(codecInst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 1;
|
|
out.Write(&_codecId, 1);
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WritePreEncodedData(
|
|
OutStream& out,
|
|
const WebRtc_Word8* buffer,
|
|
const WebRtc_UWord32 dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x,\
|
|
dataLen= %d)",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
WebRtc_Word32 bytesWritten = 0;
|
|
// The first two bytes is the size of the frame.
|
|
WebRtc_Word16 lengthBuf;
|
|
lengthBuf = (WebRtc_Word16)dataLength;
|
|
if(!out.Write(&lengthBuf, 2))
|
|
{
|
|
return -1;
|
|
}
|
|
bytesWritten = 2;
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
bytesWritten += dataLength;
|
|
return bytesWritten;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitCompressedReading(
|
|
InStream& in,
|
|
const WebRtc_UWord32 start,
|
|
const WebRtc_UWord32 stop)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceDebug,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitCompressedReading(in= 0x%x, start= %d,\
|
|
stop= %d)",
|
|
&in,
|
|
start,
|
|
stop);
|
|
|
|
WebRtc_Word16 read_len = 0;
|
|
_codecId = kCodecNoCodec;
|
|
_playoutPositionMs = 0;
|
|
_reading = false;
|
|
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
WebRtc_Word32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
WebRtc_Word32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
|
|
#endif
|
|
|
|
// Read the codec name
|
|
WebRtc_Word32 cnt = 0;
|
|
WebRtc_Word8 buf[64];
|
|
do
|
|
{
|
|
in.Read(&buf[cnt++], 1);
|
|
} while ((buf[cnt-1] != '\n') && (64 > cnt));
|
|
|
|
if(cnt==64)
|
|
{
|
|
return -1;
|
|
} else {
|
|
buf[cnt]=0;
|
|
}
|
|
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
if(!strcmp("#!AMR\n", buf))
|
|
{
|
|
strcpy(codec_info_.plname, "amr");
|
|
codec_info_.pacsize = 160;
|
|
_codecId = kCodecAmr;
|
|
codec_info_.pltype = 112;
|
|
codec_info_.rate = 12200;
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.channels = 1;
|
|
|
|
WebRtc_Word16 mode = 0;
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
// First read byte contain the AMR mode.
|
|
read_len = in.Read(buf, 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
mode = (buf[0]>>3)&0xF;
|
|
if((mode < 0) || (mode > 8))
|
|
{
|
|
if(mode != 15)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if(mode != 15)
|
|
{
|
|
read_len = in.Read(&buf[1], AMRmode2bytes[mode]);
|
|
if(read_len != AMRmode2bytes[mode])
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_playoutPositionMs += 20;
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
if(!strcmp("#!AMRWB\n", buf))
|
|
{
|
|
strcpy(codec_info_.plname, "amr-wb");
|
|
codec_info_.pacsize = 320;
|
|
_codecId = kCodecAmrWb;
|
|
codec_info_.pltype = 120;
|
|
codec_info_.rate = 20000;
|
|
codec_info_.plfreq = 16000;
|
|
codec_info_.channels = 1;
|
|
|
|
WebRtc_Word16 mode = 0;
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
// First read byte contain the AMR mode.
|
|
read_len = in.Read(buf, 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
mode = (buf[0]>>3)&0xF;
|
|
if((mode < 0) || (mode > 9))
|
|
{
|
|
if(mode != 15)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if(mode != 15)
|
|
{
|
|
read_len = in.Read(&buf[1], AMRWBmode2bytes[mode]);
|
|
if(read_len != AMRWBmode2bytes[mode])
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_playoutPositionMs += 20;
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(!strcmp("#!iLBC20\n", buf))
|
|
{
|
|
codec_info_.pltype = 102;
|
|
strcpy(codec_info_.plname, "ilbc");
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 13300;
|
|
_codecId = kCodecIlbc20Ms;
|
|
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
read_len = in.Read(buf, 38);
|
|
if(read_len == 38)
|
|
{
|
|
_playoutPositionMs += 20;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if(!strcmp("#!iLBC30\n", buf))
|
|
{
|
|
codec_info_.pltype = 102;
|
|
strcpy(codec_info_.plname, "ilbc");
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 240;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 13300;
|
|
_codecId = kCodecIlbc30Ms;
|
|
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
read_len = in.Read(buf, 50);
|
|
if(read_len == 50)
|
|
{
|
|
_playoutPositionMs += 20;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
if(_codecId == kCodecNoCodec)
|
|
{
|
|
return -1;
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadCompressedData(InStream& in,
|
|
WebRtc_Word8* outData,
|
|
WebRtc_UWord32 bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x,\
|
|
bytes=%ld)",
|
|
&in,
|
|
outData,
|
|
bufferSize);
|
|
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
WebRtc_UWord32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
WebRtc_UWord32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
|
|
#endif
|
|
WebRtc_UWord32 bytesRead = 0;
|
|
|
|
if(! _reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
|
|
return -1;
|
|
}
|
|
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
if(_codecId == kCodecAmr)
|
|
{
|
|
WebRtc_Word32 res = in.Read(outData, 1);
|
|
if(res != 1)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
res = in.Read(outData, 1);
|
|
if(res != 1)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
const WebRtc_Word16 mode = (outData[0]>>3)&0xF;
|
|
if((mode < 0) ||
|
|
(mode > 8))
|
|
{
|
|
if(mode != 15)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if(mode != 15)
|
|
{
|
|
if(bufferSize < AMRmode2bytes[mode] + 1)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceError,
|
|
kTraceFile,
|
|
_id,
|
|
"output buffer is too short to read AMR compressed data.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
bytesRead = in.Read(&outData[1], AMRmode2bytes[mode]);
|
|
if(bytesRead != AMRmode2bytes[mode])
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
// Count the mode byte to bytes read.
|
|
bytesRead++;
|
|
}
|
|
else
|
|
{
|
|
bytesRead = 1;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
if(_codecId == kCodecAmrWb)
|
|
{
|
|
WebRtc_Word32 res = in.Read(outData, 1);
|
|
if(res != 1)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
res = in.Read(outData, 1);
|
|
if(res != 1)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
WebRtc_Word16 mode = (outData[0]>>3)&0xF;
|
|
if((mode < 0) ||
|
|
(mode > 8))
|
|
{
|
|
if(mode != 15)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if(mode != 15)
|
|
{
|
|
if(bufferSize < AMRWBmode2bytes[mode] + 1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"output buffer is too short to read AMRWB\
|
|
compressed.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
bytesRead = in.Read(&outData[1], AMRWBmode2bytes[mode]);
|
|
if(bytesRead != AMRWBmode2bytes[mode])
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
bytesRead++;
|
|
}
|
|
else
|
|
{
|
|
bytesRead = 1;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if((_codecId == kCodecIlbc20Ms) ||
|
|
(_codecId == kCodecIlbc30Ms))
|
|
{
|
|
WebRtc_UWord32 byteSize = 0;
|
|
if(_codecId == kCodecIlbc30Ms)
|
|
{
|
|
byteSize = 50;
|
|
}
|
|
if(_codecId == kCodecIlbc20Ms)
|
|
{
|
|
byteSize = 38;
|
|
}
|
|
if(bufferSize < byteSize)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"output buffer is too short to read ILBC compressed\
|
|
data.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
bytesRead = in.Read(outData, byteSize);
|
|
if(bytesRead != byteSize)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
bytesRead = in.Read(outData, byteSize);
|
|
if(bytesRead != byteSize)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
if(bytesRead == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadCompressedData() no bytes read, codec not supported");
|
|
return -1;
|
|
}
|
|
|
|
_playoutPositionMs += 20;
|
|
if((_stopPointInMs > 0) &&
|
|
(_playoutPositionMs >= _stopPointInMs))
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitCompressedWriting(
|
|
OutStream& out,
|
|
const CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
|
|
"ModuleFileUtility::InitCompressedWriting(out= 0x%x,\
|
|
codecName= %s)",
|
|
&out, codecInst.plname);
|
|
|
|
_writing = false;
|
|
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
|
|
{
|
|
if(codecInst.pacsize == 160)
|
|
{
|
|
memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
|
|
_codecId = kCodecAmr;
|
|
out.Write("#!AMR\n",6);
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
|
|
{
|
|
if(codecInst.pacsize == 320)
|
|
{
|
|
memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
|
|
_codecId = kCodecAmrWb;
|
|
out.Write("#!AMRWB\n",8);
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
|
|
{
|
|
if(codecInst.pacsize == 160)
|
|
{
|
|
_codecId = kCodecIlbc20Ms;
|
|
out.Write("#!iLBC20\n",9);
|
|
}
|
|
else if(codecInst.pacsize == 240)
|
|
{
|
|
_codecId = kCodecIlbc30Ms;
|
|
out.Write("#!iLBC30\n",9);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst defines unsupported compression codec!");
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
#endif
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst defines unsupported compression codec!");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WriteCompressedData(
|
|
OutStream& out,
|
|
const WebRtc_Word8* buffer,
|
|
const WebRtc_UWord32 dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x,\
|
|
dataLen= %d)",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
return dataLength;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitPCMReading(InStream& pcm,
|
|
const WebRtc_UWord32 start,
|
|
const WebRtc_UWord32 stop,
|
|
WebRtc_UWord32 freq)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceInfo,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, stop=%d,\
|
|
freq=%d)",
|
|
&pcm,
|
|
start,
|
|
stop,
|
|
freq);
|
|
|
|
WebRtc_Word8 dummy[320];
|
|
WebRtc_Word32 read_len;
|
|
|
|
_playoutPositionMs = 0;
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
_reading = false;
|
|
|
|
if(freq == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 128000;
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(freq == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 16000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 256000;
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(freq == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 32000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 512000;
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
|
|
// Readsize for 10ms of audio data (2 bytes per sample).
|
|
_readSizeBytes = 2 * codec_info_. plfreq / 100;
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs < _startPointInMs)
|
|
{
|
|
read_len = pcm.Read(dummy, _readSizeBytes);
|
|
if(read_len == _readSizeBytes)
|
|
{
|
|
_playoutPositionMs += 10;
|
|
}
|
|
else // Must have reached EOF before start position!
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::ReadPCMData(InStream& pcm,
|
|
WebRtc_Word8* outData,
|
|
WebRtc_UWord32 bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %d)",
|
|
&pcm,
|
|
outData,
|
|
bufferSize);
|
|
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
// Readsize for 10ms of audio data (2 bytes per sample).
|
|
WebRtc_UWord32 bytesRequested = 2 * codec_info_.plfreq / 100;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadPCMData: buffer not long enough for a 10ms frame.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_UWord32 bytesRead = pcm.Read(outData, bytesRequested);
|
|
if(bytesRead < bytesRequested)
|
|
{
|
|
if(pcm.Rewind() == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
else
|
|
{
|
|
if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
|
|
codec_info_.plfreq) == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
else
|
|
{
|
|
WebRtc_Word32 rest = bytesRequested - bytesRead;
|
|
WebRtc_Word32 len = pcm.Read(&(outData[bytesRead]), rest);
|
|
if(len == rest)
|
|
{
|
|
bytesRead += len;
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadPCMData: Failed to rewind audio file.");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ReadPCMData: end of file");
|
|
return -1;
|
|
}
|
|
_playoutPositionMs += 10;
|
|
if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
|
|
{
|
|
if(!pcm.Rewind())
|
|
{
|
|
if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
|
|
codec_info_.plfreq) == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::InitPCMWriting(OutStream& out,
|
|
WebRtc_UWord32 freq)
|
|
{
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"ModuleFileUtility::InitPCMWriting(out=0x%x, freq= %ld)", &out,
|
|
freq);
|
|
|
|
if(freq == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 128000;
|
|
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(freq == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 16000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 256000;
|
|
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(freq == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 32000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 512000;
|
|
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
if((_codecId != kCodecL16_8Khz) &&
|
|
(_codecId != kCodecL16_16kHz) &&
|
|
(_codecId != kCodecL16_32Khz))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst is not 8KHz PCM or 16KHz PCM!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 0;
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::WritePCMData(OutStream& out,
|
|
const WebRtc_Word8* buffer,
|
|
const WebRtc_UWord32 dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
_bytesWritten += dataLength;
|
|
return dataLength;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::codec_info(CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
|
|
|
|
if(!_reading && !_writing)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst: not currently reading audio file!");
|
|
return -1;
|
|
}
|
|
memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceModuleCall, kTraceFile, _id,
|
|
"ModuleFileUtility::set_codec_info(codecName= %s)",
|
|
codecInst.plname);
|
|
|
|
_codecId = kCodecNoCodec;
|
|
if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
|
|
{
|
|
_codecId = kCodecPcmu;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
|
|
{
|
|
_codecId = kCodecPcma;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
|
|
{
|
|
if(codecInst.plfreq == 8000)
|
|
{
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(codecInst.plfreq == 16000)
|
|
{
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(codecInst.plfreq == 32000)
|
|
{
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
}
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
else if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
|
|
{
|
|
_codecId = kCodecAmr;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
else if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
|
|
{
|
|
_codecId = kCodecAmrWb;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
|
|
{
|
|
if(codecInst.pacsize == 160)
|
|
{
|
|
_codecId = kCodecIlbc20Ms;
|
|
}
|
|
else if(codecInst.pacsize == 240)
|
|
{
|
|
_codecId = kCodecIlbc30Ms;
|
|
}
|
|
}
|
|
#endif
|
|
#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
|
else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
|
|
{
|
|
if(codecInst.plfreq == 16000)
|
|
{
|
|
_codecId = kCodecIsac;
|
|
}
|
|
else if(codecInst.plfreq == 32000)
|
|
{
|
|
_codecId = kCodecIsacSwb;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ISACLC
|
|
else if(STR_CASE_CMP(codecInst.plname, "isaclc") == 0)
|
|
{
|
|
_codecId = kCodecIsacLc;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_G722
|
|
else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
|
|
{
|
|
_codecId = kCodecG722;
|
|
}
|
|
#endif
|
|
else if(STR_CASE_CMP(codecInst.plname, "G7221") == 0)
|
|
{
|
|
#ifdef WEBRTC_CODEC_G722_1
|
|
if(codecInst.plfreq == 16000)
|
|
{
|
|
if(codecInst.rate == 16000)
|
|
{
|
|
_codecId = kCodecG722_1_16Kbps;
|
|
}
|
|
else if(codecInst.rate == 24000)
|
|
{
|
|
_codecId = kCodecG722_1_24Kbps;
|
|
}
|
|
else if(codecInst.rate == 32000)
|
|
{
|
|
_codecId = kCodecG722_1_32Kbps;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_G722_1C
|
|
if(codecInst.plfreq == 32000)
|
|
{
|
|
if(codecInst.rate == 48000)
|
|
{
|
|
_codecId = kCodecG722_1c_48;
|
|
}
|
|
else if(codecInst.rate == 32000)
|
|
{
|
|
_codecId = kCodecG722_1c_32;
|
|
}
|
|
else if(codecInst.rate == 24000)
|
|
{
|
|
_codecId = kCodecG722_1c_24;
|
|
}
|
|
}
|
|
#endif
|
|
}
|
|
#ifdef WEBRTC_CODEC_G726
|
|
else if(STR_CASE_CMP(codecInst.plname, "G726-40") == 0)
|
|
{
|
|
_codecId = kCodecG726_40;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "G726-32") == 0)
|
|
{
|
|
_codecId = kCodecG726_24;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "G726-24") == 0)
|
|
{
|
|
_codecId = kCodecG726_32;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "G726-16") == 0)
|
|
{
|
|
_codecId = kCodecG726_16;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_G729
|
|
else if(STR_CASE_CMP(codecInst.plname, "G729") == 0)
|
|
{
|
|
_codecId = kCodecG729;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_G729_1
|
|
else if(STR_CASE_CMP(codecInst.plname, "G7291") == 0)
|
|
{
|
|
_codecId = kCodecG729_1;
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_SPEEX
|
|
else if(STR_CASE_CMP(codecInst.plname, "speex") == 0)
|
|
{
|
|
if(codecInst.plfreq == 8000)
|
|
{
|
|
_codecId = kCodecSpeex8Khz;
|
|
}
|
|
else if(codecInst.plfreq == 16000)
|
|
{
|
|
_codecId = kCodecSpeex16Khz;
|
|
}
|
|
}
|
|
#endif
|
|
if(_codecId == kCodecNoCodec)
|
|
{
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
|
|
return 0;
|
|
}
|
|
|
|
WebRtc_Word32 ModuleFileUtility::FileDurationMs(const WebRtc_Word8* fileName,
|
|
const FileFormats fileFormat,
|
|
const WebRtc_UWord32 freqInHz)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceModuleCall,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::FileDuration(%s, format= %d, frequency %d)",
|
|
fileName,
|
|
fileFormat,
|
|
freqInHz);
|
|
|
|
if(fileName == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
|
|
}
|
|
|
|
WebRtc_Word32 time_in_ms = -1;
|
|
struct stat file_size;
|
|
if(stat(fileName,&file_size) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to retrieve file size with stat!");
|
|
return -1;
|
|
}
|
|
FileWrapper* inStreamObj = FileWrapper::Create();
|
|
if(inStreamObj == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"failed to create InStream object!");
|
|
return -1;
|
|
}
|
|
if(inStreamObj->OpenFile(fileName, true) == -1)
|
|
{
|
|
delete inStreamObj;
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to open file %s!", fileName);
|
|
return -1;
|
|
}
|
|
|
|
switch (fileFormat)
|
|
{
|
|
case kFileFormatWavFile:
|
|
{
|
|
if(ReadWavHeader(*inStreamObj) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to read WAV file header!");
|
|
return -1;
|
|
}
|
|
time_in_ms = ((file_size.st_size - 44) /
|
|
(_wavFormatObj.nAvgBytesPerSec/1000));
|
|
break;
|
|
}
|
|
case kFileFormatPcm16kHzFile:
|
|
{
|
|
// 16 samples per ms. 2 bytes per sample.
|
|
WebRtc_Word32 denominator = 16*2;
|
|
time_in_ms = (file_size.st_size)/denominator;
|
|
break;
|
|
}
|
|
case kFileFormatPcm8kHzFile:
|
|
{
|
|
// 8 samples per ms. 2 bytes per sample.
|
|
WebRtc_Word32 denominator = 8*2;
|
|
time_in_ms = (file_size.st_size)/denominator;
|
|
break;
|
|
}
|
|
case kFileFormatCompressedFile:
|
|
{
|
|
WebRtc_Word32 cnt = 0;
|
|
WebRtc_Word32 read_len = 0;
|
|
WebRtc_Word8 buf[64];
|
|
do
|
|
{
|
|
read_len = inStreamObj->Read(&buf[cnt++], 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
} while ((buf[cnt-1] != '\n') && (64 > cnt));
|
|
|
|
if(cnt == 64)
|
|
{
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
buf[cnt] = 0;
|
|
}
|
|
#ifdef WEBRTC_CODEC_GSMAMR
|
|
if(!strcmp("#!AMR\n", buf))
|
|
{
|
|
WebRtc_UWord8 dummy;
|
|
read_len = inStreamObj->Read(&dummy, 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_Word16 AMRMode = (dummy>>3)&0xF;
|
|
|
|
// TODO (hellner): use tables instead of hardcoding like this!
|
|
// Additionally, this calculation does not
|
|
// take octet alignment into consideration.
|
|
switch (AMRMode)
|
|
{
|
|
// Mode 0: 4.75 kbit/sec -> 95 bits per 20 ms frame.
|
|
// 20 ms = 95 bits ->
|
|
// file size in bytes * 8 / 95 is the number of
|
|
// 20 ms frames in the file ->
|
|
// time_in_ms = file size * 8 / 95 * 20
|
|
case 0:
|
|
time_in_ms = ((file_size.st_size)*160)/95;
|
|
break;
|
|
// Mode 1: 5.15 kbit/sec -> 103 bits per 20 ms frame.
|
|
case 1:
|
|
time_in_ms = ((file_size.st_size)*160)/103;
|
|
break;
|
|
// Mode 2: 5.90 kbit/sec -> 118 bits per 20 ms frame.
|
|
case 2:
|
|
time_in_ms = ((file_size.st_size)*160)/118;
|
|
break;
|
|
// Mode 3: 6.70 kbit/sec -> 134 bits per 20 ms frame.
|
|
case 3:
|
|
time_in_ms = ((file_size.st_size)*160)/134;
|
|
break;
|
|
// Mode 4: 7.40 kbit/sec -> 148 bits per 20 ms frame.
|
|
case 4:
|
|
time_in_ms = ((file_size.st_size)*160)/148;
|
|
break;
|
|
// Mode 5: 7.95 bit/sec -> 159 bits per 20 ms frame.
|
|
case 5:
|
|
time_in_ms = ((file_size.st_size)*160)/159;
|
|
break;
|
|
// Mode 6: 10.2 bit/sec -> 204 bits per 20 ms frame.
|
|
case 6:
|
|
time_in_ms = ((file_size.st_size)*160)/204;
|
|
break;
|
|
// Mode 7: 12.2 bit/sec -> 244 bits per 20 ms frame.
|
|
case 7:
|
|
time_in_ms = ((file_size.st_size)*160)/244;
|
|
break;
|
|
// Mode 8: SID Mode -> 39 bits per 20 ms frame.
|
|
case 8:
|
|
time_in_ms = ((file_size.st_size)*160)/39;
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_GSMAMRWB
|
|
if(!strcmp("#!AMRWB\n", buf))
|
|
{
|
|
WebRtc_UWord8 dummy;
|
|
read_len = inStreamObj->Read(&dummy, 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
// TODO (hellner): use tables instead of hardcoding like this!
|
|
WebRtc_Word16 AMRWBMode = (dummy>>3)&0xF;
|
|
switch(AMRWBMode)
|
|
{
|
|
// Mode 0: 6.6 kbit/sec -> 132 bits per 20 ms frame.
|
|
case 0:
|
|
time_in_ms = ((file_size.st_size)*160)/132;
|
|
break;
|
|
// Mode 1: 8.85 kbit/sec -> 177 bits per 20 ms frame.
|
|
case 1:
|
|
time_in_ms = ((file_size.st_size)*160)/177;
|
|
break;
|
|
// Mode 2: 12.65 kbit/sec -> 253 bits per 20 ms frame.
|
|
case 2:
|
|
time_in_ms = ((file_size.st_size)*160)/253;
|
|
break;
|
|
// Mode 3: 14.25 kbit/sec -> 285 bits per 20 ms frame.
|
|
case 3:
|
|
time_in_ms = ((file_size.st_size)*160)/285;
|
|
break;
|
|
// Mode 4: 15.85 kbit/sec -> 317 bits per 20 ms frame.
|
|
case 4:
|
|
time_in_ms = ((file_size.st_size)*160)/317;
|
|
break;
|
|
// Mode 5: 18.25 kbit/sec -> 365 bits per 20 ms frame.
|
|
case 5:
|
|
time_in_ms = ((file_size.st_size)*160)/365;
|
|
break;
|
|
// Mode 6: 19.85 kbit/sec -> 397 bits per 20 ms frame.
|
|
case 6:
|
|
time_in_ms = ((file_size.st_size)*160)/397;
|
|
break;
|
|
// Mode 7: 23.05 kbit/sec -> 461 bits per 20 ms frame.
|
|
case 7:
|
|
time_in_ms = ((file_size.st_size)*160)/461;
|
|
break;
|
|
// Mode 8: 23.85 kbit/sec -> 477 bits per 20 ms frame.
|
|
case 8:
|
|
time_in_ms = ((file_size.st_size)*160)/477;
|
|
break;
|
|
default:
|
|
delete inStreamObj;
|
|
return -1;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(!strcmp("#!iLBC20\n", buf))
|
|
{
|
|
// 20 ms is 304 bits
|
|
time_in_ms = ((file_size.st_size)*160)/304;
|
|
break;
|
|
}
|
|
if(!strcmp("#!iLBC30\n", buf))
|
|
{
|
|
// 30 ms takes 400 bits.
|
|
// file size in bytes * 8 / 400 is the number of
|
|
// 30 ms frames in the file ->
|
|
// time_in_ms = file size * 8 / 400 * 30
|
|
time_in_ms = ((file_size.st_size)*240)/400;
|
|
break;
|
|
}
|
|
#endif
|
|
}
|
|
case kFileFormatPreencodedFile:
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"cannot determine duration of Pre-Encoded file!");
|
|
break;
|
|
}
|
|
default:
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"unsupported file format %d!", fileFormat);
|
|
break;
|
|
}
|
|
inStreamObj->CloseFile();
|
|
delete inStreamObj;
|
|
return time_in_ms;
|
|
}
|
|
|
|
WebRtc_UWord32 ModuleFileUtility::PlayoutPositionMs()
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::PlayoutPosition()");
|
|
|
|
if(_reading)
|
|
{
|
|
return _playoutPositionMs;
|
|
}
|
|
else
|
|
{
|
|
return 0;
|
|
}
|
|
}
|
|
} // namespace webrtc
|