[DEV] add basic clone or RtAudio with code simplification review (tested alsa, pulse, jack)

This commit is contained in:
2014-03-11 21:46:00 +01:00
commit b21b2f7413
27 changed files with 9861 additions and 0 deletions

1184
airtaudio/api/Alsa.cpp Normal file

File diff suppressed because it is too large Load Diff

47
airtaudio/api/Alsa.h Normal file
View File

@@ -0,0 +1,47 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_ALSA_H__) && defined(__LINUX_ALSA__)
#define __AIRTAUDIO_API_ALSA_H__
namespace airtaudio {
namespace api {
class Alsa: public airtaudio::Api {
public:
Alsa();
~Alsa();
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::LINUX_ALSA;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent(void);
private:
std::vector<airtaudio::DeviceInfo> m_devices;
void saveDeviceInfo(void);
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

979
airtaudio/api/Asio.cpp Normal file
View File

@@ -0,0 +1,979 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if defined(__WINDOWS_ASIO__) // ASIO API on Windows
#include <airtaudio/Interface.h>
// The ASIO API is designed around a callback scheme, so this
// implementation is similar to that used for OS-X CoreAudio and Linux
// Jack. The primary constraint with ASIO is that it only allows
// access to a single driver at a time. Thus, it is not possible to
// have more than one simultaneous RtAudio stream.
//
// This implementation also requires a number of external ASIO files
// and a few global variables. The ASIO callback scheme does not
// allow for the passing of user data, so we must create a global
// pointer to our callbackInfo structure.
//
// On unix systems, we make use of a pthread condition variable.
// Since there is no equivalent in Windows, I hacked something based
// on information found in
// http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
#include "asiosys.h"
#include "asio.h"
#include "iasiothiscallresolver.h"
#include "asiodrivers.h"
#include <cmath>
static AsioDrivers drivers;
static ASIOCallbacks asioCallbacks;
static ASIODriverInfo driverInfo;
static CallbackInfo *asioCallbackInfo;
static bool asioXRun;
struct AsioHandle {
int32_t drainCounter; // Tracks callback counts when draining
bool internalDrain; // Indicates if stop is initiated from callback or not.
ASIOBufferInfo *bufferInfos;
HANDLE condition;
AsioHandle()
:drainCounter(0), internalDrain(false), bufferInfos(0) {}
};
// Function declarations (definitions at end of section)
static const char* getAsioErrorString(ASIOError result);
static void sampleRateChanged(ASIOSampleRate sRate);
static long asioMessages(long selector, long value, void* message, double* opt);
airtaudio::api::Asio::Asio(void) {
// ASIO cannot run on a multi-threaded appartment. You can call
// CoInitialize beforehand, but it must be for appartment threading
// (in which case, CoInitilialize will return S_FALSE here).
m_coInitialized = false;
HRESULT hr = CoInitialize(NULL);
if (FAILED(hr)) {
m_errorText = "airtaudio::api::Asio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
error(airtaudio::errorWarning);
}
m_coInitialized = true;
drivers.removeCurrentDriver();
driverInfo.asioVersion = 2;
// See note in DirectSound implementation about GetDesktopWindow().
driverInfo.sysRef = GetForegroundWindow();
}
airtaudio::api::Asio::~Asio(void) {
if (m_stream.state != STREAM_CLOSED) closeStream();
if (m_coInitialized) CoUninitialize();
}
uint32_t airtaudio::api::Asio::getDeviceCount(void)
{
return (uint32_t) drivers.asioGetNumDev();
}
rtaudio::DeviceInfo airtaudio::api::Asio::getDeviceInfo(uint32_t device)
{
rtaudio::DeviceInfo info;
info.probed = false;
// Get device ID
uint32_t nDevices = getDeviceCount();
if (nDevices == 0) {
m_errorText = "airtaudio::api::Asio::getDeviceInfo: no devices found!";
error(airtaudio::errorInvalidUse);
return info;
}
if (device >= nDevices) {
m_errorText = "airtaudio::api::Asio::getDeviceInfo: device ID is invalid!";
error(airtaudio::errorInvalidUse);
return info;
}
// If a stream is already open, we cannot probe other devices. Thus, use the saved results.
if (m_stream.state != STREAM_CLOSED) {
if (device >= m_devices.size()) {
m_errorText = "airtaudio::api::Asio::getDeviceInfo: device ID was not present before stream was opened.";
error(airtaudio::errorWarning);
return info;
}
return m_devices[ device ];
}
char driverName[32];
ASIOError result = drivers.asioGetDriverName((int) device, driverName, 32);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::getDeviceInfo: unable to get driver name (" << getAsioErrorString(result) << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
info.name = driverName;
if (!drivers.loadDriver(driverName)) {
m_errorStream << "airtaudio::api::Asio::getDeviceInfo: unable to load driver (" << driverName << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
result = ASIOInit(&driverInfo);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::getDeviceInfo: error (" << getAsioErrorString(result) << ") initializing driver (" << driverName << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
// Determine the device channel information.
long inputChannels, outputChannels;
result = ASIOGetChannels(&inputChannels, &outputChannels);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::getDeviceInfo: error (" << getAsioErrorString(result) << ") getting channel count (" << driverName << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
info.outputChannels = outputChannels;
info.inputChannels = inputChannels;
if (info.outputChannels > 0 && info.inputChannels > 0)
info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
// Determine the supported sample rates.
info.sampleRates.clear();
for (uint32_t i=0; i<MAX_SAMPLE_RATES; i++) {
result = ASIOCanSampleRate((ASIOSampleRate) SAMPLE_RATES[i]);
if (result == ASE_OK)
info.sampleRates.push_back(SAMPLE_RATES[i]);
}
// Determine supported data types ... just check first channel and assume rest are the same.
ASIOChannelInfo channelInfo;
channelInfo.channel = 0;
channelInfo.isInput = true;
if (info.inputChannels <= 0) channelInfo.isInput = false;
result = ASIOGetChannelInfo(&channelInfo);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::getDeviceInfo: error (" << getAsioErrorString(result) << ") getting driver channel info (" << driverName << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
info.nativeFormats = 0;
if (channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB)
info.nativeFormats |= RTAUDIO_SINT16;
else if (channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB)
info.nativeFormats |= RTAUDIO_SINT32;
else if (channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB)
info.nativeFormats |= RTAUDIO_FLOAT32;
else if (channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB)
info.nativeFormats |= RTAUDIO_FLOAT64;
else if (channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB)
info.nativeFormats |= RTAUDIO_SINT24;
if (info.outputChannels > 0)
if (getDefaultOutputDevice() == device) info.isDefaultOutput = true;
if (info.inputChannels > 0)
if (getDefaultInputDevice() == device) info.isDefaultInput = true;
info.probed = true;
drivers.removeCurrentDriver();
return info;
}
static void bufferSwitch(long index, ASIOBool processNow)
{
RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
object->callbackEvent(index);
}
void airtaudio::api::Asio::saveDeviceInfo(void)
{
m_devices.clear();
uint32_t nDevices = getDeviceCount();
m_devices.resize(nDevices);
for (uint32_t i=0; i<nDevices; i++)
m_devices[i] = getDeviceInfo(i);
}
bool airtaudio::api::Asio::probeDeviceOpen(uint32_t device, StreamMode mode, uint32_t channels,
uint32_t firstChannel, uint32_t sampleRate,
rtaudio::format format, uint32_t *bufferSize,
rtaudio::StreamOptions *options)
{
// For ASIO, a duplex stream MUST use the same driver.
if (mode == INPUT && m_stream.mode == OUTPUT && m_stream.device[0] != device) {
m_errorText = "airtaudio::api::Asio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
return FAILURE;
}
char driverName[32];
ASIOError result = drivers.asioGetDriverName((int) device, driverName, 32);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString(result) << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Only load the driver once for duplex stream.
if (mode != INPUT || m_stream.mode != OUTPUT) {
// The getDeviceInfo() function will not work when a stream is open
// because ASIO does not allow multiple devices to run at the same
// time. Thus, we'll probe the system before opening a stream and
// save the results for use by getDeviceInfo().
this->saveDeviceInfo();
if (!drivers.loadDriver(driverName)) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: unable to load driver (" << driverName << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
result = ASIOInit(&driverInfo);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: error (" << getAsioErrorString(result) << ") initializing driver (" << driverName << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
}
// Check the device channel count.
long inputChannels, outputChannels;
result = ASIOGetChannels(&inputChannels, &outputChannels);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: error (" << getAsioErrorString(result) << ") getting channel count (" << driverName << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
if ((mode == OUTPUT && (channels+firstChannel) > (uint32_t) outputChannels) ||
(mode == INPUT && (channels+firstChannel) > (uint32_t) inputChannels)) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
m_stream.nDeviceChannels[mode] = channels;
m_stream.nUserChannels[mode] = channels;
m_stream.channelOffset[mode] = firstChannel;
// Verify the sample rate is supported.
result = ASIOCanSampleRate((ASIOSampleRate) sampleRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Get the current sample rate
ASIOSampleRate currentRate;
result = ASIOGetSampleRate(&currentRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Set the sample rate only if necessary
if (currentRate != sampleRate) {
result = ASIOSetSampleRate((ASIOSampleRate) sampleRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
}
// Determine the driver data type.
ASIOChannelInfo channelInfo;
channelInfo.channel = 0;
if (mode == OUTPUT) channelInfo.isInput = false;
else channelInfo.isInput = true;
result = ASIOGetChannelInfo(&channelInfo);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting data format.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Assuming WINDOWS host is always little-endian.
m_stream.doByteSwap[mode] = false;
m_stream.userFormat = format;
m_stream.deviceFormat[mode] = 0;
if (channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB) {
m_stream.deviceFormat[mode] = RTAUDIO_SINT16;
if (channelInfo.type == ASIOSTInt16MSB) m_stream.doByteSwap[mode] = true;
}
else if (channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB) {
m_stream.deviceFormat[mode] = RTAUDIO_SINT32;
if (channelInfo.type == ASIOSTInt32MSB) m_stream.doByteSwap[mode] = true;
}
else if (channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB) {
m_stream.deviceFormat[mode] = RTAUDIO_FLOAT32;
if (channelInfo.type == ASIOSTFloat32MSB) m_stream.doByteSwap[mode] = true;
}
else if (channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB) {
m_stream.deviceFormat[mode] = RTAUDIO_FLOAT64;
if (channelInfo.type == ASIOSTFloat64MSB) m_stream.doByteSwap[mode] = true;
}
else if (channelInfo.type == ASIOSTInt24MSB || channelInfo.type == ASIOSTInt24LSB) {
m_stream.deviceFormat[mode] = RTAUDIO_SINT24;
if (channelInfo.type == ASIOSTInt24MSB) m_stream.doByteSwap[mode] = true;
}
if (m_stream.deviceFormat[mode] == 0) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Set the buffer size. For a duplex stream, this will end up
// setting the buffer size based on the input constraints, which
// should be ok.
long minSize, maxSize, preferSize, granularity;
result = ASIOGetBufferSize(&minSize, &maxSize, &preferSize, &granularity);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting buffer size.";
m_errorText = m_errorStream.str();
return FAILURE;
}
if (*bufferSize < (uint32_t) minSize) *bufferSize = (uint32_t) minSize;
else if (*bufferSize > (uint32_t) maxSize) *bufferSize = (uint32_t) maxSize;
else if (granularity == -1) {
// Make sure bufferSize is a power of two.
int32_t log2_of_min_size = 0;
int32_t log2_of_max_size = 0;
for (uint32_t i = 0; i < sizeof(long) * 8; i++) {
if (minSize & ((long)1 << i)) log2_of_min_size = i;
if (maxSize & ((long)1 << i)) log2_of_max_size = i;
}
long min_delta = std::abs((long)*bufferSize - ((long)1 << log2_of_min_size));
int32_t min_delta_num = log2_of_min_size;
for (int32_t i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
long current_delta = std::abs((long)*bufferSize - ((long)1 << i));
if (current_delta < min_delta) {
min_delta = current_delta;
min_delta_num = i;
}
}
*bufferSize = ((uint32_t)1 << min_delta_num);
if (*bufferSize < (uint32_t) minSize) *bufferSize = (uint32_t) minSize;
else if (*bufferSize > (uint32_t) maxSize) *bufferSize = (uint32_t) maxSize;
}
else if (granularity != 0) {
// Set to an even multiple of granularity, rounding up.
*bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
}
if (mode == INPUT && m_stream.mode == OUTPUT && m_stream.bufferSize != *bufferSize) {
drivers.removeCurrentDriver();
m_errorText = "airtaudio::api::Asio::probeDeviceOpen: input/output buffersize discrepancy!";
return FAILURE;
}
m_stream.bufferSize = *bufferSize;
m_stream.nBuffers = 2;
if (options && options->flags & RTAUDIO_NONINTERLEAVED) m_stream.userInterleaved = false;
else m_stream.userInterleaved = true;
// ASIO always uses non-interleaved buffers.
m_stream.deviceInterleaved[mode] = false;
// Allocate, if necessary, our AsioHandle structure for the stream.
AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
if (handle == 0) {
try {
handle = new AsioHandle;
}
catch (std::bad_alloc&) {
//if (handle == NULL) {
drivers.removeCurrentDriver();
m_errorText = "airtaudio::api::Asio::probeDeviceOpen: error allocating AsioHandle memory.";
return FAILURE;
}
handle->bufferInfos = 0;
// Create a manual-reset event.
handle->condition = CreateEvent(NULL, // no security
TRUE, // manual-reset
FALSE, // non-signaled initially
NULL); // unnamed
m_stream.apiHandle = (void *) handle;
}
// Create the ASIO internal buffers. Since RtAudio sets up input
// and output separately, we'll have to dispose of previously
// created output buffers for a duplex stream.
long inputLatency, outputLatency;
if (mode == INPUT && m_stream.mode == OUTPUT) {
ASIODisposeBuffers();
if (handle->bufferInfos) free(handle->bufferInfos);
}
// Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
bool buffersAllocated = false;
uint32_t i, nChannels = m_stream.nDeviceChannels[0] + m_stream.nDeviceChannels[1];
handle->bufferInfos = (ASIOBufferInfo *) malloc(nChannels * sizeof(ASIOBufferInfo));
if (handle->bufferInfos == NULL) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
m_errorText = m_errorStream.str();
goto error;
}
ASIOBufferInfo *infos;
infos = handle->bufferInfos;
for (i=0; i<m_stream.nDeviceChannels[0]; i++, infos++) {
infos->isInput = ASIOFalse;
infos->channelNum = i + m_stream.channelOffset[0];
infos->buffers[0] = infos->buffers[1] = 0;
}
for (i=0; i<m_stream.nDeviceChannels[1]; i++, infos++) {
infos->isInput = ASIOTrue;
infos->channelNum = i + m_stream.channelOffset[1];
infos->buffers[0] = infos->buffers[1] = 0;
}
// Set up the ASIO callback structure and create the ASIO data buffers.
asioCallbacks.bufferSwitch = &bufferSwitch;
asioCallbacks.sampleRateDidChange = &sampleRateChanged;
asioCallbacks.asioMessage = &asioMessages;
asioCallbacks.bufferSwitchTimeInfo = NULL;
result = ASIOCreateBuffers(handle->bufferInfos, nChannels, m_stream.bufferSize, &asioCallbacks);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString(result) << ") creating buffers.";
m_errorText = m_errorStream.str();
goto error;
}
buffersAllocated = true;
// Set flags for buffer conversion.
m_stream.doConvertBuffer[mode] = false;
if (m_stream.userFormat != m_stream.deviceFormat[mode])
m_stream.doConvertBuffer[mode] = true;
if (m_stream.userInterleaved != m_stream.deviceInterleaved[mode] &&
m_stream.nUserChannels[mode] > 1)
m_stream.doConvertBuffer[mode] = true;
// Allocate necessary internal buffers
uint64_t bufferBytes;
bufferBytes = m_stream.nUserChannels[mode] * *bufferSize * formatBytes(m_stream.userFormat);
m_stream.userBuffer[mode] = (char *) calloc(bufferBytes, 1);
if (m_stream.userBuffer[mode] == NULL) {
m_errorText = "airtaudio::api::Asio::probeDeviceOpen: error allocating user buffer memory.";
goto error;
}
if (m_stream.doConvertBuffer[mode]) {
bool makeBuffer = true;
bufferBytes = m_stream.nDeviceChannels[mode] * formatBytes(m_stream.deviceFormat[mode]);
if (mode == INPUT) {
if (m_stream.mode == OUTPUT && m_stream.deviceBuffer) {
uint64_t bytesOut = m_stream.nDeviceChannels[0] * formatBytes(m_stream.deviceFormat[0]);
if (bufferBytes <= bytesOut) makeBuffer = false;
}
}
if (makeBuffer) {
bufferBytes *= *bufferSize;
if (m_stream.deviceBuffer) free(m_stream.deviceBuffer);
m_stream.deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_stream.deviceBuffer == NULL) {
m_errorText = "airtaudio::api::Asio::probeDeviceOpen: error allocating device buffer memory.";
goto error;
}
}
}
m_stream.sampleRate = sampleRate;
m_stream.device[mode] = device;
m_stream.state = STREAM_STOPPED;
asioCallbackInfo = &m_stream.callbackInfo;
m_stream.callbackInfo.object = (void *) this;
if (m_stream.mode == OUTPUT && mode == INPUT)
// We had already set up an output stream.
m_stream.mode = DUPLEX;
else
m_stream.mode = mode;
// Determine device latencies
result = ASIOGetLatencies(&inputLatency, &outputLatency);
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting latency.";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning); // warn but don't fail
}
else {
m_stream.latency[0] = outputLatency;
m_stream.latency[1] = inputLatency;
}
// Setup the buffer conversion information structure. We don't use
// buffers to do channel offsets, so we override that parameter
// here.
if (m_stream.doConvertBuffer[mode]) setConvertInfo(mode, 0);
return SUCCESS;
error:
if (buffersAllocated)
ASIODisposeBuffers();
drivers.removeCurrentDriver();
if (handle) {
CloseHandle(handle->condition);
if (handle->bufferInfos)
free(handle->bufferInfos);
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
return FAILURE;
}
void airtaudio::api::Asio::closeStream()
{
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Asio::closeStream(): no open stream to close!";
error(airtaudio::errorWarning);
return;
}
if (m_stream.state == STREAM_RUNNING) {
m_stream.state = STREAM_STOPPED;
ASIOStop();
}
ASIODisposeBuffers();
drivers.removeCurrentDriver();
AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
if (handle) {
CloseHandle(handle->condition);
if (handle->bufferInfos)
free(handle->bufferInfos);
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
m_stream.mode = UNINITIALIZED;
m_stream.state = STREAM_CLOSED;
}
bool stopThreadCalled = false;
void airtaudio::api::Asio::startStream()
{
verifyStream();
if (m_stream.state == STREAM_RUNNING) {
m_errorText = "airtaudio::api::Asio::startStream(): the stream is already running!";
error(airtaudio::errorWarning);
return;
}
AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
ASIOError result = ASIOStart();
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::startStream: error (" << getAsioErrorString(result) << ") starting device.";
m_errorText = m_errorStream.str();
goto unlock;
}
handle->drainCounter = 0;
handle->internalDrain = false;
ResetEvent(handle->condition);
m_stream.state = STREAM_RUNNING;
asioXRun = false;
unlock:
stopThreadCalled = false;
if (result == ASE_OK) return;
error(airtaudio::errorSystemError);
}
void airtaudio::api::Asio::stopStream()
{
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Asio::stopStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
if (handle->drainCounter == 0) {
handle->drainCounter = 2;
WaitForSingleObject(handle->condition, INFINITE); // block until signaled
}
}
m_stream.state = STREAM_STOPPED;
ASIOError result = ASIOStop();
if (result != ASE_OK) {
m_errorStream << "airtaudio::api::Asio::stopStream: error (" << getAsioErrorString(result) << ") stopping device.";
m_errorText = m_errorStream.str();
}
if (result == ASE_OK) return;
error(airtaudio::errorSystemError);
}
void airtaudio::api::Asio::abortStream()
{
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Asio::abortStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
// The following lines were commented-out because some behavior was
// noted where the device buffers need to be zeroed to avoid
// continuing sound, even when the device buffers are completely
// disposed. So now, calling abort is the same as calling stop.
// AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
// handle->drainCounter = 2;
stopStream();
}
// This function will be called by a spawned thread when the user
// callback function signals that the stream should be stopped or
// aborted. It is necessary to handle it this way because the
// callbackEvent() function must return before the ASIOStop()
// function will return.
static unsigned __stdcall asioStopStream(void *ptr)
{
CallbackInfo *info = (CallbackInfo *) ptr;
RtApiAsio *object = (RtApiAsio *) info->object;
object->stopStream();
_endthreadex(0);
return 0;
}
bool airtaudio::api::Asio::callbackEvent(long bufferIndex)
{
if (m_stream.state == STREAM_STOPPED || m_stream.state == STREAM_STOPPING) return SUCCESS;
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Asio::callbackEvent(): the stream is closed ... this shouldn't happen!";
error(airtaudio::errorWarning);
return FAILURE;
}
CallbackInfo *info = (CallbackInfo *) &m_stream.callbackInfo;
AsioHandle *handle = (AsioHandle *) m_stream.apiHandle;
// Check if we were draining the stream and signal if finished.
if (handle->drainCounter > 3) {
m_stream.state = STREAM_STOPPING;
if (handle->internalDrain == false)
SetEvent(handle->condition);
else { // spawn a thread to stop the stream
unsigned threadId;
m_stream.callbackInfo.thread = _beginthreadex(NULL, 0, &asioStopStream,
&m_stream.callbackInfo, 0, &threadId);
}
return SUCCESS;
}
// Invoke user callback to get fresh output data UNLESS we are
// draining stream.
if (handle->drainCounter == 0) {
airtaudio::AirTAudioCallback callback = (airtaudio::AirTAudioCallback) info->callback;
double streamTime = getStreamTime();
rtaudio::streamStatus status = 0;
if (m_stream.mode != INPUT && asioXRun == true) {
status |= RTAUDIO_OUTPUT_UNDERFLOW;
asioXRun = false;
}
if (m_stream.mode != OUTPUT && asioXRun == true) {
status |= RTAUDIO_INPUT_OVERFLOW;
asioXRun = false;
}
int32_t cbReturnValue = callback(m_stream.userBuffer[0], m_stream.userBuffer[1],
m_stream.bufferSize, streamTime, status, info->userData);
if (cbReturnValue == 2) {
m_stream.state = STREAM_STOPPING;
handle->drainCounter = 2;
unsigned threadId;
m_stream.callbackInfo.thread = _beginthreadex(NULL, 0, &asioStopStream,
&m_stream.callbackInfo, 0, &threadId);
return SUCCESS;
}
else if (cbReturnValue == 1) {
handle->drainCounter = 1;
handle->internalDrain = true;
}
}
uint32_t nChannels, bufferBytes, i, j;
nChannels = m_stream.nDeviceChannels[0] + m_stream.nDeviceChannels[1];
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
bufferBytes = m_stream.bufferSize * formatBytes(m_stream.deviceFormat[0]);
if (handle->drainCounter > 1) { // write zeros to the output stream
for (i=0, j=0; i<nChannels; i++) {
if (handle->bufferInfos[i].isInput != ASIOTrue)
memset(handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes);
}
}
else if (m_stream.doConvertBuffer[0]) {
convertBuffer(m_stream.deviceBuffer, m_stream.userBuffer[0], m_stream.convertInfo[0]);
if (m_stream.doByteSwap[0])
byteSwapBuffer(m_stream.deviceBuffer,
m_stream.bufferSize * m_stream.nDeviceChannels[0],
m_stream.deviceFormat[0]);
for (i=0, j=0; i<nChannels; i++) {
if (handle->bufferInfos[i].isInput != ASIOTrue)
memcpy(handle->bufferInfos[i].buffers[bufferIndex],
&m_stream.deviceBuffer[j++*bufferBytes], bufferBytes);
}
}
else {
if (m_stream.doByteSwap[0])
byteSwapBuffer(m_stream.userBuffer[0],
m_stream.bufferSize * m_stream.nUserChannels[0],
m_stream.userFormat);
for (i=0, j=0; i<nChannels; i++) {
if (handle->bufferInfos[i].isInput != ASIOTrue)
memcpy(handle->bufferInfos[i].buffers[bufferIndex],
&m_stream.userBuffer[0][bufferBytes*j++], bufferBytes);
}
}
if (handle->drainCounter) {
handle->drainCounter++;
goto unlock;
}
}
if (m_stream.mode == INPUT || m_stream.mode == DUPLEX) {
bufferBytes = m_stream.bufferSize * formatBytes(m_stream.deviceFormat[1]);
if (m_stream.doConvertBuffer[1]) {
// Always interleave ASIO input data.
for (i=0, j=0; i<nChannels; i++) {
if (handle->bufferInfos[i].isInput == ASIOTrue)
memcpy(&m_stream.deviceBuffer[j++*bufferBytes],
handle->bufferInfos[i].buffers[bufferIndex],
bufferBytes);
}
if (m_stream.doByteSwap[1])
byteSwapBuffer(m_stream.deviceBuffer,
m_stream.bufferSize * m_stream.nDeviceChannels[1],
m_stream.deviceFormat[1]);
convertBuffer(m_stream.userBuffer[1], m_stream.deviceBuffer, m_stream.convertInfo[1]);
}
else {
for (i=0, j=0; i<nChannels; i++) {
if (handle->bufferInfos[i].isInput == ASIOTrue) {
memcpy(&m_stream.userBuffer[1][bufferBytes*j++],
handle->bufferInfos[i].buffers[bufferIndex],
bufferBytes);
}
}
if (m_stream.doByteSwap[1])
byteSwapBuffer(m_stream.userBuffer[1],
m_stream.bufferSize * m_stream.nUserChannels[1],
m_stream.userFormat);
}
}
unlock:
// The following call was suggested by Malte Clasen. While the API
// documentation indicates it should not be required, some device
// drivers apparently do not function correctly without it.
ASIOOutputReady();
RtApi::tickStreamTime();
return SUCCESS;
}
static void sampleRateChanged(ASIOSampleRate sRate)
{
// The ASIO documentation says that this usually only happens during
// external sync. Audio processing is not stopped by the driver,
// actual sample rate might not have even changed, maybe only the
// sample rate status of an AES/EBU or S/PDIF digital input at the
// audio device.
RtApi *object = (RtApi *) asioCallbackInfo->object;
try {
object->stopStream();
}
catch (RtError &exception) {
std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
return;
}
std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
}
static long asioMessages(long selector, long value, void* message, double* opt)
{
long ret = 0;
switch(selector) {
case kAsioSelectorSupported:
if (value == kAsioResetRequest
|| value == kAsioEngineVersion
|| value == kAsioResyncRequest
|| value == kAsioLatenciesChanged
// The following three were added for ASIO 2.0, you don't
// necessarily have to support them.
|| value == kAsioSupportsTimeInfo
|| value == kAsioSupportsTimeCode
|| value == kAsioSupportsInputMonitor)
ret = 1L;
break;
case kAsioResetRequest:
// Defer the task and perform the reset of the driver during the
// next "safe" situation. You cannot reset the driver right now,
// as this code is called from the driver. Reset the driver is
// done by completely destruct is. I.e. ASIOStop(),
// ASIODisposeBuffers(), Destruction Afterwards you initialize the
// driver again.
std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
ret = 1L;
break;
case kAsioResyncRequest:
// This informs the application that the driver encountered some
// non-fatal data loss. It is used for synchronization purposes
// of different media. Added mainly to work around the Win16Mutex
// problems in Windows 95/98 with the Windows Multimedia system,
// which could lose data because the Mutex was held too long by
// another thread. However a driver can issue it in other
// situations, too.
// std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
asioXRun = true;
ret = 1L;
break;
case kAsioLatenciesChanged:
// This will inform the host application that the drivers were
// latencies changed. Beware, it this does not mean that the
// buffer sizes have changed! You might need to update internal
// delay data.
std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
ret = 1L;
break;
case kAsioEngineVersion:
// Return the supported ASIO version of the host application. If
// a host application does not implement this selector, ASIO 1.0
// is assumed by the driver.
ret = 2L;
break;
case kAsioSupportsTimeInfo:
// Informs the driver whether the
// asioCallbacks.bufferSwitchTimeInfo() callback is supported.
// For compatibility with ASIO 1.0 drivers the host application
// should always support the "old" bufferSwitch method, too.
ret = 0;
break;
case kAsioSupportsTimeCode:
// Informs the driver whether application is interested in time
// code info. If an application does not need to know about time
// code, the driver has less work to do.
ret = 0;
break;
}
return ret;
}
static const char* getAsioErrorString(ASIOError result)
{
struct Messages
{
ASIOError value;
const char*message;
};
static const Messages m[] =
{
{ ASE_NotPresent, "Hardware input or output is not present or available." },
{ ASE_HWMalfunction, "Hardware is malfunctioning." },
{ ASE_InvalidParameter, "Invalid input parameter." },
{ ASE_InvalidMode, "Invalid mode." },
{ ASE_SPNotAdvancing, "Sample position not advancing." },
{ ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
{ ASE_NoMemory, "Not enough memory to complete the request." }
};
for (uint32_t i = 0; i < sizeof(m)/sizeof(m[0]); ++i)
if (m[i].value == result) return m[i].message;
return "Unknown error.";
}
//******************** End of __WINDOWS_ASIO__ *********************//
#endif

49
airtaudio/api/Asio.h Normal file
View File

@@ -0,0 +1,49 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_ASIO_H__) && defined(__WINDOWS_ASIO__)
#define __AIRTAUDIO_API_ASIO_H__
namespace airtaudio {
namespace api {
class Asio: public airtaudio::Api {
public:
Asio();
~Asio();
airtaudio::api::type getCurrentApi(void) {
return airtaudio::WINDOWS_ASIO;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
long getStreamLatency(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
bool callbackEvent(long _bufferIndex);
private:
std::vector<airtaudio::DeviceInfo> m_devices;
void saveDeviceInfo(void);
bool m_coInitialized;
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

1405
airtaudio/api/Core.cpp Normal file

File diff suppressed because it is too large Load Diff

54
airtaudio/api/Core.h Normal file
View File

@@ -0,0 +1,54 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_CORE_H__) && defined(__MACOSX_CORE__)
#define __AIRTAUDIO_API_CORE_H__
#include <CoreAudio/AudioHardware.h>
namespace airtaudio {
namespace api {
class Core: public airtaudio::Api {
public:
Core();
~Core();
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::MACOSX_CORE;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
uint32_t getDefaultOutputDevice(void);
uint32_t getDefaultInputDevice(void);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
long getStreamLatency(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
bool callbackEvent(AudioDeviceID _deviceId,
const AudioBufferList *_inBufferList,
const AudioBufferList *_outBufferList);
private:
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
static const char* getErrorCode(OSStatus _code);
};
};
};
#endif

1621
airtaudio/api/Ds.cpp Normal file

File diff suppressed because it is too large Load Diff

52
airtaudio/api/Ds.h Normal file
View File

@@ -0,0 +1,52 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_DS_H__) && defined(__WINDOWS_DS__)
#define __AIRTAUDIO_API_DS_H__
namespace airtaudio {
namespace api {
class Ds: public airtaudio::Api {
public:
Ds(void);
~Ds(void);
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::WINDOWS_DS;
}
uint32_t getDeviceCount(void);
uint32_t getDefaultOutputDevice(void);
uint32_t getDefaultInputDevice(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
long getStreamLatency(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent(void);
private:
bool m_coInitialized;
bool m_buffersRolling;
long m_duplexPrerollBytes;
std::vector<struct DsDevice> dsDevices;
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

54
airtaudio/api/Dummy.cpp Normal file
View File

@@ -0,0 +1,54 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if defined(__AIRTAUDIO_DUMMY__)
airtaudio::api::Dummy::Dummy(void) {
m_errorText = "airtaudio::api::Dummy: This class provides no functionality.";
error(airtaudio::errorWarning);
}
uint32_t airtaudio::api::Dummy::getDeviceCount(void) {
return 0;
}
rtaudio::DeviceInfo airtaudio::api::Dummy::getDeviceInfo(uint32_t _device) {
(void)_device;
rtaudio::DeviceInfo info;
return info;
}
void airtaudio::api::Dummy::closeStream(void) {
}
void airtaudio::api::Dummy::startStream(void) {
}
void airtaudio::api::Dummy::stopStream(void) {
}
void airtaudio::api::Dummy::abortStream(void) {
}
bool airtaudio::api::Dummy::probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options) {
return false;
}
#endif

41
airtaudio/api/Dummy.h Normal file
View File

@@ -0,0 +1,41 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_DUMMY_H__) && defined(__AIRTAUDIO_DUMMY__)
#define __AIRTAUDIO_API_DUMMY_H__
#include <airtaudio/Interface.h>
namespace airtaudio {
namespace api {
class Dummy: public airtaudio::Api {
public:
Dummy(void);
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::RTAUDIO_DUMMY;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
private:
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

792
airtaudio/api/Jack.cpp Normal file
View File

@@ -0,0 +1,792 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if defined(__UNIX_JACK__)
#include <unistd.h>
#include <limits.h>
#include <iostream>
#include <airtaudio/Interface.h>
#include <string.h>
// JACK is a low-latency audio server, originally written for the
// GNU/Linux operating system and now also ported to OS-X. It can
// connect a number of different applications to an audio device, as
// well as allowing them to share audio between themselves.
//
// When using JACK with RtAudio, "devices" refer to JACK clients that
// have ports connected to the server. The JACK server is typically
// started in a terminal as follows:
//
// .jackd -d alsa -d hw:0
//
// or through an interface program such as qjackctl. Many of the
// parameters normally set for a stream are fixed by the JACK server
// and can be specified when the JACK server is started. In
// particular,
//
// .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
//
// specifies a sample rate of 44100 Hz, a buffer size of 512 sample
// frames, and number of buffers = 4. Once the server is running, it
// is not possible to override these values. If the values are not
// specified in the command-line, the JACK server uses default values.
//
// The JACK server does not have to be running when an instance of
// RtApiJack is created, though the function getDeviceCount() will
// report 0 devices found until JACK has been started. When no
// devices are available (i.e., the JACK server is not running), a
// stream cannot be opened.
#include <jack/jack.h>
#include <unistd.h>
#include <cstdio>
// A structure to hold various information related to the Jack API
// implementation.
struct JackHandle {
jack_client_t *client;
jack_port_t **ports[2];
std::string deviceName[2];
bool xrun[2];
std::condition_variable condition;
int32_t drainCounter; // Tracks callback counts when draining
bool internalDrain; // Indicates if stop is initiated from callback or not.
JackHandle(void) :
client(0),
drainCounter(0),
internalDrain(false) {
ports[0] = 0;
ports[1] = 0;
xrun[0] = false;
xrun[1] = false;
}
};
static void jackSilentError(const char *) {};
airtaudio::api::Jack::Jack(void) {
// Nothing to do here.
#if !defined(__RTAUDIO_DEBUG__)
// Turn off Jack's internal error reporting.
jack_set_error_function(&jackSilentError);
#endif
}
airtaudio::api::Jack::~Jack(void) {
if (m_stream.state != STREAM_CLOSED) {
closeStream();
}
}
uint32_t airtaudio::api::Jack::getDeviceCount(void) {
// See if we can become a jack client.
jack_options_t options = (jack_options_t) (JackNoStartServer); //JackNullOption;
jack_status_t *status = NULL;
jack_client_t *client = jack_client_open("RtApiJackCount", options, status);
if (client == NULL) {
return 0;
}
const char **ports;
std::string port, previousPort;
uint32_t nChannels = 0, nDevices = 0;
ports = jack_get_ports(client, NULL, NULL, 0);
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[ nChannels ];
iColon = port.find(":");
if (iColon != std::string::npos) {
port = port.substr(0, iColon + 1);
if (port != previousPort) {
nDevices++;
previousPort = port;
}
}
} while (ports[++nChannels]);
free(ports);
}
jack_client_close(client);
return nDevices;
}
airtaudio::DeviceInfo airtaudio::api::Jack::getDeviceInfo(uint32_t _device)
{
airtaudio::DeviceInfo info;
info.probed = false;
jack_options_t options = (jack_options_t) (JackNoStartServer); //JackNullOption
jack_status_t *status = NULL;
jack_client_t *client = jack_client_open("RtApiJackInfo", options, status);
if (client == NULL) {
m_errorText = "airtaudio::api::Jack::getDeviceInfo: Jack server not found or connection error!";
error(airtaudio::errorWarning);
return info;
}
const char **ports;
std::string port, previousPort;
uint32_t nPorts = 0, nDevices = 0;
ports = jack_get_ports(client, NULL, NULL, 0);
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[ nPorts ];
iColon = port.find(":");
if (iColon != std::string::npos) {
port = port.substr(0, iColon);
if (port != previousPort) {
if (nDevices == _device) {
info.name = port;
}
nDevices++;
previousPort = port;
}
}
} while (ports[++nPorts]);
free(ports);
}
if (_device >= nDevices) {
jack_client_close(client);
m_errorText = "airtaudio::api::Jack::getDeviceInfo: device ID is invalid!";
error(airtaudio::errorInvalidUse);
return info;
}
// Get the current jack server sample rate.
info.sampleRates.clear();
info.sampleRates.push_back(jack_get_sample_rate(client));
// Count the available ports containing the client name as device
// channels. Jack "input ports" equal RtAudio output channels.
uint32_t nChannels = 0;
ports = jack_get_ports(client, info.name.c_str(), NULL, JackPortIsInput);
if (ports) {
while (ports[ nChannels ]) {
nChannels++;
}
free(ports);
info.outputChannels = nChannels;
}
// Jack "output ports" equal RtAudio input channels.
nChannels = 0;
ports = jack_get_ports(client, info.name.c_str(), NULL, JackPortIsOutput);
if (ports) {
while (ports[ nChannels ]) {
nChannels++;
}
free(ports);
info.inputChannels = nChannels;
}
if (info.outputChannels == 0 && info.inputChannels == 0) {
jack_client_close(client);
m_errorText = "airtaudio::api::Jack::getDeviceInfo: error determining Jack input/output channels!";
error(airtaudio::errorWarning);
return info;
}
// If device opens for both playback and capture, we determine the channels.
if (info.outputChannels > 0 && info.inputChannels > 0) {
info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
}
// Jack always uses 32-bit floats.
info.nativeFormats = airtaudio::FLOAT32;
// Jack doesn't provide default devices so we'll use the first available one.
if ( _device == 0
&& info.outputChannels > 0) {
info.isDefaultOutput = true;
}
if ( _device == 0
&& info.inputChannels > 0) {
info.isDefaultInput = true;
}
jack_client_close(client);
info.probed = true;
return info;
}
static int32_t jackCallbackHandler(jack_nframes_t _nframes, void *_infoPointer) {
airtaudio::CallbackInfo* info = (airtaudio::CallbackInfo*)_infoPointer;
airtaudio::api::Jack* object = (airtaudio::api::Jack*)info->object;
if (object->callbackEvent((uint64_t)_nframes) == false) {
return 1;
}
return 0;
}
// This function will be called by a spawned thread when the Jack
// server signals that it is shutting down. It is necessary to handle
// it this way because the jackShutdown() function must return before
// the jack_deactivate() function (in closeStream()) will return.
static void *jackCloseStream(void *_ptr) {
airtaudio::CallbackInfo* info = (airtaudio::CallbackInfo*)_ptr;
airtaudio::api::Jack* object = (airtaudio::api::Jack*)info->object;
object->closeStream();
pthread_exit(NULL);
}
static void jackShutdown(void* _infoPointer) {
airtaudio::CallbackInfo* info = (airtaudio::CallbackInfo*)_infoPointer;
airtaudio::api::Jack* object = (airtaudio::api::Jack*)info->object;
// Check current stream state. If stopped, then we'll assume this
// was called as a result of a call to airtaudio::api::Jack::stopStream (the
// deactivation of a client handle causes this function to be called).
// If not, we'll assume the Jack server is shutting down or some
// other problem occurred and we should close the stream.
if (object->isStreamRunning() == false) {
return;
}
pthread_t threadId;
pthread_create(&threadId, NULL, jackCloseStream, info);
std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
}
static int32_t jackXrun(void* _infoPointer) {
JackHandle* handle = (JackHandle*)_infoPointer;
if (handle->ports[0]) {
handle->xrun[0] = true;
}
if (handle->ports[1]) {
handle->xrun[1] = true;
}
return 0;
}
bool airtaudio::api::Jack::probeDeviceOpen(uint32_t device,
airtaudio::api::StreamMode mode,
uint32_t channels,
uint32_t firstChannel,
uint32_t sampleRate,
airtaudio::format format,
uint32_t *bufferSize,
airtaudio::StreamOptions *options) {
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
// Look for jack server and try to become a client (only do once per stream).
jack_client_t *client = 0;
if (mode == OUTPUT || (mode == INPUT && m_stream.mode != OUTPUT)) {
jack_options_t jackoptions = (jack_options_t) (JackNoStartServer); //JackNullOption;
jack_status_t *status = NULL;
if (options && !options->streamName.empty()) {
client = jack_client_open(options->streamName.c_str(), jackoptions, status);
} else {
client = jack_client_open("RtApiJack", jackoptions, status);
}
if (client == 0) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: Jack server not found or connection error!";
error(airtaudio::errorWarning);
return FAILURE;
}
}
else {
// The handle must have been created on an earlier pass.
client = handle->client;
}
const char **ports;
std::string port, previousPort, deviceName;
uint32_t nPorts = 0, nDevices = 0;
ports = jack_get_ports(client, NULL, NULL, 0);
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[ nPorts ];
iColon = port.find(":");
if (iColon != std::string::npos) {
port = port.substr(0, iColon);
if (port != previousPort) {
if (nDevices == device) deviceName = port;
nDevices++;
previousPort = port;
}
}
} while (ports[++nPorts]);
free(ports);
}
if (device >= nDevices) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: device ID is invalid!";
return FAILURE;
}
// Count the available ports containing the client name as device
// channels. Jack "input ports" equal RtAudio output channels.
uint32_t nChannels = 0;
uint64_t flag = JackPortIsInput;
if (mode == INPUT) flag = JackPortIsOutput;
ports = jack_get_ports(client, deviceName.c_str(), NULL, flag);
if (ports) {
while (ports[ nChannels ]) nChannels++;
free(ports);
}
// Compare the jack ports for specified client to the requested number of channels.
if (nChannels < (channels + firstChannel)) {
m_errorStream << "airtaudio::api::Jack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Check the jack server sample rate.
uint32_t jackRate = jack_get_sample_rate(client);
if (sampleRate != jackRate) {
jack_client_close(client);
m_errorStream << "airtaudio::api::Jack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
m_stream.sampleRate = jackRate;
// Get the latency of the JACK port.
ports = jack_get_ports(client, deviceName.c_str(), NULL, flag);
if (ports[ firstChannel ]) {
// Added by Ge Wang
jack_latency_callback_mode_t cbmode = (mode == INPUT ? JackCaptureLatency : JackPlaybackLatency);
// the range (usually the min and max are equal)
jack_latency_range_t latrange; latrange.min = latrange.max = 0;
// get the latency range
jack_port_get_latency_range(jack_port_by_name(client, ports[firstChannel]), cbmode, &latrange);
// be optimistic, use the min!
m_stream.latency[mode] = latrange.min;
//m_stream.latency[mode] = jack_port_get_latency(jack_port_by_name(client, ports[ firstChannel ]));
}
free(ports);
// The jack server always uses 32-bit floating-point data.
m_stream.deviceFormat[mode] = FLOAT32;
m_stream.userFormat = format;
if (options && options->flags & NONINTERLEAVED) m_stream.userInterleaved = false;
else m_stream.userInterleaved = true;
// Jack always uses non-interleaved buffers.
m_stream.deviceInterleaved[mode] = false;
// Jack always provides host byte-ordered data.
m_stream.doByteSwap[mode] = false;
// Get the buffer size. The buffer size and number of buffers
// (periods) is set when the jack server is started.
m_stream.bufferSize = (int) jack_get_buffer_size(client);
*bufferSize = m_stream.bufferSize;
m_stream.nDeviceChannels[mode] = channels;
m_stream.nUserChannels[mode] = channels;
// Set flags for buffer conversion.
m_stream.doConvertBuffer[mode] = false;
if (m_stream.userFormat != m_stream.deviceFormat[mode])
m_stream.doConvertBuffer[mode] = true;
if (m_stream.userInterleaved != m_stream.deviceInterleaved[mode] &&
m_stream.nUserChannels[mode] > 1)
m_stream.doConvertBuffer[mode] = true;
// Allocate our JackHandle structure for the stream.
if (handle == 0) {
try {
handle = new JackHandle;
}
catch (std::bad_alloc&) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: error allocating JackHandle memory.";
goto error;
}
m_stream.apiHandle = (void *) handle;
handle->client = client;
}
handle->deviceName[mode] = deviceName;
// Allocate necessary internal buffers.
uint64_t bufferBytes;
bufferBytes = m_stream.nUserChannels[mode] * *bufferSize * formatBytes(m_stream.userFormat);
m_stream.userBuffer[mode] = (char *) calloc(bufferBytes, 1);
if (m_stream.userBuffer[mode] == NULL) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: error allocating user buffer memory.";
goto error;
}
if (m_stream.doConvertBuffer[mode]) {
bool makeBuffer = true;
if (mode == OUTPUT)
bufferBytes = m_stream.nDeviceChannels[0] * formatBytes(m_stream.deviceFormat[0]);
else { // mode == INPUT
bufferBytes = m_stream.nDeviceChannels[1] * formatBytes(m_stream.deviceFormat[1]);
if (m_stream.mode == OUTPUT && m_stream.deviceBuffer) {
uint64_t bytesOut = m_stream.nDeviceChannels[0] * formatBytes(m_stream.deviceFormat[0]);
if (bufferBytes < bytesOut) makeBuffer = false;
}
}
if (makeBuffer) {
bufferBytes *= *bufferSize;
if (m_stream.deviceBuffer) free(m_stream.deviceBuffer);
m_stream.deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_stream.deviceBuffer == NULL) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: error allocating device buffer memory.";
goto error;
}
}
}
// Allocate memory for the Jack ports (channels) identifiers.
handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
if (handle->ports[mode] == NULL) {
m_errorText = "airtaudio::api::Jack::probeDeviceOpen: error allocating port memory.";
goto error;
}
m_stream.device[mode] = device;
m_stream.channelOffset[mode] = firstChannel;
m_stream.state = STREAM_STOPPED;
m_stream.callbackInfo.object = (void *) this;
if (m_stream.mode == OUTPUT && mode == INPUT)
// We had already set up the stream for output.
m_stream.mode = DUPLEX;
else {
m_stream.mode = mode;
jack_set_process_callback(handle->client, jackCallbackHandler, (void *) &m_stream.callbackInfo);
jack_set_xrun_callback(handle->client, jackXrun, (void *) &handle);
jack_on_shutdown(handle->client, jackShutdown, (void *) &m_stream.callbackInfo);
}
// Register our ports.
char label[64];
if (mode == OUTPUT) {
for (uint32_t i=0; i<m_stream.nUserChannels[0]; i++) {
snprintf(label, 64, "outport %d", i);
handle->ports[0][i] = jack_port_register(handle->client,
(const char *)label,
JACK_DEFAULT_AUDIO_TYPE,
JackPortIsOutput,
0);
}
} else {
for (uint32_t i=0; i<m_stream.nUserChannels[1]; i++) {
snprintf(label, 64, "inport %d", i);
handle->ports[1][i] = jack_port_register(handle->client,
(const char *)label,
JACK_DEFAULT_AUDIO_TYPE,
JackPortIsInput,
0);
}
}
// Setup the buffer conversion information structure. We don't use
// buffers to do channel offsets, so we override that parameter
// here.
if (m_stream.doConvertBuffer[mode]) {
setConvertInfo(mode, 0);
}
return SUCCESS;
error:
if (handle) {
jack_client_close(handle->client);
if (handle->ports[0]) {
free(handle->ports[0]);
}
if (handle->ports[1]) {
free(handle->ports[1]);
}
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t iii=0; iii<2; ++iii) {
if (m_stream.userBuffer[iii]) {
free(m_stream.userBuffer[iii]);
m_stream.userBuffer[iii] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
return FAILURE;
}
void airtaudio::api::Jack::closeStream(void)
{
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Jack::closeStream(): no open stream to close!";
error(airtaudio::errorWarning);
return;
}
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
if (handle) {
if (m_stream.state == STREAM_RUNNING)
jack_deactivate(handle->client);
jack_client_close(handle->client);
}
if (handle) {
if (handle->ports[0]) {
free(handle->ports[0]);
}
if (handle->ports[1]) {
free(handle->ports[1]);
}
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
m_stream.mode = UNINITIALIZED;
m_stream.state = STREAM_CLOSED;
}
void airtaudio::api::Jack::startStream(void)
{
verifyStream();
if (m_stream.state == STREAM_RUNNING) {
m_errorText = "airtaudio::api::Jack::startStream(): the stream is already running!";
error(airtaudio::errorWarning);
return;
}
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
int32_t result = jack_activate(handle->client);
if (result) {
m_errorText = "airtaudio::api::Jack::startStream(): unable to activate JACK client!";
goto unlock;
}
const char **ports;
// Get the list of available ports.
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
result = 1;
ports = jack_get_ports(handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
if (ports == NULL) {
m_errorText = "airtaudio::api::Jack::startStream(): error determining available JACK input ports!";
goto unlock;
}
// Now make the port connections. Since RtAudio wasn't designed to
// allow the user to select particular channels of a device, we'll
// just open the first "nChannels" ports with offset.
for (uint32_t i=0; i<m_stream.nUserChannels[0]; i++) {
result = 1;
if (ports[ m_stream.channelOffset[0] + i ])
result = jack_connect(handle->client, jack_port_name(handle->ports[0][i]), ports[ m_stream.channelOffset[0] + i ]);
if (result) {
free(ports);
m_errorText = "airtaudio::api::Jack::startStream(): error connecting output ports!";
goto unlock;
}
}
free(ports);
}
if (m_stream.mode == INPUT || m_stream.mode == DUPLEX) {
result = 1;
ports = jack_get_ports(handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput);
if (ports == NULL) {
m_errorText = "airtaudio::api::Jack::startStream(): error determining available JACK output ports!";
goto unlock;
}
// Now make the port connections. See note above.
for (uint32_t i=0; i<m_stream.nUserChannels[1]; i++) {
result = 1;
if (ports[ m_stream.channelOffset[1] + i ])
result = jack_connect(handle->client, ports[ m_stream.channelOffset[1] + i ], jack_port_name(handle->ports[1][i]));
if (result) {
free(ports);
m_errorText = "airtaudio::api::Jack::startStream(): error connecting input ports!";
goto unlock;
}
}
free(ports);
}
handle->drainCounter = 0;
handle->internalDrain = false;
m_stream.state = STREAM_RUNNING;
unlock:
if (result == 0) return;
error(airtaudio::errorSystemError);
}
void airtaudio::api::Jack::stopStream(void) {
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Jack::stopStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
if ( m_stream.mode == OUTPUT
|| m_stream.mode == DUPLEX) {
if (handle->drainCounter == 0) {
handle->drainCounter = 2;
std::unique_lock<std::mutex> lck(m_stream.mutex);
handle->condition.wait(lck);
}
}
jack_deactivate(handle->client);
m_stream.state = STREAM_STOPPED;
}
void airtaudio::api::Jack::abortStream(void)
{
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Jack::abortStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
handle->drainCounter = 2;
stopStream();
}
// This function will be called by a spawned thread when the user
// callback function signals that the stream should be stopped or
// aborted. It is necessary to handle it this way because the
// callbackEvent() function must return before the jack_deactivate()
// function will return.
static void jackStopStream(void *_ptr) {
airtaudio::CallbackInfo *info = (airtaudio::CallbackInfo *) _ptr;
airtaudio::api::Jack *object = (airtaudio::api::Jack *) info->object;
object->stopStream();
}
bool airtaudio::api::Jack::callbackEvent(uint64_t nframes)
{
if (m_stream.state == STREAM_STOPPED || m_stream.state == STREAM_STOPPING) return SUCCESS;
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
error(airtaudio::errorWarning);
return FAILURE;
}
if (m_stream.bufferSize != nframes) {
m_errorText = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
error(airtaudio::errorWarning);
return FAILURE;
}
CallbackInfo *info = (CallbackInfo *) &m_stream.callbackInfo;
JackHandle *handle = (JackHandle *) m_stream.apiHandle;
// Check if we were draining the stream and signal is finished.
if (handle->drainCounter > 3) {
m_stream.state = STREAM_STOPPING;
if (handle->internalDrain == true) {
new std::thread(jackStopStream, info);
} else {
handle->condition.notify_one();
}
return SUCCESS;
}
// Invoke user callback first, to get fresh output data.
if (handle->drainCounter == 0) {
airtaudio::AirTAudioCallback callback = (airtaudio::AirTAudioCallback) info->callback;
double streamTime = getStreamTime();
airtaudio::streamStatus status = 0;
if (m_stream.mode != INPUT && handle->xrun[0] == true) {
status |= OUTPUT_UNDERFLOW;
handle->xrun[0] = false;
}
if (m_stream.mode != OUTPUT && handle->xrun[1] == true) {
status |= INPUT_OVERFLOW;
handle->xrun[1] = false;
}
int32_t cbReturnValue = callback(m_stream.userBuffer[0], m_stream.userBuffer[1],
m_stream.bufferSize, streamTime, status, info->userData);
if (cbReturnValue == 2) {
m_stream.state = STREAM_STOPPING;
handle->drainCounter = 2;
new std::thread(jackStopStream, info);
return SUCCESS;
}
else if (cbReturnValue == 1) {
handle->drainCounter = 1;
handle->internalDrain = true;
}
}
jack_default_audio_sample_t *jackbuffer;
uint64_t bufferBytes = nframes * sizeof(jack_default_audio_sample_t);
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
if (handle->drainCounter > 1) { // write zeros to the output stream
for (uint32_t i=0; i<m_stream.nDeviceChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i], (jack_nframes_t) nframes);
memset(jackbuffer, 0, bufferBytes);
}
}
else if (m_stream.doConvertBuffer[0]) {
convertBuffer(m_stream.deviceBuffer, m_stream.userBuffer[0], m_stream.convertInfo[0]);
for (uint32_t i=0; i<m_stream.nDeviceChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i], (jack_nframes_t) nframes);
memcpy(jackbuffer, &m_stream.deviceBuffer[i*bufferBytes], bufferBytes);
}
}
else { // no buffer conversion
for (uint32_t i=0; i<m_stream.nUserChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i], (jack_nframes_t) nframes);
memcpy(jackbuffer, &m_stream.userBuffer[0][i*bufferBytes], bufferBytes);
}
}
if (handle->drainCounter) {
handle->drainCounter++;
goto unlock;
}
}
if ( m_stream.mode == INPUT
|| m_stream.mode == DUPLEX) {
if (m_stream.doConvertBuffer[1]) {
for (uint32_t i=0; i<m_stream.nDeviceChannels[1]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i], (jack_nframes_t) nframes);
memcpy(&m_stream.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes);
}
convertBuffer(m_stream.userBuffer[1], m_stream.deviceBuffer, m_stream.convertInfo[1]);
} else {
// no buffer conversion
for (uint32_t i=0; i<m_stream.nUserChannels[1]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i], (jack_nframes_t) nframes);
memcpy(&m_stream.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes);
}
}
}
unlock:
airtaudio::Api::tickStreamTime();
return SUCCESS;
}
//******************** End of __UNIX_JACK__ *********************//
#endif

46
airtaudio/api/Jack.h Normal file
View File

@@ -0,0 +1,46 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_JACK_H__) && defined(__UNIX_JACK__)
#define __AIRTAUDIO_API_JACK_H__
namespace airtaudio {
namespace api {
class Jack: public airtaudio::Api {
public:
Jack(void);
~Jack(void);
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::UNIX_JACK;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
long getStreamLatency(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
bool callbackEvent(uint64_t _nframes);
private:
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

945
airtaudio/api/Oss.cpp Normal file
View File

@@ -0,0 +1,945 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if defined(__LINUX_OSS__)
#include <airtaudio/Interface.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <fcntl.h>
#include "soundcard.h"
#include <errno.h>
#include <math.h>
static void *ossCallbackHandler(void * ptr);
// A structure to hold various information related to the OSS API
// implementation.
struct OssHandle {
int32_t id[2]; // device ids
bool xrun[2];
bool triggered;
pthread_cond_t runnable;
OssHandle()
:triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
};
airtaudio::api::Oss::Oss(void) {
// Nothing to do here.
}
airtaudio::api::Oss::~Oss(void) {
if (m_stream.state != STREAM_CLOSED) {
closeStream();
}
}
uint32_t airtaudio::api::Oss::getDeviceCount(void)
{
int32_t mixerfd = open("/dev/mixer", O_RDWR, 0);
if (mixerfd == -1) {
m_errorText = "airtaudio::api::Oss::getDeviceCount: error opening '/dev/mixer'.";
error(airtaudio::errorWarning);
return 0;
}
oss_sysinfo sysinfo;
if (ioctl(mixerfd, SNDCTL_SYSINFO, &sysinfo) == -1) {
close(mixerfd);
m_errorText = "airtaudio::api::Oss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
error(airtaudio::errorWarning);
return 0;
}
close(mixerfd);
return sysinfo.numaudios;
}
rtaudio::DeviceInfo airtaudio::api::Oss::getDeviceInfo(uint32_t device)
{
rtaudio::DeviceInfo info;
info.probed = false;
int32_t mixerfd = open("/dev/mixer", O_RDWR, 0);
if (mixerfd == -1) {
m_errorText = "airtaudio::api::Oss::getDeviceInfo: error opening '/dev/mixer'.";
error(airtaudio::errorWarning);
return info;
}
oss_sysinfo sysinfo;
int32_t result = ioctl(mixerfd, SNDCTL_SYSINFO, &sysinfo);
if (result == -1) {
close(mixerfd);
m_errorText = "airtaudio::api::Oss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
error(airtaudio::errorWarning);
return info;
}
unsigned nDevices = sysinfo.numaudios;
if (nDevices == 0) {
close(mixerfd);
m_errorText = "airtaudio::api::Oss::getDeviceInfo: no devices found!";
error(airtaudio::errorInvalidUse);
return info;
}
if (device >= nDevices) {
close(mixerfd);
m_errorText = "airtaudio::api::Oss::getDeviceInfo: device ID is invalid!";
error(airtaudio::errorInvalidUse);
return info;
}
oss_audioinfo ainfo;
ainfo.dev = device;
result = ioctl(mixerfd, SNDCTL_AUDIOINFO, &ainfo);
close(mixerfd);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
// Probe channels
if (ainfo.caps & PCM_CAP_OUTPUT) info.outputChannels = ainfo.max_channels;
if (ainfo.caps & PCM_CAP_INPUT) info.inputChannels = ainfo.max_channels;
if (ainfo.caps & PCM_CAP_DUPLEX) {
if (info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX)
info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
}
// Probe data formats ... do for input
uint64_t mask = ainfo.iformats;
if (mask & AFMT_S16_LE || mask & AFMT_S16_BE)
info.nativeFormats |= RTAUDIO_SINT16;
if (mask & AFMT_S8)
info.nativeFormats |= RTAUDIO_SINT8;
if (mask & AFMT_S32_LE || mask & AFMT_S32_BE)
info.nativeFormats |= RTAUDIO_SINT32;
if (mask & AFMT_FLOAT)
info.nativeFormats |= RTAUDIO_FLOAT32;
if (mask & AFMT_S24_LE || mask & AFMT_S24_BE)
info.nativeFormats |= RTAUDIO_SINT24;
// Check that we have at least one supported format
if (info.nativeFormats == 0) {
m_errorStream << "airtaudio::api::Oss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
return info;
}
// Probe the supported sample rates.
info.sampleRates.clear();
if (ainfo.nrates) {
for (uint32_t i=0; i<ainfo.nrates; i++) {
for (uint32_t k=0; k<MAX_SAMPLE_RATES; k++) {
if (ainfo.rates[i] == SAMPLE_RATES[k]) {
info.sampleRates.push_back(SAMPLE_RATES[k]);
break;
}
}
}
}
else {
// Check min and max rate values;
for (uint32_t k=0; k<MAX_SAMPLE_RATES; k++) {
if (ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k])
info.sampleRates.push_back(SAMPLE_RATES[k]);
}
}
if (info.sampleRates.size() == 0) {
m_errorStream << "airtaudio::api::Oss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
}
else {
info.probed = true;
info.name = ainfo.name;
}
return info;
}
bool airtaudio::api::Oss::probeDeviceOpen(uint32_t device, StreamMode mode, uint32_t channels,
uint32_t firstChannel, uint32_t sampleRate,
rtaudio::format format, uint32_t *bufferSize,
rtaudio::StreamOptions *options)
{
int32_t mixerfd = open("/dev/mixer", O_RDWR, 0);
if (mixerfd == -1) {
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error opening '/dev/mixer'.";
return FAILURE;
}
oss_sysinfo sysinfo;
int32_t result = ioctl(mixerfd, SNDCTL_SYSINFO, &sysinfo);
if (result == -1) {
close(mixerfd);
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
return FAILURE;
}
unsigned nDevices = sysinfo.numaudios;
if (nDevices == 0) {
// This should not happen because a check is made before this function is called.
close(mixerfd);
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: no devices found!";
return FAILURE;
}
if (device >= nDevices) {
// This should not happen because a check is made before this function is called.
close(mixerfd);
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: device ID is invalid!";
return FAILURE;
}
oss_audioinfo ainfo;
ainfo.dev = device;
result = ioctl(mixerfd, SNDCTL_AUDIOINFO, &ainfo);
close(mixerfd);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Check if device supports input or output
if ((mode == OUTPUT && !(ainfo.caps & PCM_CAP_OUTPUT)) ||
(mode == INPUT && !(ainfo.caps & PCM_CAP_INPUT))) {
if (mode == OUTPUT)
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
else
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
m_errorText = m_errorStream.str();
return FAILURE;
}
int32_t flags = 0;
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
if (mode == OUTPUT)
flags |= O_WRONLY;
else { // mode == INPUT
if (m_stream.mode == OUTPUT && m_stream.device[0] == device) {
// We just set the same device for playback ... close and reopen for duplex (OSS only).
close(handle->id[0]);
handle->id[0] = 0;
if (!(ainfo.caps & PCM_CAP_DUPLEX)) {
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Check that the number previously set channels is the same.
if (m_stream.nUserChannels[0] != channels) {
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
flags |= O_RDWR;
}
else
flags |= O_RDONLY;
}
// Set exclusive access if specified.
if (options && options->flags & RTAUDIO_HOG_DEVICE) flags |= O_EXCL;
// Try to open the device.
int32_t fd;
fd = open(ainfo.devnode, flags, 0);
if (fd == -1) {
if (errno == EBUSY)
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
else
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// For duplex operation, specifically set this mode (this doesn't seem to work).
/*
if (flags | O_RDWR) {
result = ioctl(fd, SNDCTL_DSP_SETDUPLEX, NULL);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
}
*/
// Check the device channel support.
m_stream.nUserChannels[mode] = channels;
if (ainfo.max_channels < (int)(channels + firstChannel)) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Set the number of channels.
int32_t deviceChannels = channels + firstChannel;
result = ioctl(fd, SNDCTL_DSP_CHANNELS, &deviceChannels);
if (result == -1 || deviceChannels < (int)(channels + firstChannel)) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
m_stream.nDeviceChannels[mode] = deviceChannels;
// Get the data format mask
int32_t mask;
result = ioctl(fd, SNDCTL_DSP_GETFMTS, &mask);
if (result == -1) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Determine how to set the device format.
m_stream.userFormat = format;
int32_t deviceFormat = -1;
m_stream.doByteSwap[mode] = false;
if (format == RTAUDIO_SINT8) {
if (mask & AFMT_S8) {
deviceFormat = AFMT_S8;
m_stream.deviceFormat[mode] = RTAUDIO_SINT8;
}
}
else if (format == RTAUDIO_SINT16) {
if (mask & AFMT_S16_NE) {
deviceFormat = AFMT_S16_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT16;
}
else if (mask & AFMT_S16_OE) {
deviceFormat = AFMT_S16_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT16;
m_stream.doByteSwap[mode] = true;
}
}
else if (format == RTAUDIO_SINT24) {
if (mask & AFMT_S24_NE) {
deviceFormat = AFMT_S24_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT24;
}
else if (mask & AFMT_S24_OE) {
deviceFormat = AFMT_S24_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT24;
m_stream.doByteSwap[mode] = true;
}
}
else if (format == RTAUDIO_SINT32) {
if (mask & AFMT_S32_NE) {
deviceFormat = AFMT_S32_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT32;
}
else if (mask & AFMT_S32_OE) {
deviceFormat = AFMT_S32_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT32;
m_stream.doByteSwap[mode] = true;
}
}
if (deviceFormat == -1) {
// The user requested format is not natively supported by the device.
if (mask & AFMT_S16_NE) {
deviceFormat = AFMT_S16_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT16;
}
else if (mask & AFMT_S32_NE) {
deviceFormat = AFMT_S32_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT32;
}
else if (mask & AFMT_S24_NE) {
deviceFormat = AFMT_S24_NE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT24;
}
else if (mask & AFMT_S16_OE) {
deviceFormat = AFMT_S16_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT16;
m_stream.doByteSwap[mode] = true;
}
else if (mask & AFMT_S32_OE) {
deviceFormat = AFMT_S32_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT32;
m_stream.doByteSwap[mode] = true;
}
else if (mask & AFMT_S24_OE) {
deviceFormat = AFMT_S24_OE;
m_stream.deviceFormat[mode] = RTAUDIO_SINT24;
m_stream.doByteSwap[mode] = true;
}
else if (mask & AFMT_S8) {
deviceFormat = AFMT_S8;
m_stream.deviceFormat[mode] = RTAUDIO_SINT8;
}
}
if (m_stream.deviceFormat[mode] == 0) {
// This really shouldn't happen ...
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Set the data format.
int32_t temp = deviceFormat;
result = ioctl(fd, SNDCTL_DSP_SETFMT, &deviceFormat);
if (result == -1 || deviceFormat != temp) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Attempt to set the buffer size. According to OSS, the minimum
// number of buffers is two. The supposed minimum buffer size is 16
// bytes, so that will be our lower bound. The argument to this
// call is in the form 0xMMMMSSSS (hex), where the buffer size (in
// bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
// We'll check the actual value used near the end of the setup
// procedure.
int32_t ossBufferBytes = *bufferSize * formatBytes(m_stream.deviceFormat[mode]) * deviceChannels;
if (ossBufferBytes < 16) ossBufferBytes = 16;
int32_t buffers = 0;
if (options) buffers = options->numberOfBuffers;
if (options && options->flags & RTAUDIO_MINIMIZE_LATENCY) buffers = 2;
if (buffers < 2) buffers = 3;
temp = ((int) buffers << 16) + (int)(log10((double)ossBufferBytes) / log10(2.0));
result = ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp);
if (result == -1) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
m_stream.nBuffers = buffers;
// Save buffer size (in sample frames).
*bufferSize = ossBufferBytes / (formatBytes(m_stream.deviceFormat[mode]) * deviceChannels);
m_stream.bufferSize = *bufferSize;
// Set the sample rate.
int32_t srate = sampleRate;
result = ioctl(fd, SNDCTL_DSP_SPEED, &srate);
if (result == -1) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
// Verify the sample rate setup worked.
if (abs(srate - sampleRate) > 100) {
close(fd);
m_errorStream << "airtaudio::api::Oss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
m_errorText = m_errorStream.str();
return FAILURE;
}
m_stream.sampleRate = sampleRate;
if (mode == INPUT && m_stream.mode == OUTPUT && m_stream.device[0] == device) {
// We're doing duplex setup here.
m_stream.deviceFormat[0] = m_stream.deviceFormat[1];
m_stream.nDeviceChannels[0] = deviceChannels;
}
// Set interleaving parameters.
m_stream.userInterleaved = true;
m_stream.deviceInterleaved[mode] = true;
if (options && options->flags & RTAUDIO_NONINTERLEAVED)
m_stream.userInterleaved = false;
// Set flags for buffer conversion
m_stream.doConvertBuffer[mode] = false;
if (m_stream.userFormat != m_stream.deviceFormat[mode])
m_stream.doConvertBuffer[mode] = true;
if (m_stream.nUserChannels[mode] < m_stream.nDeviceChannels[mode])
m_stream.doConvertBuffer[mode] = true;
if (m_stream.userInterleaved != m_stream.deviceInterleaved[mode] &&
m_stream.nUserChannels[mode] > 1)
m_stream.doConvertBuffer[mode] = true;
// Allocate the stream handles if necessary and then save.
if (m_stream.apiHandle == 0) {
try {
handle = new OssHandle;
}
catch (std::bad_alloc&) {
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error allocating OssHandle memory.";
goto error;
}
if (pthread_cond_init(&handle->runnable, NULL)) {
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error initializing pthread condition variable.";
goto error;
}
m_stream.apiHandle = (void *) handle;
}
else {
handle = (OssHandle *) m_stream.apiHandle;
}
handle->id[mode] = fd;
// Allocate necessary internal buffers.
uint64_t bufferBytes;
bufferBytes = m_stream.nUserChannels[mode] * *bufferSize * formatBytes(m_stream.userFormat);
m_stream.userBuffer[mode] = (char *) calloc(bufferBytes, 1);
if (m_stream.userBuffer[mode] == NULL) {
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error allocating user buffer memory.";
goto error;
}
if (m_stream.doConvertBuffer[mode]) {
bool makeBuffer = true;
bufferBytes = m_stream.nDeviceChannels[mode] * formatBytes(m_stream.deviceFormat[mode]);
if (mode == INPUT) {
if (m_stream.mode == OUTPUT && m_stream.deviceBuffer) {
uint64_t bytesOut = m_stream.nDeviceChannels[0] * formatBytes(m_stream.deviceFormat[0]);
if (bufferBytes <= bytesOut) makeBuffer = false;
}
}
if (makeBuffer) {
bufferBytes *= *bufferSize;
if (m_stream.deviceBuffer) free(m_stream.deviceBuffer);
m_stream.deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_stream.deviceBuffer == NULL) {
m_errorText = "airtaudio::api::Oss::probeDeviceOpen: error allocating device buffer memory.";
goto error;
}
}
}
m_stream.device[mode] = device;
m_stream.state = STREAM_STOPPED;
// Setup the buffer conversion information structure.
if (m_stream.doConvertBuffer[mode]) setConvertInfo(mode, firstChannel);
// Setup thread if necessary.
if (m_stream.mode == OUTPUT && mode == INPUT) {
// We had already set up an output stream.
m_stream.mode = DUPLEX;
if (m_stream.device[0] == device) handle->id[0] = fd;
}
else {
m_stream.mode = mode;
// Setup callback thread.
m_stream.callbackInfo.object = (void *) this;
// Set the thread attributes for joinable and realtime scheduling
// priority. The higher priority will only take affect if the
// program is run as root or suid.
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
if (options && options->flags & RTAUDIO_SCHEDULE_REALTIME) {
struct sched_param param;
int32_t priority = options->priority;
int32_t min = sched_get_priority_min(SCHED_RR);
int32_t max = sched_get_priority_max(SCHED_RR);
if (priority < min) priority = min;
else if (priority > max) priority = max;
param.sched_priority = priority;
pthread_attr_setschedparam(&attr, &param);
pthread_attr_setschedpolicy(&attr, SCHED_RR);
}
else
pthread_attr_setschedpolicy(&attr, SCHED_OTHER);
#else
pthread_attr_setschedpolicy(&attr, SCHED_OTHER);
#endif
m_stream.callbackInfo.isRunning = true;
result = pthread_create(&m_stream.callbackInfo.thread, &attr, ossCallbackHandler, &m_stream.callbackInfo);
pthread_attr_destroy(&attr);
if (result) {
m_stream.callbackInfo.isRunning = false;
m_errorText = "airtaudio::api::Oss::error creating callback thread!";
goto error;
}
}
return SUCCESS;
error:
if (handle) {
pthread_cond_destroy(&handle->runnable);
if (handle->id[0]) close(handle->id[0]);
if (handle->id[1]) close(handle->id[1]);
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
return FAILURE;
}
void airtaudio::api::Oss::closeStream()
{
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Oss::closeStream(): no open stream to close!";
error(airtaudio::errorWarning);
return;
}
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
m_stream.callbackInfo.isRunning = false;
m_stream.mutex.lock();
if (m_stream.state == STREAM_STOPPED)
pthread_cond_signal(&handle->runnable);
m_stream.mutex.unlock();
pthread_join(m_stream.callbackInfo.thread, NULL);
if (m_stream.state == STREAM_RUNNING) {
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX)
ioctl(handle->id[0], SNDCTL_DSP_HALT, 0);
else
ioctl(handle->id[1], SNDCTL_DSP_HALT, 0);
m_stream.state = STREAM_STOPPED;
}
if (handle) {
pthread_cond_destroy(&handle->runnable);
if (handle->id[0]) close(handle->id[0]);
if (handle->id[1]) close(handle->id[1]);
delete handle;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
m_stream.mode = UNINITIALIZED;
m_stream.state = STREAM_CLOSED;
}
void airtaudio::api::Oss::startStream()
{
verifyStream();
if (m_stream.state == STREAM_RUNNING) {
m_errorText = "airtaudio::api::Oss::startStream(): the stream is already running!";
error(airtaudio::errorWarning);
return;
}
m_stream.mutex.lock();
m_stream.state = STREAM_RUNNING;
// No need to do anything else here ... OSS automatically starts
// when fed samples.
m_stream.mutex.unlock();
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
pthread_cond_signal(&handle->runnable);
}
void airtaudio::api::Oss::stopStream()
{
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Oss::stopStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
m_stream.mutex.lock();
// The state might change while waiting on a mutex.
if (m_stream.state == STREAM_STOPPED) {
m_stream.mutex.unlock();
return;
}
int32_t result = 0;
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
// Flush the output with zeros a few times.
char *buffer;
int32_t samples;
rtaudio::format format;
if (m_stream.doConvertBuffer[0]) {
buffer = m_stream.deviceBuffer;
samples = m_stream.bufferSize * m_stream.nDeviceChannels[0];
format = m_stream.deviceFormat[0];
}
else {
buffer = m_stream.userBuffer[0];
samples = m_stream.bufferSize * m_stream.nUserChannels[0];
format = m_stream.userFormat;
}
memset(buffer, 0, samples * formatBytes(format));
for (uint32_t i=0; i<m_stream.nBuffers+1; i++) {
result = write(handle->id[0], buffer, samples * formatBytes(format));
if (result == -1) {
m_errorText = "airtaudio::api::Oss::stopStream: audio write error.";
error(airtaudio::errorWarning);
}
}
result = ioctl(handle->id[0], SNDCTL_DSP_HALT, 0);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::stopStream: system error stopping callback procedure on device (" << m_stream.device[0] << ").";
m_errorText = m_errorStream.str();
goto unlock;
}
handle->triggered = false;
}
if (m_stream.mode == INPUT || (m_stream.mode == DUPLEX && handle->id[0] != handle->id[1])) {
result = ioctl(handle->id[1], SNDCTL_DSP_HALT, 0);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::stopStream: system error stopping input callback procedure on device (" << m_stream.device[0] << ").";
m_errorText = m_errorStream.str();
goto unlock;
}
}
unlock:
m_stream.state = STREAM_STOPPED;
m_stream.mutex.unlock();
if (result != -1) return;
error(airtaudio::errorSystemError);
}
void airtaudio::api::Oss::abortStream()
{
verifyStream();
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Oss::abortStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
m_stream.mutex.lock();
// The state might change while waiting on a mutex.
if (m_stream.state == STREAM_STOPPED) {
m_stream.mutex.unlock();
return;
}
int32_t result = 0;
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
result = ioctl(handle->id[0], SNDCTL_DSP_HALT, 0);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::abortStream: system error stopping callback procedure on device (" << m_stream.device[0] << ").";
m_errorText = m_errorStream.str();
goto unlock;
}
handle->triggered = false;
}
if (m_stream.mode == INPUT || (m_stream.mode == DUPLEX && handle->id[0] != handle->id[1])) {
result = ioctl(handle->id[1], SNDCTL_DSP_HALT, 0);
if (result == -1) {
m_errorStream << "airtaudio::api::Oss::abortStream: system error stopping input callback procedure on device (" << m_stream.device[0] << ").";
m_errorText = m_errorStream.str();
goto unlock;
}
}
unlock:
m_stream.state = STREAM_STOPPED;
m_stream.mutex.unlock();
if (result != -1) return;
*error(airtaudio::errorSystemError);
}
void airtaudio::api::Oss::callbackEvent()
{
OssHandle *handle = (OssHandle *) m_stream.apiHandle;
if (m_stream.state == STREAM_STOPPED) {
m_stream.mutex.lock();
pthread_cond_wait(&handle->runnable, &m_stream.mutex);
if (m_stream.state != STREAM_RUNNING) {
m_stream.mutex.unlock();
return;
}
m_stream.mutex.unlock();
}
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Oss::callbackEvent(): the stream is closed ... this shouldn't happen!";
error(airtaudio::errorWarning);
return;
}
// Invoke user callback to get fresh output data.
int32_t doStopStream = 0;
airtaudio::AirTAudioCallback callback = (airtaudio::AirTAudioCallback) m_stream.callbackInfo.callback;
double streamTime = getStreamTime();
rtaudio::streamStatus status = 0;
if (m_stream.mode != INPUT && handle->xrun[0] == true) {
status |= RTAUDIO_OUTPUT_UNDERFLOW;
handle->xrun[0] = false;
}
if (m_stream.mode != OUTPUT && handle->xrun[1] == true) {
status |= RTAUDIO_INPUT_OVERFLOW;
handle->xrun[1] = false;
}
doStopStream = callback(m_stream.userBuffer[0], m_stream.userBuffer[1],
m_stream.bufferSize, streamTime, status, m_stream.callbackInfo.userData);
if (doStopStream == 2) {
this->abortStream();
return;
}
m_stream.mutex.lock();
// The state might change while waiting on a mutex.
if (m_stream.state == STREAM_STOPPED) goto unlock;
int32_t result;
char *buffer;
int32_t samples;
rtaudio::format format;
if (m_stream.mode == OUTPUT || m_stream.mode == DUPLEX) {
// Setup parameters and do buffer conversion if necessary.
if (m_stream.doConvertBuffer[0]) {
buffer = m_stream.deviceBuffer;
convertBuffer(buffer, m_stream.userBuffer[0], m_stream.convertInfo[0]);
samples = m_stream.bufferSize * m_stream.nDeviceChannels[0];
format = m_stream.deviceFormat[0];
}
else {
buffer = m_stream.userBuffer[0];
samples = m_stream.bufferSize * m_stream.nUserChannels[0];
format = m_stream.userFormat;
}
// Do byte swapping if necessary.
if (m_stream.doByteSwap[0])
byteSwapBuffer(buffer, samples, format);
if (m_stream.mode == DUPLEX && handle->triggered == false) {
int32_t trig = 0;
ioctl(handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig);
result = write(handle->id[0], buffer, samples * formatBytes(format));
trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
ioctl(handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig);
handle->triggered = true;
}
else
// Write samples to device.
result = write(handle->id[0], buffer, samples * formatBytes(format));
if (result == -1) {
// We'll assume this is an underrun, though there isn't a
// specific means for determining that.
handle->xrun[0] = true;
m_errorText = "airtaudio::api::Oss::callbackEvent: audio write error.";
error(airtaudio::errorWarning);
// Continue on to input section.
}
}
if (m_stream.mode == INPUT || m_stream.mode == DUPLEX) {
// Setup parameters.
if (m_stream.doConvertBuffer[1]) {
buffer = m_stream.deviceBuffer;
samples = m_stream.bufferSize * m_stream.nDeviceChannels[1];
format = m_stream.deviceFormat[1];
}
else {
buffer = m_stream.userBuffer[1];
samples = m_stream.bufferSize * m_stream.nUserChannels[1];
format = m_stream.userFormat;
}
// Read samples from device.
result = read(handle->id[1], buffer, samples * formatBytes(format));
if (result == -1) {
// We'll assume this is an overrun, though there isn't a
// specific means for determining that.
handle->xrun[1] = true;
m_errorText = "airtaudio::api::Oss::callbackEvent: audio read error.";
error(airtaudio::errorWarning);
goto unlock;
}
// Do byte swapping if necessary.
if (m_stream.doByteSwap[1])
byteSwapBuffer(buffer, samples, format);
// Do buffer conversion if necessary.
if (m_stream.doConvertBuffer[1])
convertBuffer(m_stream.userBuffer[1], m_stream.deviceBuffer, m_stream.convertInfo[1]);
}
unlock:
m_stream.mutex.unlock();
RtApi::tickStreamTime();
if (doStopStream == 1) this->stopStream();
}
static void *ossCallbackHandler(void *ptr)
{
CallbackInfo *info = (CallbackInfo *) ptr;
RtApiOss *object = (RtApiOss *) info->object;
bool *isRunning = &info->isRunning;
while (*isRunning == true) {
pthread_testcancel();
object->callbackEvent();
}
pthread_exit(NULL);
}
//******************** End of __LINUX_OSS__ *********************//
#endif

45
airtaudio/api/Oss.h Normal file
View File

@@ -0,0 +1,45 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_OSS_H__) && defined(__LINUX_OSS__)
#define __AIRTAUDIO_API_OSS_H__
namespace airtaudio {
namespace api {
class Oss: public airtaudio::Api {
public:
Oss(void);
~Oss(void);
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::LINUX_OSS;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent(void);
private:
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif

446
airtaudio/api/Pulse.cpp Normal file
View File

@@ -0,0 +1,446 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if defined(__LINUX_PULSE__)
#include <unistd.h>
#include <limits.h>
#include <airtaudio/Interface.h>
// Code written by Peter Meerwald, pmeerw@pmeerw.net
// and Tristan Matthews.
#include <pulse/error.h>
#include <pulse/simple.h>
#include <cstdio>
static const uint32_t SUPPORTED_SAMPLERATES[] = {
8000,
16000,
22050,
32000,
44100,
48000,
96000,
0
};
struct rtaudio_pa_format_mapping_t {
airtaudio::format airtaudio_format;
pa_sample_format_t pa_format;
};
static const rtaudio_pa_format_mapping_t supported_sampleformats[] = {
{airtaudio::SINT16, PA_SAMPLE_S16LE},
{airtaudio::SINT32, PA_SAMPLE_S32LE},
{airtaudio::FLOAT32, PA_SAMPLE_FLOAT32LE},
{0, PA_SAMPLE_INVALID}};
struct PulseAudioHandle {
pa_simple *s_play;
pa_simple *s_rec;
std::thread* thread;
std::condition_variable runnable_cv;
bool runnable;
PulseAudioHandle() : s_play(0), s_rec(0), runnable(false) { }
};
airtaudio::api::Pulse::~Pulse()
{
if (m_stream.state != STREAM_CLOSED)
closeStream();
}
uint32_t airtaudio::api::Pulse::getDeviceCount(void) {
return 1;
}
airtaudio::DeviceInfo airtaudio::api::Pulse::getDeviceInfo(uint32_t _device) {
airtaudio::DeviceInfo info;
info.probed = true;
info.name = "PulseAudio";
info.outputChannels = 2;
info.inputChannels = 2;
info.duplexChannels = 2;
info.isDefaultOutput = true;
info.isDefaultInput = true;
for (const uint32_t *sr = SUPPORTED_SAMPLERATES; *sr; ++sr) {
info.sampleRates.push_back(*sr);
}
info.nativeFormats = SINT16 | SINT32 | FLOAT32;
return info;
}
static void pulseaudio_callback(void* _user) {
airtaudio::CallbackInfo *cbi = static_cast<airtaudio::CallbackInfo *>(_user);
airtaudio::api::Pulse *context = static_cast<airtaudio::api::Pulse*>(cbi->object);
volatile bool *isRunning = &cbi->isRunning;
while (*isRunning) {
context->callbackEvent();
}
}
void airtaudio::api::Pulse::closeStream(void) {
PulseAudioHandle *pah = static_cast<PulseAudioHandle *>(m_stream.apiHandle);
m_stream.callbackInfo.isRunning = false;
if (pah) {
m_stream.mutex.lock();
if (m_stream.state == STREAM_STOPPED) {
pah->runnable = true;
pah->runnable_cv.notify_one();;
}
m_stream.mutex.unlock();
pah->thread->join();
if (pah->s_play) {
pa_simple_flush(pah->s_play, NULL);
pa_simple_free(pah->s_play);
}
if (pah->s_rec) {
pa_simple_free(pah->s_rec);
}
delete pah;
m_stream.apiHandle = 0;
}
if (m_stream.userBuffer[0] != NULL) {
free(m_stream.userBuffer[0]);
m_stream.userBuffer[0] = NULL;
}
if (m_stream.userBuffer[1] != NULL) {
free(m_stream.userBuffer[1]);
m_stream.userBuffer[1] = NULL;
}
m_stream.state = STREAM_CLOSED;
m_stream.mode = UNINITIALIZED;
}
void airtaudio::api::Pulse::callbackEvent(void) {
PulseAudioHandle *pah = static_cast<PulseAudioHandle *>(m_stream.apiHandle);
if (m_stream.state == STREAM_STOPPED) {
std::unique_lock<std::mutex> lck(m_stream.mutex);
while (!pah->runnable) {
pah->runnable_cv.wait(lck);
}
if (m_stream.state != STREAM_RUNNING) {
m_stream.mutex.unlock();
return;
}
}
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Pulse::callbackEvent(): the stream is closed ... "
"this shouldn't happen!";
error(airtaudio::errorWarning);
return;
}
airtaudio::AirTAudioCallback callback = (airtaudio::AirTAudioCallback) m_stream.callbackInfo.callback;
double streamTime = getStreamTime();
airtaudio::streamStatus status = 0;
int32_t doStopStream = callback(m_stream.userBuffer[OUTPUT],
m_stream.userBuffer[INPUT],
m_stream.bufferSize,
streamTime,
status,
m_stream.callbackInfo.userData);
if (doStopStream == 2) {
abortStream();
return;
}
m_stream.mutex.lock();
void *pulse_in = m_stream.doConvertBuffer[INPUT] ? m_stream.deviceBuffer : m_stream.userBuffer[INPUT];
void *pulse_out = m_stream.doConvertBuffer[OUTPUT] ? m_stream.deviceBuffer : m_stream.userBuffer[OUTPUT];
if (m_stream.state != STREAM_RUNNING) {
goto unlock;
}
int32_t pa_error;
size_t bytes;
if ( m_stream.mode == OUTPUT
|| m_stream.mode == DUPLEX) {
if (m_stream.doConvertBuffer[OUTPUT]) {
convertBuffer(m_stream.deviceBuffer,
m_stream.userBuffer[OUTPUT],
m_stream.convertInfo[OUTPUT]);
bytes = m_stream.nDeviceChannels[OUTPUT] * m_stream.bufferSize * formatBytes(m_stream.deviceFormat[OUTPUT]);
} else {
bytes = m_stream.nUserChannels[OUTPUT] * m_stream.bufferSize * formatBytes(m_stream.userFormat);
}
if (pa_simple_write(pah->s_play, pulse_out, bytes, &pa_error) < 0) {
m_errorStream << "airtaudio::api::Pulse::callbackEvent: audio write error, " << pa_strerror(pa_error) << ".";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
}
}
if (m_stream.mode == INPUT || m_stream.mode == DUPLEX) {
if (m_stream.doConvertBuffer[INPUT]) {
bytes = m_stream.nDeviceChannels[INPUT] * m_stream.bufferSize * formatBytes(m_stream.deviceFormat[INPUT]);
} else {
bytes = m_stream.nUserChannels[INPUT] * m_stream.bufferSize * formatBytes(m_stream.userFormat);
}
if (pa_simple_read(pah->s_rec, pulse_in, bytes, &pa_error) < 0) {
m_errorStream << "airtaudio::api::Pulse::callbackEvent: audio read error, " << pa_strerror(pa_error) << ".";
m_errorText = m_errorStream.str();
error(airtaudio::errorWarning);
}
if (m_stream.doConvertBuffer[INPUT]) {
convertBuffer(m_stream.userBuffer[INPUT],
m_stream.deviceBuffer,
m_stream.convertInfo[INPUT]);
}
}
unlock:
m_stream.mutex.unlock();
airtaudio::Api::tickStreamTime();
if (doStopStream == 1) {
stopStream();
}
}
void airtaudio::api::Pulse::startStream(void) {
PulseAudioHandle *pah = static_cast<PulseAudioHandle *>(m_stream.apiHandle);
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Pulse::startStream(): the stream is not open!";
error(airtaudio::errorInvalidUse);
return;
}
if (m_stream.state == STREAM_RUNNING) {
m_errorText = "airtaudio::api::Pulse::startStream(): the stream is already running!";
error(airtaudio::errorWarning);
return;
}
m_stream.mutex.lock();
m_stream.state = STREAM_RUNNING;
pah->runnable = true;
pah->runnable_cv.notify_one();
m_stream.mutex.unlock();
}
void airtaudio::api::Pulse::stopStream(void) {
PulseAudioHandle *pah = static_cast<PulseAudioHandle *>(m_stream.apiHandle);
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Pulse::stopStream(): the stream is not open!";
error(airtaudio::errorInvalidUse);
return;
}
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Pulse::stopStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
m_stream.state = STREAM_STOPPED;
m_stream.mutex.lock();
if (pah && pah->s_play) {
int32_t pa_error;
if (pa_simple_drain(pah->s_play, &pa_error) < 0) {
m_errorStream << "airtaudio::api::Pulse::stopStream: error draining output device, " <<
pa_strerror(pa_error) << ".";
m_errorText = m_errorStream.str();
m_stream.mutex.unlock();
error(airtaudio::errorSystemError);
return;
}
}
m_stream.state = STREAM_STOPPED;
m_stream.mutex.unlock();
}
void airtaudio::api::Pulse::abortStream(void) {
PulseAudioHandle *pah = static_cast<PulseAudioHandle*>(m_stream.apiHandle);
if (m_stream.state == STREAM_CLOSED) {
m_errorText = "airtaudio::api::Pulse::abortStream(): the stream is not open!";
error(airtaudio::errorInvalidUse);
return;
}
if (m_stream.state == STREAM_STOPPED) {
m_errorText = "airtaudio::api::Pulse::abortStream(): the stream is already stopped!";
error(airtaudio::errorWarning);
return;
}
m_stream.state = STREAM_STOPPED;
m_stream.mutex.lock();
if (pah && pah->s_play) {
int32_t pa_error;
if (pa_simple_flush(pah->s_play, &pa_error) < 0) {
m_errorStream << "airtaudio::api::Pulse::abortStream: error flushing output device, " <<
pa_strerror(pa_error) << ".";
m_errorText = m_errorStream.str();
m_stream.mutex.unlock();
error(airtaudio::errorSystemError);
return;
}
}
m_stream.state = STREAM_STOPPED;
m_stream.mutex.unlock();
}
bool airtaudio::api::Pulse::probeDeviceOpen(uint32_t device,
airtaudio::api::StreamMode mode,
uint32_t channels,
uint32_t firstChannel,
uint32_t sampleRate,
airtaudio::format format,
uint32_t *bufferSize,
airtaudio::StreamOptions *options) {
PulseAudioHandle *pah = 0;
uint64_t bufferBytes = 0;
pa_sample_spec ss;
if (device != 0) {
return false;
}
if (mode != INPUT && mode != OUTPUT) {
return false;
}
if (channels != 1 && channels != 2) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: unsupported number of channels.";
return false;
}
ss.channels = channels;
if (firstChannel != 0) {
return false;
}
bool sr_found = false;
for (const uint32_t *sr = SUPPORTED_SAMPLERATES; *sr; ++sr) {
if (sampleRate == *sr) {
sr_found = true;
m_stream.sampleRate = sampleRate;
ss.rate = sampleRate;
break;
}
}
if (!sr_found) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: unsupported sample rate.";
return false;
}
bool sf_found = 0;
for (const rtaudio_pa_format_mapping_t *sf = supported_sampleformats;
sf->airtaudio_format && sf->pa_format != PA_SAMPLE_INVALID;
++sf) {
if (format == sf->airtaudio_format) {
sf_found = true;
m_stream.userFormat = sf->airtaudio_format;
ss.format = sf->pa_format;
break;
}
}
if (!sf_found) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: unsupported sample format.";
return false;
}
// Set interleaving parameters.
if (options && options->flags & NONINTERLEAVED) {
m_stream.userInterleaved = false;
} else {
m_stream.userInterleaved = true;
}
m_stream.deviceInterleaved[mode] = true;
m_stream.nBuffers = 1;
m_stream.doByteSwap[mode] = false;
m_stream.doConvertBuffer[mode] = channels > 1 && !m_stream.userInterleaved;
m_stream.deviceFormat[mode] = m_stream.userFormat;
m_stream.nUserChannels[mode] = channels;
m_stream.nDeviceChannels[mode] = channels + firstChannel;
m_stream.channelOffset[mode] = 0;
// Allocate necessary internal buffers.
bufferBytes = m_stream.nUserChannels[mode] * *bufferSize * formatBytes(m_stream.userFormat);
m_stream.userBuffer[mode] = (char *) calloc(bufferBytes, 1);
if (m_stream.userBuffer[mode] == NULL) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error allocating user buffer memory.";
goto error;
}
m_stream.bufferSize = *bufferSize;
if (m_stream.doConvertBuffer[mode]) {
bool makeBuffer = true;
bufferBytes = m_stream.nDeviceChannels[mode] * formatBytes(m_stream.deviceFormat[mode]);
if (mode == INPUT) {
if (m_stream.mode == OUTPUT && m_stream.deviceBuffer) {
uint64_t bytesOut = m_stream.nDeviceChannels[0] * formatBytes(m_stream.deviceFormat[0]);
if (bufferBytes <= bytesOut) makeBuffer = false;
}
}
if (makeBuffer) {
bufferBytes *= *bufferSize;
if (m_stream.deviceBuffer) free(m_stream.deviceBuffer);
m_stream.deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_stream.deviceBuffer == NULL) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error allocating device buffer memory.";
goto error;
}
}
}
m_stream.device[mode] = device;
// Setup the buffer conversion information structure.
if (m_stream.doConvertBuffer[mode]) {
setConvertInfo(mode, firstChannel);
}
if (!m_stream.apiHandle) {
PulseAudioHandle *pah = new PulseAudioHandle;
if (!pah) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error allocating memory for handle.";
goto error;
}
m_stream.apiHandle = pah;
}
pah = static_cast<PulseAudioHandle *>(m_stream.apiHandle);
int32_t error;
switch (mode) {
case INPUT:
pah->s_rec = pa_simple_new(NULL, "RtAudio", PA_STREAM_RECORD, NULL, "Record", &ss, NULL, NULL, &error);
if (!pah->s_rec) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error connecting input to PulseAudio server.";
goto error;
}
break;
case OUTPUT:
pah->s_play = pa_simple_new(NULL, "RtAudio", PA_STREAM_PLAYBACK, NULL, "Playback", &ss, NULL, NULL, &error);
if (!pah->s_play) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error connecting output to PulseAudio server.";
goto error;
}
break;
default:
goto error;
}
if (m_stream.mode == UNINITIALIZED) {
m_stream.mode = mode;
} else if (m_stream.mode == mode) {
goto error;
}else {
m_stream.mode = DUPLEX;
}
if (!m_stream.callbackInfo.isRunning) {
m_stream.callbackInfo.object = this;
m_stream.callbackInfo.isRunning = true;
pah->thread = new std::thread(pulseaudio_callback, (void *)&m_stream.callbackInfo);
if (pah->thread == NULL) {
m_errorText = "airtaudio::api::Pulse::probeDeviceOpen: error creating thread.";
goto error;
}
}
m_stream.state = STREAM_STOPPED;
return true;
error:
if (pah && m_stream.callbackInfo.isRunning) {
delete pah;
m_stream.apiHandle = 0;
}
for (int32_t i=0; i<2; i++) {
if (m_stream.userBuffer[i]) {
free(m_stream.userBuffer[i]);
m_stream.userBuffer[i] = 0;
}
}
if (m_stream.deviceBuffer) {
free(m_stream.deviceBuffer);
m_stream.deviceBuffer = 0;
}
return FAILURE;
}
#endif

46
airtaudio/api/Pulse.h Normal file
View File

@@ -0,0 +1,46 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if !defined(__AIRTAUDIO_API_PULSE_H__) && defined(__LINUX_PULSE__)
#define __AIRTAUDIO_API_PULSE_H__
namespace airtaudio {
namespace api {
class Pulse: public airtaudio::Api {
public:
~Pulse(void);
airtaudio::api::type getCurrentApi(void) {
return airtaudio::api::LINUX_PULSE;
}
uint32_t getDeviceCount(void);
airtaudio::DeviceInfo getDeviceInfo(uint32_t _device);
void closeStream(void);
void startStream(void);
void stopStream(void);
void abortStream(void);
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent(void);
private:
std::vector<airtaudio::DeviceInfo> m_devices;
void saveDeviceInfo(void);
bool probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
};
};
};
#endif