[DEV] functionnal Android with ewol version

This commit is contained in:
Edouard DUPIN 2014-03-20 10:25:05 +01:00
parent 56836c7876
commit a1f0b0f81c
10 changed files with 362 additions and 48 deletions

View File

@ -6,6 +6,7 @@
* @license like MIT (see license file)
*/
//#include <etk/types.h>
#include <airtaudio/Interface.h>
#include <airtaudio/debug.h>
#include <iostream>
@ -14,6 +15,27 @@
#include <climits>
etk::CCout& operator <<(etk::CCout& _os, const airtaudio::api::type& _obj){
switch (_obj) {
default:
case airtaudio::api::UNSPECIFIED: _os << "UNSPECIFIED"; break;
case airtaudio::api::LINUX_ALSA: _os << "LINUX_ALSA"; break;
case airtaudio::api::LINUX_PULSE: _os << "LINUX_PULSE"; break;
case airtaudio::api::LINUX_OSS: _os << "LINUX_OSS"; break;
case airtaudio::api::UNIX_JACK: _os << "UNIX_JACK"; break;
case airtaudio::api::MACOSX_CORE: _os << "MACOSX_CORE"; break;
case airtaudio::api::WINDOWS_ASIO: _os << "WINDOWS_ASIO"; break;
case airtaudio::api::WINDOWS_DS: _os << "WINDOWS_DS"; break;
case airtaudio::api::RTAUDIO_DUMMY: _os << "RTAUDIO_DUMMY"; break;
case airtaudio::api::ANDROID_JAVA: _os << "ANDROID_JAVA"; break;
case airtaudio::api::USER_INTERFACE_1: _os << "USER_INTERFACE_1"; break;
case airtaudio::api::USER_INTERFACE_2: _os << "USER_INTERFACE_2"; break;
case airtaudio::api::USER_INTERFACE_3: _os << "USER_INTERFACE_3"; break;
case airtaudio::api::USER_INTERFACE_4: _os << "USER_INTERFACE_4"; break;
}
}
// Static variable definitions.
const uint32_t airtaudio::api::MAX_SAMPLE_RATES = 14;
const uint32_t airtaudio::api::SAMPLE_RATES[] = {

View File

@ -10,6 +10,8 @@
#define __AIRTAUDIO_API_H__
#include <sstream>
#include <etk/types.h>
#include <etk/Stream.h>
namespace airtaudio {
namespace api {
@ -174,7 +176,7 @@ namespace airtaudio {
* @brief Protected method used to perform format, channel number, and/or interleaving
* conversions between the user and device buffers.
*/
void convertBuffer(char *_outBuffer, char *_inBuffer, airtaudio::api::ConvertInfo &_info);
void convertBuffer(char *_outBuffer, char *_inBuffer, airtaudio::api::ConvertInfo& _info);
//! Protected common method used to perform byte-swapping on buffers.
void byteSwapBuffer(char *_buffer, uint32_t _samples, airtaudio::format _format);
@ -186,5 +188,9 @@ namespace airtaudio {
void setConvertInfo(airtaudio::api::StreamMode _mode, uint32_t _firstChannel);
};
};
/**
* @brief Debug operator To display the curent element in a Human redeable information
*/
etk::CCout& operator <<(etk::CCout& _os, const airtaudio::api::type& _obj);
#endif

View File

@ -6,6 +6,7 @@
* @license like MIT (see license file)
*/
//#include <etk/types.h>
#include <airtaudio/Interface.h>
#include <airtaudio/debug.h>
#include <iostream>
@ -28,7 +29,9 @@ void airtaudio::Interface::openRtApi(airtaudio::api::type _api) {
m_rtapi = NULL;
}
for (auto &it :m_apiAvaillable) {
ATA_ERROR("try open " << it.first);
if (_api == it.first) {
ATA_ERROR(" ==> call it");
m_rtapi = it.second();
if (m_rtapi != NULL) {
return;
@ -36,6 +39,7 @@ void airtaudio::Interface::openRtApi(airtaudio::api::type _api) {
}
}
// TODO : An eror occured ...
ATA_ERROR("Error in open API ...");
}
@ -62,6 +66,9 @@ airtaudio::Interface::Interface(void) :
#if defined(__MACOSX_CORE__)
addInterface(airtaudio::api::MACOSX_CORE, airtaudio::api::Core::Create);
#endif
#if defined(__ANDROID_JAVA__)
addInterface(airtaudio::api::ANDROID_JAVA, airtaudio::api::Android::Create);
#endif
#if defined(__AIRTAUDIO_DUMMY__)
addInterface(airtaudio::api::RTAUDIO_DUMMY, airtaudio::api::Dummy::Create);
#endif
@ -72,11 +79,13 @@ void airtaudio::Interface::addInterface(airtaudio::api::type _api, Api* (*_callb
}
enum airtaudio::errorType airtaudio::Interface::instanciate(airtaudio::api::type _api) {
ATA_INFO("Instanciate API ...");
if (m_rtapi != NULL) {
std::cerr << "\nInterface already started ...!\n" << std::endl;
ATA_WARNING("Interface already started ...!");
return airtaudio::errorNone;
}
if (_api != airtaudio::api::UNSPECIFIED) {
ATA_ERROR("API specified ...");
// Attempt to open the specified API.
openRtApi(_api);
if (m_rtapi != NULL) {
@ -84,26 +93,35 @@ enum airtaudio::errorType airtaudio::Interface::instanciate(airtaudio::api::type
}
// No compiled support for specified API value. Issue a debug
// warning and continue as if no API was specified.
std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
ATA_ERROR("RtAudio: no compiled support for specified API argument!");
return airtaudio::errorFail;
}
ATA_INFO("Auto choice API :");
// Iterate through the compiled APIs and return as soon as we find
// one with at least one device or we reach the end of the list.
std::vector<airtaudio::api::type> apis = getCompiledApi();
ATA_INFO(" find : " << apis.size() << " apis.");
for (auto &it : apis) {
ATA_INFO("try open ...");
openRtApi(it);
if(m_rtapi == NULL) {
ATA_ERROR(" ==> can not create ...");
continue;
}
if (m_rtapi->getDeviceCount() != 0) {
ATA_INFO(" ==> api open");
break;
}
}
if (m_rtapi != NULL) {
return airtaudio::errorNone;
}
std::cout << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
ATA_ERROR("RtAudio: no compiled API support found ... critical error!!");
return airtaudio::errorFail;
}
airtaudio::Interface::~Interface(void) {
ATA_INFO("Remove interface");
if (m_rtapi != NULL) {
delete m_rtapi;
m_rtapi = NULL;

View File

@ -4,81 +4,217 @@
* @license like MIT (see license file)
*/
#if defined(__ANDROID_JAVA__)
#ifdef __ANDROID_JAVA__
#include <alsa/asoundlib.h>
#include <ewol/context/Context.h>
#include <unistd.h>
#include <airtaudio/Interface.h>
#include <airtaudio/debug.h>
#include <limits.h>
airtaudio::api::Android(void) {
// On android, we set a static device ...
airtaudio::DeviceInfo tmp;
tmp.name = "speaker";
tmp.outputChannels = 2;
tmp.inputChannels = 0;
tmp.duplexChannels = 1;
tmp.isDefaultOutput = true;
tmp.isDefaultInput = false;
sampleRates.pushBack(44100);
nativeFormats = SINT16;
m_devices.push_back(tmp);
ATA_INFO("Create Android interface");
airtaudio::Api* airtaudio::api::Android::Create(void) {
ATA_INFO("Create Android device ... ");
return new airtaudio::api::Android();
}
airtaudio::api::~Android(void) {
airtaudio::api::Android::Android(void) {
ATA_INFO("new Android");
// On android, we set a static device ...
ATA_INFO("get context");
ewol::Context& tmpContext = ewol::getContext();
ATA_INFO("done p=" << (int64_t)&tmpContext);
int32_t deviceCount = tmpContext.audioGetDeviceCount();
ATA_ERROR("Get count devices : " << deviceCount);
for (int32_t iii=0; iii<deviceCount; ++iii) {
std::string property = tmpContext.audioGetDeviceProperty(iii);
ATA_ERROR("Get devices property : " << property);
std::vector<std::string> listProperty = std::split(property, ':');
airtaudio::DeviceInfo tmp;
tmp.name = listProperty[0];
std::vector<std::string> listFreq = std::split(listProperty[2], ',');
for(size_t fff=0; fff<listFreq.size(); ++fff) {
tmp.sampleRates.push_back(std::stoi(listFreq[fff]));
}
tmp.outputChannels = 0;
tmp.inputChannels = 0;
tmp.duplexChannels = 0;
if (listProperty[1] == "out") {
tmp.isDefaultOutput = true;
tmp.isDefaultInput = false;
tmp.outputChannels = std::stoi(listProperty[3]);
} else if (listProperty[1] == "in") {
tmp.isDefaultOutput = false;
tmp.isDefaultInput = true;
tmp.inputChannels = std::stoi(listProperty[3]);
} else {
/* duplex */
tmp.isDefaultOutput = true;
tmp.isDefaultInput = true;
tmp.duplexChannels = std::stoi(listProperty[3]);
}
std::vector<std::string> listFormat = std::split(listProperty[4], ',');
tmp.nativeFormats = 0;
for(size_t fff=0; fff<listFormat.size(); ++fff) {
if (listFormat[fff] == "float") {
tmp.nativeFormats |= FLOAT32;
} else if (listFormat[fff] == "double") {
tmp.nativeFormats |= FLOAT64;
} else if (listFormat[fff] == "s32") {
tmp.nativeFormats |= SINT32;
} else if (listFormat[fff] == "s24") {
tmp.nativeFormats |= SINT24;
} else if (listFormat[fff] == "s16") {
tmp.nativeFormats |= SINT16;
} else if (listFormat[fff] == "s8") {
tmp.nativeFormats |= SINT8;
}
}
m_devices.push_back(tmp);
}
ATA_INFO("Create Android interface (end)");
}
airtaudio::api::Android::~Android(void) {
ATA_INFO("Destroy Android interface");
}
uint32_t airtaudio::api::getDeviceCount(void) {
ATA_INFO("Get device count:"<< m_devices.size());
uint32_t airtaudio::api::Android::getDeviceCount(void) {
//ATA_INFO("Get device count:"<< m_devices.size());
return m_devices.size();
}
airtaudio::DeviceInfo airtaudio::api::getDeviceInfo(uint32_t _device) {
ATA_INFO("Get device info ...");
airtaudio::DeviceInfo airtaudio::api::Android::getDeviceInfo(uint32_t _device) {
//ATA_INFO("Get device info ...");
return m_devices[_device];
}
enum airtaudio::errorType airtaudio::api::closeStream(void) {
enum airtaudio::errorType airtaudio::api::Android::closeStream(void) {
ATA_INFO("Clese Stream");
// Can not close the stream now...
return airtaudio::errorNone;
}
enum airtaudio::errorType airtaudio::api::startStream(void) {
enum airtaudio::errorType airtaudio::api::Android::startStream(void) {
ATA_INFO("Start Stream");
// Can not close the stream now...
return airtaudio::errorNone;
}
enum airtaudio::errorType airtaudio::api::stopStream(void) {
enum airtaudio::errorType airtaudio::api::Android::stopStream(void) {
ATA_INFO("Stop stream");
ewol::Context& tmpContext = ewol::getContext();
tmpContext.audioCloseDevice(0);
// Can not close the stream now...
return airtaudio::errorNone;
}
enum airtaudio::errorType airtaudio::api::abortStream(void) {
enum airtaudio::errorType airtaudio::api::Android::abortStream(void) {
ATA_INFO("Abort Stream");
ewol::Context& tmpContext = ewol::getContext();
tmpContext.audioCloseDevice(0);
// Can not close the stream now...
return airtaudio::errorNone;
}
void airtaudio::api::callbackEvent(void) {
ATA_INFO("callback event ...");
void airtaudio::api::Android::callBackEvent(void* _data,
int32_t _frameRate) {
int32_t doStopStream = 0;
airtaudio::AirTAudioCallback callback = (airtaudio::AirTAudioCallback) m_stream.callbackInfo.callback;
double streamTime = getStreamTime();
airtaudio::streamStatus status = 0;
if (m_stream.doConvertBuffer[OUTPUT] == true) {
doStopStream = callback(m_stream.userBuffer[OUTPUT],
NULL,
_frameRate,
streamTime,
status,
m_stream.callbackInfo.userData);
convertBuffer((char*)_data, (char*)m_stream.userBuffer[OUTPUT], m_stream.convertInfo[OUTPUT]);
} else {
doStopStream = callback(_data,
NULL,
_frameRate,
streamTime,
status,
m_stream.callbackInfo.userData);
}
if (doStopStream == 2) {
abortStream();
return;
}
airtaudio::Api::tickStreamTime();
}
bool airtaudio::api::probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options) {
void airtaudio::api::Android::androidCallBackEvent(void* _data,
int32_t _frameRate,
void* _userData) {
if (_userData == NULL) {
ATA_INFO("callback event ... NULL pointer");
return;
}
airtaudio::api::Android* myClass = static_cast<airtaudio::api::Android*>(_userData);
myClass->callBackEvent(_data, _frameRate/2);
}
bool airtaudio::api::Android::probeDeviceOpen(uint32_t _device,
airtaudio::api::StreamMode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options) {
ATA_INFO("Probe : device=" << _device << " channels=" << _channels << " firstChannel=" << _firstChannel << " sampleRate=" << _sampleRate);
return true
if (_mode != OUTPUT) {
ATA_ERROR("Can not start a device input or duplex for Android ...");
return false;
}
m_stream.userFormat = _format;
m_stream.nUserChannels[_mode] = _channels;
ewol::Context& tmpContext = ewol::getContext();
bool ret = false;
if (_format == SINT8) {
ret = tmpContext.audioOpenDevice(_device, _sampleRate, _channels, 0, androidCallBackEvent, this);
} else {
ret = tmpContext.audioOpenDevice(_device, _sampleRate, _channels, 1, androidCallBackEvent, this);
}
m_stream.bufferSize = 256;
m_stream.sampleRate = _sampleRate;
m_stream.doByteSwap[_mode] = false; // for endienness ...
// TODO : For now, we write it in hard ==> to bu update later ...
m_stream.deviceFormat[_mode] = SINT16;
m_stream.nDeviceChannels[_mode] = 2;
m_stream.deviceInterleaved[_mode] = true;
m_stream.doConvertBuffer[_mode] = false;
if (m_stream.userFormat != m_stream.deviceFormat[_mode]) {
m_stream.doConvertBuffer[_mode] = true;
}
if (m_stream.nUserChannels[_mode] < m_stream.nDeviceChannels[_mode]) {
m_stream.doConvertBuffer[_mode] = true;
}
if ( m_stream.userInterleaved != m_stream.deviceInterleaved[_mode]
&& m_stream.nUserChannels[_mode] > 1) {
m_stream.doConvertBuffer[_mode] = true;
}
if (m_stream.doConvertBuffer[_mode] == true) {
// Allocate necessary internal buffers.
uint64_t bufferBytes = m_stream.nUserChannels[_mode] * m_stream.bufferSize * formatBytes(m_stream.userFormat);
m_stream.userBuffer[_mode] = (char *) calloc(bufferBytes, 1);
if (m_stream.userBuffer[_mode] == NULL) {
ATA_ERROR("airtaudio::api::Android::probeDeviceOpen: error allocating user buffer memory.");
}
setConvertInfo(_mode, _firstChannel);
}
ATA_INFO("device format : " << m_stream.deviceFormat[_mode] << " user format : " << m_stream.userFormat);
ATA_INFO("device channels : " << m_stream.nDeviceChannels[_mode] << " user channels : " << m_stream.nUserChannels[_mode]);
ATA_INFO("do convert buffer : " << m_stream.doConvertBuffer[_mode]);
if (ret == false) {
ATA_ERROR("Can not open device.");
}
return ret;
}
#endif

View File

@ -40,6 +40,12 @@ namespace airtaudio {
airtaudio::format _format,
uint32_t *_bufferSize,
airtaudio::StreamOptions *_options);
private:
void callBackEvent(void* _data,
int32_t _frameRate);
static void androidCallBackEvent(void* _data,
int32_t _frameRate,
void* _userData);
};
};
};

105
airtaudio/base.cpp Normal file
View File

@ -0,0 +1,105 @@
/**
* @author Gary P. SCAVONE
*
* @copyright 2001-2013 Gary P. Scavone, all right reserved
*
* @license like MIT (see license file)
*/
#if 0
#include <airtaudio/base.h>
etk::CCout& airtaudio::operator <<(etk::CCout& _os, enum errorType _obj) {
switch(_obj) {
case errorNone:
_os << "errorNone";
break;
case errorFail:
_os << "errorFail";
break;
case errorWarning:
_os << "errorWarning";
break;
case errorInputNull:
_os << "errorInputNull";
break;
case errorInvalidUse:
_os << "errorInvalidUse";
break;
case errorSystemError:
_os << "errorSystemError";
break;
default:
_os << "UNKNOW...";
break;
}
return _os;
}
etk::CCout& airtaudio::operator <<(etk::CCout& _os, const airtaudio::format& _obj) {
switch(_obj) {
case SINT8:
_os << "SINT8";
break;
case SINT16:
_os << "SINT16";
break;
case SINT24:
_os << "SINT24";
break;
case SINT32:
_os << "SINT32";
break;
case FLOAT32:
_os << "FLOAT32";
break;
case FLOAT64:
_os << "FLOAT64";
break;
default:
_os << "UNKNOW...";
break;
}
return _os;
}
etk::CCout& airtaudio::operator <<(etk::CCout& _os, const airtaudio::streamFlags& _obj) {
switch(_obj) {
case NONINTERLEAVED:
_os << "NONINTERLEAVED";
break;
case MINIMIZE_LATENCY:
_os << "MINIMIZE_LATENCY";
break;
case HOG_DEVICE:
_os << "HOG_DEVICE";
break;
case SCHEDULE_REALTIME:
_os << "SCHEDULE_REALTIME";
break;
case ALSA_USE_DEFAULT:
_os << "ALSA_USE_DEFAULT";
break;
default:
_os << "UNKNOW...";
break;
}
return _os;
}
etk::CCout& airtaudio::operator <<(etk::CCout& _os, const airtaudio::streamStatus& _obj) {
switch(_obj) {
case INPUT_OVERFLOW:
_os << "INPUT_OVERFLOW";
break;
case OUTPUT_UNDERFLOW:
_os << "OUTPUT_UNDERFLOW";
break;
default:
_os << "UNKNOW...";
break;
}
return _os;
}
#endif

View File

@ -21,6 +21,7 @@
#if defined(HAVE_GETTIMEOFDAY)
#include <sys/time.h>
#endif
//#include <etk/Stream.h>
namespace airtaudio {
//! Defined RtError types.
@ -35,6 +36,10 @@ namespace airtaudio {
// airtaudio version
static const std::string VERSION("4.0.12");
/**
* @brief Debug operator To display the curent element in a Human redeable information
*/
//etk::CCout& operator <<(etk::CCout& _os, enum errorType _obj);
/**
* @typedef typedef uint64_t format;
* @brief airtaudio data format type.
@ -60,6 +65,11 @@ namespace airtaudio {
static const format FLOAT32 = 0x10; // Normalized between plus/minus 1.0.
static const format FLOAT64 = 0x20; // Normalized between plus/minus 1.0.
/**
* @brief Debug operator To display the curent element in a Human redeable information
*/
//etk::CCout& operator <<(etk::CCout& _os, const airtaudio::format& _obj);
/**
* @typedef typedef uint64_t streamFlags;
* @brief RtAudio stream option flags.
@ -110,6 +120,11 @@ namespace airtaudio {
static const streamFlags SCHEDULE_REALTIME = 0x8; // Try to select realtime scheduling for callback thread.
static const streamFlags ALSA_USE_DEFAULT = 0x10; // Use the "default" PCM device (ALSA only).
/**
* @brief Debug operator To display the curent element in a Human redeable information
*/
//etk::CCout& operator <<(etk::CCout& _os, const airtaudio::streamFlags& _obj);
/**
* @typedef typedef uint64_t rtaudio::streamStatus;
* @brief RtAudio stream status (over- or underflow) flags.
@ -126,6 +141,11 @@ namespace airtaudio {
static const streamStatus INPUT_OVERFLOW = 0x1; // Input data was discarded because of an overflow condition at the driver.
static const streamStatus OUTPUT_UNDERFLOW = 0x2; // The output buffer ran low, likely causing a gap in the output sound.
/**
* @brief Debug operator To display the curent element in a Human redeable information
*/
//etk::CCout& operator <<(etk::CCout& _os, const airtaudio::streamStatus& _obj);
/**
* @brief RtAudio callback function prototype.
*

View File

@ -3,7 +3,7 @@
*
* @copyright 2011, Edouard DUPIN, all right reserved
*
* @license BSD v3 (see license file)
* @license BSD 3 clauses (see license file)
*/
#include <airtaudio/debug.h>

View File

@ -3,7 +3,7 @@
*
* @copyright 2011, Edouard DUPIN, all right reserved
*
* @license BSD v3 (see license file)
* @license BSD 3 clauses (see license file)
*/
#ifndef __EAUDIOFX_DEBUG_H__

View File

@ -12,6 +12,7 @@ def create(target):
myModule.add_src_file([
'airtaudio/debug.cpp',
'airtaudio/base.cpp',
'airtaudio/Interface.cpp',
'airtaudio/Api.cpp',
'airtaudio/api/Alsa.cpp',
@ -36,20 +37,20 @@ def create(target):
myModule.add_export_flag_CC(['-D__LINUX_ALSA__'])
myModule.add_export_flag_LD("-lasound")
# Linux Jack API
myModule.add_export_flag_CC(['-D__UNIX_JACK__'])
myModule.add_export_flag_LD("-ljack")
#myModule.add_export_flag_CC(['-D__UNIX_JACK__'])
#myModule.add_export_flag_LD("-ljack")
# Linux PulseAudio API
myModule.add_export_flag_CC(['-D__LINUX_PULSE__'])
myModule.add_export_flag_LD("-lpulse-simple")
myModule.add_export_flag_LD("-lpulse")
#myModule.add_export_flag_CC(['-D__LINUX_PULSE__'])
#myModule.add_export_flag_LD("-lpulse-simple")
#myModule.add_export_flag_LD("-lpulse")
elif target.name=="MacOs":
# MacOsX core
myModule.add_export_flag_CC(['__MACOSX_CORE__'])
myModule.add_export_flag_CC(['-D__MACOSX_CORE__'])
myModule.add_export_flag_LD("-framework CoreAudio")
myModule.add_export_flag_LD("-framework CoreMIDI")
elif target.name=="Android":
# MacOsX core
myModule.add_export_flag_CC(['__ANDROID_JAVA__'])
myModule.add_export_flag_CC(['-D__ANDROID_JAVA__'])
else:
debug.warning("unknow target for RTAudio : " + target.name);