Compare commits

...

No commits in common. "main" and "gh-pages" have entirely different histories.

329 changed files with 8500 additions and 11401 deletions

64
.gitignore vendored
View File

@ -1,64 +0,0 @@
###################################
# folders
###################################
CVS
.svn
Object_*
doxygen/API/
doxygen/ALL/
###################################
# backup files
###################################
*~
*.swp
*.old
*.bck
###################################
# Compiled source #
###################################
*.com
*.class
*.dll
*.exe
*.o
*.so
*.pyc
tags
#ewol
out
ewol_debug
ewol_release
###################################
# Packages #
###################################
# it's better to unpack these files and commit the raw source
# git has its own built in compression methods
*.7z
*.dmg
*.gz
*.iso
*.jar
*.rar
*.tar
*.zip
###################################
# Logs and databases #
###################################
*.log
*.sql
*.sqlite
###################################
# OS generated files #
###################################
.DS_Store?
ehthumbs.db
Icon?
Thumbs.db
Sources/libewol/ewol/os/AndroidAbstraction.cpp
org_ewol_EwolConstants.h

View File

@ -1,7 +1,11 @@
language: cpp
language:
- cpp
sudo: required
dist: trusty
sudo: false
os:
- linux
- osx
branches:
only:
@ -14,81 +18,33 @@ addons:
- ubuntu-toolchain-r-test
packages:
- g++-4.9
- expect
- binutils-mingw-w64-x86-64 # 64bit MinGW
- gcc-mingw-w64-x86-64
- g++-mingw-w64-x86-64
matrix:
include:
- os: linux
env: CONF=release BUILDER=gcc TARGET=Linux TAG=Linux COMPILATOR_OPTION="--compilator-version=4.9" GCOV=--gcov
compiler: gcc
- os: linux
env: CONF=debug BUILDER=clang TARGET=Linux
compiler: clang
- os: linux
env: CONF=release BUILDER=gcc TARGET=Windows TAG=Mingw
compiler: x86_64-w64-mingw32-gcc
- os: linux
env: CONF=release BUILDER=gcc TARGET=Android TAG=Android DISABLE_PACKAGE=-p
compiler: gcc
- os: osx
env: CONF=release BUILDER=clang TARGET=MacOs TAG=MacOs
compiler: clang
- os: osx
env: CONF=release BUILDER=clang TARGET=IOs TAG=IOs
compiler: clang
install:
- cd ..
- pip install --user lutin
- if [ "$TAG" == "Android" ]; then
git clone --depth 1 --branch master https://github.com/HeeroYui/android-download-tool;
./android-download-tool/dl-android.sh;
fi
- git clone --depth 1 --branch master https://github.com/atria-soft/ci.git
- cd -
env:
- CONF=debug BOARD=Linux BUILDER=clang GCOV=
- CONF=release BOARD=Linux BUILDER=clang GCOV=
- CONF=debug BOARD=Linux BUILDER=gcc GCOV=
- CONF=release BOARD=Linux BUILDER=gcc GCOV=
- CONF=debug BOARD=Linux BUILDER=gcc GCOV=--gcov
before_script:
- cd ..
- git clone https://github.com/atria-soft/etk.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/elog.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/ememory.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/echrono.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/ethread.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/ejson.git -b $TRAVIS_BRANCH
- git clone https://github.com/atria-soft/jvm-basics.git -b $TRAVIS_BRANCH
- git clone https://github.com/musicdsp/audio.git -b $TRAVIS_BRANCH
- git clone https://github.com/generic-library/gtest-lutin.git --recursive
- git clone https://github.com/generic-library/z-lutin.git --recursive
- wget http://atria-soft.com/ci/coverage_send.py
- wget http://atria-soft.com/ci/test_send.py
- wget http://atria-soft.com/ci/warning_send.py
- git clone https://github.com/atria-soft/etk.git
- git clone https://github.com/musicdsp/audio.git
- pwd
- ls -l
- if [ "$TRAVIS_OS_NAME" == "osx" ]; then
export PATH=$PATH:/Users/travis/Library/Python/2.7/bin/;
fi
- ./ci/build_send.py --tag=$TAG --status=START;
- if [ "$BUILDER" == "gcc" ]; then COMPILATOR_OPTION="--compilator-version=4.9"; else COMPILATOR_OPTION=""; fi
script:
- lutin -w -j4 -C -P -t$TARGET -c $BUILDER $COMPILATOR_OPTION $BUS -m $CONF $GCOV $DISABLE_PACKAGE audio-orchestra; STATUS=$?
- ./ci/build_send.py --tag=$TAG --status="$STATUS";
after_script:
- if [ "$GCOV" != "" ]; then
./ci/warning_send.py --find-path ./out/Linux_x86_64/$CONF/build/$BUILDER/audio-orchestra/;
fi
#- lutin -w -j4 -C -P -t$TARGET -c $BUILDER $COMPILATOR_OPTION $BUS -m $CONF $GCOV $DISABLE_PACKAGE audio-orchestra-test?run:--elog-level=3 | tee out_test.txt
#- if [ "$GCOV" != "" ]; then
# ./ci/test_send.py --file=out_test.txt;
# lutin -C -P -t $TARGET -c $BUILDER $COMPILATOR_OPTION $BUS -m $CONF -p audio-orchestra?gcov;
# ./ci/coverage_send.py --json=out/Linux_x86_64/$CONF/build/$BUILDER/audio-orchestra/audio-orchestra_coverage.json;
# fi
- lutin -w -j4 -C -P -c $BUILDER $COMPILATOR_OPTION -m $CONF $GCOV -p audio-orchestra
# - ./out/Linux_x86_64/$CONF/staging/$BUILDER/audio-orchestra/usr/bin/audio-orchestra -l6
notifications:
email:
- yui.heero@gmail.com

View File

@ -1,4 +1,4 @@
# audio-orchestra
(MIT) audio: backend to acces hardware access (Fork of the original RTAudio lib)
------------------------------
Nothing else ...
[![Build Status](https://travis-ci.org/musicdsp/audio-orchestra.svg?branch=master)](https://travis-ci.org/musicdsp/audio-orchestra)

View File

@ -1,13 +0,0 @@
/**
* @author Edouard DUPIN
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
public interface OrchestraConstants {
public static final int BUFFER_SIZE = 512;
}

View File

@ -1,119 +0,0 @@
/**
* @author Edouard DUPIN, Kevin BILLONNEAU
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
import android.media.AudioRecord;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;
public class OrchestraInterfaceInput implements Runnable, OrchestraConstants {
private Thread m_thread = null;
private int m_uid = -1;
private OrchestraNative m_orchestraNativeHandle;
private boolean m_stop = false;
private boolean m_suspend = false;
private AudioRecord m_audio = null;
private int m_sampleRate = 48000;
private int m_nbChannel = 2;
private int m_format = 1;
private int m_bufferSize = BUFFER_SIZE;
public OrchestraInterfaceInput(int _id, OrchestraNative _instance, int _idDevice, int _sampleRate, int _nbChannel, int _format) {
Log.d("InterfaceInput", "new: Input");
m_uid = _id;
m_orchestraNativeHandle = _instance;
m_stop = false;
m_suspend = false;
m_sampleRate = _sampleRate;
m_nbChannel = _nbChannel;
m_format = _format;
m_bufferSize = BUFFER_SIZE * m_nbChannel;
}
public int getUId() {
return m_uid;
}
public void run() {
Log.e("InterfaceInput", "RUN (start)");
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
// we keep the minimum buffer size, otherwite the delay is too big ...
// TODO : int bufferSize = AudioRecord.getMinBufferSize(m_sampleRate, channelConfig, audioFormat);
int config = 0;
if (m_nbChannel == 1) {
config = AudioFormat.CHANNEL_IN_MONO;
} else {
config = AudioFormat.CHANNEL_IN_STEREO;
}
// Create a streaming AudioTrack for music playback
short[] streamBuffer = new short[m_bufferSize];
m_audio = new AudioRecord(MediaRecorder.AudioSource.MIC,
m_sampleRate,
config,
audioFormat,
m_bufferSize);
m_audio.startRecording();
while ( m_stop == false
&& m_suspend == false) {
// Stream PCM data into the local buffer
m_audio.read(streamBuffer, 0, m_bufferSize);
// Send it to C++
m_orchestraNativeHandle.record(m_uid, streamBuffer, m_bufferSize/m_nbChannel);
}
m_audio.stop();
m_audio = null;
streamBuffer = null;
Log.e("InterfaceInput", "RUN (stop)");
}
public void autoStart() {
m_stop=false;
if (m_suspend == false) {
Log.e("InterfaceInput", "Create thread");
m_thread = new Thread(this);
Log.e("InterfaceInput", "start thread");
m_thread.start();
Log.e("InterfaceInput", "start thread (done)");
}
}
public void autoStop() {
if(m_audio == null) {
return;
}
m_stop=true;
m_thread = null;
/*
try {
super.join();
} catch(InterruptedException e) { }
*/
}
public void activityResume() {
m_suspend = false;
if (m_stop == false) {
Log.i("InterfaceInput", "Resume audio stream : " + m_uid);
m_thread = new Thread(this);
m_thread.start();
}
}
public void activityPause() {
if(m_audio == null) {
return;
}
m_suspend = true;
Log.i("InterfaceInput", "Pause audio stream : " + m_uid);
m_thread = null;
}
}

View File

@ -1,108 +0,0 @@
/**
* @author Edouard DUPIN, Kevin BILLONNEAU
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
import android.media.AudioTrack;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.util.Log;
public class OrchestraInterfaceOutput extends Thread implements OrchestraConstants {
private int m_uid = -1;
private OrchestraNative m_orchestraNativeHandle;
private boolean m_stop = false;
private boolean m_suspend = false;
private AudioTrack m_audio = null;
private int m_sampleRate = 48000;
private int m_nbChannel = 2;
private int m_format = 1;
private int m_bufferSize = BUFFER_SIZE;
public OrchestraInterfaceOutput(int _id, OrchestraNative _instance, int _idDevice, int _sampleRate, int _nbChannel, int _format) {
Log.d("InterfaceOutput", "new: output");
m_uid = _id;
m_orchestraNativeHandle = _instance;
m_stop = true;
m_sampleRate = _sampleRate;
m_nbChannel = _nbChannel;
m_format = _format;
m_bufferSize = BUFFER_SIZE * m_nbChannel;
}
public int getUId() {
return m_uid;
}
public void run() {
Log.e("InterfaceOutput", "RUN (start)");
int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
// we keep the minimum buffer size, otherwite the delay is too big ...
//int bufferSize = AudioTrack.getMinBufferSize(m_sampleRate, channelConfig, audioFormat);
int config = 0;
if (m_nbChannel == 1) {
config = AudioFormat.CHANNEL_OUT_MONO;
} else if (m_nbChannel == 4) {
config = AudioFormat.CHANNEL_OUT_QUAD;
} else {
config = AudioFormat.CHANNEL_OUT_STEREO;
}
// Create a streaming AudioTrack for music playback
short[] streamBuffer = new short[m_bufferSize];
m_audio = new AudioTrack(AudioManager.STREAM_MUSIC,
m_sampleRate,
config,
audioFormat,
m_bufferSize,
AudioTrack.MODE_STREAM);
m_audio.play();
//m_audio.setPositionNotificationPeriod(2048);
while (m_stop == false) {
// Fill buffer with PCM data from C++
m_orchestraNativeHandle.playback(m_uid, streamBuffer, m_bufferSize/m_nbChannel);
// Stream PCM data into the music AudioTrack
m_audio.write(streamBuffer, 0, m_bufferSize);
}
m_audio.flush();
m_audio.stop();
m_audio = null;
streamBuffer = null;
Log.e("InterfaceOutput", "RUN (stop)");
}
public void autoStart() {
m_stop=false;
this.start();
}
public void autoStop() {
if(m_audio == null) {
return;
}
m_stop=true;
try {
super.join();
} catch(InterruptedException e) { }
}
public void activityResume() {
if (m_audio != null) {
Log.i("InterfaceOutput", "Resume audio stream : " + m_uid);
m_audio.play();
}
}
public void activityPause() {
if(m_audio == null) {
return;
}
if (m_audio != null) {
Log.i("InterfaceOutput", "Pause audio stream : " + m_uid);
m_audio.pause();
}
}
}

View File

@ -1,262 +0,0 @@
/**
* @author Edouard DUPIN
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
import android.util.Log;
import java.util.Vector;
//import org.musicdsp.orchestra.Constants;
//import org.musicdsp.orchestra.ManagerCallback;
//import org.musicdsp.orchestra.Orchestra;
//import org.musicdsp.orchestra.InterfaceOutput;
//import org.musicdsp.orchestra.InterfaceInput;
/**
* @brief Class :
*
*/
public class OrchestraManager implements OrchestraManagerCallback, OrchestraConstants {
private OrchestraNative m_orchestraHandle;
private int m_uid = 0;
private Vector<OrchestraInterfaceOutput> m_outputList;
private Vector<OrchestraInterfaceInput> m_inputList;
public OrchestraManager() {
// set the java evironement in the C sources :
m_orchestraHandle = new OrchestraNative(this);
m_outputList = new Vector<OrchestraInterfaceOutput>();
m_inputList = new Vector<OrchestraInterfaceInput>();
}
public int getDeviceCount() {
Log.e("Manager", "Get device List");
return 2;
}
public String getDeviceProperty(int _idDevice) {
if (_idDevice == 0) {
return "{\n"
+ " name:'speaker',\n"
+ " type:'output',\n"
+ " sample-rate:[8000,16000,24000,32000,48000,96000],\n"
+ " channels:['front-left','front-right'],\n"
+ " format:['int16'],\n"
+ " default:true\n"
+ "}";
} else if (_idDevice == 1) {
return "{\n"
+ " name:'microphone',\n"
+ " type:'input',\n"
+ " sample-rate:[8000,16000,24000,32000,48000,96000],\n"
+ " channels:['front-left','front-right'],\n"
+ " format:['int16'],\n"
+ " default:true\n"
+ "}";
} else {
return "{}";
}
}
public int openDeviceOutput(int _idDevice, int _freq, int _nbChannel, int _format) {
OrchestraInterfaceOutput iface = new OrchestraInterfaceOutput(m_uid, m_orchestraHandle, _idDevice, _freq, _nbChannel, _format);
m_uid++;
Log.e("Manager", "Open device Output: " + _idDevice + " with m_uid=" + (m_uid-1));
if (iface != null) {
m_outputList.add(iface);
Log.e("Manager", "Added element count=" + m_outputList.size());
return m_uid-1;
}
return -1;
}
public int openDeviceInput(int _idDevice, int _freq, int _nbChannel, int _format) {
OrchestraInterfaceInput iface = new OrchestraInterfaceInput(m_uid, m_orchestraHandle, _idDevice, _freq, _nbChannel, _format);
m_uid++;
Log.e("Manager", "Open device Input: " + _idDevice + " with m_uid=" + (m_uid-1));
if (iface != null) {
m_inputList.add(iface);
return m_uid-1;
}
return -1;
}
public boolean closeDevice(int _uniqueID) {
Log.e("Manager", "Close device : " + _uniqueID);
if (_uniqueID<0) {
Log.e("Manager", "Can not Close device with m_uid: " + _uniqueID);
return false;
}
// find the Element with his ID:
if (m_inputList != null) {
for (int iii=0; iii<m_inputList.size(); iii++) {
if (m_inputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_inputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_inputList.remove(iii);
return true;
}
}
}
if (m_outputList != null) {
for (int iii=0; iii<m_outputList.size(); iii++) {
if (m_outputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_outputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_outputList.remove(iii);
return true;
}
}
}
Log.e("Manager", "Can not start device with m_uid: " + _uniqueID + " Element does not exist ...");
return false;
}
public boolean start(int _uniqueID) {
Log.e("Manager", "start device : " + _uniqueID);
if (_uniqueID<0) {
Log.e("Manager", "Can not start device with m_uid: " + _uniqueID);
return false;
}
// find the Element with his ID:
if (m_inputList != null) {
for (int iii=0; iii<m_inputList.size(); iii++) {
if (m_inputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_inputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_inputList.get(iii).autoStart();
return true;
}
}
}
if (m_outputList != null) {
for (int iii=0; iii<m_outputList.size(); iii++) {
if (m_outputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_outputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_outputList.get(iii).autoStart();
return true;
}
}
}
Log.e("Manager", "Can not start device with UID: " + _uniqueID + " Element does not exist ...");
return false;
}
public boolean stop(int _uniqueID) {
Log.e("Manager", "stop device : " + _uniqueID);
if (_uniqueID<0) {
Log.e("Manager", "Can not stop device with UID: " + _uniqueID);
return false;
}
// find the Element with his ID:
if (m_inputList != null) {
for (int iii=0; iii<m_inputList.size(); iii++) {
if (m_inputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_inputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_inputList.get(iii).autoStop();
return true;
}
}
}
if (m_outputList != null) {
for (int iii=0; iii<m_outputList.size(); iii++) {
if (m_outputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
if (m_outputList.get(iii).getUId() == _uniqueID) {
// find it ...
m_outputList.get(iii).autoStop();
return true;
}
}
}
Log.e("Manager", "Can not stop device with UID: " + _uniqueID + " Element does not exist ...");
return false;
}
public void onCreate() {
Log.w("Manager", "onCreate ...");
// nothing to do ...
}
public void onStart() {
Log.w("Manager", "onStart ...");
// nothing to do ...
}
public void onRestart() {
Log.w("Manager", "onRestart ...");
// nothing to do ...
}
public void onResume() {
Log.w("Manager", "onResume ...");
// find the Element with his ID:
if (m_inputList != null) {
for (int iii=0; iii<m_inputList.size(); iii++) {
if (m_inputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
m_inputList.get(iii).activityResume();
}
}
if (m_outputList != null) {
for (int iii=0; iii<m_outputList.size(); iii++) {
if (m_outputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
m_outputList.get(iii).activityResume();
}
}
}
public void onPause() {
Log.w("Manager", "onPause ...");
// find the Element with his ID:
if (m_inputList != null) {
for (int iii=0; iii<m_inputList.size(); iii++) {
if (m_inputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
m_inputList.get(iii).activityPause();
}
}
if (m_outputList != null) {
for (int iii=0; iii<m_outputList.size(); iii++) {
if (m_outputList.get(iii) == null) {
Log.e("Manager", "Null input element: " + iii);
continue;
}
m_outputList.get(iii).activityPause();
}
}
}
public void onStop() {
Log.w("Manager", "onStop ...");
}
public void onDestroy() {
Log.w("Manager", "onDestroy ...");
}
}

View File

@ -1,19 +0,0 @@
/**
* @author Edouard DUPIN, Kevin BILLONNEAU
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
public interface OrchestraManagerCallback {
public int getDeviceCount();
public String getDeviceProperty(int _idDevice);
public int openDeviceInput(int _idDevice, int _sampleRate, int _nbChannel, int _format);
public int openDeviceOutput(int _idDevice, int _sampleRate, int _nbChannel, int _format);
public boolean closeDevice(int _uniqueID);
public boolean start(int _uniqueID);
public boolean stop(int _uniqueID);
}

View File

@ -1,43 +0,0 @@
/**
* @author Edouard DUPIN, Kevin BILLONNEAU
*
* @copyright 2015, Edouard DUPIN, all right reserved
*
* @license APACHE v2.0 (see license file)
*/
package org.musicdsp.orchestra;
import java.lang.UnsatisfiedLinkError;
import java.lang.RuntimeException;
import android.util.Log;
public class OrchestraNative {
public <T extends OrchestraManagerCallback> OrchestraNative(T _managerInstance) {
try {
NNsetJavaManager(_managerInstance);
} catch (java.lang.UnsatisfiedLinkError e) {
Log.e("Orchestra", "JNI binding not present ...");
throw new RuntimeException("Orchestra binding not present ...");
}
Log.d("Orchestra", "new ...");
}
public void setManagerRemove() {
NNsetJavaManagerRemove();
}
public void playback(int _flowId, short[] _bufferData, int _nbChunk) {
NNPlayback(_flowId, _bufferData, _nbChunk);
}
public void record(int _flowId, short[] _bufferData, int _nbChunk) {
NNRecord(_flowId, _bufferData, _nbChunk);
}
private native <T extends OrchestraManagerCallback> void NNsetJavaManager(T _managerInstance);
private native void NNsetJavaManagerRemove();
private native void NNPlayback(int _flowId, short[] _bufferData, int _nbChunk);
private native void NNRecord(int _flowId, short[] _bufferData, int _nbChunk);
}

View File

@ -1,478 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
//#include <etk/types.hpp>
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
#include <etk/types.hpp>
// Static variable definitions.
const etk::Vector<uint32_t>& audio::orchestra::genericSampleRate() {
static etk::Vector<uint32_t> list;
if (list.size() == 0) {
list.pushBack(4000);
list.pushBack(5512);
list.pushBack(8000);
list.pushBack(9600);
list.pushBack(11025);
list.pushBack(16000);
list.pushBack(22050);
list.pushBack(32000);
list.pushBack(44100);
list.pushBack(48000);
list.pushBack(64000);
list.pushBack(88200);
list.pushBack(96000);
list.pushBack(128000);
list.pushBack(176400);
list.pushBack(192000);
list.pushBack(256000);
}
return list;
};
audio::orchestra::Api::Api() :
m_callback(null),
m_deviceBuffer(null) {
m_device[0] = 11111;
m_device[1] = 11111;
m_state = audio::orchestra::state::closed;
m_mode = audio::orchestra::mode_unknow;
}
audio::orchestra::Api::~Api() {
}
enum audio::orchestra::error audio::orchestra::Api::startStream() {
ATA_VERBOSE("Start Stream");
m_startTime = audio::Time::now();
m_duration = echrono::microseconds(0);
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::Api::openStream(audio::orchestra::StreamParameters* _oParams,
audio::orchestra::StreamParameters* _iParams,
enum audio::format _format,
uint32_t _sampleRate,
uint32_t* _bufferFrames,
audio::orchestra::AirTAudioCallback _callback,
const audio::orchestra::StreamOptions& _options) {
if (m_state != audio::orchestra::state::closed) {
ATA_ERROR("a stream is already open!");
return audio::orchestra::error_invalidUse;
}
if ( _oParams != null
&& _oParams->nChannels < 1) {
ATA_ERROR("a non-null output StreamParameters structure cannot have an nChannels value less than one.");
return audio::orchestra::error_invalidUse;
}
if ( _iParams != null
&& _iParams->nChannels < 1) {
ATA_ERROR("a non-null input StreamParameters structure cannot have an nChannels value less than one.");
return audio::orchestra::error_invalidUse;
}
if ( _oParams == null
&& _iParams == null) {
ATA_ERROR("input and output StreamParameters structures are both null!");
return audio::orchestra::error_invalidUse;
}
if (audio::getFormatBytes(_format) == 0) {
ATA_ERROR("'format' parameter value is undefined.");
return audio::orchestra::error_invalidUse;
}
uint32_t nDevices = getDeviceCount();
uint32_t oChannels = 0;
if (_oParams != null) {
oChannels = _oParams->nChannels;
if ( _oParams->deviceId >= nDevices
&& _oParams->deviceName == "") {
ATA_ERROR("output device parameter value is invalid.");
return audio::orchestra::error_invalidUse;
}
}
uint32_t iChannels = 0;
if (_iParams != null) {
iChannels = _iParams->nChannels;
if ( _iParams->deviceId >= nDevices
&& _iParams->deviceName == "") {
ATA_ERROR("input device parameter value is invalid.");
return audio::orchestra::error_invalidUse;
}
}
clearStreamInfo();
bool result;
if (oChannels > 0) {
if (_oParams->deviceId == -1) {
result = openName(_oParams->deviceName,
audio::orchestra::mode_output,
oChannels,
_oParams->firstChannel,
_sampleRate,
_format,
_bufferFrames,
_options);
} else {
result = open(_oParams->deviceId,
audio::orchestra::mode_output,
oChannels,
_oParams->firstChannel,
_sampleRate,
_format,
_bufferFrames,
_options);
}
if (result == false) {
ATA_ERROR("system ERROR");
return audio::orchestra::error_systemError;
}
}
if (iChannels > 0) {
if (_iParams->deviceId == -1) {
result = openName(_iParams->deviceName,
audio::orchestra::mode_input,
iChannels,
_iParams->firstChannel,
_sampleRate,
_format,
_bufferFrames,
_options);
} else {
result = open(_iParams->deviceId,
audio::orchestra::mode_input,
iChannels,
_iParams->firstChannel,
_sampleRate,
_format,
_bufferFrames,
_options);
}
if (result == false) {
if (oChannels > 0) {
closeStream();
}
ATA_ERROR("system error");
return audio::orchestra::error_systemError;
}
}
m_callback = _callback;
//_options.numberOfBuffers = m_nBuffers;
m_state = audio::orchestra::state::stopped;
return audio::orchestra::error_none;
}
uint32_t audio::orchestra::Api::getDefaultInputDevice() {
// Should be implemented in subclasses if possible.
return 0;
}
uint32_t audio::orchestra::Api::getDefaultOutputDevice() {
// Should be implemented in subclasses if possible.
return 0;
}
enum audio::orchestra::error audio::orchestra::Api::closeStream() {
ATA_VERBOSE("Close Stream");
// MUST be implemented in subclasses!
return audio::orchestra::error_none;
}
bool audio::orchestra::Api::open(uint32_t /*device*/,
audio::orchestra::mode /*mode*/,
uint32_t /*channels*/,
uint32_t /*firstChannel*/,
uint32_t /*sampleRate*/,
audio::format /*format*/,
uint32_t * /*bufferSize*/,
const audio::orchestra::StreamOptions& /*options*/) {
// MUST be implemented in subclasses!
return false;
}
void audio::orchestra::Api::tickStreamTime() {
//ATA_WARNING("tick : size=" << m_bufferSize << " rate=" << m_sampleRate << " time=" << audio::Duration((int64_t(m_bufferSize) * int64_t(1000000000)) / int64_t(m_sampleRate)).count());
//ATA_WARNING(" one element=" << audio::Duration((int64_t(1000000000)) / int64_t(m_sampleRate)).count());
m_duration += audio::Duration((int64_t(m_bufferSize) * int64_t(1000000000)) / int64_t(m_sampleRate));
}
long audio::orchestra::Api::getStreamLatency() {
if (verifyStream() != audio::orchestra::error_none) {
return 0;
}
long totalLatency = 0;
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
totalLatency = m_latency[0];
}
if ( m_mode == audio::orchestra::mode_input
|| m_mode == audio::orchestra::mode_duplex) {
totalLatency += m_latency[1];
}
return totalLatency;
}
audio::Time audio::orchestra::Api::getStreamTime() {
if (verifyStream() != audio::orchestra::error_none) {
return audio::Time();
}
return m_startTime + m_duration;
}
uint32_t audio::orchestra::Api::getStreamSampleRate() {
if (verifyStream() != audio::orchestra::error_none) {
return 0;
}
return m_sampleRate;
}
enum audio::orchestra::error audio::orchestra::Api::verifyStream() {
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("a stream is not open!");
return audio::orchestra::error_invalidUse;
}
return audio::orchestra::error_none;
}
void audio::orchestra::Api::clearStreamInfo() {
m_mode = audio::orchestra::mode_unknow;
m_state = audio::orchestra::state::closed;
m_sampleRate = 0;
m_bufferSize = 0;
m_nBuffers = 0;
m_userFormat = audio::format_unknow;
m_startTime = audio::Time();
m_duration = audio::Duration(0);
m_deviceBuffer = null;
m_callback = null;
for (int32_t iii=0; iii<2; ++iii) {
m_device[iii] = 11111;
m_doConvertBuffer[iii] = false;
m_deviceInterleaved[iii] = true;
m_doByteSwap[iii] = false;
m_nUserChannels[iii] = 0;
m_nDeviceChannels[iii] = 0;
m_channelOffset[iii] = 0;
m_deviceFormat[iii] = audio::format_unknow;
m_latency[iii] = 0;
m_userBuffer[iii].clear();
m_convertInfo[iii].channels = 0;
m_convertInfo[iii].inJump = 0;
m_convertInfo[iii].outJump = 0;
m_convertInfo[iii].inFormat = audio::format_unknow;
m_convertInfo[iii].outFormat = audio::format_unknow;
m_convertInfo[iii].inOffset.clear();
m_convertInfo[iii].outOffset.clear();
}
}
void audio::orchestra::Api::setConvertInfo(audio::orchestra::mode _mode, uint32_t _firstChannel) {
int32_t idTable = audio::orchestra::modeToIdTable(_mode);
if (_mode == audio::orchestra::mode_input) { // convert device to user buffer
m_convertInfo[idTable].inJump = m_nDeviceChannels[1];
m_convertInfo[idTable].outJump = m_nUserChannels[1];
m_convertInfo[idTable].inFormat = m_deviceFormat[1];
m_convertInfo[idTable].outFormat = m_userFormat;
} else { // convert user to device buffer
m_convertInfo[idTable].inJump = m_nUserChannels[0];
m_convertInfo[idTable].outJump = m_nDeviceChannels[0];
m_convertInfo[idTable].inFormat = m_userFormat;
m_convertInfo[idTable].outFormat = m_deviceFormat[0];
}
if (m_convertInfo[idTable].inJump < m_convertInfo[idTable].outJump) {
m_convertInfo[idTable].channels = m_convertInfo[idTable].inJump;
} else {
m_convertInfo[idTable].channels = m_convertInfo[idTable].outJump;
}
// Set up the interleave/deinterleave offsets.
if (m_deviceInterleaved[idTable] == false) {
if (_mode == audio::orchestra::mode_input) {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].inOffset.pushBack(kkk * m_bufferSize);
m_convertInfo[idTable].outOffset.pushBack(kkk);
m_convertInfo[idTable].inJump = 1;
}
} else {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].inOffset.pushBack(kkk);
m_convertInfo[idTable].outOffset.pushBack(kkk * m_bufferSize);
m_convertInfo[idTable].outJump = 1;
}
}
} else { // no (de)interleaving
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].inOffset.pushBack(kkk);
m_convertInfo[idTable].outOffset.pushBack(kkk);
}
}
// Add channel offset.
if (_firstChannel > 0) {
if (m_deviceInterleaved[idTable]) {
if (_mode == audio::orchestra::mode_output) {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].outOffset[kkk] += _firstChannel;
}
} else {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].inOffset[kkk] += _firstChannel;
}
}
} else {
if (_mode == audio::orchestra::mode_output) {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].outOffset[kkk] += (_firstChannel * m_bufferSize);
}
} else {
for (int32_t kkk=0; kkk<m_convertInfo[idTable].channels; ++kkk) {
m_convertInfo[idTable].inOffset[kkk] += (_firstChannel * m_bufferSize);
}
}
}
}
}
void audio::orchestra::Api::convertBuffer(char *_outBuffer, char *_inBuffer, audio::orchestra::ConvertInfo &_info) {
// This function does format conversion, input/output channel compensation, and
// data interleaving/deinterleaving. 24-bit integers are assumed to occupy
// the lower three bytes of a 32-bit integer.
// Clear our device buffer when in/out duplex device channels are different
if ( _outBuffer == m_deviceBuffer
&& m_mode == audio::orchestra::mode_duplex
&& m_nDeviceChannels[0] < m_nDeviceChannels[1]) {
memset(_outBuffer, 0, m_bufferSize * _info.outJump * audio::getFormatBytes(_info.outFormat));
}
switch (audio::getFormatBytes(_info.outFormat)) {
case 1:
{
uint8_t *out = reinterpret_cast<uint8_t*>(_outBuffer);
uint8_t *in = reinterpret_cast<uint8_t*>(_inBuffer);
for (size_t iii=0; iii<m_bufferSize; ++iii) {
for (size_t jjj=0; jjj<_info.channels; jjj++) {
out[_info.outOffset[jjj]] = in[_info.inOffset[jjj]];
}
in += _info.inJump;
out += _info.outJump;
}
}
break;
case 2:
{
uint16_t *out = reinterpret_cast<uint16_t*>(_outBuffer);
uint16_t *in = reinterpret_cast<uint16_t*>(_inBuffer);
for (size_t iii=0; iii<m_bufferSize; ++iii) {
for (size_t jjj=0; jjj<_info.channels; jjj++) {
out[_info.outOffset[jjj]] = in[_info.inOffset[jjj]];
}
in += _info.inJump;
out += _info.outJump;
}
}
break;
case 4:
{
uint32_t *out = reinterpret_cast<uint32_t*>(_outBuffer);
uint32_t *in = reinterpret_cast<uint32_t*>(_inBuffer);
for (size_t iii=0; iii<m_bufferSize; ++iii) {
for (size_t jjj=0; jjj<_info.channels; jjj++) {
out[_info.outOffset[jjj]] = in[_info.inOffset[jjj]];
}
in += _info.inJump;
out += _info.outJump;
}
}
break;
case 8:
{
uint64_t *out = reinterpret_cast<uint64_t*>(_outBuffer);
uint64_t *in = reinterpret_cast<uint64_t*>(_inBuffer);
for (size_t iii=0; iii<m_bufferSize; ++iii) {
for (size_t jjj=0; jjj<_info.channels; jjj++) {
out[_info.outOffset[jjj]] = in[_info.inOffset[jjj]];
}
in += _info.inJump;
out += _info.outJump;
}
}
break;
}
}
void audio::orchestra::Api::byteSwapBuffer(char *_buffer, uint32_t _samples, audio::format _format) {
char val;
char *ptr;
ptr = _buffer;
if (_format == audio::format_int16) {
for (uint32_t iii=0; iii<_samples; ++iii) {
// Swap 1st and 2nd bytes.
val = *(ptr);
*(ptr) = *(ptr+1);
*(ptr+1) = val;
// Increment 2 bytes.
ptr += 2;
}
} else if ( _format == audio::format_int32
|| _format == audio::format_float) {
for (uint32_t iii=0; iii<_samples; ++iii) {
// Swap 1st and 4th bytes.
val = *(ptr);
*(ptr) = *(ptr+3);
*(ptr+3) = val;
// Swap 2nd and 3rd bytes.
ptr += 1;
val = *(ptr);
*(ptr) = *(ptr+1);
*(ptr+1) = val;
// Increment 3 more bytes.
ptr += 3;
}
} else if (_format == audio::format_int24) {
for (uint32_t iii=0; iii<_samples; ++iii) {
// Swap 1st and 3rd bytes.
val = *(ptr);
*(ptr) = *(ptr+2);
*(ptr+2) = val;
// Increment 2 more bytes.
ptr += 2;
}
} else if (_format == audio::format_double) {
for (uint32_t iii=0; iii<_samples; ++iii) {
// Swap 1st and 8th bytes
val = *(ptr);
*(ptr) = *(ptr+7);
*(ptr+7) = val;
// Swap 2nd and 7th bytes
ptr += 1;
val = *(ptr);
*(ptr) = *(ptr+5);
*(ptr+5) = val;
// Swap 3rd and 6th bytes
ptr += 1;
val = *(ptr);
*(ptr) = *(ptr+3);
*(ptr+3) = val;
// Swap 4th and 5th bytes
ptr += 1;
val = *(ptr);
*(ptr) = *(ptr+1);
*(ptr+1) = val;
// Increment 5 more bytes.
ptr += 5;
}
}
}

View File

@ -1,180 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/Stream.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/orchestra/type.hpp>
#include <audio/orchestra/state.hpp>
#include <audio/orchestra/mode.hpp>
#include <audio/Time.hpp>
#include <audio/Duration.hpp>
#include <ememory/memory.hpp>
/**
* @brief Audio library namespace
*/
namespace audio {
/**
* @brief Audio-orchestra library namespace
*/
namespace orchestra {
const etk::Vector<uint32_t>& genericSampleRate();
/**
* @brief airtaudio callback function prototype.
* @param _inputBuffer For input (or duplex) streams, this buffer will hold _nbChunk of input audio chunk (null if no data).
* @param _timeInput Timestamp of the first buffer sample (recording time).
* @param _outputBuffer For output (or duplex) streams, the client should write _nbChunk of audio chunk into this buffer (null if no data).
* @param _timeOutput Timestamp of the first buffer sample (playing time).
* @param _nbChunk The number of chunk of input or output chunk in the buffer (same size).
* @param _status List of error that occured in the laps of time.
*/
typedef etk::Function<int32_t (const void* _inputBuffer,
const audio::Time& _timeInput,
void* _outputBuffer,
const audio::Time& _timeOutput,
uint32_t _nbChunk,
const etk::Vector<audio::orchestra::status>& _status)> AirTAudioCallback;
// A protected structure used for buffer conversion.
class ConvertInfo {
public:
int32_t channels;
int32_t inJump;
int32_t outJump;
enum audio::format inFormat;
enum audio::format outFormat;
etk::Vector<int> inOffset;
etk::Vector<int> outOffset;
};
class Api : public ememory::EnableSharedFromThis<Api>{
protected:
etk::String m_name;
public:
Api();
virtual ~Api();
void setName(const etk::String& _name) {
m_name = _name;
}
virtual const etk::String& getCurrentApi() = 0;
virtual uint32_t getDeviceCount() = 0;
virtual audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device) = 0;
// TODO : Check API ...
virtual bool getNamedDeviceInfo(const etk::String& _deviceName, audio::orchestra::DeviceInfo& _info) {
return false;
}
virtual uint32_t getDefaultInputDevice();
virtual uint32_t getDefaultOutputDevice();
enum audio::orchestra::error openStream(audio::orchestra::StreamParameters* _outputParameters,
audio::orchestra::StreamParameters* _inputParameters,
audio::format _format,
uint32_t _sampleRate,
uint32_t* _nbChunk,
audio::orchestra::AirTAudioCallback _callback,
const audio::orchestra::StreamOptions& _options);
virtual enum audio::orchestra::error closeStream();
virtual enum audio::orchestra::error startStream();
virtual enum audio::orchestra::error stopStream() = 0;
virtual enum audio::orchestra::error abortStream() = 0;
long getStreamLatency();
uint32_t getStreamSampleRate();
virtual audio::Time getStreamTime();
bool isStreamOpen() const {
return m_state != audio::orchestra::state::closed;
}
bool isStreamRunning() const {
return m_state == audio::orchestra::state::running;
}
protected:
mutable ethread::Mutex m_mutex;
audio::orchestra::AirTAudioCallback m_callback;
uint32_t m_device[2]; // Playback and record, respectively.
enum audio::orchestra::mode m_mode; // audio::orchestra::mode_output, audio::orchestra::mode_input, or audio::orchestra::mode_duplex.
enum audio::orchestra::state m_state; // STOPPED, RUNNING, or CLOSED
etk::Vector<char> m_userBuffer[2]; // Playback and record, respectively.
char *m_deviceBuffer;
bool m_doConvertBuffer[2]; // Playback and record, respectively.
bool m_deviceInterleaved[2]; // Playback and record, respectively.
bool m_doByteSwap[2]; // Playback and record, respectively.
uint32_t m_sampleRate; // TODO : Rename frequency
uint32_t m_bufferSize;
uint32_t m_nBuffers;
uint32_t m_nUserChannels[2]; // Playback and record, respectively. // TODO : set only one config (open inout with the same number of channels (limitation)
uint32_t m_nDeviceChannels[2]; // Playback and record channels, respectively.
uint32_t m_channelOffset[2]; // Playback and record, respectively.
uint64_t m_latency[2]; // Playback and record, respectively.
enum audio::format m_userFormat; // TODO : Remove this ==> use can only open in the Harware format ...
enum audio::format m_deviceFormat[2]; // Playback and record, respectively.
audio::orchestra::ConvertInfo m_convertInfo[2];
//audio::Time
audio::Time m_startTime; //!< start time of the stream (restart at every stop, pause ...)
audio::Duration m_duration; //!< duration from wich the stream is started
/**
* @brief api-specific method that attempts to open a device
* with the given parameters. This function MUST be implemented by
* all subclasses. If an error is encountered during the probe, a
* "warning" message is reported and false is returned. A
* successful probe is indicated by a return value of true.
*/
virtual bool open(uint32_t _device,
enum audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
enum audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
virtual bool openName(const etk::String& _deviceName,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options) { return false; }
/**
* @brief Increment the stream time.
*/
void tickStreamTime();
/**
* @brief Clear an RtApiStream structure.
*/
void clearStreamInfo();
/**
* @brief Check the current stream status
*/
enum audio::orchestra::error verifyStream();
/**
* @brief Protected method used to perform format, channel number, and/or interleaving
* conversions between the user and device buffers.
*/
void convertBuffer(char *_outBuffer,
char *_inBuffer,
audio::orchestra::ConvertInfo& _info);
/**
* @brief Perform byte-swapping on buffers.
*/
void byteSwapBuffer(char *_buffer,
uint32_t _samples,
enum audio::format _format);
/**
* @brief Sets up the parameters for buffer conversion.
*/
void setConvertInfo(enum audio::orchestra::mode _mode,
uint32_t _firstChannel);
public:
virtual bool isMasterOf(ememory::SharedPtr<audio::orchestra::Api> _api) {
return false;
};
};
}
}

View File

@ -1,61 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
//#include <etk/types.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/orchestra/DeviceInfo.hpp>
#include <etk/stdTools.hpp>
void audio::orchestra::DeviceInfo::display(int32_t _tabNumber) const {
etk::String space;
for (int32_t iii=0; iii<_tabNumber; ++iii) {
space += " ";
}
if (isCorrect == false) {
ATA_PRINT(space + "NOT CORRECT INFORAMATIONS");
return;
}
ATA_PRINT(space + "mode=" << (input==true?"input":"output"));
ATA_PRINT(space + "name=" << name);
if (desc.size() != 0) {
ATA_PRINT(space + "desc=" << desc);
}
ATA_PRINT(space + "channel" << (channels.size()>1?"s":"") << "=" << channels.size() << " : " << channels);
ATA_PRINT(space + "rate" << (sampleRates.size()>1?"s":"") << "=" << sampleRates);
ATA_PRINT(space + "native Format" << (nativeFormats.size()>1?"s":"") << ": " << nativeFormats);
ATA_PRINT(space + "default=" << (isDefault==true?"true":"false"));
}
void audio::orchestra::DeviceInfo::clear() {
isCorrect = false;
input = false;
name = "";
desc = "";
channels.clear();
sampleRates.clear();
nativeFormats.clear();
isDefault = false;
}
etk::Stream& audio::orchestra::operator <<(etk::Stream& _os, const audio::orchestra::DeviceInfo& _obj) {
_os << "{";
if (_obj.isCorrect == false) {
_os << "NOT CORRECT INFORAMATIONS";
} else {
_os << "name=" << _obj.name << ", ";
if (_obj.desc.size() != 0) {
_os << "description=" << _obj.desc << ", ";
}
_os << "channels=" << _obj.channels << ", ";
_os << "default=" << _obj.isDefault << ", ";
_os << "rates=" << _obj.sampleRates << ", ";
_os << "native Format: " << _obj.nativeFormats;
}
_os << "}";
return _os;
}

View File

@ -1,49 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <audio/format.hpp>
#include <audio/channel.hpp>
namespace audio {
namespace orchestra {
/**
* @brief The public device information structure for returning queried values.
*/
class DeviceInfo {
public:
bool isCorrect; //!< the information is correct (the system can return information incorect).
bool input; //!< true if the device in an input; false: output.
etk::String name; //!< Character string device identifier.
etk::String desc; //!< description of the device
etk::Vector<audio::channel> channels; //!< Channels interfaces.
etk::Vector<uint32_t> sampleRates; //!< Supported sample rates (queried from list of standard rates).
etk::Vector<audio::format> nativeFormats; //!< Bit mask of supported data formats.
bool isDefault; //! is default input/output
// Default constructor.
DeviceInfo() :
isCorrect(false),
input(false),
name(),
desc(),
channels(),
sampleRates(),
nativeFormats(),
isDefault(false) {}
/**
* @brief Display the current information of the device (on console)
*/
void display(int32_t _tabNumber = 1) const;
/**
* @brief Clear all internal data
*/
void clear();
};
etk::Stream& operator <<(etk::Stream& _os, const audio::orchestra::DeviceInfo& _obj);
}
}

View File

@ -1,9 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/Flags.hpp>
#include <audio/orchestra/debug.hpp>

View File

@ -1,22 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
namespace audio {
namespace orchestra {
class Flags {
public:
bool m_minimizeLatency; // Simple example ==> TODO ...
Flags() :
m_minimizeLatency(false) {
// nothing to do ...
}
};
}
}

View File

@ -1,186 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
//#include <etk/types.hpp>
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/orchestra/api/Alsa.hpp>
#include <audio/orchestra/api/Android.hpp>
#include <audio/orchestra/api/Asio.hpp>
#include <audio/orchestra/api/Core.hpp>
#include <audio/orchestra/api/CoreIos.hpp>
#include <audio/orchestra/api/Ds.hpp>
#include <audio/orchestra/api/Dummy.hpp>
#include <audio/orchestra/api/Jack.hpp>
#include <audio/orchestra/api/Pulse.hpp>
etk::Vector<etk::String> audio::orchestra::Interface::getListApi() {
etk::Vector<etk::String> apis;
// The order here will control the order of RtAudio's API search in
// the constructor.
for (size_t iii=0; iii<m_apiAvaillable.size(); ++iii) {
apis.pushBack(m_apiAvaillable[iii].first);
}
return apis;
}
void audio::orchestra::Interface::openApi(const etk::String& _api) {
m_api.reset();
for (size_t iii=0; iii<m_apiAvaillable.size(); ++iii) {
ATA_INFO("try open " << m_apiAvaillable[iii].first);
if (_api == m_apiAvaillable[iii].first) {
ATA_INFO(" ==> call it");
m_api = m_apiAvaillable[iii].second();
if (m_api != null) {
return;
}
}
}
// TODO : An error occured ...
ATA_ERROR("Error in open API ...");
}
audio::orchestra::Interface::Interface() :
m_api(null) {
ATA_DEBUG("Add interface:");
#if defined(ORCHESTRA_BUILD_JACK)
addInterface(audio::orchestra::typeJack, audio::orchestra::api::Jack::create);
#endif
#if defined(ORCHESTRA_BUILD_ALSA)
addInterface(audio::orchestra::typeAlsa, audio::orchestra::api::Alsa::create);
#endif
#if defined(ORCHESTRA_BUILD_PULSE)
addInterface(audio::orchestra::typePulse, audio::orchestra::api::Pulse::create);
#endif
#if defined(ORCHESTRA_BUILD_ASIO)
addInterface(audio::orchestra::typeAsio, audio::orchestra::api::Asio::create);
#endif
#if defined(ORCHESTRA_BUILD_DS)
addInterface(audio::orchestra::typeDs, audio::orchestra::api::Ds::create);
#endif
#if defined(ORCHESTRA_BUILD_MACOSX_CORE)
addInterface(audio::orchestra::typeCoreOSX, audio::orchestra::api::Core::create);
#endif
#if defined(ORCHESTRA_BUILD_IOS_CORE)
addInterface(audio::orchestra::typeCoreIOS, audio::orchestra::api::CoreIos::create);
#endif
#if defined(ORCHESTRA_BUILD_JAVA)
addInterface(audio::orchestra::typeJava, audio::orchestra::api::Android::create);
#endif
#if defined(ORCHESTRA_BUILD_DUMMY)
addInterface(audio::orchestra::typeDummy, audio::orchestra::api::Dummy::create);
#endif
}
void audio::orchestra::Interface::addInterface(const etk::String& _api, ememory::SharedPtr<Api> (*_callbackCreate)()) {
m_apiAvaillable.pushBack(etk::Pair<etk::String, ememory::SharedPtr<Api> (*)()>(_api, _callbackCreate));
}
enum audio::orchestra::error audio::orchestra::Interface::clear() {
ATA_INFO("Clear API ...");
if (m_api == null) {
ATA_WARNING("Interface NOT started!");
return audio::orchestra::error_none;
}
m_api.reset();
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::Interface::instanciate(const etk::String& _api) {
ATA_INFO("Instanciate API ...");
if (m_api != null) {
ATA_WARNING("Interface already started!");
return audio::orchestra::error_none;
}
if (_api != audio::orchestra::typeUndefined) {
ATA_INFO("API specified : " << _api);
// Attempt to open the specified API.
openApi(_api);
if (m_api != null) {
if (m_api->getDeviceCount() != 0) {
ATA_INFO(" ==> api open");
}
return audio::orchestra::error_none;
}
// No compiled support for specified API value. Issue a debug
// warning and continue as if no API was specified.
ATA_ERROR("API NOT Supported '" << _api << "' not in " << getListApi());
return audio::orchestra::error_fail;
}
ATA_INFO("Auto choice API :");
// Iterate through the compiled APIs and return as soon as we find
// one with at least one device or we reach the end of the list.
etk::Vector<etk::String> apis = getListApi();
ATA_INFO(" find : " << apis.size() << " apis.");
for (size_t iii=0; iii<apis.size(); ++iii) {
ATA_INFO("try open ...");
openApi(apis[iii]);
if(m_api == null) {
ATA_ERROR(" ==> can not create ...");
continue;
}
if (m_api->getDeviceCount() != 0) {
ATA_INFO(" ==> api open");
break;
} else {
ATA_INFO(" ==> Interface exist, but have no devices: " << m_api->getDeviceCount());
}
}
if (m_api != null) {
return audio::orchestra::error_none;
}
ATA_ERROR("API NOT Supported '" << _api << "' not in " << getListApi());
return audio::orchestra::error_fail;
}
audio::orchestra::Interface::~Interface() {
ATA_INFO("Remove interface");
m_api.reset();
}
enum audio::orchestra::error audio::orchestra::Interface::openStream(audio::orchestra::StreamParameters* _outputParameters,
audio::orchestra::StreamParameters* _inputParameters,
audio::format _format,
uint32_t _sampleRate,
uint32_t* _bufferFrames,
audio::orchestra::AirTAudioCallback _callback,
const audio::orchestra::StreamOptions& _options) {
if (m_api == null) {
return audio::orchestra::error_inputNull;
}
return m_api->openStream(_outputParameters,
_inputParameters,
_format,
_sampleRate,
_bufferFrames,
_callback,
_options);
}
bool audio::orchestra::Interface::isMasterOf(audio::orchestra::Interface& _interface) {
if (m_api == null) {
ATA_ERROR("Current Master API is null ...");
return false;
}
if (_interface.m_api == null) {
ATA_ERROR("Current Slave API is null ...");
return false;
}
if (m_api->getCurrentApi() != _interface.m_api->getCurrentApi()) {
ATA_ERROR("Can not link 2 Interface with not the same Low level type (?)");//" << _interface.m_adac->getCurrentApi() << " != " << m_adac->getCurrentApi() << ")");
return false;
}
if (m_api->getCurrentApi() != audio::orchestra::typeAlsa) {
ATA_ERROR("Link 2 device together work only if the interafec is ?");// << audio::orchestra::type::alsa << " not for " << m_api->getCurrentApi());
return false;
}
return m_api->isMasterOf(_interface.m_api);
}

View File

@ -1,308 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/String.hpp>
#include <etk/Vector.hpp>
#include <etk/Pair.hpp>
#include <audio/orchestra/base.hpp>
#include <audio/orchestra/CallbackInfo.hpp>
#include <audio/orchestra/Api.hpp>
namespace audio {
namespace orchestra {
/**
* @brief audio::orchestra::Interface class declaration.
*
* audio::orchestra::Interface is a "controller" used to select an available audio i/o
* interface. It presents a common API for the user to call but all
* functionality is implemented by the class RtApi and its
* subclasses. RtAudio creates an instance of an RtApi subclass
* based on the user's API choice. If no choice is made, RtAudio
* attempts to make a "logical" API selection.
*/
class Interface {
protected:
etk::Vector<etk::Pair<etk::String, ememory::SharedPtr<Api> (*)()> > m_apiAvaillable;
protected:
ememory::SharedPtr<audio::orchestra::Api> m_api;
public:
void setName(const etk::String& _name) {
if (m_api == null) {
return;
}
m_api->setName(_name);
}
/**
* @brief Get the list of all availlable API in the system.
* @return the list of all APIs
*/
etk::Vector<etk::String> getListApi();
/**
* @brief Add an interface of the Possible List.
* @param[in] _api Type of the interface.
* @param[in] _callbackCreate API creation callback.
*/
void addInterface(const etk::String& _api, ememory::SharedPtr<Api> (*_callbackCreate)());
/**
* @brief The class constructor.
* @note the creating of the basic instance is done by Instanciate
*/
Interface();
/**
* @brief The destructor.
*
* If a stream is running or open, it will be stopped and closed
* automatically.
*/
virtual ~Interface();
/**
* @brief Clear the current Interface
*/
enum audio::orchestra::error clear();
/**
* @brief Create an interface instance
*/
enum audio::orchestra::error instanciate(const etk::String& _api = audio::orchestra::typeUndefined);
/**
* @return the audio API specifier for the current instance of airtaudio.
*/
const etk::String& getCurrentApi() {
if (m_api == null) {
return audio::orchestra::typeUndefined;
}
return m_api->getCurrentApi();
}
/**
* @brief A public function that queries for the number of audio devices available.
*
* This function performs a system query of available devices each time it
* is called, thus supporting devices connected \e after instantiation. If
* a system error occurs during processing, a warning will be issued.
*/
uint32_t getDeviceCount() {
if (m_api == null) {
return 0;
}
return m_api->getDeviceCount();
}
/**
* @brief Any device integer between 0 and getDeviceCount() - 1 is valid.
* If an invalid argument is provided, an RtError (type = INVALID_USE)
* will be thrown. If a device is busy or otherwise unavailable, the
* structure member "probed" will have a value of "false" and all
* other members are undefined. If the specified device is the
* current default input or output device, the corresponding
* "isDefault" member will have a value of "true".
*
* @return An audio::orchestra::DeviceInfo structure for a specified device number.
*/
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device) {
if (m_api == null) {
return audio::orchestra::DeviceInfo();
}
return m_api->getDeviceInfo(_device);
}
audio::orchestra::DeviceInfo getDeviceInfo(const etk::String& _deviceName) {
if (m_api == null) {
return audio::orchestra::DeviceInfo();
}
audio::orchestra::DeviceInfo info;
m_api->getNamedDeviceInfo(_deviceName, info);
return info;
}
/**
* @brief A function that returns the index of the default output device.
*
* If the underlying audio API does not provide a "default
* device", or if no devices are available, the return value will be
* 0. Note that this is a valid device identifier and it is the
* client's responsibility to verify that a device is available
* before attempting to open a stream.
*/
uint32_t getDefaultOutputDevice() {
if (m_api == null) {
return 0;
}
return m_api->getDefaultOutputDevice();
}
/**
* @brief A function that returns the index of the default input device.
*
* If the underlying audio API does not provide a "default
* device", or if no devices are available, the return value will be
* 0. Note that this is a valid device identifier and it is the
* client's responsibility to verify that a device is available
* before attempting to open a stream.
*/
uint32_t getDefaultInputDevice() {
if (m_api == null) {
return 0;
}
return m_api->getDefaultInputDevice();
}
/**
* @brief A public function for opening a stream with the specified parameters.
*
* An RtError (type = SYSTEM_ERROR) is thrown if a stream cannot be
* opened with the specified parameters or an error occurs during
* processing. An RtError (type = INVALID_USE) is thrown if any
* invalid device ID or channel number parameters are specified.
* @param _outputParameters Specifies output stream parameters to use
* when opening a stream, including a device ID, number of channels,
* and starting channel number. For input-only streams, this
* argument should be null. The device ID is an index value between
* 0 and getDeviceCount() - 1.
* @param _inputParameters Specifies input stream parameters to use
* when opening a stream, including a device ID, number of channels,
* and starting channel number. For output-only streams, this
* argument should be null. The device ID is an index value between
* 0 and getDeviceCount() - 1.
* @param _format An audio::format specifying the desired sample data format.
* @param _sampleRate The desired sample rate (sample frames per second).
* @param _bufferFrames A pointer to a value indicating the desired
* internal buffer size in sample frames. The actual value
* used by the device is returned via the same pointer. A
* value of zero can be specified, in which case the lowest
* allowable value is determined.
* @param _callback A client-defined function that will be invoked
* when input data is available and/or output data is needed.
* @param _options An optional pointer to a structure containing various
* global stream options, including a list of OR'ed audio::orchestra::streamFlags
* and a suggested number of stream buffers that can be used to
* control stream latency. More buffers typically result in more
* robust performance, though at a cost of greater latency. If a
* value of zero is specified, a system-specific median value is
* chosen. If the airtaudio_MINIMIZE_LATENCY flag bit is set, the
* lowest allowable value is used. The actual value used is
* returned via the structure argument. The parameter is API dependent.
* @param _errorCallback A client-defined function that will be invoked
* when an error has occured.
*/
enum audio::orchestra::error openStream(audio::orchestra::StreamParameters *_outputParameters,
audio::orchestra::StreamParameters *_inputParameters,
enum audio::format _format,
uint32_t _sampleRate,
uint32_t* _bufferFrames,
audio::orchestra::AirTAudioCallback _callback,
const audio::orchestra::StreamOptions& _options = audio::orchestra::StreamOptions());
/**
* @brief A function that closes a stream and frees any associated stream memory.
*
* If a stream is not open, this function issues a warning and
* returns (no exception is thrown).
*/
enum audio::orchestra::error closeStream() {
if (m_api == null) {
return audio::orchestra::error_inputNull;
}
return m_api->closeStream();
}
/**
* @brief A function that starts a stream.
*
* An RtError (type = SYSTEM_ERROR) is thrown if an error occurs
* during processing. An RtError (type = INVALID_USE) is thrown if a
* stream is not open. A warning is issued if the stream is already
* running.
*/
enum audio::orchestra::error startStream() {
if (m_api == null) {
return audio::orchestra::error_inputNull;
}
return m_api->startStream();
}
/**
* @brief Stop a stream, allowing any samples remaining in the output queue to be played.
*
* An RtError (type = SYSTEM_ERROR) is thrown if an error occurs
* during processing. An RtError (type = INVALID_USE) is thrown if a
* stream is not open. A warning is issued if the stream is already
* stopped.
*/
enum audio::orchestra::error stopStream() {
if (m_api == null) {
return audio::orchestra::error_inputNull;
}
return m_api->stopStream();
}
/**
* @brief Stop a stream, discarding any samples remaining in the input/output queue.
* An RtError (type = SYSTEM_ERROR) is thrown if an error occurs
* during processing. An RtError (type = INVALID_USE) is thrown if a
* stream is not open. A warning is issued if the stream is already
* stopped.
*/
enum audio::orchestra::error abortStream() {
if (m_api == null) {
return audio::orchestra::error_inputNull;
}
return m_api->abortStream();
}
/**
* @return true if a stream is open and false if not.
*/
bool isStreamOpen() const {
if (m_api == null) {
return false;
}
return m_api->isStreamOpen();
}
/**
* @return true if the stream is running and false if it is stopped or not open.
*/
bool isStreamRunning() const {
if (m_api == null) {
return false;
}
return m_api->isStreamRunning();
}
/**
* @brief If a stream is not open, an RtError (type = INVALID_USE) will be thrown.
* @return the number of elapsed seconds since the stream was started.
*/
audio::Time getStreamTime() {
if (m_api == null) {
return audio::Time();
}
return m_api->getStreamTime();
}
/**
* @brief The stream latency refers to delay in audio input and/or output
* caused by internal buffering by the audio system and/or hardware.
* For duplex streams, the returned value will represent the sum of
* the input and output latencies. If a stream is not open, an
* RtError (type = INVALID_USE) will be thrown. If the API does not
* report latency, the return value will be zero.
* @return The internal stream latency in sample frames.
*/
long getStreamLatency() {
if (m_api == null) {
return 0;
}
return m_api->getStreamLatency();
}
/**
* @brief On some systems, the sample rate used may be slightly different
* than that specified in the stream parameters. If a stream is not
* open, an RtError (type = INVALID_USE) will be thrown.
* @return Returns actual sample rate in use by the stream.
*/
uint32_t getStreamSampleRate() {
if (m_api == null) {
return 0;
}
return m_api->getStreamSampleRate();
}
bool isMasterOf(audio::orchestra::Interface& _interface);
protected:
void openApi(const etk::String& _api);
};
}
}

View File

@ -1,45 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/StreamOptions.hpp>
#include <etk/stdTools.hpp>
#include <audio/orchestra/debug.hpp>
static const char* listValue[] = {
"hardware",
"trigered",
"soft"
};
etk::Stream& audio::orchestra::operator <<(etk::Stream& _os, enum audio::orchestra::timestampMode _obj) {
_os << listValue[_obj];
return _os;
}
namespace etk {
template <> bool from_string<enum audio::orchestra::timestampMode>(enum audio::orchestra::timestampMode& _variableRet, const etk::String& _value) {
if (_value == "hardware") {
_variableRet = audio::orchestra::timestampMode_Hardware;
return true;
}
if (_value == "trigered") {
_variableRet = audio::orchestra::timestampMode_trigered;
return true;
}
if (_value == "soft") {
_variableRet = audio::orchestra::timestampMode_soft;
return true;
}
return false;
}
template <enum audio::orchestra::timestampMode> etk::String toString(const enum audio::orchestra::timestampMode& _variable) {
return listValue[_variable];
}
}

View File

@ -1,35 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <audio/orchestra/Flags.hpp>
#include <etk/String.hpp>
namespace audio {
namespace orchestra {
enum timestampMode {
timestampMode_Hardware, //!< enable harware timestamp
timestampMode_trigered, //!< get harware triger time stamp and increment with duration
timestampMode_soft, //!< Simulate all timestamp.
};
etk::Stream& operator <<(etk::Stream& _os, enum audio::orchestra::timestampMode _obj);
class StreamOptions {
public:
audio::orchestra::Flags flags; //!< A bit-mask of stream flags
uint32_t numberOfBuffers; //!< Number of stream buffers.
etk::String streamName; //!< A stream name (currently used only in Jack).
enum timestampMode mode; //!< mode of timestamping data...
// Default constructor.
StreamOptions() :
flags(),
numberOfBuffers(0),
mode(timestampMode_Hardware) {}
};
}
}

View File

@ -1,30 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
namespace audio {
namespace orchestra {
/**
* @brief The structure for specifying input or ouput stream parameters.
*/
class StreamParameters {
public:
int32_t deviceId; //!< Device index (-1 to getDeviceCount() - 1).
etk::String deviceName; //!< name of the device (if deviceId==-1 this must not be == "", and the oposite ...)
uint32_t nChannels; //!< Number of channels.
uint32_t firstChannel; //!< First channel index on device (default = 0).
// Default constructor.
StreamParameters() :
deviceId(-1),
nChannels(0),
firstChannel(0) {
}
};
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,79 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_ALSA
namespace audio {
namespace orchestra {
namespace api {
class AlsaPrivate;
class Alsa: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Alsa();
virtual ~Alsa();
const etk::String& getCurrentApi() {
return audio::orchestra::typeAlsa;
}
uint32_t getDeviceCount();
private:
bool getNamedDeviceInfoLocal(const etk::String& _deviceName,
audio::orchestra::DeviceInfo& _info,
int32_t _cardId=-1, // Alsa card ID
int32_t _subdevice=-1, // alsa subdevice ID
int32_t _localDeviceId=-1,// local ID of device find
bool _input=false);
public:
bool getNamedDeviceInfo(const etk::String& _deviceName, audio::orchestra::DeviceInfo& _info) {
return getNamedDeviceInfoLocal(_deviceName, _info);
}
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent();
void callbackEventOneCycleRead();
void callbackEventOneCycleWrite();
void callbackEventOneCycleMMAPRead();
void callbackEventOneCycleMMAPWrite();
private:
ememory::SharedPtr<AlsaPrivate> m_private;
etk::Vector<audio::orchestra::DeviceInfo> m_devices;
void saveDeviceInfo();
bool open(uint32_t _device,
enum audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
enum audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
bool openName(const etk::String& _deviceName,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
virtual audio::Time getStreamTime();
public:
bool isMasterOf(ememory::SharedPtr<audio::orchestra::Api> _api);
};
}
}
}
#endif

View File

@ -1,195 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#ifdef ORCHESTRA_BUILD_JAVA
//#include <ewol/context/Context.h>
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/orchestra/api/AndroidNativeInterface.hpp>
#include <audio/orchestra/api/Android.hpp>
extern "C" {
#include <limits.h>
}
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::Android::create() {
ATA_INFO("Create Android device ... ");
return ememory::SharedPtr<audio::orchestra::api::Android>(ETK_NEW(audio::orchestra::api::Android));
}
audio::orchestra::api::Android::Android() :
m_uid(-1) {
ATA_INFO("Create Android interface");
}
audio::orchestra::api::Android::~Android() {
ATA_INFO("Destroy Android interface");
}
uint32_t audio::orchestra::api::Android::getDeviceCount() {
//ATA_INFO("Get device count:"<< m_devices.size());
return audio::orchestra::api::android::getDeviceCount();
}
audio::orchestra::DeviceInfo audio::orchestra::api::Android::getDeviceInfo(uint32_t _device) {
//ATA_INFO("Get device info ...");
return audio::orchestra::api::android::getDeviceInfo(_device);
}
enum audio::orchestra::error audio::orchestra::api::Android::closeStream() {
ATA_INFO("Close Stream");
// Can not close the stream now...
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Android::startStream() {
ATA_INFO("Start Stream");
// TODO : Check return ...
audio::orchestra::Api::startStream();
// Can not close the stream now...
return audio::orchestra::api::android::startStream(m_uid);
}
enum audio::orchestra::error audio::orchestra::api::Android::stopStream() {
ATA_INFO("Stop stream");
// Can not close the stream now...
return audio::orchestra::api::android::stopStream(m_uid);
}
enum audio::orchestra::error audio::orchestra::api::Android::abortStream() {
ATA_INFO("Abort Stream");
// Can not close the stream now...
return audio::orchestra::error_none;
}
void audio::orchestra::api::Android::playback(int16_t* _dst, int32_t _nbChunk) {
// clear output buffer:
if (_dst != null) {
memset(_dst, 0, _nbChunk*audio::getFormatBytes(m_deviceFormat[modeToIdTable(m_mode)])*m_nDeviceChannels[modeToIdTable(m_mode)]);
}
int32_t doStopStream = 0;
audio::Time streamTime = getStreamTime();
etk::Vector<enum audio::orchestra::status> status;
if (m_doConvertBuffer[modeToIdTable(m_mode)] == true) {
ATA_VERBOSE("Need playback data " << int32_t(_nbChunk) << " userbuffer size = " << m_userBuffer[audio::orchestra::mode_output].size() << "pointer=" << int64_t(&m_userBuffer[audio::orchestra::mode_output][0]));
doStopStream = m_callback(null,
audio::Time(),
&m_userBuffer[m_mode][0],
streamTime,
uint32_t(_nbChunk),
status);
convertBuffer((char*)_dst, (char*)&m_userBuffer[audio::orchestra::mode_output][0], m_convertInfo[audio::orchestra::mode_output]);
} else {
ATA_VERBOSE("Need playback data " << int32_t(_nbChunk) << " pointer=" << int64_t(_dst));
doStopStream = m_callback(null,
audio::Time(),
_dst,
streamTime,
uint32_t(_nbChunk),
status);
}
if (doStopStream == 2) {
abortStream();
return;
}
audio::orchestra::Api::tickStreamTime();
}
void audio::orchestra::api::Android::record(int16_t* _dst, int32_t _nbChunk) {
int32_t doStopStream = 0;
audio::Time streamTime = getStreamTime();
etk::Vector<enum audio::orchestra::status> status;
if (m_doConvertBuffer[modeToIdTable(m_mode)] == true) {
ATA_VERBOSE("Need playback data " << int32_t(_nbChunk) << " userbuffer size = " << m_userBuffer[audio::orchestra::mode_output].size() << "pointer=" << int64_t(&m_userBuffer[audio::orchestra::mode_output][0]));
convertBuffer((char*)&m_userBuffer[audio::orchestra::mode_input][0], (char*)_dst, m_convertInfo[audio::orchestra::mode_input]);
doStopStream = m_callback(&m_userBuffer[m_mode][0],
streamTime,
null,
audio::Time(),
uint32_t(_nbChunk),
status);
} else {
ATA_VERBOSE("Need playback data " << int32_t(_nbChunk) << " pointer=" << int64_t(_dst));
doStopStream = m_callback(_dst,
streamTime,
null,
audio::Time(),
uint32_t(_nbChunk),
status);
}
if (doStopStream == 2) {
abortStream();
return;
}
audio::orchestra::Api::tickStreamTime();
}
bool audio::orchestra::api::Android::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options) {
bool ret = false;
ATA_INFO("Probe : device=" << _device << " channels=" << _channels << " firstChannel=" << _firstChannel << " sampleRate=" << _sampleRate);
m_mode = _mode;
m_userFormat = _format;
m_nUserChannels[modeToIdTable(m_mode)] = _channels;
m_uid = audio::orchestra::api::android::open(_device, m_mode, _channels, _firstChannel, _sampleRate, _format, _bufferSize, _options, ememory::staticPointerCast<audio::orchestra::api::Android>(sharedFromThis()));
if (m_uid < 0) {
ret = false;
} else {
ret = true;
}
m_bufferSize = 256;
m_sampleRate = _sampleRate;
m_doByteSwap[modeToIdTable(m_mode)] = false; // for endienness ...
// TODO : For now, we write it in hard ==> to bu update later ...
m_deviceFormat[modeToIdTable(m_mode)] = audio::format_int16;
m_nDeviceChannels[modeToIdTable(m_mode)] = 2;
m_deviceInterleaved[modeToIdTable(m_mode)] = true;
m_doConvertBuffer[modeToIdTable(m_mode)] = false;
if (m_userFormat != m_deviceFormat[modeToIdTable(m_mode)]) {
m_doConvertBuffer[modeToIdTable(m_mode)] = true;
}
if (m_nUserChannels[modeToIdTable(m_mode)] < m_nDeviceChannels[modeToIdTable(m_mode)]) {
m_doConvertBuffer[modeToIdTable(m_mode)] = true;
}
if ( m_deviceInterleaved[modeToIdTable(m_mode)] == false
&& m_nUserChannels[modeToIdTable(m_mode)] > 1) {
m_doConvertBuffer[modeToIdTable(m_mode)] = true;
}
if (m_doConvertBuffer[modeToIdTable(m_mode)] == true) {
// Allocate necessary internal buffers.
uint64_t bufferBytes = m_nUserChannels[modeToIdTable(m_mode)] * m_bufferSize * audio::getFormatBytes(m_userFormat);
m_userBuffer[modeToIdTable(m_mode)].resize(bufferBytes);
if (m_userBuffer[modeToIdTable(m_mode)].size() == 0) {
ATA_ERROR("error allocating user buffer memory.");
}
setConvertInfo(m_mode, _firstChannel);
}
ATA_INFO("device format : " << m_deviceFormat[modeToIdTable(m_mode)] << " user format : " << m_userFormat);
ATA_INFO("device channels : " << m_nDeviceChannels[modeToIdTable(m_mode)] << " user channels : " << m_nUserChannels[modeToIdTable(m_mode)]);
ATA_INFO("do convert buffer : " << m_doConvertBuffer[modeToIdTable(m_mode)]);
if (ret == false) {
ATA_ERROR("Can not open device.");
}
return ret;
}
#endif

View File

@ -1,60 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
*/
#pragma once
#ifdef ORCHESTRA_BUILD_JAVA
#include <audio/orchestra/Interface.hpp>
namespace audio {
namespace orchestra {
namespace api {
class Android: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Android();
virtual ~Android();
const etk::String& getCurrentApi() {
return audio::orchestra::typeJava;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent();
private:
int32_t m_uid;
public:
int32_t getUId() {
return m_uid;
}
private:
etk::Vector<audio::orchestra::DeviceInfo> m_devices;
void saveDeviceInfo();
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
public:
void playback(int16_t* _dst, int32_t _nbChunk);
void record(int16_t* _dst, int32_t _nbChunk);
};
}
}
}
#endif

View File

@ -1,542 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
*/
extern "C" {
#include <jni.h>
#include <pthread.h>
}
#include <ethread/Mutex.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/orchestra/error.hpp>
#include <audio/orchestra/api/AndroidNativeInterface.hpp>
#include <audio/orchestra/api/Android.hpp>
/* include auto generated file */
#include <org_musicdsp_orchestra_OrchestraConstants.h>
#include <jvm-basics/jvm-basics.hpp>
#include <ememory/memory.hpp>
#include <ejson/ejson.hpp>
class AndroidOrchestraContext {
public:
// get a resources from the java environement :
JNIEnv* m_JavaVirtualMachinePointer; //!< the JVM
jclass m_javaClassOrchestra; //!< main activity class (android ...)
jclass m_javaClassOrchestraCallback;
jobject m_javaObjectOrchestraCallback;
jmethodID m_javaMethodOrchestraActivityAudioGetDeviceCount;
jmethodID m_javaMethodOrchestraActivityAudioGetDeviceProperty;
jmethodID m_javaMethodOrchestraActivityAudioOpenDeviceInput;
jmethodID m_javaMethodOrchestraActivityAudioOpenDeviceOutput;
jmethodID m_javaMethodOrchestraActivityAudioCloseDevice;
jmethodID m_javaMethodOrchestraActivityAudioStart;
jmethodID m_javaMethodOrchestraActivityAudioStop;
jclass m_javaDefaultClassString; //!< default string class
private:
bool safeInitMethodID(jmethodID& _mid, jclass& _cls, const char* _name, const char* _sign) {
_mid = m_JavaVirtualMachinePointer->GetMethodID(_cls, _name, _sign);
if(_mid == null) {
ATA_ERROR("C->java : Can't find the method " << _name);
/* remove access on the virtual machine : */
m_JavaVirtualMachinePointer = null;
return false;
}
return true;
}
bool java_attach_current_thread(int *_rstatus) {
ATA_DEBUG("C->java : call java");
if (jvm_basics::getJavaVM() == null) {
ATA_ERROR("C->java : JVM not initialised");
m_JavaVirtualMachinePointer = null;
return false;
}
*_rstatus = jvm_basics::getJavaVM()->GetEnv((void **) &m_JavaVirtualMachinePointer, JNI_VERSION_1_6);
if (*_rstatus == JNI_EDETACHED) {
JavaVMAttachArgs lJavaVMAttachArgs;
lJavaVMAttachArgs.version = JNI_VERSION_1_6;
lJavaVMAttachArgs.name = "EwolNativeThread";
lJavaVMAttachArgs.group = null;
int status = jvm_basics::getJavaVM()->AttachCurrentThread(&m_JavaVirtualMachinePointer, &lJavaVMAttachArgs);
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
if (status != JNI_OK) {
ATA_ERROR("C->java : AttachCurrentThread failed : " << status);
m_JavaVirtualMachinePointer = null;
return false;
}
}
return true;
}
void java_detach_current_thread(int _status) {
if(_status == JNI_EDETACHED) {
jvm_basics::getJavaVM()->DetachCurrentThread();
m_JavaVirtualMachinePointer = null;
}
}
public:
AndroidOrchestraContext(JNIEnv* _env, jclass _classBase, jobject _objCallback) :
m_JavaVirtualMachinePointer(null),
m_javaClassOrchestra(0),
m_javaClassOrchestraCallback(0),
m_javaObjectOrchestraCallback(0),
m_javaMethodOrchestraActivityAudioGetDeviceCount(0),
m_javaMethodOrchestraActivityAudioGetDeviceProperty(0),
m_javaMethodOrchestraActivityAudioOpenDeviceInput(0),
m_javaMethodOrchestraActivityAudioOpenDeviceOutput(0),
m_javaMethodOrchestraActivityAudioCloseDevice(0),
m_javaMethodOrchestraActivityAudioStart(0),
m_javaMethodOrchestraActivityAudioStop(0),
m_javaDefaultClassString(0) {
ATA_DEBUG("*******************************************");
ATA_DEBUG("** set JVM Pointer (orchestra) **");
ATA_DEBUG("*******************************************");
m_JavaVirtualMachinePointer = _env;
// get default needed all time elements :
if (m_JavaVirtualMachinePointer == null) {
ATA_ERROR("C->java: NULLPTR jvm interface");
return;
}
ATA_DEBUG("C->java: try load org/musicdsp/orchestra/OrchestraNative class");
m_javaClassOrchestra = m_JavaVirtualMachinePointer->FindClass("org/musicdsp/orchestra/OrchestraNative" );
if (m_javaClassOrchestra == 0) {
ATA_ERROR("C->java : Can't find org/musicdsp/orchestra/OrchestraNative class");
// remove access on the virtual machine :
m_JavaVirtualMachinePointer = null;
return;
}
/* The object field extends Activity and implement OrchestraCallback */
m_javaClassOrchestraCallback = m_JavaVirtualMachinePointer->GetObjectClass(_objCallback);
if(m_javaClassOrchestraCallback == null) {
ATA_ERROR("C->java : Can't find org/musicdsp/orchestra/OrchestraManagerCallback class");
// remove access on the virtual machine :
m_JavaVirtualMachinePointer = null;
return;
}
bool functionCallbackIsMissing = false;
bool ret= false;
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioGetDeviceCount,
m_javaClassOrchestraCallback,
"getDeviceCount",
"()I");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : getDeviceCount");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioGetDeviceProperty,
m_javaClassOrchestraCallback,
"getDeviceProperty",
"(I)Ljava/lang/String;");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : getDeviceProperty");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioOpenDeviceInput,
m_javaClassOrchestraCallback,
"openDeviceInput",
"(IIII)I");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : openDeviceInput");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioOpenDeviceOutput,
m_javaClassOrchestraCallback,
"openDeviceOutput",
"(IIII)I");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : openDeviceOutput");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioCloseDevice,
m_javaClassOrchestraCallback,
"closeDevice",
"(I)Z");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : closeDevice");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioStart,
m_javaClassOrchestraCallback,
"start",
"(I)Z");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : start");
functionCallbackIsMissing = true;
}
ret = safeInitMethodID(m_javaMethodOrchestraActivityAudioStop,
m_javaClassOrchestraCallback,
"stop",
"(I)Z");
if (ret == false) {
jvm_basics::checkExceptionJavaVM(_env);
ATA_ERROR("system can not start without function : stop");
functionCallbackIsMissing = true;
}
m_javaObjectOrchestraCallback = _env->NewGlobalRef(_objCallback);
if (m_javaObjectOrchestraCallback == null) {
functionCallbackIsMissing = true;
}
m_javaDefaultClassString = m_JavaVirtualMachinePointer->FindClass("java/lang/String" );
if (m_javaDefaultClassString == 0) {
ATA_ERROR("C->java : Can't find java/lang/String" );
// remove access on the virtual machine :
m_JavaVirtualMachinePointer = null;
functionCallbackIsMissing = true;
}
if (functionCallbackIsMissing == true) {
ATA_CRITICAL(" mission one function ==> system can not work withut it...");
}
}
~AndroidOrchestraContext() {
// TODO ...
}
void unInit(JNIEnv* _env) {
_env->DeleteGlobalRef(m_javaObjectOrchestraCallback);
m_javaObjectOrchestraCallback = null;
}
uint32_t getDeviceCount() {
// Request the clipBoard :
ATA_WARNING("C->java : audio get device count");
int status;
if(!java_attach_current_thread(&status)) {
return 0;
}
ATA_DEBUG("Call CallIntMethod ...");
//Call java ...
jint ret = m_JavaVirtualMachinePointer->CallIntMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioGetDeviceCount);
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
ATA_WARNING(" find " << (uint32_t)ret << " IO");
return (uint32_t)ret;
}
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _idDevice) {
audio::orchestra::DeviceInfo info;
// Request the clipBoard :
ATA_WARNING("C->java : audio get device info " << _idDevice);
int status;
if(!java_attach_current_thread(&status)) {
return info;
}
//Call java ...
jstring returnString = (jstring) m_JavaVirtualMachinePointer->CallObjectMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioGetDeviceProperty, _idDevice);
const char *js = m_JavaVirtualMachinePointer->GetStringUTFChars(returnString, null);
etk::String retString(js);
m_JavaVirtualMachinePointer->ReleaseStringUTFChars(returnString, js);
//m_JavaVirtualMachinePointer->DeleteLocalRef(returnString);
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
ATA_WARNING("get device information : " << retString);
ejson::Document doc;
if (doc.parse(retString) == false) {
return info;
}
info.name = doc["name"].toString().get("no-name");
if (doc["type"].toString().get("output") == "output") {
info.input = false;
} else {
info.input = true;
}
ejson::Array list = doc["sample-rate"].toArray();
if (list.exist() == true) {
for (auto it : list) {
info.sampleRates.pushBack(int32_t(it.toNumber().get(48000)));
}
}
list = doc["channels"].toArray();
if (list.exist() == true) {
for (auto it : list) {
info.channels.pushBack(audio::getChannelFromString(it.toString().get("???")));
}
}
list = doc["format"].toArray();
if (list.exist() == true) {
for (auto it : list) {
info.nativeFormats.pushBack(audio::getFormatFromString(it.toString().get("???")));
}
}
info.isDefault = doc["default"].toBoolean().get(false);
info.isCorrect = true;
return info;
}
private:
etk::Vector<ememory::WeakPtr<audio::orchestra::api::Android> > m_instanceList; // list of connected handle ...
//AndroidAudioCallback m_audioCallBack;
//void* m_audioCallBackUserData;
public:
int32_t open(uint32_t _idDevice,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options,
ememory::SharedPtr<audio::orchestra::api::Android> _instance) {
ATA_DEBUG("C->java : audio open device");
int status;
if(!java_attach_current_thread(&status)) {
return -1;
}
//Call java ...
jint ret = false;
if (_mode == audio::orchestra::mode_output) {
ret = m_JavaVirtualMachinePointer->CallIntMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioOpenDeviceOutput, _idDevice, _sampleRate, _channels, /*_format*/ 1);
} else {
ret = m_JavaVirtualMachinePointer->CallIntMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioOpenDeviceInput, _idDevice, _sampleRate, _channels, /*_format*/ 1);
}
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
if (int32_t(ret) >= 0) {
m_instanceList.pushBack(_instance);
return int32_t(ret);
}
return -1;
}
public:
enum audio::orchestra::error closeStream(int32_t _id) {
ATA_DEBUG("C->java : audio close device");
int status;
if(!java_attach_current_thread(&status)) {
return audio::orchestra::error_fail;
}
//Call java ...
jboolean ret = m_JavaVirtualMachinePointer->CallBooleanMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioCloseDevice, _id);
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
if (bool(ret) == false) {
return audio::orchestra::error_fail;
}
return audio::orchestra::error_none;
}
enum audio::orchestra::error startStream(int32_t _id) {
ATA_DEBUG("C->java : audio start device");
int status;
if(!java_attach_current_thread(&status)) {
return audio::orchestra::error_fail;
}
//Call java ...
jboolean ret = m_JavaVirtualMachinePointer->CallBooleanMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioStart, _id);
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
if (bool(ret) == false) {
return audio::orchestra::error_fail;
}
return audio::orchestra::error_none;
}
enum audio::orchestra::error stopStream(int32_t _id) {
ATA_DEBUG("C->java : audio close device");
int status;
if(!java_attach_current_thread(&status)) {
return audio::orchestra::error_fail;
}
//Call java ...
jboolean ret = m_JavaVirtualMachinePointer->CallBooleanMethod(m_javaObjectOrchestraCallback, m_javaMethodOrchestraActivityAudioStop, _id);
// manage execption :
jvm_basics::checkExceptionJavaVM(m_JavaVirtualMachinePointer);
java_detach_current_thread(status);
if (bool(ret) == false) {
return audio::orchestra::error_fail;
}
return audio::orchestra::error_none;
}
enum audio::orchestra::error abortStream(int32_t _id) {
return audio::orchestra::error_fail;
}
void playback(int32_t _id, int16_t* _dst, int32_t _nbChunk) {
auto it = m_instanceList.begin();
while (it != m_instanceList.end()) {
auto elem = it->lock();
if (elem == null) {
it = m_instanceList.erase(it);
continue;
}
if (elem->getUId() == _id) {
elem->playback(_dst, _nbChunk);
}
++it;
}
}
void record(int32_t _id, int16_t* _dst, int32_t _nbChunk) {
auto it = m_instanceList.begin();
while (it != m_instanceList.end()) {
auto elem = it->lock();
if (elem == null) {
it = m_instanceList.erase(it);
continue;
}
if (elem->getUId() == _id) {
elem->record(_dst, _nbChunk);
}
++it;
}
}
};
static ememory::SharedPtr<AndroidOrchestraContext> s_localContext;
static int32_t s_nbContextRequested(0);
uint32_t audio::orchestra::api::android::getDeviceCount() {
if (s_localContext == null) {
ATA_ERROR("Have no Orchertra API instanciate in JAVA ...");
return 0;
}
return s_localContext->getDeviceCount();
}
audio::orchestra::DeviceInfo audio::orchestra::api::android::getDeviceInfo(uint32_t _device) {
if (s_localContext == null) {
return audio::orchestra::DeviceInfo();
}
return s_localContext->getDeviceInfo(_device);
}
int32_t audio::orchestra::api::android::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options,
ememory::SharedPtr<audio::orchestra::api::Android> _instance) {
if (s_localContext == null) {
return -1;
}
return s_localContext->open(_device, _mode, _channels, _firstChannel, _sampleRate, _format, _bufferSize, _options, _instance);
}
enum audio::orchestra::error audio::orchestra::api::android::closeStream(int32_t _id) {
if (s_localContext == null) {
return audio::orchestra::error_fail;
}
return s_localContext->closeStream(_id);
}
enum audio::orchestra::error audio::orchestra::api::android::startStream(int32_t _id) {
if (s_localContext == null) {
return audio::orchestra::error_fail;
}
return s_localContext->startStream(_id);
}
enum audio::orchestra::error audio::orchestra::api::android::stopStream(int32_t _id) {
if (s_localContext == null) {
return audio::orchestra::error_fail;
}
return s_localContext->stopStream(_id);
}
enum audio::orchestra::error audio::orchestra::api::android::abortStream(int32_t _id) {
if (s_localContext == null) {
return audio::orchestra::error_fail;
}
return s_localContext->abortStream(_id);
}
extern "C" {
void Java_org_musicdsp_orchestra_OrchestraNative_NNsetJavaManager(JNIEnv* _env,
jclass _classBase,
jobject _objCallback) {
ethread::UniqueLock lock(jvm_basics::getMutexJavaVM());
ATA_INFO("*******************************************");
ATA_INFO("** Creating Orchestra context **");
ATA_INFO("*******************************************");
if (s_localContext != null) {
s_nbContextRequested++;
}
s_localContext = ememory::makeShared<AndroidOrchestraContext>(_env, _classBase, _objCallback);
if (s_localContext == null) {
ATA_ERROR("Can not allocate the orchestra main context instance");
return;
}
s_nbContextRequested++;
}
void Java_org_musicdsp_orchestra_OrchestraNative_NNsetJavaManagerRemove(JNIEnv* _env, jclass _cls) {
ethread::UniqueLock lock(jvm_basics::getMutexJavaVM());
ATA_INFO("*******************************************");
ATA_INFO("** remove Orchestra Pointer **");
ATA_INFO("*******************************************");
if (s_nbContextRequested == 0) {
ATA_ERROR("Request remove orchestra interface from Android, but no more interface availlable");
return;
}
s_nbContextRequested--;
if (s_nbContextRequested == 0) {
s_localContext.reset();
}
}
void Java_org_musicdsp_orchestra_OrchestraNative_NNPlayback(JNIEnv* _env,
void* _reserved,
jint _id,
jshortArray _location,
jint _nbChunk) {
ethread::UniqueLock lock(jvm_basics::getMutexJavaVM());
if (s_localContext == null) {
ATA_ERROR("Call audio with no more Low level interface");
return;
}
// get the short* pointer from the Java array
jboolean isCopy;
jshort* dst = _env->GetShortArrayElements(_location, &isCopy);
if (dst != null) {
//ATA_INFO("Need audioData " << int32_t(_nbChunk));
s_localContext->playback(int32_t(_id), static_cast<short*>(dst), int32_t(_nbChunk));
}
// TODO : Understand why it did not work corectly ...
//if (isCopy == JNI_TRUE) {
// release the short* pointer
_env->ReleaseShortArrayElements(_location, dst, 0);
//}
}
void Java_org_musicdsp_orchestra_OrchestraNative_NNRecord(JNIEnv* _env,
void* _reserved,
jint _id,
jshortArray _location,
jint _nbChunk) {
ethread::UniqueLock lock(jvm_basics::getMutexJavaVM());
if (s_localContext == null) {
ATA_ERROR("Call audio with no more Low level interface");
return;
}
// get the short* pointer from the Java array
jboolean isCopy;
jshort* dst = _env->GetShortArrayElements(_location, &isCopy);
if (dst != null) {
//ATA_INFO("Need audioData " << int32_t(_nbChunk));
s_localContext->record(int32_t(_id), static_cast<short*>(dst), int32_t(_nbChunk));
}
// TODO : Understand why it did not work corectly ...
//if (isCopy == JNI_TRUE) {
// release the short* pointer
_env->ReleaseShortArrayElements(_location, dst, 0);
//}
}
}

View File

@ -1,42 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
*/
#pragma once
#ifdef ORCHESTRA_BUILD_JAVA
#include <audio/orchestra/DeviceInfo.hpp>
#include <audio/orchestra/mode.hpp>
#include <audio/orchestra/error.hpp>
#include <audio/orchestra/StreamOptions.hpp>
#include <audio/format.hpp>
#include <ememory/memory.hpp>
namespace audio {
namespace orchestra {
namespace api {
class Android;
namespace android {
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
int32_t open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options,
ememory::SharedPtr<audio::orchestra::api::Android> _instance);
enum audio::orchestra::error closeStream(int32_t _id);
enum audio::orchestra::error startStream(int32_t _id);
enum audio::orchestra::error stopStream(int32_t _id);
enum audio::orchestra::error abortStream(int32_t _id);
}
}
}
}
#endif

View File

@ -1,921 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#if defined(ORCHESTRA_BUILD_ASIO)
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::Asio::create() {
return ememory::SharedPtr<audio::orchestra::api::Asio>(ETK_NEW(audio::orchestra::api::Asio));
}
// The ASIO API is designed around a callback scheme, so this
// implementation is similar to that used for OS-X CoreAudio and Linux
// Jack. The primary constraint with ASIO is that it only allows
// access to a single driver at a time. Thus, it is not possible to
// have more than one simultaneous RtAudio stream.
//
// This implementation also requires a number of external ASIO files
// and a few global variables. The ASIO callback scheme does not
// allow for the passing of user data, so we must create a global
// pointer to our callbackInfo structure.
//
// On unix systems, we make use of a pthread condition variable.
// Since there is no equivalent in Windows, I hacked something based
// on information found in
// http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
extern "C" {
#include "asiosys.h"
#include "asio.h"
#include "iasiothiscallresolver.h"
#include "asiodrivers.h"
#include <math.h>
}
static AsioDrivers drivers;
static ASIOCallbacks asioCallbacks;
static ASIODriverInfo driverInfo;
static CallbackInfo *asioCallbackInfo;
static bool asioXRun;
namespace audio {
namespace orchestra {
namespace api {
class AsioPrivate {
public:
int32_t drainCounter; // Tracks callback counts when draining
bool internalDrain; // Indicates if stop is initiated from callback or not.
ASIOBufferInfo *bufferInfos;
HANDLE condition;
AsioPrivate() :
drainCounter(0),
internalDrain(false),
bufferInfos(0) {
}
};
}
}
}
// Function declarations (definitions at end of section)
static const char* getAsioErrorString(ASIOError _result);
static void sampleRateChanged(ASIOSampleRate _sRate);
static long asioMessages(long _selector, long _value, void* _message, double* _opt);
audio::orchestra::api::Asio::Asio() :
m_private(ETK_NEW(audio::orchestra::api::AsioPrivate)) {
// ASIO cannot run on a multi-threaded appartment. You can call
// CoInitialize beforehand, but it must be for appartment threading
// (in which case, CoInitilialize will return S_FALSE here).
m_coInitialized = false;
HRESULT hr = CoInitialize(null);
if (FAILED(hr)) {
ATA_ERROR("requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)");
}
m_coInitialized = true;
drivers.removeCurrentDriver();
driverInfo.asioVersion = 2;
// See note in DirectSound implementation about GetDesktopWindow().
driverInfo.sysRef = GetForegroundWindow();
}
audio::orchestra::api::Asio::~Asio() {
if (m_state != audio::orchestra::state::closed) {
closeStream();
}
if (m_coInitialized) {
CoUninitialize();
}
}
uint32_t audio::orchestra::api::Asio::getDeviceCount() {
return (uint32_t) drivers.asioGetNumDev();
}
rtaudio::DeviceInfo audio::orchestra::api::Asio::getDeviceInfo(uint32_t _device) {
rtaudio::DeviceInfo info;
info.probed = false;
// Get device ID
uint32_t nDevices = getDeviceCount();
if (nDevices == 0) {
ATA_ERROR("no devices found!");
return info;
}
if (_device >= nDevices) {
ATA_ERROR("device ID is invalid!");
return info;
}
// If a stream is already open, we cannot probe other devices. Thus, use the saved results.
if (m_state != audio::orchestra::state::closed) {
if (_device >= m_devices.size()) {
ATA_ERROR("device ID was not present before stream was opened.");
return info;
}
return m_devices[ _device ];
}
char driverName[32];
ASIOError result = drivers.asioGetDriverName((int) _device, driverName, 32);
if (result != ASE_OK) {
ATA_ERROR("unable to get driver name (" << getAsioErrorString(result) << ").");
return info;
}
info.name = driverName;
if (!drivers.loadDriver(driverName)) {
ATA_ERROR("unable to load driver (" << driverName << ").");
return info;
}
result = ASIOInit(&driverInfo);
if (result != ASE_OK) {
ATA_ERROR("error (" << getAsioErrorString(result) << ") initializing driver (" << driverName << ").");
return info;
}
// Determine the device channel information.
long inputChannels, outputChannels;
result = ASIOGetChannels(&inputChannels, &outputChannels);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("error (" << getAsioErrorString(result) << ") getting channel count (" << driverName << ").");
return info;
}
info.outputChannels = outputChannels;
info.inputChannels = inputChannels;
if (info.outputChannels > 0 && info.inputChannels > 0) {
info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
}
// Determine the supported sample rates.
info.sampleRates.clear();
for (uint32_t i=0; i<MAX_SAMPLE_RATES; i++) {
result = ASIOCanSampleRate((ASIOSampleRate) SAMPLE_RATES[i]);
if (result == ASE_OK) {
info.sampleRates.pushBack(SAMPLE_RATES[i]);
}
}
// Determine supported data types ... just check first channel and assume rest are the same.
ASIOChannelInfo channelInfo;
channelInfo.channel = 0;
channelInfo.isInput = true;
if (info.inputChannels <= 0) {
channelInfo.isInput = false;
}
result = ASIOGetChannelInfo(&channelInfo);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("error (" << getAsioErrorString(result) << ") getting driver channel info (" << driverName << ").");
return info;
}
info.nativeFormats.clear();
if ( channelInfo.type == ASIOSTInt16MSB
|| channelInfo.type == ASIOSTInt16LSB) {
info.nativeFormats.pushBack(audio::format_int16);
} else if ( channelInfo.type == ASIOSTInt32MSB
|| channelInfo.type == ASIOSTInt32LSB) {
info.nativeFormats.pushBack(audio::format_int32);
} else if ( channelInfo.type == ASIOSTFloat32MSB
|| channelInfo.type == ASIOSTFloat32LSB) {
info.nativeFormats.pushBack(audio::format_float);
} else if ( channelInfo.type == ASIOSTFloat64MSB
|| channelInfo.type == ASIOSTFloat64LSB) {
info.nativeFormats.pushBack(audio::format_double);
} else if ( channelInfo.type == ASIOSTInt24MSB
|| channelInfo.type == ASIOSTInt24LSB) {
info.nativeFormats.pushBack(audio::format_int24);
}
if (info.outputChannels > 0){
if (getDefaultOutputDevice() == _device) {
info.isDefaultOutput = true;
}
}
if (info.inputChannels > 0) {
if (getDefaultInputDevice() == _device) {
info.isDefaultInput = true;
}
}
info.probed = true;
drivers.removeCurrentDriver();
return info;
}
static void bufferSwitch(long _index, ASIOBool _processNow) {
RtApiAsio* object = (RtApiAsio*)asioCallbackInfo->object;
object->callbackEvent(_index);
}
void audio::orchestra::api::Asio::saveDeviceInfo() {
m_devices.clear();
uint32_t nDevices = getDeviceCount();
m_devices.resize(nDevices);
for (uint32_t i=0; i<nDevices; i++) {
m_devices[i] = getDeviceInfo(i);
}
}
bool audio::orchestra::api::Asio::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t* _bufferSize,
const audio::orchestra::StreamOptions& _options) {
// For ASIO, a duplex stream MUST use the same driver.
if ( _mode == audio::orchestra::mode_input
&& m_mode == audio::orchestra::mode_output
&& m_device[0] != _device) {
ATA_ERROR("an ASIO duplex stream must use the same device for input and output!");
return false;
}
char driverName[32];
ASIOError result = drivers.asioGetDriverName((int) _device, driverName, 32);
if (result != ASE_OK) {
ATA_ERROR("unable to get driver name (" << getAsioErrorString(result) << ").");
return false;
}
// Only load the driver once for duplex stream.
if ( _mode != audio::orchestra::mode_input
|| m_mode != audio::orchestra::mode_output) {
// The getDeviceInfo() function will not work when a stream is open
// because ASIO does not allow multiple devices to run at the same
// time. Thus, we'll probe the system before opening a stream and
// save the results for use by getDeviceInfo().
this->saveDeviceInfo();
if (!drivers.loadDriver(driverName)) {
ATA_ERROR("unable to load driver (" << driverName << ").");
return false;
}
result = ASIOInit(&driverInfo);
if (result != ASE_OK) {
ATA_ERROR("error (" << getAsioErrorString(result) << ") initializing driver (" << driverName << ").");
return false;
}
}
// Check the device channel count.
long inputChannels, outputChannels;
result = ASIOGetChannels(&inputChannels, &outputChannels);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("error (" << getAsioErrorString(result) << ") getting channel count (" << driverName << ").");
return false;
}
if ( ( _mode == audio::orchestra::mode_output
&& (_channels+_firstChannel) > (uint32_t) outputChannels)
|| ( _mode == audio::orchestra::mode_input
&& (_channels+_firstChannel) > (uint32_t) inputChannels)) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") does not support requested channel count (" << _channels << ") + offset (" << _firstChannel << ").");
return false;
}
m_nDeviceChannels[modeToIdTable(_mode)] = _channels;
m_nUserChannels[modeToIdTable(_mode)] = _channels;
m_channelOffset[modeToIdTable(_mode)] = _firstChannel;
// Verify the sample rate is supported.
result = ASIOCanSampleRate((ASIOSampleRate) _sampleRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") does not support requested sample rate (" << _sampleRate << ").");
return false;
}
// Get the current sample rate
ASIOSampleRate currentRate;
result = ASIOGetSampleRate(&currentRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") error getting sample rate.");
return false;
}
// Set the sample rate only if necessary
if (currentRate != _sampleRate) {
result = ASIOSetSampleRate((ASIOSampleRate) _sampleRate);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") error setting sample rate (" << _sampleRate << ").");
return false;
}
}
// Determine the driver data type.
ASIOChannelInfo channelInfo;
channelInfo.channel = 0;
if (_mode == audio::orchestra::mode_output) {
channelInfo.isInput = false;
} else {
channelInfo.isInput = true;
}
result = ASIOGetChannelInfo(&channelInfo);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting data format.");
return false;
}
// Assuming WINDOWS host is always little-endian.
m_doByteSwap[modeToIdTable(_mode)] = false;
m_userFormat = _format;
m_deviceFormat[modeToIdTable(_mode)] = 0;
if ( channelInfo.type == ASIOSTInt16MSB
|| channelInfo.type == ASIOSTInt16LSB) {
m_deviceFormat[modeToIdTable(_mode)] = RTAUDIO_SINT16;
if (channelInfo.type == ASIOSTInt16MSB) {
m_doByteSwap[modeToIdTable(_mode)] = true;
}
} else if ( channelInfo.type == ASIOSTInt32MSB
|| channelInfo.type == ASIOSTInt32LSB) {
m_deviceFormat[modeToIdTable(_mode)] = RTAUDIO_SINT32;
if (channelInfo.type == ASIOSTInt32MSB) {
m_doByteSwap[modeToIdTable(_mode)] = true;
}
} else if ( channelInfo.type == ASIOSTFloat32MSB
|| channelInfo.type == ASIOSTFloat32LSB) {
m_deviceFormat[modeToIdTable(_mode)] = RTAUDIO_FLOAT32;
if (channelInfo.type == ASIOSTFloat32MSB) {
m_doByteSwap[modeToIdTable(_mode)] = true;
}
} else if ( channelInfo.type == ASIOSTFloat64MSB
|| channelInfo.type == ASIOSTFloat64LSB) {
m_deviceFormat[modeToIdTable(_mode)] = RTAUDIO_FLOAT64;
if (channelInfo.type == ASIOSTFloat64MSB) {
m_doByteSwap[modeToIdTable(_mode)] = true;
}
} else if ( channelInfo.type == ASIOSTInt24MSB
|| channelInfo.type == ASIOSTInt24LSB) {
m_deviceFormat[modeToIdTable(_mode)] = RTAUDIO_SINT24;
if (channelInfo.type == ASIOSTInt24MSB) {
m_doByteSwap[modeToIdTable(_mode)] = true;
}
}
if (m_deviceFormat[modeToIdTable(_mode)] == 0) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") data format not supported by RtAudio.");
return false;
}
// Set the buffer size. For a duplex stream, this will end up
// setting the buffer size based on the input constraints, which
// should be ok.
long minSize, maxSize, preferSize, granularity;
result = ASIOGetBufferSize(&minSize, &maxSize, &preferSize, &granularity);
if (result != ASE_OK) {
drivers.removeCurrentDriver();
ATA_ERROR("driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting buffer size.");
return false;
}
if (*_bufferSize < (uint32_t) minSize) {
*_bufferSize = (uint32_t) minSize;
} else if (*_bufferSize > (uint32_t) maxSize) {
*_bufferSize = (uint32_t) maxSize;
} else if (granularity == -1) {
// Make sure bufferSize is a power of two.
int32_t log2_of_min_size = 0;
int32_t log2_of_max_size = 0;
for (uint32_t i = 0; i < sizeof(long) * 8; i++) {
if (minSize & ((long)1 << i)) {
log2_of_min_size = i;
}
if (maxSize & ((long)1 << i)) {
log2_of_max_size = i;
}
}
long min_delta = etk::abs((long)*_bufferSize - ((long)1 << log2_of_min_size));
int32_t min_delta_num = log2_of_min_size;
for (int32_t i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
long current_delta = etk::abs((long)*_bufferSize - ((long)1 << i));
if (current_delta < min_delta) {
min_delta = current_delta;
min_delta_num = i;
}
}
*_bufferSize = ((uint32_t)1 << min_delta_num);
if (*_bufferSize < (uint32_t) {
minSize) *_bufferSize = (uint32_t) minSize;
} else if (*_bufferSize > (uint32_t) maxSize) {
*_bufferSize = (uint32_t) maxSize;
}
} else if (granularity != 0) {
// Set to an even multiple of granularity, rounding up.
*_bufferSize = (*_bufferSize + granularity-1) / granularity * granularity;
}
if ( _mode == audio::orchestra::mode_input
&& m_mode == audio::orchestra::mode_output
&& m_bufferSize != *_bufferSize) {
drivers.removeCurrentDriver();
ATA_ERROR("input/output buffersize discrepancy!");
return false;
}
m_bufferSize = *_bufferSize;
m_nBuffers = 2;
// ASIO always uses non-interleaved buffers.
m_deviceInterleaved[modeToIdTable(_mode)] = false;
m_private->bufferInfos = 0;
// Create a manual-reset event.
m_private->condition = CreateEvent(null, // no security
TRUE, // manual-reset
FALSE, // non-signaled initially
null); // unnamed
// Create the ASIO internal buffers. Since RtAudio sets up input
// and output separately, we'll have to dispose of previously
// created output buffers for a duplex stream.
long inputLatency, outputLatency;
if ( _mode == audio::orchestra::mode_input
&& m_mode == audio::orchestra::mode_output) {
ASIODisposeBuffers();
if (m_private->bufferInfos == null) {
free(m_private->bufferInfos);
m_private->bufferInfos = null;
}
}
// Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
bool buffersAllocated = false;
uint32_t i, nChannels = m_nDeviceChannels[0] + m_nDeviceChannels[1];
m_private->bufferInfos = (ASIOBufferInfo *) malloc(nChannels * sizeof(ASIOBufferInfo));
if (m_private->bufferInfos == null) {
ATA_ERROR("error allocating bufferInfo memory for driver (" << driverName << ").");
goto error;
}
ASIOBufferInfo *infos;
infos = m_private->bufferInfos;
for (i=0; i<m_nDeviceChannels[0]; i++, infos++) {
infos->isInput = ASIOFalse;
infos->channelNum = i + m_channelOffset[0];
infos->buffers[0] = infos->buffers[1] = 0;
}
for (i=0; i<m_nDeviceChannels[1]; i++, infos++) {
infos->isInput = ASIOTrue;
infos->channelNum = i + m_channelOffset[1];
infos->buffers[0] = infos->buffers[1] = 0;
}
// Set up the ASIO callback structure and create the ASIO data buffers.
asioCallbacks.bufferSwitch = &bufferSwitch;
asioCallbacks.sampleRateDidChange = &sampleRateChanged;
asioCallbacks.asioMessage = &asioMessages;
asioCallbacks.bufferSwitchTimeInfo = null;
result = ASIOCreateBuffers(m_private->bufferInfos, nChannels, m_bufferSize, &asioCallbacks);
if (result != ASE_OK) {
ATA_ERROR("driver (" << driverName << ") error (" << getAsioErrorString(result) << ") creating buffers.");
goto error;
}
buffersAllocated = true;
// Set flags for buffer conversion.
m_doConvertBuffer[modeToIdTable(_mode)] = false;
if (m_userFormat != m_deviceFormat[modeToIdTable(_mode)]) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
if ( m_deviceInterleaved[modeToIdTable(_mode)] == false
&& m_nUserChannels[modeToIdTable(_mode)] > 1) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
// Allocate necessary internal buffers
uint64_t bufferBytes;
bufferBytes = m_nUserChannels[modeToIdTable(_mode)] * *_bufferSize * audio::getFormatBytes(m_userFormat);
m_userBuffer[modeToIdTable(_mode)] = (char *) calloc(bufferBytes, 1);
if (m_userBuffer[modeToIdTable(_mode)] == null) {
ATA_ERROR("error allocating user buffer memory.");
goto error;
}
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
bool makeBuffer = true;
bufferBytes = m_nDeviceChannels[modeToIdTable(_mode)] * audio::getFormatBytes(m_deviceFormat[modeToIdTable(_mode)]);
if (_mode == audio::orchestra::mode_input) {
if (m_mode == audio::orchestra::mode_output && m_deviceBuffer) {
uint64_t bytesOut = m_nDeviceChannels[0] * audio::getFormatBytes(m_deviceFormat[0]);
if (bufferBytes <= bytesOut) {
makeBuffer = false;
}
}
}
if (makeBuffer) {
bufferBytes *= *_bufferSize;
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = null;
}
m_deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_deviceBuffer == null) {
ATA_ERROR("error allocating device buffer memory.");
goto error;
}
}
}
m_sampleRate = _sampleRate;
m_device[modeToIdTable(_mode)] = _device;
m_state = audio::orchestra::state::stopped;
if ( _mode == audio::orchestra::mode_output
&& _mode == audio::orchestra::mode_input) {
// We had already set up an output stream.
m_mode = audio::orchestra::mode_duplex;
} else {
m_mode = _mode;
}
// Determine device latencies
result = ASIOGetLatencies(&inputLatency, &outputLatency);
if (result != ASE_OK) {
ATA_ERROR("driver (" << driverName << ") error (" << getAsioErrorString(result) << ") getting latency.");
} else {
m_latency[0] = outputLatency;
m_latency[1] = inputLatency;
}
// Setup the buffer conversion information structure. We don't use
// buffers to do channel offsets, so we override that parameter
// here.
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
setConvertInfo(_mode, 0);
}
return true;
error:
if (buffersAllocated) {
ASIODisposeBuffers();
}
drivers.removeCurrentDriver();
CloseHandle(m_private->condition);
if (m_private->bufferInfos != null) {
free(m_private->bufferInfos);
m_private->bufferInfos = null;
}
for (int32_t i=0; i<2; i++) {
if (m_userBuffer[i]) {
free(m_userBuffer[i]);
m_userBuffer[i] = 0;
}
}
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = 0;
}
return false;
}
enum audio::orchestra::error audio::orchestra::api::Asio::closeStream() {
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("no open stream to close!");
return audio::orchestra::error_warning;
}
if (m_state == audio::orchestra::state::running) {
m_state = audio::orchestra::state::stopped;
ASIOStop();
}
ASIODisposeBuffers();
drivers.removeCurrentDriver();
CloseHandle(m_private->condition);
if (m_private->bufferInfos) {
free(m_private->bufferInfos);
}
for (int32_t i=0; i<2; i++) {
if (m_userBuffer[i]) {
free(m_userBuffer[i]);
m_userBuffer[i] = 0;
}
}
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = 0;
}
m_mode = audio::orchestra::mode_unknow;
m_state = audio::orchestra::state::closed;
return audio::orchestra::error_none;
}
bool stopThreadCalled = false;
enum audio::orchestra::error audio::orchestra::api::Asio::startStream() {
// TODO : Check return ...
audio::orchestra::Api::startStream();
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::running) {
ATA_ERROR("the stream is already running!");
return audio::orchestra::error_warning;
}
ASIOError result = ASIOStart();
if (result != ASE_OK) {
ATA_ERROR("error (" << getAsioErrorString(result) << ") starting device.");
goto unlock;
}
m_private->drainCounter = 0;
m_private->internalDrain = false;
ResetEvent(m_private->condition);
m_state = audio::orchestra::state::running;
asioXRun = false;
unlock:
stopThreadCalled = false;
if (result == ASE_OK) {
return audio::orchestra::error_none;
}
return audio::orchestra::error_systemError;
}
enum audio::orchestra::error audio::orchestra::api::Asio::stopStream() {
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
return audio::orchestra::error_warning;
}
if (m_mode == audio::orchestra::mode_output || m_mode == audio::orchestra::mode_duplex) {
if (m_private->drainCounter == 0) {
m_private->drainCounter = 2;
WaitForSingleObject(m_private->condition, INFINITE); // block until signaled
}
}
m_state = audio::orchestra::state::stopped;
ASIOError result = ASIOStop();
if (result != ASE_OK) {
ATA_ERROR("error (" << getAsioErrorString(result) << ") stopping device.");
}
if (result == ASE_OK) {
return audio::orchestra::error_none;
}
return audio::orchestra::error_systemError;
}
enum audio::orchestra::error audio::orchestra::api::Asio::abortStream() {
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
error(audio::orchestra::error_warning);
return;
}
// The following lines were commented-out because some behavior was
// noted where the device buffers need to be zeroed to avoid
// continuing sound, even when the device buffers are completely
// disposed. So now, calling abort is the same as calling stop.
// handle->drainCounter = 2;
return stopStream();
}
// This function will be called by a spawned thread when the user
// callback function signals that the stream should be stopped or
// aborted. It is necessary to handle it this way because the
// callbackEvent() function must return before the ASIOStop()
// function will return.
static unsigned __stdcall asioStopStream(void *_ptr) {
CallbackInfo* info = (CallbackInfo*)_ptr;
RtApiAsio* object = (RtApiAsio*)info->object;
object->stopStream();
_endthreadex(0);
return 0;
}
bool audio::orchestra::api::Asio::callbackEvent(long bufferIndex) {
if ( m_state == audio::orchestra::state::stopped
|| m_state == audio::orchestra::state::stopping) {
return true;
}
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is closed ... this shouldn't happen!");
return false;
}
CallbackInfo *info = (CallbackInfo *) &m_callbackInfo;
// Check if we were draining the stream and signal if finished.
if (m_private->drainCounter > 3) {
m_state = audio::orchestra::state::stopping;
if (m_private->internalDrain == false) {
SetEvent(m_private->condition);
} else { // spawn a thread to stop the stream
unsigned threadId;
m_callbackInfo.thread = _beginthreadex(null,
0,
&asioStopStream,
&m_callbackInfo,
0,
&threadId);
}
return true;
}
// Invoke user callback to get fresh output data UNLESS we are
// draining stream.
if (m_private->drainCounter == 0) {
audio::Time streamTime = getStreamTime();
etk::Vector<enum audio::orchestra::status status;
if (m_mode != audio::orchestra::mode_input && asioXRun == true) {
status.pushBack(audio::orchestra::status::underflow);
asioXRun = false;
}
if (m_mode != audio::orchestra::mode_output && asioXRun == true) {
status.pushBack(audio::orchestra::status::underflow;
asioXRun = false;
}
int32_t cbReturnValue = info->callback(m_userBuffer[1],
streamTime,
m_userBuffer[0],
streamTime,
m_bufferSize,
status);
if (cbReturnValue == 2) {
m_state = audio::orchestra::state::stopping;
m_private->drainCounter = 2;
unsigned threadId;
m_callbackInfo.thread = _beginthreadex(null,
0,
&asioStopStream,
&m_callbackInfo,
0,
&threadId);
return true;
} else if (cbReturnValue == 1) {
m_private->drainCounter = 1;
m_private->internalDrain = true;
}
}
uint32_t nChannels, bufferBytes, i, j;
nChannels = m_nDeviceChannels[0] + m_nDeviceChannels[1];
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
bufferBytes = m_bufferSize * audio::getFormatBytes(m_deviceFormat[0]);
if (m_private->drainCounter > 1) { // write zeros to the output stream
for (i=0, j=0; i<nChannels; i++) {
if (m_private->bufferInfos[i].isInput != ASIOTrue) {
memset(m_private->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes);
}
}
} else if (m_doConvertBuffer[0]) {
convertBuffer(m_deviceBuffer, m_userBuffer[0], m_convertInfo[0]);
if (m_doByteSwap[0]) {
byteSwapBuffer(m_deviceBuffer,
m_bufferSize * m_nDeviceChannels[0],
m_deviceFormat[0]);
}
for (i=0, j=0; i<nChannels; i++) {
if (m_private->bufferInfos[i].isInput != ASIOTrue) {
memcpy(m_private->bufferInfos[i].buffers[bufferIndex],
&m_deviceBuffer[j++*bufferBytes],
bufferBytes);
}
}
} else {
if (m_doByteSwap[0]) {
byteSwapBuffer(m_userBuffer[0],
m_bufferSize * m_nUserChannels[0],
m_userFormat);
}
for (i=0, j=0; i<nChannels; i++) {
if (m_private->bufferInfos[i].isInput != ASIOTrue) {
memcpy(m_private->bufferInfos[i].buffers[bufferIndex],
&m_userBuffer[0][bufferBytes*j++],
bufferBytes);
}
}
}
if (m_private->drainCounter) {
m_private->drainCounter++;
goto unlock;
}
}
if ( m_mode == audio::orchestra::mode_input
|| m_mode == audio::orchestra::mode_duplex) {
bufferBytes = m_bufferSize * audio::getFormatBytes(m_deviceFormat[1]);
if (m_doConvertBuffer[1]) {
// Always interleave ASIO input data.
for (i=0, j=0; i<nChannels; i++) {
if (m_private->bufferInfos[i].isInput == ASIOTrue) {
memcpy(&m_deviceBuffer[j++*bufferBytes],
m_private->bufferInfos[i].buffers[bufferIndex],
bufferBytes);
}
}
if (m_doByteSwap[1]) {
byteSwapBuffer(m_deviceBuffer,
m_bufferSize * m_nDeviceChannels[1],
m_deviceFormat[1]);
}
convertBuffer(m_userBuffer[1],
m_deviceBuffer,
m_convertInfo[1]);
} else {
for (i=0, j=0; i<nChannels; i++) {
if (m_private->bufferInfos[i].isInput == ASIOTrue) {
memcpy(&m_userBuffer[1][bufferBytes*j++],
m_private->bufferInfos[i].buffers[bufferIndex],
bufferBytes);
}
}
if (m_doByteSwap[1]) {
byteSwapBuffer(m_userBuffer[1],
m_bufferSize * m_nUserChannels[1],
m_userFormat);
}
}
}
unlock:
// The following call was suggested by Malte Clasen. While the API
// documentation indicates it should not be required, some device
// drivers apparently do not function correctly without it.
ASIOOutputReady();
audio::orchestra::Api::tickStreamTime();
return true;
}
static void sampleRateChanged(ASIOSampleRate _sRate) {
// The ASIO documentation says that this usually only happens during
// external sync. Audio processing is not stopped by the driver,
// actual sample rate might not have even changed, maybe only the
// sample rate status of an AES/EBU or S/PDIF digital input at the
// audio device.
RtApi* object = (RtApi*)asioCallbackInfo->object;
enum audio::orchestra::error ret = object->stopStream()
if (ret != audio::orchestra::error_none) {
ATA_ERROR("error stop stream!");
} else {
ATA_ERROR("driver reports sample rate changed to " << _sRate << " ... stream stopped!!!");
}
}
static long asioMessages(long _selector, long _value, void* _message, double* _opt) {
long ret = 0;
switch(_selector) {
case kAsioSelectorSupported:
if ( _value == kAsioResetRequest
|| _value == kAsioEngineVersion
|| _value == kAsioResyncRequest
|| _value == kAsioLatenciesChanged
// The following three were added for ASIO 2.0, you don't
// necessarily have to support them.
|| _value == kAsioSupportsTimeInfo
|| _value == kAsioSupportsTimeCode
|| _value == kAsioSupportsInputMonitor) {
ret = 1L;
}
break;
case kAsioResetRequest:
// Defer the task and perform the reset of the driver during the
// next "safe" situation. You cannot reset the driver right now,
// as this code is called from the driver. Reset the driver is
// done by completely destruct is. I.e. ASIOStop(),
// ASIODisposeBuffers(), Destruction Afterwards you initialize the
// driver again.
ATA_ERROR("driver reset requested!!!");
ret = 1L;
break;
case kAsioResyncRequest:
// This informs the application that the driver encountered some
// non-fatal data loss. It is used for synchronization purposes
// of different media. Added mainly to work around the Win16Mutex
// problems in Windows 95/98 with the Windows Multimedia system,
// which could lose data because the Mutex was held too long by
// another thread. However a driver can issue it in other
// situations, too.
// ATA_ERROR("driver resync requested!!!");
asioXRun = true;
ret = 1L;
break;
case kAsioLatenciesChanged:
// This will inform the host application that the drivers were
// latencies changed. Beware, it this does not mean that the
// buffer sizes have changed! You might need to update internal
// delay data.
ATA_ERROR("driver latency may have changed!!!");
ret = 1L;
break;
case kAsioEngineVersion:
// Return the supported ASIO version of the host application. If
// a host application does not implement this selector, ASIO 1.0
// is assumed by the driver.
ret = 2L;
break;
case kAsioSupportsTimeInfo:
// Informs the driver whether the
// asioCallbacks.bufferSwitchTimeInfo() callback is supported.
// For compatibility with ASIO 1.0 drivers the host application
// should always support the "old" bufferSwitch method, too.
ret = 0;
break;
case kAsioSupportsTimeCode:
// Informs the driver whether application is interested in time
// code info. If an application does not need to know about time
// code, the driver has less work to do.
ret = 0;
break;
}
return ret;
}
static const char* getAsioErrorString(ASIOError _result) {
struct Messages {
ASIOError value;
const char*message;
};
static const Messages m[] = {
{ ASE_NotPresent, "Hardware input or output is not present or available." },
{ ASE_HWMalfunction, "Hardware is malfunctioning." },
{ ASE_InvalidParameter, "Invalid input parameter." },
{ ASE_InvalidMode, "Invalid mode." },
{ ASE_SPNotAdvancing, "Sample position not advancing." },
{ ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
{ ASE_NoMemory, "Not enough memory to complete the request." }
};
for (uint32_t i = 0; i < sizeof(m)/sizeof(m[0]); ++i) {
if (m[i].value == result) {
return m[i].message;
}
}
return "Unknown error.";
}
#endif

View File

@ -1,53 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_ASIO
namespace audio {
namespace orchestra {
namespace api {
class AsioPrivate:
class Asio: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Asio();
virtual ~Asio();
const etk::String& getCurrentApi() {
return audio::orchestra::typeAsio;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
long getStreamLatency();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
bool callbackEvent(long _bufferIndex);
private:
ememory::SharedPtr<AsioPrivate> m_private;
etk::Vector<audio::orchestra::DeviceInfo> m_devices;
void saveDeviceInfo();
bool m_coInitialized;
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
};
}
}
}
#endif

File diff suppressed because it is too large Load Diff

View File

@ -1,68 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_MACOSX_CORE
#include <CoreAudio/AudioHardware.h>
namespace audio {
namespace orchestra {
namespace api {
class CorePrivate;
class Core: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Core();
virtual ~Core();
const etk::String& getCurrentApi() {
return audio::orchestra::typeCoreOSX;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
uint32_t getDefaultOutputDevice();
uint32_t getDefaultInputDevice();
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
long getStreamLatency();
bool callbackEvent(AudioDeviceID _deviceId,
const AudioBufferList *_inBufferList,
const audio::Time& _inTime,
const AudioBufferList *_outBufferList,
const audio::Time& _outTime);
static OSStatus callbackEvent(AudioDeviceID _inDevice,
const AudioTimeStamp* _inNow,
const AudioBufferList* _inInputData,
const AudioTimeStamp* _inInputTime,
AudioBufferList* _outOutputData,
const AudioTimeStamp* _inOutputTime,
void* _infoPointer);
static void coreStopStream(void *_userData);
private:
ememory::SharedPtr<CorePrivate> m_private;
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
static const char* getErrorCode(OSStatus _code);
static OSStatus xrunListener(AudioObjectID _inDevice,
uint32_t _nAddresses,
const AudioObjectPropertyAddress _properties[],
void* _userData);
};
}
}
}
#endif

View File

@ -1,58 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_IOS_CORE
namespace audio {
namespace orchestra {
namespace api {
class CoreIosPrivate;
class CoreIos: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
CoreIos();
virtual ~CoreIos();
const etk::String& getCurrentApi() {
return audio::orchestra::typeCoreIOS;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent();
private:
etk::Vector<audio::orchestra::DeviceInfo> m_devices;
void saveDeviceInfo();
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
public:
void callBackEvent(void* _data,
int32_t _nbChunk,
const audio::Time& _time);
public:
ememory::SharedPtr<CoreIosPrivate> m_private;
uint32_t getDefaultInputDevice();
uint32_t getDefaultOutputDevice();
};
}
}
}
#endif

View File

@ -1,336 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#ifdef ORCHESTRA_BUILD_IOS_CORE
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
extern "C" {
#include <limits.h>
}
#include <audio/orchestra/api/CoreIos.hpp>
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::CoreIos::create() {
ATA_INFO("Create CoreIos device ... ");
return ememory::SharedPtr<audio::orchestra::api::CoreIos>(ETK_NEW(audio::orchestra::api::CoreIos));
}
#define kOutputBus 0
#define kInputBus 1
namespace audio {
namespace orchestra {
namespace api {
class CoreIosPrivate {
public:
AudioComponentInstance audioUnit;
};
}
}
}
audio::orchestra::api::CoreIos::CoreIos() :
m_private(ETK_NEW(audio::orchestra::api::CoreIosPrivate)) {
ATA_INFO("new CoreIos");
int32_t deviceCount = 2;
ATA_ERROR("Get count devices : " << 2);
audio::orchestra::DeviceInfo tmp;
// Add default output format :
tmp.name = "speaker";
tmp.input = false;
tmp.sampleRates.push_back(48000);
tmp.channels.push_back(audio::channel_frontRight);
tmp.channels.push_back(audio::channel_frontLeft);
tmp.nativeFormats.push_back(audio::format_int16);
tmp.isDefault = true;
tmp.isCorrect = true;
m_devices.push_back(tmp);
// add default input format:
tmp.name = "microphone";
tmp.input = true;
tmp.sampleRates.push_back(48000);
tmp.channels.push_back(audio::channel_frontRight);
tmp.channels.push_back(audio::channel_frontLeft);
tmp.nativeFormats.push_back(audio::format_int16);
tmp.isDefault = true;
tmp.isCorrect = true;
m_devices.push_back(tmp);
ATA_INFO("Create CoreIOs interface (end)");
}
uint32_t audio::orchestra::api::CoreIos::getDefaultInputDevice() {
// Should be implemented in subclasses if possible.
return 1;
}
uint32_t audio::orchestra::api::CoreIos::getDefaultOutputDevice() {
// Should be implemented in subclasses if possible.
return 0;
}
audio::orchestra::api::CoreIos::~CoreIos() {
ATA_INFO("Destroy CoreIOs interface");
AudioUnitUninitialize(m_private->audioUnit);
}
uint32_t audio::orchestra::api::CoreIos::getDeviceCount() {
//ATA_INFO("Get device count:"<< m_devices.size());
return m_devices.size();
}
audio::orchestra::DeviceInfo audio::orchestra::api::CoreIos::getDeviceInfo(uint32_t _device) {
//ATA_INFO("Get device info ...");
if (_device >= m_devices.size()) {
audio::orchestra::DeviceInfo tmp;
tmp.sampleRates.push_back(0);
tmp.channels.push_back(audio::channel_frontCenter);
tmp.isDefault = false;
tmp.nativeFormats.push_back(audio::format_int8);
return tmp;
}
return m_devices[_device];
}
enum audio::orchestra::error audio::orchestra::api::CoreIos::closeStream() {
ATA_INFO("Close Stream");
// Can not close the stream now...
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::CoreIos::startStream() {
ATA_INFO("Start Stream");
// TODO : Check return ...
audio::orchestra::Api::startStream();
OSStatus status = AudioOutputUnitStart(m_private->audioUnit);
// Can not close the stream now...
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::CoreIos::stopStream() {
ATA_INFO("Stop stream");
OSStatus status = AudioOutputUnitStop(m_private->audioUnit);
// Can not close the stream now...
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::CoreIos::abortStream() {
ATA_INFO("Abort Stream");
OSStatus status = AudioOutputUnitStop(m_private->audioUnit);
// Can not close the stream now...
return audio::orchestra::error_none;
}
void audio::orchestra::api::CoreIos::callBackEvent(void* _data,
int32_t _nbChunk,
const audio::Time& _time) {
int32_t doStopStream = 0;
etk::Vector<enum audio::orchestra::status> status;
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
if (m_doConvertBuffer[modeToIdTable(audio::orchestra::mode_output)] == true) {
ATA_INFO("get output DATA : " << uint64_t(&m_userBuffer[modeToIdTable(audio::orchestra::mode_output)][0]));
doStopStream = m_callback(null,
audio::Time(),
&m_userBuffer[modeToIdTable(audio::orchestra::mode_output)][0],
_time,
_nbChunk,
status);
convertBuffer((char*)_data, &m_userBuffer[modeToIdTable(audio::orchestra::mode_output)][0], m_convertInfo[modeToIdTable(audio::orchestra::mode_output)]);
} else {
ATA_INFO("have output DATA : " << uint64_t(_data));
doStopStream = m_callback(null,
_time,
_data,
audio::Time(),
_nbChunk,
status);
}
}
if ( m_mode == audio::orchestra::mode_input
|| m_mode == audio::orchestra::mode_duplex) {
ATA_INFO("have input DATA : " << uint64_t(_data));
doStopStream = m_callback(_data,
_time,
null,
audio::Time(),
_nbChunk,
status);
}
if (doStopStream == 2) {
abortStream();
return;
}
audio::orchestra::Api::tickStreamTime();
}
static OSStatus playbackCallback(void *_userData,
AudioUnitRenderActionFlags* _ioActionFlags,
const AudioTimeStamp* _inTime,
uint32_t _inBusNumber,
uint32_t _inNumberFrames,
AudioBufferList* _ioData) {
if (_userData == null) {
ATA_ERROR("callback event ... null pointer");
return -1;
}
audio::Time tmpTimeime;
if (_inTime != null) {
tmpTimeime = audio::Time(_inTime->mHostTime/1000000000LL, _inTime->mHostTime%1000000000LL);
}
audio::orchestra::api::CoreIos* myClass = static_cast<audio::orchestra::api::CoreIos*>(_userData);
// get all requested buffer :
for (int32_t iii=0; iii < _ioData->mNumberBuffers; iii++) {
AudioBuffer buffer = _ioData->mBuffers[iii];
int32_t numberFrame = buffer.mDataByteSize/2/*stereo*/ /sizeof(int16_t);
ATA_INFO("request data size: " << numberFrame << " busNumber=" << _inBusNumber);
myClass->callBackEvent(buffer.mData, numberFrame, tmpTimeime);
}
return noErr;
}
bool audio::orchestra::api::CoreIos::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options) {
ATA_INFO("Probe : device=" << _device << " channels=" << _channels << " firstChannel=" << _firstChannel << " sampleRate=" << _sampleRate);
if (_mode != audio::orchestra::mode_output) {
ATA_ERROR("Can not start a device input or duplex for CoreIos ...");
return false;
}
bool ret = true;
// TODO : This is a bad ack ....
m_mode = audio::orchestra::mode_output;
// configure Airtaudio internal configuration:
m_userFormat = _format;
m_nUserChannels[modeToIdTable(_mode)] = _channels;
m_bufferSize = 8192;
m_sampleRate = _sampleRate;
m_doByteSwap[modeToIdTable(_mode)] = false; // for endienness ...
// TODO : For now, we write it in hard ==> to be update later ...
m_deviceFormat[modeToIdTable(_mode)] = audio::format_int16;
m_nDeviceChannels[modeToIdTable(_mode)] = 2;
m_deviceInterleaved[modeToIdTable(_mode)] = true;
m_doConvertBuffer[modeToIdTable(_mode)] = false;
if (m_userFormat != m_deviceFormat[modeToIdTable(_mode)]) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
if (m_nUserChannels[modeToIdTable(_mode)] < m_nDeviceChannels[modeToIdTable(_mode)]) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
if ( m_deviceInterleaved[modeToIdTable(_mode)] == false
&& m_nUserChannels[modeToIdTable(_mode)] > 1) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
if (m_doConvertBuffer[modeToIdTable(_mode)] == true) {
// Allocate necessary internal buffers.
uint64_t bufferBytes = m_nUserChannels[modeToIdTable(_mode)] * m_bufferSize * audio::getFormatBytes(m_userFormat);
m_userBuffer[modeToIdTable(_mode)].resize(bufferBytes, 0);
if (m_userBuffer[modeToIdTable(_mode)].size() == 0) {
ATA_ERROR("error allocating user buffer memory.");
}
setConvertInfo(_mode, _firstChannel);
}
ATA_INFO("device format : " << m_deviceFormat[modeToIdTable(_mode)] << " user format : " << m_userFormat);
ATA_INFO("device channels : " << m_nDeviceChannels[modeToIdTable(_mode)] << " user channels : " << m_nUserChannels[modeToIdTable(_mode)]);
ATA_INFO("do convert buffer : " << m_doConvertBuffer[modeToIdTable(_mode)]);
if (ret == false) {
ATA_ERROR("Can not open device.");
}
// Configure IOs interface:
OSStatus status;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(null, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &m_private->audioUnit);
if (status != 0) {
ATA_ERROR("can not create an audio intance...");
}
uint32_t flag = 1;
// Enable IO for playback
status = AudioUnitSetProperty(m_private->audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&flag,
sizeof(flag));
if (status != 0) {
ATA_ERROR("can not request audio autorisation...");
}
// Describe format
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 48000.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1; //
audioFormat.mChannelsPerFrame = 2; // stereo
audioFormat.mBitsPerChannel = sizeof(short) * 8;
audioFormat.mBytesPerPacket = sizeof(short) * audioFormat.mChannelsPerFrame;
audioFormat.mBytesPerFrame = sizeof(short) * audioFormat.mChannelsPerFrame;
audioFormat.mReserved = 0;
// Apply format
status = AudioUnitSetProperty(m_private->audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
if (status != 0) {
ATA_ERROR("can not set stream properties...");
}
// Set output callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = &playbackCallback;
callbackStruct.inputProcRefCon = this;
status = AudioUnitSetProperty(m_private->audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
kOutputBus,
&callbackStruct,
sizeof(callbackStruct));
if (status != 0) {
ATA_ERROR("can not set Callback...");
}
// Initialise
status = AudioUnitInitialize(m_private->audioUnit);
if (status != 0) {
ATA_ERROR("can not initialize...");
}
return ret;
}
#endif

File diff suppressed because it is too large Load Diff

View File

@ -1,54 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_DS
namespace audio {
namespace orchestra {
namespace api {
class DsPrivate;
class Ds: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Ds();
virtual ~Ds();
const etk::String& getCurrentApi() {
return audio::orchestra::typeDs;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
long getStreamLatency();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEvent();
private:
static void dsCallbackEvent(void *_userData);
ememory::SharedPtr<DsPrivate> m_private;
bool m_coInitialized;
bool m_buffersRolling;
long m_duplexPrerollBytes;
bool open(uint32_t _device,
enum audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
enum audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
};
}
}
}
#endif

View File

@ -1,60 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#if defined(ORCHESTRA_BUILD_DUMMY)
#include <audio/orchestra/api/Dummy.hpp>
#include <audio/orchestra/debug.hpp>
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::Dummy::create() {
return ememory::SharedPtr<audio::orchestra::api::Dummy>(ETK_NEW(audio::orchestra::api::Dummy));
}
audio::orchestra::api::Dummy::Dummy() {
ATA_WARNING("This class provides no functionality.");
}
uint32_t audio::orchestra::api::Dummy::getDeviceCount() {
return 0;
}
audio::orchestra::DeviceInfo audio::orchestra::api::Dummy::getDeviceInfo(uint32_t _device) {
(void)_device;
return audio::orchestra::DeviceInfo();
}
enum audio::orchestra::error audio::orchestra::api::Dummy::closeStream() {
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Dummy::startStream() {
// TODO : Check return ...
audio::orchestra::Api::startStream();
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Dummy::stopStream() {
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Dummy::abortStream() {
return audio::orchestra::error_none;
}
bool audio::orchestra::api::Dummy::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options) {
return false;
}
#endif

View File

@ -1,44 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_DUMMY
#include <audio/orchestra/Interface.hpp>
namespace audio {
namespace orchestra {
namespace api {
class Dummy: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Dummy();
const etk::String& getCurrentApi() {
return audio::orchestra::typeDummy;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
private:
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
};
}
}
}
#endif

View File

@ -1,712 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
// must run before :
#if defined(ORCHESTRA_BUILD_JACK)
extern "C" {
#include <limits.h>
#include <string.h>
}
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
#include <ethread/tools.hpp>
#include <audio/orchestra/api/Jack.hpp>
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::Jack::create() {
return ememory::SharedPtr<audio::orchestra::api::Jack>(ETK_NEW(audio::orchestra::api::Jack));
}
// JACK is a low-latency audio server, originally written for the
// GNU/Linux operating system and now also ported to OS-X. It can
// connect a number of different applications to an audio device, as
// well as allowing them to share audio between themselves.
//
// When using JACK with RtAudio, "devices" refer to JACK clients that
// have ports connected to the server. The JACK server is typically
// started in a terminal as follows:
//
// .jackd -d alsa -d hw:0
//
// or through an interface program such as qjackctl. Many of the
// parameters normally set for a stream are fixed by the JACK server
// and can be specified when the JACK server is started. In
// particular,
//
// jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
// jackd -r -d alsa -r 48000
//
// specifies a sample rate of 44100 Hz, a buffer size of 512 sample
// frames, and number of buffers = 4. Once the server is running, it
// is not possible to override these values. If the values are not
// specified in the command-line, the JACK server uses default values.
//
// The JACK server does not have to be running when an instance of
// audio::orchestra::Jack is created, though the function getDeviceCount() will
// report 0 devices found until JACK has been started. When no
// devices are available (i.e., the JACK server is not running), a
// stream cannot be opened.
#include <jack/jack.h>
extern "C" {
#include <stdio.h>
}
namespace audio {
namespace orchestra {
namespace api {
class JackPrivate {
public:
jack_client_t *client;
jack_port_t **ports[2];
etk::String deviceName[2];
bool xrun[2];
ethread::Semaphore m_semaphore;
int32_t drainCounter; // Tracks callback counts when draining
bool internalDrain; // Indicates if stop is initiated from callback or not.
JackPrivate() :
client(0),
drainCounter(0),
internalDrain(false) {
ports[0] = 0;
ports[1] = 0;
xrun[0] = false;
xrun[1] = false;
}
};
}
}
}
audio::orchestra::api::Jack::Jack() :
m_private(ETK_NEW(audio::orchestra::api::JackPrivate)) {
// Nothing to do here.
}
audio::orchestra::api::Jack::~Jack() {
if (m_state != audio::orchestra::state::closed) {
closeStream();
}
}
uint32_t audio::orchestra::api::Jack::getDeviceCount() {
// See if we can become a jack client.
jack_options_t options = (jack_options_t) (JackNoStartServer); //JackNullOption;
jack_status_t *status = null;
jack_client_t *client = jack_client_open("orchestraJackCount", options, status);
if (client == null) {
return 0;
}
const char **ports;
etk::String port, previousPort;
uint32_t nChannels = 0, nDevices = 0;
ports = jack_get_ports(client, null, null, 0);
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[ nChannels ];
iColon = port.find(":");
if (iColon != etk::String::npos) {
port = port.extract(0, iColon + 1);
if (port != previousPort) {
nDevices++;
previousPort = port;
}
}
} while (ports[++nChannels]);
free(ports);
ports = null;
}
jack_client_close(client);
return nDevices*2;
}
audio::orchestra::DeviceInfo audio::orchestra::api::Jack::getDeviceInfo(uint32_t _device) {
audio::orchestra::DeviceInfo info;
jack_options_t options = (jack_options_t) (JackNoStartServer); //JackNullOption
jack_status_t *status = null;
jack_client_t *client = jack_client_open("orchestraJackInfo", options, status);
if (client == null) {
ATA_ERROR("Jack server not found or connection error!");
// TODO : audio::orchestra::error_warning;
info.clear();
return info;
}
const char **ports;
etk::String port, previousPort;
uint32_t nPorts = 0, nDevices = 0;
ports = jack_get_ports(client, null, null, 0);
int32_t deviceID = _device/2;
info.input = _device%2==0?true:false; // note that jack sens are inverted
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[nPorts];
iColon = port.find(":");
if (iColon != etk::String::npos) {
port = port.extract(0, iColon);
if (port != previousPort) {
if (nDevices == deviceID) {
info.name = port;
}
nDevices++;
previousPort = port;
}
}
} while (ports[++nPorts]);
free(ports);
}
if (deviceID >= nDevices) {
jack_client_close(client);
ATA_ERROR("device ID is invalid!");
// TODO : audio::orchestra::error_invalidUse;
return info;
}
// Get the current jack server sample rate.
info.sampleRates.clear();
info.sampleRates.pushBack(jack_get_sample_rate(client));
if (info.input == true) {
ports = jack_get_ports(client, info.name.c_str(), null, JackPortIsOutput);
if (ports) {
int32_t iii=0;
while (ports[iii]) {
ATA_ERROR(" ploppp='" << ports[iii] << "'");
info.channels.pushBack(audio::channel_unknow);
iii++;
}
free(ports);
}
} else {
ports = jack_get_ports(client, info.name.c_str(), null, JackPortIsInput);
if (ports) {
int32_t iii=0;
while (ports[iii]) {
ATA_ERROR(" ploppp='" << ports[iii] << "'");
info.channels.pushBack(audio::channel_unknow);
iii++;
}
free(ports);
}
}
if (info.channels.size() == 0) {
jack_client_close(client);
ATA_ERROR("error determining Jack input/output channels!");
// TODO : audio::orchestra::error_warning;
info.clear();
return info;
}
// Jack always uses 32-bit floats.
info.nativeFormats.pushBack(audio::format_float);
// Jack doesn't provide default devices so we'll use the first available one.
if (deviceID == 0) {
info.isDefault = true;
}
jack_client_close(client);
info.isCorrect = true;
return info;
}
int32_t audio::orchestra::api::Jack::jackCallbackHandler(jack_nframes_t _nframes, void* _userData) {
ATA_VERBOSE("Jack callback: [BEGIN] " << uint64_t(_userData));
audio::orchestra::api::Jack* myClass = reinterpret_cast<audio::orchestra::api::Jack*>(_userData);
if (myClass->callbackEvent((uint64_t)_nframes) == false) {
ATA_VERBOSE("Jack callback: [END] 1");
return 1;
}
ATA_VERBOSE("Jack callback: [END] 0");
return 0;
}
void audio::orchestra::api::Jack::jackShutdown(void* _userData) {
audio::orchestra::api::Jack* myClass = reinterpret_cast<audio::orchestra::api::Jack*>(_userData);
// Check current stream state. If stopped, then we'll assume this
// was called as a result of a call to audio::orchestra::api::Jack::stopStream (the
// deactivation of a client handle causes this function to be called).
// If not, we'll assume the Jack server is shutting down or some
// other problem occurred and we should close the stream.
if (myClass->isStreamRunning() == false) {
return;
}
ETK_NEW(ethread::Thread, [=](){myClass->closeStream();});
ATA_ERROR("The Jack server is shutting down this client ... stream stopped and closed!!");
}
int32_t audio::orchestra::api::Jack::jackXrun(void* _userData) {
audio::orchestra::api::Jack* myClass = reinterpret_cast<audio::orchestra::api::Jack*>(_userData);
if (myClass->m_private->ports[0]) {
myClass->m_private->xrun[0] = true;
}
if (myClass->m_private->ports[1]) {
myClass->m_private->xrun[1] = true;
}
return 0;
}
bool audio::orchestra::api::Jack::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t* _bufferSize,
const audio::orchestra::StreamOptions& _options) {
// Look for jack server and try to become a client (only do once per stream).
jack_client_t *client = 0;
if ( _mode == audio::orchestra::mode_output
|| ( _mode == audio::orchestra::mode_input
&& m_mode != audio::orchestra::mode_output)) {
jack_options_t jackoptions = (jack_options_t) (JackNoStartServer); //JackNullOption;
jack_status_t *status = null;
if (_options.streamName.size() != 0) {
client = jack_client_open(_options.streamName.c_str(), jackoptions, status);
} else {
client = jack_client_open("orchestraJack", jackoptions, status);
}
if (client == 0) {
ATA_ERROR("Jack server not found or connection error!");
return false;
}
} else {
// The handle must have been created on an earlier pass.
client = m_private->client;
}
const char **ports;
etk::String port, previousPort, deviceName;
uint32_t nPorts = 0, nDevices = 0;
int32_t deviceID = _device/2;
bool isInput = _device%2==0?true:false;
ports = jack_get_ports(client, null, null, 0);
if (ports) {
// Parse the port names up to the first colon (:).
size_t iColon = 0;
do {
port = (char *) ports[ nPorts ];
iColon = port.find(":");
if (iColon != etk::String::npos) {
port = port.extract(0, iColon);
if (port != previousPort) {
if (nDevices == deviceID) {
deviceName = port;
}
nDevices++;
previousPort = port;
}
}
} while (ports[++nPorts]);
free(ports);
}
if (_device >= nDevices) {
ATA_ERROR("device ID is invalid!");
return false;
}
// Count the available ports containing the client name as device
// channels. Jack "input ports" equal RtAudio output channels.
uint32_t nChannels = 0;
uint64_t flag = JackPortIsInput;
if (_mode == audio::orchestra::mode_input) {
flag = JackPortIsOutput;
}
ports = jack_get_ports(client, deviceName.c_str(), null, flag);
if (ports) {
while (ports[ nChannels ]) {
nChannels++;
}
free(ports);
}
// Compare the jack ports for specified client to the requested number of channels.
if (nChannels < (_channels + _firstChannel)) {
ATA_ERROR("requested number of channels (" << _channels << ") + offset (" << _firstChannel << ") not found for specified device (" << _device << ":" << deviceName << ").");
return false;
}
// Check the jack server sample rate.
uint32_t jackRate = jack_get_sample_rate(client);
if (_sampleRate != jackRate) {
jack_client_close(client);
ATA_ERROR("the requested sample rate (" << _sampleRate << ") is different than the JACK server rate (" << jackRate << ").");
return false;
}
m_sampleRate = jackRate;
// Get the latency of the JACK port.
ports = jack_get_ports(client, deviceName.c_str(), null, flag);
if (ports[ _firstChannel ]) {
// Added by Ge Wang
jack_latency_callback_mode_t cbmode = (_mode == audio::orchestra::mode_input ? JackCaptureLatency : JackPlaybackLatency);
// the range (usually the min and max are equal)
jack_latency_range_t latrange; latrange.min = latrange.max = 0;
// get the latency range
jack_port_get_latency_range(jack_port_by_name(client, ports[_firstChannel]), cbmode, &latrange);
// be optimistic, use the min!
m_latency[modeToIdTable(_mode)] = latrange.min;
//m_latency[modeToIdTable(_mode)] = jack_port_get_latency(jack_port_by_name(client, ports[ _firstChannel ]));
}
free(ports);
// The jack server always uses 32-bit floating-point data.
m_deviceFormat[modeToIdTable(_mode)] = audio::format_float;
m_userFormat = _format;
// Jack always uses non-interleaved buffers.
m_deviceInterleaved[modeToIdTable(_mode)] = false;
// Jack always provides host byte-ordered data.
m_doByteSwap[modeToIdTable(_mode)] = false;
// Get the buffer size. The buffer size and number of buffers
// (periods) is set when the jack server is started.
m_bufferSize = (int) jack_get_buffer_size(client);
*_bufferSize = m_bufferSize;
m_nDeviceChannels[modeToIdTable(_mode)] = _channels;
m_nUserChannels[modeToIdTable(_mode)] = _channels;
// Set flags for buffer conversion.
m_doConvertBuffer[modeToIdTable(_mode)] = false;
if (m_userFormat != m_deviceFormat[modeToIdTable(_mode)]) {
m_doConvertBuffer[modeToIdTable(_mode)] = true;
ATA_CRITICAL("Can not update format ==> use RIVER lib for this ...");
}
if ( m_deviceInterleaved[modeToIdTable(_mode)] == false
&& m_nUserChannels[modeToIdTable(_mode)] > 1) {
ATA_ERROR("Reorder channel for the interleaving properties ...");
m_doConvertBuffer[modeToIdTable(_mode)] = true;
}
// Allocate our JackHandle structure for the stream.
m_private->client = client;
m_private->deviceName[modeToIdTable(_mode)] = deviceName;
// Allocate necessary internal buffers.
uint64_t bufferBytes;
bufferBytes = m_nUserChannels[modeToIdTable(_mode)] * *_bufferSize * audio::getFormatBytes(m_deviceFormat[modeToIdTable(_mode)]);
ATA_VERBOSE("allocate : nbChannel=" << m_nUserChannels[modeToIdTable(_mode)] << " bufferSize=" << *_bufferSize << " format=" << m_deviceFormat[modeToIdTable(_mode)] << "=" << audio::getFormatBytes(m_deviceFormat[modeToIdTable(_mode)]));
m_userBuffer[modeToIdTable(_mode)].resize(bufferBytes, 0);
if (m_userBuffer[modeToIdTable(_mode)].size() == 0) {
ATA_ERROR("error allocating user buffer memory.");
goto error;
}
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
bool makeBuffer = true;
if (_mode == audio::orchestra::mode_output) {
bufferBytes = m_nDeviceChannels[0] * audio::getFormatBytes(m_deviceFormat[0]);
} else { // _mode == audio::orchestra::mode_input
bufferBytes = m_nDeviceChannels[1] * audio::getFormatBytes(m_deviceFormat[1]);
if (m_mode == audio::orchestra::mode_output && m_deviceBuffer) {
uint64_t bytesOut = m_nDeviceChannels[0] * audio::getFormatBytes(m_deviceFormat[0]);
if (bufferBytes < bytesOut) {
makeBuffer = false;
}
}
}
if (makeBuffer) {
bufferBytes *= *_bufferSize;
if (m_deviceBuffer) free(m_deviceBuffer);
m_deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_deviceBuffer == null) {
ATA_ERROR("error allocating device buffer memory.");
goto error;
}
}
}
// Allocate memory for the Jack ports (channels) identifiers.
m_private->ports[modeToIdTable(_mode)] = (jack_port_t **) malloc (sizeof (jack_port_t *) * _channels);
if (m_private->ports[modeToIdTable(_mode)] == null) {
ATA_ERROR("error allocating port memory.");
goto error;
}
m_device[modeToIdTable(_mode)] = _device;
m_channelOffset[modeToIdTable(_mode)] = _firstChannel;
m_state = audio::orchestra::state::stopped;
if ( m_mode == audio::orchestra::mode_output
&& _mode == audio::orchestra::mode_input) {
// We had already set up the stream for output.
m_mode = audio::orchestra::mode_duplex;
} else {
m_mode = _mode;
jack_set_process_callback(m_private->client, &audio::orchestra::api::Jack::jackCallbackHandler, this);
jack_set_xrun_callback(m_private->client, &audio::orchestra::api::Jack::jackXrun, this);
jack_on_shutdown(m_private->client, &audio::orchestra::api::Jack::jackShutdown, this);
}
// Register our ports.
char label[64];
if (_mode == audio::orchestra::mode_output) {
for (uint32_t i=0; i<m_nUserChannels[0]; i++) {
snprintf(label, 64, "outport %d", i);
m_private->ports[0][i] = jack_port_register(m_private->client,
(const char *)label,
JACK_DEFAULT_AUDIO_TYPE,
JackPortIsOutput,
0);
}
} else {
for (uint32_t i=0; i<m_nUserChannels[1]; i++) {
snprintf(label, 64, "inport %d", i);
m_private->ports[1][i] = jack_port_register(m_private->client,
(const char *)label,
JACK_DEFAULT_AUDIO_TYPE,
JackPortIsInput,
0);
}
}
// Setup the buffer conversion information structure. We don't use
// buffers to do channel offsets, so we override that parameter
// here.
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
setConvertInfo(_mode, 0);
}
return true;
error:
jack_client_close(m_private->client);
if (m_private->ports[0] != null) {
free(m_private->ports[0]);
m_private->ports[0] = null;
}
if (m_private->ports[1] != null) {
free(m_private->ports[1]);
m_private->ports[1] = null;
}
for (int32_t iii=0; iii<2; ++iii) {
m_userBuffer[iii].clear();
}
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = null;
}
return false;
}
enum audio::orchestra::error audio::orchestra::api::Jack::closeStream() {
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("no open stream to close!");
return audio::orchestra::error_warning;
}
if (m_private != null) {
if (m_state == audio::orchestra::state::running) {
jack_deactivate(m_private->client);
}
jack_client_close(m_private->client);
}
if (m_private->ports[0] != null) {
free(m_private->ports[0]);
m_private->ports[0] = null;
}
if (m_private->ports[1] != null) {
free(m_private->ports[1]);
m_private->ports[1] = null;
}
for (int32_t i=0; i<2; i++) {
m_userBuffer[i].clear();
}
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = null;
}
m_mode = audio::orchestra::mode_unknow;
m_state = audio::orchestra::state::closed;
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Jack::startStream() {
// TODO : Check return ...
audio::orchestra::Api::startStream();
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::running) {
ATA_ERROR("the stream is already running!");
return audio::orchestra::error_warning;
}
int32_t result = jack_activate(m_private->client);
if (result) {
ATA_ERROR("unable to activate JACK client!");
goto unlock;
}
const char **ports;
// Get the list of available ports.
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
result = 1;
ports = jack_get_ports(m_private->client, m_private->deviceName[0].c_str(), null, JackPortIsInput);
if (ports == null) {
ATA_ERROR("error determining available JACK input ports!");
goto unlock;
}
// Now make the port connections. Since RtAudio wasn't designed to
// allow the user to select particular channels of a device, we'll
// just open the first "nChannels" ports with offset.
for (uint32_t i=0; i<m_nUserChannels[0]; i++) {
result = 1;
if (ports[ m_channelOffset[0] + i ])
result = jack_connect(m_private->client, jack_port_name(m_private->ports[0][i]), ports[ m_channelOffset[0] + i ]);
if (result) {
free(ports);
ATA_ERROR("error connecting output ports!");
goto unlock;
}
}
free(ports);
}
if ( m_mode == audio::orchestra::mode_input
|| m_mode == audio::orchestra::mode_duplex) {
result = 1;
ports = jack_get_ports(m_private->client, m_private->deviceName[1].c_str(), null, JackPortIsOutput);
if (ports == null) {
ATA_ERROR("error determining available JACK output ports!");
goto unlock;
}
// Now make the port connections. See note above.
for (uint32_t i=0; i<m_nUserChannels[1]; i++) {
result = 1;
if (ports[ m_channelOffset[1] + i ]) {
result = jack_connect(m_private->client, ports[ m_channelOffset[1] + i ], jack_port_name(m_private->ports[1][i]));
}
if (result) {
free(ports);
ATA_ERROR("error connecting input ports!");
goto unlock;
}
}
free(ports);
}
m_private->drainCounter = 0;
m_private->internalDrain = false;
m_state = audio::orchestra::state::running;
unlock:
if (result == 0) {
return audio::orchestra::error_none;
}
return audio::orchestra::error_systemError;
}
enum audio::orchestra::error audio::orchestra::api::Jack::stopStream() {
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
return audio::orchestra::error_warning;
}
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
if (m_private->drainCounter == 0) {
m_private->drainCounter = 2;
m_private->m_semaphore.wait();
}
}
jack_deactivate(m_private->client);
m_state = audio::orchestra::state::stopped;
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Jack::abortStream() {
if (verifyStream() != audio::orchestra::error_none) {
return audio::orchestra::error_fail;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
return audio::orchestra::error_warning;
}
m_private->drainCounter = 2;
return stopStream();
}
bool audio::orchestra::api::Jack::callbackEvent(uint64_t _nframes) {
if ( m_state == audio::orchestra::state::stopped
|| m_state == audio::orchestra::state::stopping) {
return true;
}
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is closed ... this shouldn't happen!");
return false;
}
if (m_bufferSize != _nframes) {
ATA_ERROR("the JACK buffer size has changed ... cannot process!");
return false;
}
// Check if we were draining the stream and signal is finished.
if (m_private->drainCounter > 3) {
m_state = audio::orchestra::state::stopping;
if (m_private->internalDrain == true) {
ETK_NEW(ethread::Thread, [&](){stopStream();}, "Jack_stopStream");
} else {
m_private->m_semaphore.post();
}
return true;
}
// Invoke user callback first, to get fresh output data.
if (m_private->drainCounter == 0) {
audio::Time streamTime = getStreamTime();
etk::Vector<enum audio::orchestra::status> status;
if (m_mode != audio::orchestra::mode_input && m_private->xrun[0] == true) {
status.pushBack(audio::orchestra::status::underflow);
m_private->xrun[0] = false;
}
if (m_mode != audio::orchestra::mode_output && m_private->xrun[1] == true) {
status.pushBack(audio::orchestra::status::overflow);
m_private->xrun[1] = false;
}
int32_t cbReturnValue = m_callback(&m_userBuffer[1][0],
streamTime,
&m_userBuffer[0][0],
streamTime,
m_bufferSize,
status);
if (cbReturnValue == 2) {
m_state = audio::orchestra::state::stopping;
m_private->drainCounter = 2;
ETK_NEW(ethread::Thread, [&](){stopStream();}, "Jack_stopStream2");
return true;
}
else if (cbReturnValue == 1) {
m_private->drainCounter = 1;
m_private->internalDrain = true;
}
}
jack_default_audio_sample_t *jackbuffer;
uint64_t bufferBytes = _nframes * sizeof(jack_default_audio_sample_t);
if ( m_mode == audio::orchestra::mode_output
|| m_mode == audio::orchestra::mode_duplex) {
if (m_private->drainCounter > 1) { // write zeros to the output stream
for (uint32_t i=0; i<m_nDeviceChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(m_private->ports[0][i], (jack_nframes_t) _nframes);
memset(jackbuffer, 0, bufferBytes);
}
} else if (m_doConvertBuffer[0]) {
convertBuffer(m_deviceBuffer, &m_userBuffer[0][0], m_convertInfo[0]);
for (uint32_t i=0; i<m_nDeviceChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(m_private->ports[0][i], (jack_nframes_t) _nframes);
memcpy(jackbuffer, &m_deviceBuffer[i*bufferBytes], bufferBytes);
}
} else { // no buffer conversion
for (uint32_t i=0; i<m_nUserChannels[0]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(m_private->ports[0][i], (jack_nframes_t) _nframes);
memcpy(jackbuffer, &m_userBuffer[0][i*bufferBytes], bufferBytes);
}
}
if (m_private->drainCounter) {
m_private->drainCounter++;
goto unlock;
}
}
if ( m_mode == audio::orchestra::mode_input
|| m_mode == audio::orchestra::mode_duplex) {
if (m_doConvertBuffer[1]) {
for (uint32_t i=0; i<m_nDeviceChannels[1]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(m_private->ports[1][i], (jack_nframes_t) _nframes);
memcpy(&m_deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes);
}
convertBuffer(&m_userBuffer[1][0], m_deviceBuffer, m_convertInfo[1]);
} else {
// no buffer conversion
for (uint32_t i=0; i<m_nUserChannels[1]; i++) {
jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(m_private->ports[1][i], (jack_nframes_t) _nframes);
memcpy(&m_userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes);
}
}
}
unlock:
audio::orchestra::Api::tickStreamTime();
return true;
}
#endif

View File

@ -1,56 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_JACK
#include <jack/jack.h>
namespace audio {
namespace orchestra {
namespace api {
class JackPrivate;
class Jack: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Jack();
virtual ~Jack();
const etk::String& getCurrentApi() {
return audio::orchestra::typeJack;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
long getStreamLatency();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
bool callbackEvent(uint64_t _nframes);
private:
static int32_t jackXrun(void* _userData);
static void jackShutdown(void* _userData);
static int32_t jackCallbackHandler(jack_nframes_t _nframes, void* _userData);
private:
ememory::SharedPtr<JackPrivate> m_private;
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
};
}
}
}
#endif

View File

@ -1,412 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#if defined(ORCHESTRA_BUILD_PULSE)
extern "C" {
#include <limits.h>
#include <stdio.h>
}
#include <audio/orchestra/Interface.hpp>
#include <audio/orchestra/debug.hpp>
#include <pulse/error.h>
#include <pulse/simple.h>
#include <ethread/tools.hpp>
#include <audio/orchestra/api/PulseDeviceList.hpp>
#include <audio/orchestra/api/Pulse.hpp>
ememory::SharedPtr<audio::orchestra::Api> audio::orchestra::api::Pulse::create() {
return ememory::SharedPtr<audio::orchestra::api::Pulse>(ETK_NEW(audio::orchestra::api::Pulse));
}
static const uint32_t SUPPORTED_SAMPLERATES[] = {
8000,
16000,
22050,
32000,
44100,
48000,
96000,
0
};
struct rtaudio_pa_format_mapping_t {
enum audio::format airtaudio_format;
pa_sample_format_t pa_format;
};
static const rtaudio_pa_format_mapping_t supported_sampleformats[] = {
{audio::format_int16, PA_SAMPLE_S16LE},
{audio::format_int32, PA_SAMPLE_S32LE},
{audio::format_float, PA_SAMPLE_FLOAT32LE},
{audio::format_unknow, PA_SAMPLE_INVALID}};
namespace audio {
namespace orchestra {
namespace api {
class PulsePrivate {
public:
pa_simple* handle;
ememory::SharedPtr<ethread::Thread> thread;
bool threadRunning;
ethread::Semaphore m_semaphore;
bool runnable;
PulsePrivate() :
handle(0),
threadRunning(false),
runnable(false) {
}
};
}
}
}
audio::orchestra::api::Pulse::Pulse() :
m_private(ETK_NEW(audio::orchestra::api::PulsePrivate)) {
}
audio::orchestra::api::Pulse::~Pulse() {
if (m_state != audio::orchestra::state::closed) {
closeStream();
}
}
uint32_t audio::orchestra::api::Pulse::getDeviceCount() {
#if 1
etk::Vector<audio::orchestra::DeviceInfo> list = audio::orchestra::api::pulse::getDeviceList();
return list.size();
#else
return 1;
#endif
}
audio::orchestra::DeviceInfo audio::orchestra::api::Pulse::getDeviceInfo(uint32_t _device) {
etk::Vector<audio::orchestra::DeviceInfo> list = audio::orchestra::api::pulse::getDeviceList();
if (_device >= list.size()) {
ATA_ERROR("Request device out of IDs:" << _device << " >= " << list.size());
return audio::orchestra::DeviceInfo();
}
return list[_device];
}
void audio::orchestra::api::Pulse::callbackEvent() {
ethread::setName("Pulse IO-" + m_name);
while (m_private->threadRunning == true) {
callbackEventOneCycle();
}
}
enum audio::orchestra::error audio::orchestra::api::Pulse::closeStream() {
m_private->threadRunning = false;
m_mutex.lock();
if (m_state == audio::orchestra::state::stopped) {
m_private->runnable = true;
m_private->m_semaphore.post();;
}
m_mutex.unLock();
m_private->thread->join();
if (m_mode == audio::orchestra::mode_output) {
pa_simple_flush(m_private->handle, null);
}
pa_simple_free(m_private->handle);
m_private->handle = null;
m_userBuffer[0].clear();
m_userBuffer[1].clear();
m_state = audio::orchestra::state::closed;
m_mode = audio::orchestra::mode_unknow;
return audio::orchestra::error_none;
}
void audio::orchestra::api::Pulse::callbackEventOneCycle() {
if (m_state == audio::orchestra::state::stopped) {
while (!m_private->runnable) {
m_private->m_semaphore.wait();
}
if (m_state != audio::orchestra::state::running) {
m_mutex.unLock();
return;
}
}
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is closed ... this shouldn't happen!");
return;
}
audio::Time streamTime = getStreamTime();
etk::Vector<enum audio::orchestra::status> status;
int32_t doStopStream = m_callback(&m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)][0],
streamTime,
&m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)][0],
streamTime,
m_bufferSize,
status);
if (doStopStream == 2) {
abortStream();
return;
}
m_mutex.lock();
void *pulse_in = m_doConvertBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)] ? m_deviceBuffer : &m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)][0];
void *pulse_out = m_doConvertBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)] ? m_deviceBuffer : &m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)][0];
if (m_state != audio::orchestra::state::running) {
goto unLock;
}
int32_t pa_error;
size_t bytes;
if (m_mode == audio::orchestra::mode_output) {
if (m_doConvertBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)]) {
convertBuffer(m_deviceBuffer,
&m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)][0],
m_convertInfo[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)]);
bytes = m_nDeviceChannels[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)] * m_bufferSize * audio::getFormatBytes(m_deviceFormat[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)]);
} else {
bytes = m_nUserChannels[audio::orchestra::modeToIdTable(audio::orchestra::mode_output)] * m_bufferSize * audio::getFormatBytes(m_userFormat);
}
if (pa_simple_write(m_private->handle, pulse_out, bytes, &pa_error) < 0) {
ATA_ERROR("audio write error, " << pa_strerror(pa_error) << ".");
return;
}
}
if (m_mode == audio::orchestra::mode_input) {
if (m_doConvertBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)]) {
bytes = m_nDeviceChannels[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)] * m_bufferSize * audio::getFormatBytes(m_deviceFormat[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)]);
} else {
bytes = m_nUserChannels[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)] * m_bufferSize * audio::getFormatBytes(m_userFormat);
}
if (pa_simple_read(m_private->handle, pulse_in, bytes, &pa_error) < 0) {
ATA_ERROR("audio read error, " << pa_strerror(pa_error) << ".");
return;
}
if (m_doConvertBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)]) {
convertBuffer(&m_userBuffer[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)][0],
m_deviceBuffer,
m_convertInfo[audio::orchestra::modeToIdTable(audio::orchestra::mode_input)]);
}
}
unLock:
m_mutex.unLock();
audio::orchestra::Api::tickStreamTime();
if (doStopStream == 1) {
stopStream();
return;
}
return;
}
enum audio::orchestra::error audio::orchestra::api::Pulse::startStream() {
// TODO : Check return ...
audio::orchestra::Api::startStream();
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is not open!");
return audio::orchestra::error_invalidUse;
}
if (m_state == audio::orchestra::state::running) {
ATA_ERROR("the stream is already running!");
return audio::orchestra::error_warning;
}
m_mutex.lock();
m_state = audio::orchestra::state::running;
m_private->runnable = true;
m_private->m_semaphore.post();
m_mutex.unLock();
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Pulse::stopStream() {
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is not open!");
return audio::orchestra::error_invalidUse;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
return audio::orchestra::error_warning;
}
m_state = audio::orchestra::state::stopped;
m_mutex.lock();
if ( m_private != null
&& m_private->handle != null
&& m_mode == audio::orchestra::mode_output) {
int32_t pa_error;
if (pa_simple_drain(m_private->handle, &pa_error) < 0) {
ATA_ERROR("error draining output device, " << pa_strerror(pa_error) << ".");
m_mutex.unLock();
return audio::orchestra::error_systemError;
}
}
m_state = audio::orchestra::state::stopped;
m_mutex.unLock();
return audio::orchestra::error_none;
}
enum audio::orchestra::error audio::orchestra::api::Pulse::abortStream() {
if (m_state == audio::orchestra::state::closed) {
ATA_ERROR("the stream is not open!");
return audio::orchestra::error_invalidUse;
}
if (m_state == audio::orchestra::state::stopped) {
ATA_ERROR("the stream is already stopped!");
return audio::orchestra::error_warning;
}
m_state = audio::orchestra::state::stopped;
m_mutex.lock();
if ( m_private != null
&& m_private->handle != null
&& m_mode == audio::orchestra::mode_output) {
int32_t pa_error;
if (pa_simple_flush(m_private->handle, &pa_error) < 0) {
ATA_ERROR("error flushing output device, " << pa_strerror(pa_error) << ".");
m_mutex.unLock();
return audio::orchestra::error_systemError;
}
}
m_state = audio::orchestra::state::stopped;
m_mutex.unLock();
return audio::orchestra::error_none;
}
bool audio::orchestra::api::Pulse::open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options) {
uint64_t bufferBytes = 0;
pa_sample_spec ss;
if (_device != 0) {
return false;
}
if (_mode != audio::orchestra::mode_input && _mode != audio::orchestra::mode_output) {
return false;
}
if (_channels != 1 && _channels != 2) {
ATA_ERROR("unsupported number of channels.");
return false;
}
ss.channels = _channels;
if (_firstChannel != 0) {
return false;
}
bool sr_found = false;
for (const uint32_t *sr = SUPPORTED_SAMPLERATES; *sr; ++sr) {
if (_sampleRate == *sr) {
sr_found = true;
m_sampleRate = _sampleRate;
ss.rate = _sampleRate;
break;
}
}
if (!sr_found) {
ATA_ERROR("unsupported sample rate.");
return false;
}
bool sf_found = 0;
for (const rtaudio_pa_format_mapping_t *sf = supported_sampleformats;
sf->airtaudio_format && sf->pa_format != PA_SAMPLE_INVALID;
++sf) {
if (_format == sf->airtaudio_format) {
sf_found = true;
m_userFormat = sf->airtaudio_format;
ss.format = sf->pa_format;
break;
}
}
if (!sf_found) {
ATA_ERROR("unsupported sample format.");
return false;
}
m_deviceInterleaved[modeToIdTable(_mode)] = true;
m_nBuffers = 1;
m_doByteSwap[modeToIdTable(_mode)] = false;
m_doConvertBuffer[modeToIdTable(_mode)] = false;
m_deviceFormat[modeToIdTable(_mode)] = m_userFormat;
m_nUserChannels[modeToIdTable(_mode)] = _channels;
m_nDeviceChannels[modeToIdTable(_mode)] = _channels + _firstChannel;
m_channelOffset[modeToIdTable(_mode)] = 0;
// Allocate necessary internal buffers.
bufferBytes = m_nUserChannels[modeToIdTable(_mode)] * *_bufferSize * audio::getFormatBytes(m_userFormat);
m_userBuffer[modeToIdTable(_mode)].resize(bufferBytes, 0);
if (m_userBuffer[modeToIdTable(_mode)].size() == 0) {
ATA_ERROR("error allocating user buffer memory.");
goto error;
}
m_bufferSize = *_bufferSize;
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
bool makeBuffer = true;
bufferBytes = m_nDeviceChannels[modeToIdTable(_mode)] * audio::getFormatBytes(m_deviceFormat[modeToIdTable(_mode)]);
if (_mode == audio::orchestra::mode_input) {
if (m_mode == audio::orchestra::mode_output && m_deviceBuffer) {
uint64_t bytesOut = m_nDeviceChannels[0] * audio::getFormatBytes(m_deviceFormat[0]);
if (bufferBytes <= bytesOut) makeBuffer = false;
}
}
if (makeBuffer) {
bufferBytes *= *_bufferSize;
if (m_deviceBuffer) free(m_deviceBuffer);
m_deviceBuffer = (char *) calloc(bufferBytes, 1);
if (m_deviceBuffer == null) {
ATA_ERROR("error allocating device buffer memory.");
goto error;
}
}
}
m_device[modeToIdTable(_mode)] = _device;
// Setup the buffer conversion information structure.
if (m_doConvertBuffer[modeToIdTable(_mode)]) {
setConvertInfo(_mode, _firstChannel);
}
int32_t error;
switch (_mode) {
case audio::orchestra::mode_input:
m_private->handle = pa_simple_new(null, "orchestra", PA_STREAM_RECORD, null, "Record", &ss, null, null, &error);
if (m_private->handle == null) {
ATA_ERROR("error connecting input to PulseAudio server.");
goto error;
}
break;
case audio::orchestra::mode_output:
m_private->handle = pa_simple_new(null, "orchestra", PA_STREAM_PLAYBACK, null, "Playback", &ss, null, null, &error);
if (m_private->handle == null) {
ATA_ERROR("error connecting output to PulseAudio server.");
goto error;
}
break;
default:
goto error;
}
if (m_mode == audio::orchestra::mode_unknow) {
m_mode = _mode;
} else {
goto error;
}
if (m_private->threadRunning == false) {
m_private->threadRunning = true;
m_private->thread = ememory::makeShared<ethread::Thread>([&](){callbackEvent();}, "pulseCallback");
if (m_private->thread == null) {
ATA_ERROR("error creating thread.");
goto error;
}
}
m_state = audio::orchestra::state::stopped;
return true;
error:
for (int32_t iii=0; iii<2; ++iii) {
m_userBuffer[iii].clear();
}
if (m_deviceBuffer) {
free(m_deviceBuffer);
m_deviceBuffer = 0;
}
return false;
}
#endif

View File

@ -1,52 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_PULSE
namespace audio {
namespace orchestra {
namespace api {
class PulsePrivate;
class Pulse: public audio::orchestra::Api {
public:
static ememory::SharedPtr<audio::orchestra::Api> create();
public:
Pulse();
virtual ~Pulse();
const etk::String& getCurrentApi() {
return audio::orchestra::typePulse;
}
uint32_t getDeviceCount();
audio::orchestra::DeviceInfo getDeviceInfo(uint32_t _device);
enum audio::orchestra::error closeStream();
enum audio::orchestra::error startStream();
enum audio::orchestra::error stopStream();
enum audio::orchestra::error abortStream();
// This function is intended for internal use only. It must be
// public because it is called by the internal callback handler,
// which is not a member of RtAudio. External use of this function
// will most likely produce highly undesireable results!
void callbackEventOneCycle();
void callbackEvent();
private:
ememory::SharedPtr<PulsePrivate> m_private;
etk::Vector<audio::orchestra::DeviceInfo> m_devices;
void saveDeviceInfo();
bool open(uint32_t _device,
audio::orchestra::mode _mode,
uint32_t _channels,
uint32_t _firstChannel,
uint32_t _sampleRate,
audio::format _format,
uint32_t *_bufferSize,
const audio::orchestra::StreamOptions& _options);
};
}
}
}
#endif

View File

@ -1,363 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#if defined(ORCHESTRA_BUILD_PULSE)
extern "C" {
#include <stdio.h>
#include <string.h>
}
#include <pulse/pulseaudio.h>
#include <audio/orchestra/api/PulseDeviceList.hpp>
#include <audio/orchestra/debug.hpp>
#include <audio/Time.hpp>
#include <audio/Duration.hpp>
#include <audio/format.hpp>
#include <etk/stdTools.hpp>
// This callback gets called when our context changes state. We really only
// care about when it's ready or if it has failed
static void callbackStateMachine(pa_context* _contex, void *_userdata) {
pa_context_state_t state;
int *pulseAudioReady = static_cast<int*>(_userdata);
state = pa_context_get_state(_contex);
switch (state) {
// There are just here for reference
case PA_CONTEXT_UNCONNECTED:
ATA_VERBOSE("pulse state: PA_CONTEXT_UNCONNECTED");
break;
case PA_CONTEXT_CONNECTING:
ATA_VERBOSE("pulse state: PA_CONTEXT_CONNECTING");
break;
case PA_CONTEXT_AUTHORIZING:
ATA_VERBOSE("pulse state: PA_CONTEXT_AUTHORIZING");
break;
case PA_CONTEXT_SETTING_NAME:
ATA_VERBOSE("pulse state: PA_CONTEXT_SETTING_NAME");
break;
default:
ATA_VERBOSE("pulse state: default");
break;
case PA_CONTEXT_FAILED:
*pulseAudioReady = 2;
ATA_VERBOSE("pulse state: PA_CONTEXT_FAILED");
break;
case PA_CONTEXT_TERMINATED:
*pulseAudioReady = 2;
ATA_VERBOSE("pulse state: PA_CONTEXT_TERMINATED");
break;
case PA_CONTEXT_READY:
*pulseAudioReady = 1;
ATA_VERBOSE("pulse state: PA_CONTEXT_READY");
break;
}
}
static audio::format getFormatFromPulseFormat(enum pa_sample_format _format) {
switch (_format) {
case PA_SAMPLE_U8:
return audio::format_int8;
break;
case PA_SAMPLE_ALAW:
ATA_ERROR("Not supported: uint8_t a-law");
return audio::format_unknow;
case PA_SAMPLE_ULAW:
ATA_ERROR("Not supported: uint8_t mu-law");
return audio::format_unknow;
case PA_SAMPLE_S16LE:
return audio::format_int16;
break;
case PA_SAMPLE_S16BE:
return audio::format_int16;
break;
case PA_SAMPLE_FLOAT32LE:
return audio::format_float;
break;
case PA_SAMPLE_FLOAT32BE:
return audio::format_float;
break;
case PA_SAMPLE_S32LE:
return audio::format_int32;
break;
case PA_SAMPLE_S32BE:
return audio::format_int32;
break;
case PA_SAMPLE_S24LE:
return audio::format_int24;
break;
case PA_SAMPLE_S24BE:
return audio::format_int24;
break;
case PA_SAMPLE_S24_32LE:
return audio::format_int24_on_int32;
break;
case PA_SAMPLE_S24_32BE:
return audio::format_int24_on_int32;
break;
case PA_SAMPLE_INVALID:
case PA_SAMPLE_MAX:
ATA_ERROR("Not supported: invalid");
return audio::format_unknow;
}
ATA_ERROR("Not supported: UNKNOW flag...");
return audio::format_unknow;
}
static etk::Vector<audio::channel> getChannelOrderFromPulseChannel(const struct pa_channel_map& _map) {
etk::Vector<audio::channel> out;
for (int32_t iii=0; iii<_map.channels; ++iii) {
switch(_map.map[iii]) {
default:
case PA_CHANNEL_POSITION_MAX:
case PA_CHANNEL_POSITION_INVALID:
out.pushBack(audio::channel_unknow);
break;
case PA_CHANNEL_POSITION_MONO:
case PA_CHANNEL_POSITION_FRONT_CENTER:
out.pushBack(audio::channel_frontCenter);
break;
case PA_CHANNEL_POSITION_FRONT_LEFT:
out.pushBack(audio::channel_frontLeft);
break;
case PA_CHANNEL_POSITION_FRONT_RIGHT:
out.pushBack(audio::channel_frontRight);
break;
case PA_CHANNEL_POSITION_REAR_CENTER:
out.pushBack(audio::channel_rearCenter);
break;
case PA_CHANNEL_POSITION_REAR_LEFT:
out.pushBack(audio::channel_rearLeft);
break;
case PA_CHANNEL_POSITION_REAR_RIGHT:
out.pushBack(audio::channel_rearRight);
break;
case PA_CHANNEL_POSITION_LFE:
out.pushBack(audio::channel_lfe);
break;
case PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER:
out.pushBack(audio::channel_centerLeft);
break;
case PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER:
out.pushBack(audio::channel_centerRight);
break;
case PA_CHANNEL_POSITION_SIDE_LEFT:
out.pushBack(audio::channel_topCenterLeft);
break;
case PA_CHANNEL_POSITION_SIDE_RIGHT:
out.pushBack(audio::channel_topCenterRight);
break;
case PA_CHANNEL_POSITION_TOP_CENTER:
case PA_CHANNEL_POSITION_TOP_FRONT_CENTER:
out.pushBack(audio::channel_topFrontCenter);
break;
case PA_CHANNEL_POSITION_TOP_FRONT_LEFT:
out.pushBack(audio::channel_topFrontLeft);
break;
case PA_CHANNEL_POSITION_TOP_FRONT_RIGHT:
out.pushBack(audio::channel_topFrontRight);
break;
case PA_CHANNEL_POSITION_TOP_REAR_LEFT:
out.pushBack(audio::channel_topRearLeft);
break;
case PA_CHANNEL_POSITION_TOP_REAR_RIGHT:
out.pushBack(audio::channel_topRearRight);
break;
case PA_CHANNEL_POSITION_TOP_REAR_CENTER:
out.pushBack(audio::channel_topRearCenter);
break;
case PA_CHANNEL_POSITION_AUX0: out.pushBack(audio::channel_aux0); break;
case PA_CHANNEL_POSITION_AUX1: out.pushBack(audio::channel_aux1); break;
case PA_CHANNEL_POSITION_AUX2: out.pushBack(audio::channel_aux2); break;
case PA_CHANNEL_POSITION_AUX3: out.pushBack(audio::channel_aux3); break;
case PA_CHANNEL_POSITION_AUX4: out.pushBack(audio::channel_aux4); break;
case PA_CHANNEL_POSITION_AUX5: out.pushBack(audio::channel_aux5); break;
case PA_CHANNEL_POSITION_AUX6: out.pushBack(audio::channel_aux6); break;
case PA_CHANNEL_POSITION_AUX7: out.pushBack(audio::channel_aux7); break;
case PA_CHANNEL_POSITION_AUX8: out.pushBack(audio::channel_aux8); break;
case PA_CHANNEL_POSITION_AUX9: out.pushBack(audio::channel_aux9); break;
case PA_CHANNEL_POSITION_AUX10: out.pushBack(audio::channel_aux10); break;
case PA_CHANNEL_POSITION_AUX11: out.pushBack(audio::channel_aux11); break;
case PA_CHANNEL_POSITION_AUX12: out.pushBack(audio::channel_aux12); break;
case PA_CHANNEL_POSITION_AUX13: out.pushBack(audio::channel_aux13); break;
case PA_CHANNEL_POSITION_AUX14: out.pushBack(audio::channel_aux14); break;
case PA_CHANNEL_POSITION_AUX15: out.pushBack(audio::channel_aux15); break;
case PA_CHANNEL_POSITION_AUX16: out.pushBack(audio::channel_aux16); break;
case PA_CHANNEL_POSITION_AUX17: out.pushBack(audio::channel_aux17); break;
case PA_CHANNEL_POSITION_AUX18: out.pushBack(audio::channel_aux18); break;
case PA_CHANNEL_POSITION_AUX19: out.pushBack(audio::channel_aux19); break;
case PA_CHANNEL_POSITION_AUX20: out.pushBack(audio::channel_aux20); break;
case PA_CHANNEL_POSITION_AUX21: out.pushBack(audio::channel_aux21); break;
case PA_CHANNEL_POSITION_AUX22: out.pushBack(audio::channel_aux22); break;
case PA_CHANNEL_POSITION_AUX23: out.pushBack(audio::channel_aux23); break;
case PA_CHANNEL_POSITION_AUX24: out.pushBack(audio::channel_aux24); break;
case PA_CHANNEL_POSITION_AUX25: out.pushBack(audio::channel_aux25); break;
case PA_CHANNEL_POSITION_AUX26: out.pushBack(audio::channel_aux26); break;
case PA_CHANNEL_POSITION_AUX27: out.pushBack(audio::channel_aux27); break;
case PA_CHANNEL_POSITION_AUX28: out.pushBack(audio::channel_aux28); break;
case PA_CHANNEL_POSITION_AUX29: out.pushBack(audio::channel_aux29); break;
case PA_CHANNEL_POSITION_AUX30: out.pushBack(audio::channel_aux30); break;
case PA_CHANNEL_POSITION_AUX31: out.pushBack(audio::channel_aux31); break;
}
}
return out;
}
// Callback on getting data from pulseaudio:
static void callbackGetSinkList(pa_context* _contex, const pa_sink_info* _info, int _eol, void* _userdata) {
etk::Vector<audio::orchestra::DeviceInfo>* list = static_cast<etk::Vector<audio::orchestra::DeviceInfo>*>(_userdata);
// If eol is set to a positive number, you're at the end of the list
if (_eol > 0) {
return;
}
audio::orchestra::DeviceInfo info;
info.isCorrect = true;
info.input = false;
info.name = _info->name;
info.desc = _info->description;
info.sampleRates.pushBack(_info->sample_spec.rate);
info.nativeFormats.pushBack(getFormatFromPulseFormat(_info->sample_spec.format));
info.channels = getChannelOrderFromPulseChannel(_info->channel_map);
ATA_VERBOSE("plop=" << _info->index << " " << _info->name);
//ATA_DEBUG(" ports=" << _info->n_ports);
list->pushBack(info);
}
// allback to get data from pulseaudio:
static void callbackGetSourceList(pa_context* _contex, const pa_source_info* _info, int _eol, void* _userdata) {
etk::Vector<audio::orchestra::DeviceInfo>* list = static_cast<etk::Vector<audio::orchestra::DeviceInfo>*>(_userdata);
if (_eol > 0) {
return;
}
audio::orchestra::DeviceInfo info;
info.isCorrect = true;
info.input = true;
info.name = _info->name;
info.desc = _info->description;
info.sampleRates.pushBack(_info->sample_spec.rate);
info.nativeFormats.pushBack(getFormatFromPulseFormat(_info->sample_spec.format));
info.channels = getChannelOrderFromPulseChannel(_info->channel_map);
ATA_VERBOSE("plop=" << _info->index << " " << _info->name);
list->pushBack(info);
}
// to not update all the time ...
static etk::Vector<audio::orchestra::DeviceInfo> pulseAudioListOfDevice;
static audio::Time pulseAudioListOfDeviceTime;
etk::Vector<audio::orchestra::DeviceInfo> audio::orchestra::api::pulse::getDeviceList() {
audio::Duration delta = audio::Time::now() - pulseAudioListOfDeviceTime;
if (delta < audio::Duration(30,0)) {
return pulseAudioListOfDevice;
}
// Define our pulse audio loop and connection variables
pa_mainloop* pulseAudioMainLoop;
pa_mainloop_api* pulseAudioMainLoopAPI;
pa_operation* pulseAudioOperation;
pa_context* pulseAudioContex;
pa_context_flags_t pulseAudioFlags = PA_CONTEXT_NOAUTOSPAWN;
etk::Vector<audio::orchestra::DeviceInfo>& out = pulseAudioListOfDevice;
out.clear();
// We'll need these state variables to keep track of our requests
int state = 0;
int pulseAudioReady = 0;
// Create a mainloop API and connection to the default server
pulseAudioMainLoop = pa_mainloop_new();
pulseAudioMainLoopAPI = pa_mainloop_get_api(pulseAudioMainLoop);
pulseAudioContex = pa_context_new(pulseAudioMainLoopAPI, "orchestraPulseCount");
// If there's an error, the callback will set pulseAudioReady
pa_context_set_state_callback(pulseAudioContex, callbackStateMachine, &pulseAudioReady);
// This function connects to the pulse server
pa_context_connect(pulseAudioContex, NULL, pulseAudioFlags, NULL);
bool playLoop = true;
while (playLoop == true) {
// We can't do anything until PA is ready, so just iterate the mainloop
// and continue
if (pulseAudioReady == 0) {
pa_mainloop_iterate(pulseAudioMainLoop, 1, null);
continue;
}
// We couldn't get a connection to the server, so exit out
if (pulseAudioReady == 2) {
pa_context_disconnect(pulseAudioContex);
pa_context_unref(pulseAudioContex);
pa_mainloop_free(pulseAudioMainLoop);
ATA_ERROR("Pulse interface error: Can not connect to the pulseaudio iterface...");
return out;
}
// At this point, we're connected to the server and ready to make
// requests
switch (state) {
// State 0: we haven't done anything yet
case 0:
ATA_DEBUG("Request sink list");
pulseAudioOperation = pa_context_get_sink_info_list(pulseAudioContex,
callbackGetSinkList,
&out);
state++;
break;
case 1:
// Now we wait for our operation to complete. When it's
// complete our pa_output_devicelist is filled out, and we move
// along to the next state
if (pa_operation_get_state(pulseAudioOperation) == PA_OPERATION_DONE) {
pa_operation_unref(pulseAudioOperation);
ATA_DEBUG("Request sources list");
pulseAudioOperation = pa_context_get_source_info_list(pulseAudioContex,
callbackGetSourceList,
&out);
state++;
}
break;
case 2:
if (pa_operation_get_state(pulseAudioOperation) == PA_OPERATION_DONE) {
ATA_DEBUG("All is done");
// Now we're done, clean up and disconnect and return
pa_operation_unref(pulseAudioOperation);
pa_context_disconnect(pulseAudioContex);
pa_context_unref(pulseAudioContex);
pa_mainloop_free(pulseAudioMainLoop);
playLoop = false;
break;
}
break;
default:
// We should never see this state
ATA_ERROR("Error in getting the devices list ...");
return out;
}
// Iterate the main loop ..
if (playLoop == true) {
pa_mainloop_iterate(pulseAudioMainLoop, 1, null);
}
}
// TODO: need to do it better ...
// set default device:
int32_t idInput = -1;
int32_t idOutput = -1;
for (int32_t iii=0; iii<out.size(); ++iii) {
if (out[iii].input == true) {
if (idInput != -1) {
continue;
}
if (etk::end_with(out[iii].name, ".monitor", false) == false) {
idInput = iii;
out[iii].isDefault = true;
}
} else {
if (idOutput != -1) {
continue;
}
if (etk::end_with(out[iii].name, ".monitor", false) == false) {
idOutput = iii;
out[iii].isDefault = true;
}
}
}
return out;
}
#endif

View File

@ -1,23 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#ifdef ORCHESTRA_BUILD_PULSE
#include <etk/types.hpp>
#include <audio/orchestra/DeviceInfo.hpp>
namespace audio {
namespace orchestra {
namespace api {
namespace pulse {
etk::Vector<audio::orchestra::DeviceInfo> getDeviceList();
}
}
}
}
#endif

View File

@ -1,6 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/

View File

@ -1,25 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <ethread/Thread.hpp>
#include <ethread/Semaphore.hpp>
#include <ethread/Mutex.hpp>
#include <echrono/Steady.hpp>
#include <etk/Function.hpp>
#include <ememory/memory.hpp>
#include <audio/channel.hpp>
#include <audio/format.hpp>
#include <audio/orchestra/error.hpp>
#include <audio/orchestra/status.hpp>
#include <audio/orchestra/Flags.hpp>
#include <audio/orchestra/CallbackInfo.hpp>
#include <audio/orchestra/DeviceInfo.hpp>
#include <audio/orchestra/StreamOptions.hpp>
#include <audio/orchestra/StreamParameters.hpp>

View File

@ -1,13 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/debug.hpp>
int32_t audio::orchestra::getLogId() {
static int32_t g_val = elog::registerInstance("audio-orchestra");
return g_val;
}

View File

@ -1,41 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <elog/log.hpp>
namespace audio {
namespace orchestra {
int32_t getLogId();
}
}
#define ATA_BASE(info,data) ELOG_BASE(audio::orchestra::getLogId(),info,data)
#define ATA_PRINT(data) ATA_BASE(-1, data)
#define ATA_CRITICAL(data) ATA_BASE(1, data)
#define ATA_ERROR(data) ATA_BASE(2, data)
#define ATA_WARNING(data) ATA_BASE(3, data)
#ifdef DEBUG
#define ATA_INFO(data) ATA_BASE(4, data)
#define ATA_DEBUG(data) ATA_BASE(5, data)
#define ATA_VERBOSE(data) ATA_BASE(6, data)
#define ATA_TODO(data) ATA_BASE(4, "TODO : " << data)
#else
#define ATA_INFO(data) do { } while(false)
#define ATA_DEBUG(data) do { } while(false)
#define ATA_VERBOSE(data) do { } while(false)
#define ATA_TODO(data) do { } while(false)
#endif
#define ATA_ASSERT(cond,data) \
do { \
if (!(cond)) { \
ATA_CRITICAL(data); \
assert(!#cond); \
} \
} while (0)

View File

@ -1,9 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/error.hpp>
#include <audio/orchestra/debug.hpp>

View File

@ -1,22 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
namespace audio {
namespace orchestra {
enum error {
error_none, //!< No error
error_fail, //!< An error occure in the operation
error_warning, //!< A non-critical error.
error_inputNull, //!< null input or internal errror
error_invalidUse, //!< The function was called incorrectly.
error_systemError //!< A system error occured.
};
}
}

View File

@ -1,39 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/mode.hpp>
#include <audio/orchestra/debug.hpp>
int32_t audio::orchestra::modeToIdTable(enum mode _mode) {
switch (_mode) {
case mode_unknow:
case mode_duplex:
case mode_output:
return 0;
case mode_input:
return 1;
}
return 0;
}
etk::Stream& audio::operator <<(etk::Stream& _os, enum audio::orchestra::mode _obj) {
switch (_obj) {
case audio::orchestra::mode_unknow:
_os << "unknow";
break;
case audio::orchestra::mode_duplex:
_os << "duplex";
break;
case audio::orchestra::mode_output:
_os << "output";
break;
case audio::orchestra::mode_input:
_os << "input";
break;
}
return _os;
}

View File

@ -1,24 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
#include <etk/Stream.hpp>
namespace audio {
namespace orchestra {
enum mode {
mode_unknow,
mode_output,
mode_input,
mode_duplex
};
int32_t modeToIdTable(enum mode _mode);
}
etk::Stream& operator <<(etk::Stream& _os, enum audio::orchestra::mode _obj);
}

View File

@ -1,6 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/

View File

@ -1,21 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
namespace audio {
namespace orchestra {
enum class state {
closed,
stopped,
stopping,
running
};
}
}

View File

@ -1,32 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/status.hpp>
#include <audio/orchestra/debug.hpp>
static const char* listValue[] = {
"ok",
"overflow",
"underflow"
};
etk::Stream& audio::orchestra::operator <<(etk::Stream& _os, enum audio::orchestra::status _obj) {
_os << listValue[int32_t(_obj)];
return _os;
}
etk::Stream& audio::orchestra::operator <<(etk::Stream& _os, const etk::Vector<enum audio::orchestra::status>& _obj) {
_os << etk::String("{");
for (size_t iii=0; iii<_obj.size(); ++iii) {
if (iii!=0) {
_os << etk::String(";");
}
_os << _obj[iii];
}
_os << etk::String("}");
return _os;
}

View File

@ -1,23 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
#include <etk/Vector.hpp>
namespace audio {
namespace orchestra {
enum class status {
ok, //!< nothing...
overflow, //!< Internal buffer has more data than they can accept
underflow //!< The internal buffer is empty
};
etk::Stream& operator <<(etk::Stream& _os, enum audio::orchestra::status _obj);
etk::Stream& operator <<(etk::Stream& _os, const etk::Vector<enum audio::orchestra::status>& _obj);
}
}

View File

@ -1,21 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#include <audio/orchestra/type.hpp>
#include <audio/orchestra/debug.hpp>
const etk::String audio::orchestra::typeUndefined = "undefined";
const etk::String audio::orchestra::typeAlsa = "alsa";
const etk::String audio::orchestra::typePulse = "pulse";
const etk::String audio::orchestra::typeOss = "oss";
const etk::String audio::orchestra::typeJack = "jack";
const etk::String audio::orchestra::typeCoreOSX = "coreOSX";
const etk::String audio::orchestra::typeCoreIOS = "coreIOS";
const etk::String audio::orchestra::typeAsio = "asio";
const etk::String audio::orchestra::typeDs = "ds";
const etk::String audio::orchestra::typeJava = "java";
const etk::String audio::orchestra::typeDummy = "dummy";

View File

@ -1,30 +0,0 @@
/** @file
* @author Edouard DUPIN
* @copyright 2011, Edouard DUPIN, all right reserved
* @license APACHE v2.0 (see license file)
* @fork from RTAudio
*/
#pragma once
#include <etk/types.hpp>
#include <etk/stdTools.hpp>
namespace audio {
namespace orchestra {
/**
* @brief Audio API specifier arguments.
*/
extern const etk::String typeUndefined; //!< Error API.
extern const etk::String typeAlsa; //!< LINUX The Advanced Linux Sound Architecture.
extern const etk::String typePulse; //!< LINUX The Linux PulseAudio.
extern const etk::String typeOss; //!< LINUX The Linux Open Sound System.
extern const etk::String typeJack; //!< UNIX The Jack Low-Latency Audio Server.
extern const etk::String typeCoreOSX; //!< Macintosh OSX Core Audio.
extern const etk::String typeCoreIOS; //!< Macintosh iOS Core Audio.
extern const etk::String typeAsio; //!< WINDOWS The Steinberg Audio Stream I/O.
extern const etk::String typeDs; //!< WINDOWS The Microsoft Direct Sound.
extern const etk::String typeJava; //!< ANDROID Interface.
extern const etk::String typeDummy; //!< Empty wrapper (non-functional).
}
}

184
audio_orchestra_build.html Normal file
View File

@ -0,0 +1,184 @@
<!-- HTML header for doxygen 1.8.8-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!-- For Mobile Devices -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<title>Orchestra: audio interface wrapper: Build lib &amp; build sample</title>
<!--<link href="tabs.css" rel="stylesheet" type="text/css"/>-->
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
<link href="customdoxygen.css" rel="stylesheet" type="text/css"/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>
<script type="text/javascript" src="doxy-boot.js"></script>
</head>
<body>
<nav class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand">Orchestra: audio interface wrapper 0.4.0</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li><a href="index.html">Main&nbsp;Page</a></li>
<li><a href="pages.html">Related&nbsp;Pages</a></li>
<li><a href="namespaces.html">Namespaces</a></li>
<li><a href="annotated.html">Classes</a></li>
<li><a href="files.html">Files</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">
Link-libs<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li><a href="http://HeeroYui.github.io/lutin">lutin</a></li>
<li><a href="http://atria-soft.github.io/ewol">ewol</a></li>
<li><a href="http://atria-soft.github.io/echrono">echrono</a></li>
<li><a href="http://atria-soft.github.io/etk">etk</a></li>
<li><a href="http://atria-soft.github.io/ejson">ejson</a></li>
<li><a href="http://atria-soft.github.io/exml">exml</a></li>
<li><a href="http://atria-soft.github.io/esvg">esvg</a></li>
<li><a href="http://atria-soft.github.io/egami">egami</a></li>
<li><a href="http://atria-soft.github.io/gale">gale</a></li>
<li><a href="http://atria-soft.github.io/ege">ege</a></li>
<li><a href="http://atria-soft.github.io/elog">elog</a></li>
<li><a href="http://atria-soft.github.io/ememory">ememory</a></li>
<li><a href="http://atria-soft.github.io/enet">enet</a></li>
<li><a href="http://atria-soft.github.io/eproperty">eproperty</a></li>
<li><a href="http://atria-soft.github.io/esignal">esignal</a></li>
<li><a href="http://atria-soft.github.io/etranslate">etranslate</a></li>
<li><a href="http://atria-soft.github.io/zeus">zeus</a></li>
<li><a href="http://musicdsp.github.io/audio-ess">audio-ess</a></li>
<li><a href="http://musicdsp.github.io/audio">audio</a></li>
<li><a href="http://musicdsp.github.io/audio-drain">audio-drain</a></li>
<li><a href="http://musicdsp.github.io/audio-orchestra">audio-orchestra</a></li>
<li><a href="http://musicdsp.github.io/audio-river">audio-river</a></li>
</ul>
</li>
</ul>
<div id="search-box" class="input-group">
<div class="input-group-btn">
<button aria-expanded="false" type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="glyphicon glyphicon-search"></span>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a href="#">All</a></li>
<li><a href="#">Classes</a></li>
<li><a href="#">Namespaces</a></li>
<li><a href="#">Files</a></li>
<li><a href="#">Functions</a></li>
<li><a href="#">Variables</a></li>
<li><a href="#">Typedefs</a></li>
<li><a href="#">Enumerations</a></li>
<li><a href="#">Enumerator</a></li>
<li><a href="#">Friends</a></li>
<li><a href="#">Macros</a></li>
<li><a href="#">Pages</a></li>
</ul>
</div>
<button id="search-close" type="button" class="close" aria-label="Close">
<span aria-hidden="true"></span>
</button>
<input id="search-field" class="form-control" accesskey="S" onkeydown="searchBox.OnSearchFieldChange(event);" placeholder="Search ..." type="text">
</div>
</div><!--/.nav-collapse -->
</div>
</nav>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div class="content" id="content">
<div class="container">
<div class="row">
<div class="col-sm-12 panel panel-default" style="padding-bottom: 15px;">
<div style="margin-bottom: 15px;margin-top: 60px;">
<!-- end header part -->
<!-- Generated by Doxygen 1.8.12 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
</div><!-- top -->
<div class="header">
<div class="headertitle">
<div class="title">Build lib &amp; build sample </div> </div>
</div><!--header-->
<div class="contents">
<div class="toc"><h3>Table of Contents</h3>
<ul><li class="level1"><a href="#audio_orchestra_build_download">Download: </a><ul><li class="level2"><a href="#audio_orchestra_build_download_repo">need google repo: </a></li>
<li class="level2"><a href="#audio_orchestra_build_download_lutin">lutin (build-system): </a></li>
<li class="level2"><a href="#audio_orchestra_build_download_dependency">dependency: </a></li>
<li class="level2"><a href="#audio_orchestra_build_download_sources">sources: </a></li>
</ul>
</li>
<li class="level1"><a href="#audio_orchestra_build_build">Build: </a><ul><li class="level2"><a href="#audio_orchestra_build_build_library">library: </a></li>
<li class="level2"><a href="#audio_orchestra_build_build_sample">Sample: </a></li>
</ul>
</li>
</ul>
</div>
<div class="textblock"><h1><a class="anchor" id="audio_orchestra_build_download"></a>
Download: </h1>
<p>ege use some tools to manage source and build it:</p>
<h2><a class="anchor" id="audio_orchestra_build_download_repo"></a>
need google repo: </h2>
<p>see: <a href="http://source.android.com/source/downloading.html#installing-repo">http://source.android.com/source/downloading.html#installing-repo</a></p>
<p>On all platform: </p><div class="fragment"><div class="line">mkdir ~/.bin</div><div class="line">PATH=~/.bin:$PATH</div><div class="line">curl https://storage.googleapis.com/git-repo-downloads/repo &gt; ~/.bin/repo</div><div class="line">chmod a+x ~/.bin/repo</div></div><!-- fragment --><p>On ubuntu </p><div class="fragment"><div class="line">sudo apt-get install repo</div></div><!-- fragment --><p>On archlinux </p><div class="fragment"><div class="line">sudo pacman -S repo</div></div><!-- fragment --><h2><a class="anchor" id="audio_orchestra_build_download_lutin"></a>
lutin (build-system): </h2>
<div class="fragment"><div class="line">pip install lutin --user</div><div class="line"># optionnal dependency of lutin (manage image changing size for application release)</div><div class="line">pip install pillow --user</div></div><!-- fragment --><h2><a class="anchor" id="audio_orchestra_build_download_dependency"></a>
dependency: </h2>
<div class="fragment"><div class="line">mkdir -p WORKING_DIRECTORY/framework</div><div class="line">cd WORKING_DIRECTORY/framework</div><div class="line">repo init -u git://github.com/atria-soft/manifest.git</div><div class="line">repo sync -j8</div><div class="line">cd ../..</div></div><!-- fragment --><h2><a class="anchor" id="audio_orchestra_build_download_sources"></a>
sources: </h2>
<p>They are already download in the repo manifest in:</p>
<div class="fragment"><div class="line">cd WORKING_DIRECTORY/framework/atria-soft/audio_orchestra</div></div><!-- fragment --><h1><a class="anchor" id="audio_orchestra_build_build"></a>
Build: </h1>
<p>you must stay in zour working directory... </p><div class="fragment"><div class="line">cd WORKING_DIRECTORY</div></div><!-- fragment --><h2><a class="anchor" id="audio_orchestra_build_build_library"></a>
library: </h2>
<div class="fragment"><div class="line">lutin -mdebug audio-orchestra</div></div><!-- fragment --><h2><a class="anchor" id="audio_orchestra_build_build_sample"></a>
Sample: </h2>
<p>No smaple availlable for now ... </p>
</div></div><!-- contents -->
<!-- HTML footer for doxygen 1.8.8-->
<!-- start footer part -->
</div>
</div>
</div>
</div>
</div>
<hr class="footer"/><address class="footer"><small>
Generated on Mon Oct 24 2016 15:35:48 for Orchestra: audio interface wrapper by &#160;<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.12
</small></address>
</body>
</html>

View File

@ -1 +0,0 @@
MR Edouard DUPIN <yui.heero@gmail.com>

BIN
bc_s.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 676 B

BIN
bdwn.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 147 B

BIN
closed.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 B

377
customdoxygen.css Normal file
View File

@ -0,0 +1,377 @@
#navrow1, #navrow2, #navrow3, #navrow4, #navrow5{
border-bottom: 1px solid #EEEEEE;
}
.adjust-right {
margin-left: 30px !important;
font-size: 1.15em !important;
}
.navbar{
border: 0px solid #222 !important;
}
/* Sticky footer styles
-------------------------------------------------- */
html,
body {
counter-reset: h1counter;
height: 100%;
/* The html and body elements cannot have any padding or margin. */
}
h1, .h1, h2, .h2, h3, .h3{
font-weight: bold !important;
}
h1:before {
content: counter(h1counter) ".\0000a0\0000a0";
counter-increment: h1counter;
counter-reset: h2counter;
}
h2:before {
content: counter(h1counter) "." counter(h2counter) ".\0000a0\0000a0";
counter-increment: h2counter;
counter-reset: h3counter;
}
h3:before {
content: counter(h1counter) "." counter(h2counter) "." counter(h3counter) ".\0000a0\0000a0";
counter-increment: h3counter;
}
/* Wrapper for page content to push down footer */
#wrap {
min-height: 100%;
height: auto;
/* Negative indent footer by its height */
margin: 0 auto -60px;
/* Pad bottom by footer height */
padding: 0 0 60px;
}
/* Set the fixed height of the footer here */
#footer {
font-size: 0.9em;
padding: 8px 0px;
background-color: #f5f5f5;
}
.footer-row {
line-height: 44px;
}
#footer > .container {
padding-left: 15px;
padding-right: 15px;
}
.footer-follow-icon {
margin-left: 3px;
text-decoration: none !important;
}
.footer-follow-icon img {
width: 20px;
}
.footer-link {
padding-top: 5px;
display: inline-block;
color: #999999;
text-decoration: none;
}
.footer-copyright {
text-align: center;
}
@media (min-width: 992px) {
.footer-row {
text-align: left;
}
.footer-icons {
text-align: right;
}
}
@media (max-width: 991px) {
.footer-row {
text-align: center;
}
.footer-icons {
text-align: center;
}
}
/* DOXYGEN Code Styles
----------------------------------- */
a.qindex {
font-weight: bold;
}
a.qindexHL {
font-weight: bold;
background-color: #9CAFD4;
color: #ffffff;
border: 1px double #869DCA;
}
.contents a.qindexHL:visited {
color: #ffffff;
}
a.code, a.code:visited, a.line, a.line:visited {
color: #4665A2;
}
a.codeRef, a.codeRef:visited, a.lineRef, a.lineRef:visited {
color: #4665A2;
}
/* @end */
dl.el {
margin-left: -1cm;
}
pre.fragment {
border: 1px solid #C4CFE5;
background-color: #FBFCFD;
padding: 4px 6px;
margin: 4px 8px 4px 2px;
overflow: auto;
word-wrap: break-word;
font-size: 9pt;
line-height: 125%;
font-family: monospace, fixed;
font-size: 105%;
}
div.fragment {
padding: 4px 6px;
margin: 4px 8px 4px 2px;
border: 1px solid #C4CFE5;
}
div.line {
font-family: monospace, fixed;
font-size: 13px;
min-height: 13px;
line-height: 1.0;
text-wrap: unrestricted;
white-space: -moz-pre-wrap; /* Moz */
white-space: -pre-wrap; /* Opera 4-6 */
white-space: -o-pre-wrap; /* Opera 7 */
white-space: pre-wrap; /* CSS3 */
word-wrap: break-word; /* IE 5.5+ */
text-indent: -53px;
padding-left: 53px;
padding-bottom: 0px;
margin: 0px;
-webkit-transition-property: background-color, box-shadow;
-webkit-transition-duration: 0.5s;
-moz-transition-property: background-color, box-shadow;
-moz-transition-duration: 0.5s;
-ms-transition-property: background-color, box-shadow;
-ms-transition-duration: 0.5s;
-o-transition-property: background-color, box-shadow;
-o-transition-duration: 0.5s;
transition-property: background-color, box-shadow;
transition-duration: 0.5s;
}
div.line.glow {
background-color: cyan;
box-shadow: 0 0 10px cyan;
}
span.lineno {
padding-right: 4px;
text-align: right;
border-right: 2px solid #0F0;
background-color: #E8E8E8;
white-space: pre;
}
span.lineno a {
background-color: #D8D8D8;
}
span.lineno a:hover {
background-color: #C8C8C8;
}
div.groupHeader {
margin-left: 16px;
margin-top: 12px;
font-weight: bold;
}
div.groupText {
margin-left: 16px;
font-style: italic;
}
/* @group Code Colorization */
span.keyword {
color: #008000
}
span.keywordtype {
color: #604020
}
span.keywordflow {
color: #e08000
}
span.comment {
color: #800000
}
span.preprocessor {
color: #806020
}
span.stringliteral {
color: #002080
}
span.charliteral {
color: #008080
}
span.vhdldigit {
color: #ff00ff
}
span.vhdlchar {
color: #000000
}
span.vhdlkeyword {
color: #700070
}
span.vhdllogic {
color: #ff0000
}
blockquote {
background-color: #F7F8FB;
border-left: 2px solid #9CAFD4;
margin: 0 24px 0 4px;
padding: 0 12px 0 16px;
}
/*---------------- Search Box */
#search-box {
margin: 10px 0px;
}
#search-box .close {
display: none;
position: absolute;
right: 0px;
padding: 6px 12px;
z-index: 5;
}
/*---------------- Search results window */
#search-results-window {
display: none;
}
iframe#MSearchResults {
width: 100%;
height: 15em;
}
.SRChildren {
padding-left: 3ex; padding-bottom: .5em
}
.SRPage .SRChildren {
display: none;
}
a.SRScope {
display: block;
}
a.SRSymbol:focus, a.SRSymbol:active,
a.SRScope:focus, a.SRScope:active {
text-decoration: underline;
}
span.SRScope {
padding-left: 4px;
}
.SRResult {
display: none;
}
/* class and file list */
.directory .icona,
.directory .arrow {
height: auto;
}
.directory .icona .icon {
height: 16px;
}
.directory .icondoc {
background-position: 0px 0px;
height: 20px;
}
.directory .iconfopen {
background-position: 0px 0px;
}
.directory td.entry {
padding: 7px 8px 6px 8px;
}
.table > tbody > tr > td.memSeparator {
line-height: 0;
padding: 0;
}
.memItemLeft, .memTemplItemLeft {
white-space: normal;
}
/* enumerations */
.panel-body thead > tr {
background-color: #e0e0e0;
}
/* todo lists */
.todoname,
.todoname a {
font-weight: bold;
}
/* Class title */
.summary {
margin-top: 25px;
}
.page-header {
margin: 20px 0px !important;
}
.page-header {
#display: inline-block;
}
.title {
text-align: center;
color: orange;
}
.page-header .pull-right {
margin-top: 0.3em;
margin-left: 0.5em;
}
.page-header .label {
font-size: 50%;
}
#main-nav {
display: none;
}

BIN
doc.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 746 B

View File

@ -1,83 +0,0 @@
Build lib & build sample {#audio_orchestra_build}
========================
@tableofcontents
Download: {#audio_orchestra_build_download}
=========
ege use some tools to manage source and build it:
need google repo: {#audio_orchestra_build_download_repo}
-----------------
see: http://source.android.com/source/downloading.html#installing-repo
On all platform:
```{.sh}
mkdir ~/.bin
PATH=~/.bin:$PATH
curl https://storage.googleapis.com/git-repo-downloads/repo > ~/.bin/repo
chmod a+x ~/.bin/repo
```
On ubuntu
```{.sh}
sudo apt-get install repo
```
On archlinux
```{.sh}
sudo pacman -S repo
```
lutin (build-system): {#audio_orchestra_build_download_lutin}
---------------------
```{.sh}
pip install lutin --user
# optionnal dependency of lutin (manage image changing size for application release)
pip install pillow --user
```
dependency: {#audio_orchestra_build_download_dependency}
-----------
```{.sh}
mkdir -p WORKING_DIRECTORY/framework
cd WORKING_DIRECTORY/framework
repo init -u git://github.com/atria-soft/manifest.git
repo sync -j8
cd ../..
```
sources: {#audio_orchestra_build_download_sources}
--------
They are already download in the repo manifest in:
```{.sh}
cd WORKING_DIRECTORY/framework/atria-soft/audio_orchestra
```
Build: {#audio_orchestra_build_build}
======
you must stay in zour working directory...
```{.sh}
cd WORKING_DIRECTORY
```
library: {#audio_orchestra_build_build_library}
--------
```{.sh}
lutin -mdebug audio-orchestra
```
Sample: {#audio_orchestra_build_build_sample}
-------
No smaple availlable for now ...

View File

@ -1,50 +0,0 @@
AUDIO-ORCHESTRA library {#mainpage}
=======================
@tableofcontents
What is AUDIO-ORCHESTRA: {#audio_orchestra_mainpage_what}
========================
AUDIO-ORCHESTRA, is a fork of RTAudio lib (with port for Android, and IOs).
This is a cross API of Audio wrapping the Hardware.
This library is not friendy usable, use audio-river to have a correct and simple multiple-flow API
What it does: {#audio_orchestra_mainpage_what_it_does}
-------------
- Open a strem audio in Input or output
- synchronise 2 flow
- open on some platform: Android, Linux, MacOs, Ios, Windows
AUDIO-ORCHESTRA is dependent of the STL (compatible with MacOs stl (CXX))
What languages are supported? {#audio_orchestra_mainpage_language}
=============================
AUDIO-ORCHESTRA is written in C++.
Are there any licensing restrictions? {#audio_orchestra_mainpage_license_restriction}
=====================================
AUDIO-ORCHESTRA is **FREE software** and _all sub-library are FREE and staticly linkable !!!_
License (MIT) {#audio_orchestra_mainpage_license}
=============
Copyright AUDIO-ORCHESTRA Edouard DUPIN
MIT ...
Other pages {#audio_orchestra_mainpage_sub_page}
===========
- @ref audio_orchestra_build
- [**ewol coding style**](http://atria-soft.github.io/ewol/ewol_coding_style.html)

264
doxy-boot.js Normal file
View File

@ -0,0 +1,264 @@
$( document ).ready(function() {
$("div.headertitle").addClass("page-header");
$("div.title").addClass("h1");
$('li > a[href="index.html"] > span').before("<i class='fa fa-cog'></i> ");
$('li > a[href="modules.html"] > span').before("<i class='fa fa-square'></i> ");
$('li > a[href="namespaces.html"] > span').before("<i class='fa fa-bars'></i> ");
$('li > a[href="annotated.html"] > span').before("<i class='fa fa-list-ul'></i> ");
$('li > a[href="classes.html"] > span').before("<i class='fa fa-book'></i> ");
$('li > a[href="inherits.html"] > span').before("<i class='fa fa-sitemap'></i> ");
$('li > a[href="functions.html"] > span').before("<i class='fa fa-list'></i> ");
$('li > a[href="functions_func.html"] > span').before("<i class='fa fa-list'></i> ");
$('li > a[href="functions_vars.html"] > span').before("<i class='fa fa-list'></i> ");
$('li > a[href="functions_enum.html"] > span').before("<i class='fa fa-list'></i> ");
$('li > a[href="functions_eval.html"] > span').before("<i class='fa fa-list'></i> ");
$('img[src="ftv2ns.png"]').replaceWith('<span class="label label-danger">N</span> ');
$('img[src="ftv2cl.png"]').replaceWith('<span class="label label-danger">C</span> ');
$("ul.tablist").addClass("nav nav-pills nav-justified");
$("ul.tablist").css("margin-top", "0.5em");
$("ul.tablist").css("margin-bottom", "0.5em");
$("li.current").addClass("active");
$("iframe").attr("scrolling", "yes");
$("#nav-path > ul").addClass("breadcrumb");
$("table.params").addClass("table");
$("div.ingroups").wrapInner("<small></small>");
$("div.levels").css("margin", "0.5em");
$("div.levels > span").addClass("btn btn-default btn-xs");
$("div.levels > span").css("margin-right", "0.25em");
$("table.directory").addClass("table table-striped");
$("div.summary > a").addClass("btn btn-default btn-xs");
$("table.fieldtable").addClass("table");
$(".fragment").addClass("well");
$(".memitem").addClass("panel panel-default");
$(".memproto").addClass("panel-heading");
$(".memdoc").addClass("panel-body");
$("span.mlabel").addClass("label label-info");
$("table.memberdecls").addClass("table");
$("[class^=memitem]").addClass("active");
$("div.ah").addClass("btn btn-default");
$("span.mlabels").addClass("pull-right");
$("table.mlabels").css("width", "100%")
$("td.mlabels-right").addClass("pull-right");
$("div.ttc").addClass("panel panel-primary");
$("div.ttname").addClass("panel-heading");
$("div.ttname a").css("color", 'white');
$("div.ttdef,div.ttdoc,div.ttdeci").addClass("panel-body");
$('div.fragment.well div.line:first').css('margin-top', '15px');
$('div.fragment.well div.line:last').css('margin-bottom', '15px');
$('table.doxtable').removeClass('doxtable').addClass('table table-striped table-bordered').each(function(){
$(this).prepend('<thead></thead>');
$(this).find('tbody > tr:first').prependTo($(this).find('thead'));
$(this).find('td > span.success').parent().addClass('success');
$(this).find('td > span.warning').parent().addClass('warning');
$(this).find('td > span.danger').parent().addClass('danger');
});
if($('div.fragment.well div.ttc').length > 0)
{
$('div.fragment.well div.line:first').parent().removeClass('fragment well');
}
//merge left ad right element in the fuction table item
/*
$('table.memberdecls').find('.memItemRight').each(function(){
$(this).contents().appendTo($(this).siblings('.memItemLeft'));
$(this).siblings('.memItemLeft').attr('align', 'left');
});
*/
function getOriginalWidthOfImg(img_element) {
var t = new Image();
t.src = (img_element.getAttribute ? img_element.getAttribute("src") : false) || img_element.src;
return t.width;
}
$('div.dyncontent').find('img').each(function(){
if(getOriginalWidthOfImg($(this)[0]) > $('#content>div.container').width())
$(this).css('width', '100%');
});
/* responsive search box */
$('#MSearchBox').parent().remove();
var nav_container = $('<div class="row"></div>');
$('#navrow1').parent().prepend(nav_container);
var left_nav = $('<div class="col-md-9"></div>');
for (i = 0; i < 6; i++) {
var navrow = $('#navrow' + i + ' > ul.tablist').detach();
left_nav.append(navrow);
$('#navrow' + i).remove();
}
var right_nav = $('<div class="col-md-3"></div>').append('\
<div id="search-box" class="input-group">\
<div class="input-group-btn">\
<button aria-expanded="false" type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">\
<span class="glyphicon glyphicon-search"></span> <span class="caret"></span>\
</button>\
<ul class="dropdown-menu">\
</ul>\
</div>\
<button id="search-close" type="button" class="close" aria-label="Close"><span aria-hidden="true">&times;</span></button>\
<input id="search-field" class="form-control" accesskey="S" onkeydown="searchBox.OnSearchFieldChange(event);" placeholder="Search ..." type="text">\
</div>');
$(nav_container).append(left_nav);
$(nav_container).append(right_nav);
$('#MSearchSelectWindow .SelectionMark').remove();
var search_selectors = $('#MSearchSelectWindow .SelectItem');
for (var i = 0; i < search_selectors.length; i += 1) {
var element_a = $('<a href="#"></a>').text($(search_selectors[i]).text());
element_a.click(function(){
$('#search-box .dropdown-menu li').removeClass('active');
$(this).parent().addClass('active');
searchBox.OnSelectItem($('#search-box li a').index(this));
searchBox.Search();
return false;
});
var element = $('<li></li>').append(element_a);
$('#search-box .dropdown-menu').append(element);
}
$('#MSearchSelectWindow').remove();
$('#search-box .close').click(function (){
searchBox.CloseResultsWindow();
});
$('body').append('<div id="MSearchClose"></div>');
$('body').append('<div id="MSearchBox"></div>');
$('body').append('<div id="MSearchSelectWindow"></div>');
searchBox.searchLabel = '';
searchBox.DOMSearchField = function() {
return document.getElementById("search-field");
}
searchBox.DOMSearchClose = function(){
return document.getElementById("search-close");
}
/* search results */
var results_iframe = $('#MSearchResults').detach();
$('#MSearchResultsWindow')
.attr('id', 'search-results-window')
.addClass('panel panel-default')
.append(
'<div class="panel-heading">\
<h3 class="panel-title">Search Results</h3>\
</div>\
<div class="panel-body"></div>'
);
$('#search-results-window .panel-body').append(results_iframe);
searchBox.DOMPopupSearchResultsWindow = function() {
return document.getElementById("search-results-window");
}
function update_search_results_window() {
$('#search-results-window').removeClass('panel-default panel-success panel-warning panel-danger')
var status = $('#MSearchResults').contents().find('.SRStatus:visible');
if (status.length > 0) {
switch(status.attr('id')) {
case 'Loading':
case 'Searching':
$('#search-results-window').addClass('panel-warning');
break;
case 'NoMatches':
$('#search-results-window').addClass('panel-danger');
break;
default:
$('#search-results-window').addClass('panel-default');
}
} else {
$('#search-results-window').addClass('panel-success');
}
}
$('#MSearchResults').load(function() {
$('#MSearchResults').contents().find('link[href="search.css"]').attr('href','../doxygen.css');
$('#MSearchResults').contents().find('head').append(
'<link href="../customdoxygen.css" rel="stylesheet" type="text/css">');
update_search_results_window();
// detect status changes (only for search with external search backend)
var observer = new MutationObserver(function(mutations) {
update_search_results_window();
});
var config = {
attributes: true
};
var targets = $('#MSearchResults').contents().find('.SRStatus');
for (i = 0; i < targets.length; i++) {
observer.observe(targets[i], config);
}
});
/* enumerations */
$('table.fieldtable').removeClass('fieldtable').addClass('table table-striped table-bordered').each(function(){
$(this).prepend('<thead></thead>');
$(this).find('tbody > tr:first').prependTo($(this).find('thead'));
$(this).find('td > span.success').parent().addClass('success');
$(this).find('td > span.warning').parent().addClass('warning');
$(this).find('td > span.danger').parent().addClass('danger');
});
/* todo list */
var todoelements = $('.contents > .textblock > dl.reflist > dt, .contents > .textblock > dl.reflist > dd');
for (var i = 0; i < todoelements.length; i += 2) {
$('.contents > .textblock').append(
'<div class="panel panel-default active">'
+ "<div class=\"panel-heading todoname\">" + $(todoelements[i]).html() + "</div>"
+ "<div class=\"panel-body\">" + $(todoelements[i+1]).html() + "</div>"
+ '</div>');
}
$('.contents > .textblock > dl').remove();
$(".memitem").removeClass('memitem');
$(".memproto").removeClass('memproto');
$(".memdoc").removeClass('memdoc');
$("span.mlabel").removeClass('mlabel');
$("table.memberdecls").removeClass('memberdecls');
$("[class^=memitem]").removeClass('memitem');
$("span.mlabels").removeClass('mlabels');
$("table.mlabels").removeClass('mlabels');
$("td.mlabels-right").removeClass('mlabels-right');
$(".navpath").removeClass('navpath');
$("li.navelem").removeClass('navelem');
$("a.el").removeClass('el');
$("div.ah").removeClass('ah');
$("div.header").removeClass("header");
$('.mdescLeft').each(function(){
if($(this).html()=="&nbsp;") {
$(this).siblings('.mdescRight').attr('colspan', 2);
$(this).remove();
}
});
$('td.memItemLeft').each(function(){
if($(this).siblings('.memItemRight').html()=="") {
$(this).attr('colspan', 2);
$(this).siblings('.memItemRight').remove();
}
});
});

View File

@ -1,32 +0,0 @@
#!/usr/bin/python
import os
import doxy.module as module
import doxy.debug as debug
import doxy.tools as tools
def create(target, module_name):
my_module = module.Module(__file__, module_name)
my_module.set_version("version.txt")
my_module.set_title("Orchestra: audio interface wrapper")
my_module.set_website("http://musicdsp.github.io/" + module_name)
my_module.set_website_sources("http://github.com/musicdsp/" + module_name)
my_module.add_path([
"audio",
"doc"
])
my_module.add_depend([
'etk',
'audio',
])
my_module.add_exclude_symbols([
'*operator<<*',
])
my_module.add_exclude_file([
'debug.h',
])
my_module.add_file_patterns([
'*.h',
'*.md',
])
return my_module

1508
doxygen.css Normal file

File diff suppressed because it is too large Load Diff

BIN
doxygen.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

97
dynsections.js Normal file
View File

@ -0,0 +1,97 @@
function toggleVisibility(linkObj)
{
var base = $(linkObj).attr('id');
var summary = $('#'+base+'-summary');
var content = $('#'+base+'-content');
var trigger = $('#'+base+'-trigger');
var src=$(trigger).attr('src');
if (content.is(':visible')===true) {
content.hide();
summary.show();
$(linkObj).addClass('closed').removeClass('opened');
$(trigger).attr('src',src.substring(0,src.length-8)+'closed.png');
} else {
content.show();
summary.hide();
$(linkObj).removeClass('closed').addClass('opened');
$(trigger).attr('src',src.substring(0,src.length-10)+'open.png');
}
return false;
}
function updateStripes()
{
$('table.directory tr').
removeClass('even').filter(':visible:even').addClass('even');
}
function toggleLevel(level)
{
$('table.directory tr').each(function() {
var l = this.id.split('_').length-1;
var i = $('#img'+this.id.substring(3));
var a = $('#arr'+this.id.substring(3));
if (l<level+1) {
i.removeClass('iconfopen iconfclosed').addClass('iconfopen');
a.html('&#9660;');
$(this).show();
} else if (l==level+1) {
i.removeClass('iconfclosed iconfopen').addClass('iconfclosed');
a.html('&#9658;');
$(this).show();
} else {
$(this).hide();
}
});
updateStripes();
}
function toggleFolder(id)
{
// the clicked row
var currentRow = $('#row_'+id);
// all rows after the clicked row
var rows = currentRow.nextAll("tr");
var re = new RegExp('^row_'+id+'\\d+_$', "i"); //only one sub
// only match elements AFTER this one (can't hide elements before)
var childRows = rows.filter(function() { return this.id.match(re); });
// first row is visible we are HIDING
if (childRows.filter(':first').is(':visible')===true) {
// replace down arrow by right arrow for current row
var currentRowSpans = currentRow.find("span");
currentRowSpans.filter(".iconfopen").removeClass("iconfopen").addClass("iconfclosed");
currentRowSpans.filter(".arrow").html('&#9658;');
rows.filter("[id^=row_"+id+"]").hide(); // hide all children
} else { // we are SHOWING
// replace right arrow by down arrow for current row
var currentRowSpans = currentRow.find("span");
currentRowSpans.filter(".iconfclosed").removeClass("iconfclosed").addClass("iconfopen");
currentRowSpans.filter(".arrow").html('&#9660;');
// replace down arrows by right arrows for child rows
var childRowsSpans = childRows.find("span");
childRowsSpans.filter(".iconfopen").removeClass("iconfopen").addClass("iconfclosed");
childRowsSpans.filter(".arrow").html('&#9658;');
childRows.show(); //show all children
}
updateStripes();
}
function toggleInherit(id)
{
var rows = $('tr.inherit.'+id);
var img = $('tr.inherit_header.'+id+' img');
var src = $(img).attr('src');
if (rows.filter(':first').is(':visible')===true) {
rows.css('display','none');
$(img).attr('src',src.substring(0,src.length-8)+'closed.png');
} else {
rows.css('display','table-row'); // using show() causes jump in firefox
$(img).attr('src',src.substring(0,src.length-10)+'open.png');
}
}

BIN
folderclosed.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 616 B

BIN
folderopen.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 597 B

190
index.html Normal file
View File

@ -0,0 +1,190 @@
<!-- HTML header for doxygen 1.8.8-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!-- For Mobile Devices -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<title>Orchestra: audio interface wrapper: AUDIO-ORCHESTRA library</title>
<!--<link href="tabs.css" rel="stylesheet" type="text/css"/>-->
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
<link href="customdoxygen.css" rel="stylesheet" type="text/css"/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>
<script type="text/javascript" src="doxy-boot.js"></script>
</head>
<body>
<nav class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand">Orchestra: audio interface wrapper 0.4.0</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li><a href="index.html">Main&nbsp;Page</a></li>
<li><a href="pages.html">Related&nbsp;Pages</a></li>
<li><a href="namespaces.html">Namespaces</a></li>
<li><a href="annotated.html">Classes</a></li>
<li><a href="files.html">Files</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">
Link-libs<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li><a href="http://HeeroYui.github.io/lutin">lutin</a></li>
<li><a href="http://atria-soft.github.io/ewol">ewol</a></li>
<li><a href="http://atria-soft.github.io/echrono">echrono</a></li>
<li><a href="http://atria-soft.github.io/etk">etk</a></li>
<li><a href="http://atria-soft.github.io/ejson">ejson</a></li>
<li><a href="http://atria-soft.github.io/exml">exml</a></li>
<li><a href="http://atria-soft.github.io/esvg">esvg</a></li>
<li><a href="http://atria-soft.github.io/egami">egami</a></li>
<li><a href="http://atria-soft.github.io/gale">gale</a></li>
<li><a href="http://atria-soft.github.io/ege">ege</a></li>
<li><a href="http://atria-soft.github.io/elog">elog</a></li>
<li><a href="http://atria-soft.github.io/ememory">ememory</a></li>
<li><a href="http://atria-soft.github.io/enet">enet</a></li>
<li><a href="http://atria-soft.github.io/eproperty">eproperty</a></li>
<li><a href="http://atria-soft.github.io/esignal">esignal</a></li>
<li><a href="http://atria-soft.github.io/etranslate">etranslate</a></li>
<li><a href="http://atria-soft.github.io/zeus">zeus</a></li>
<li><a href="http://musicdsp.github.io/audio-ess">audio-ess</a></li>
<li><a href="http://musicdsp.github.io/audio">audio</a></li>
<li><a href="http://musicdsp.github.io/audio-drain">audio-drain</a></li>
<li><a href="http://musicdsp.github.io/audio-orchestra">audio-orchestra</a></li>
<li><a href="http://musicdsp.github.io/audio-river">audio-river</a></li>
</ul>
</li>
</ul>
<div id="search-box" class="input-group">
<div class="input-group-btn">
<button aria-expanded="false" type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="glyphicon glyphicon-search"></span>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a href="#">All</a></li>
<li><a href="#">Classes</a></li>
<li><a href="#">Namespaces</a></li>
<li><a href="#">Files</a></li>
<li><a href="#">Functions</a></li>
<li><a href="#">Variables</a></li>
<li><a href="#">Typedefs</a></li>
<li><a href="#">Enumerations</a></li>
<li><a href="#">Enumerator</a></li>
<li><a href="#">Friends</a></li>
<li><a href="#">Macros</a></li>
<li><a href="#">Pages</a></li>
</ul>
</div>
<button id="search-close" type="button" class="close" aria-label="Close">
<span aria-hidden="true"></span>
</button>
<input id="search-field" class="form-control" accesskey="S" onkeydown="searchBox.OnSearchFieldChange(event);" placeholder="Search ..." type="text">
</div>
</div><!--/.nav-collapse -->
</div>
</nav>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div class="content" id="content">
<div class="container">
<div class="row">
<div class="col-sm-12 panel panel-default" style="padding-bottom: 15px;">
<div style="margin-bottom: 15px;margin-top: 60px;">
<!-- end header part -->
<!-- Generated by Doxygen 1.8.12 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="headertitle">
<div class="title">AUDIO-ORCHESTRA library </div> </div>
</div><!--header-->
<div class="contents">
<div class="toc"><h3>Table of Contents</h3>
<ul><li class="level1"><a href="#audio_orchestra_mainpage_what">What is AUDIO-ORCHESTRA: </a><ul><li class="level2"><a href="#audio_orchestra_mainpage_what_it_does">What it does: </a></li>
</ul>
</li>
<li class="level1"><a href="#audio_orchestra_mainpage_language">What languages are supported? </a></li>
<li class="level1"><a href="#audio_orchestra_mainpage_license_restriction">Are there any licensing restrictions? </a></li>
<li class="level1"><a href="#audio_orchestra_mainpage_license">License (MIT) </a></li>
<li class="level1"><a href="#audio_orchestra_mainpage_sub_page">Other pages </a></li>
</ul>
</div>
<div class="textblock"><h1><a class="anchor" id="audio_orchestra_mainpage_what"></a>
What is AUDIO-ORCHESTRA: </h1>
<p>AUDIO-ORCHESTRA, is a fork of RTAudio lib (with port for Android, and IOs).</p>
<p>This is a cross API of Audio wrapping the Hardware.</p>
<p>This library is not friendy usable, use audio-river to have a correct and simple multiple-flow API</p>
<h2><a class="anchor" id="audio_orchestra_mainpage_what_it_does"></a>
What it does: </h2>
<ul>
<li>Open a strem audio in Input or output</li>
<li>synchronise 2 flow</li>
<li>open on some platform: Android, Linux, MacOs, Ios, Windows</li>
</ul>
<p>AUDIO-ORCHESTRA is dependent of the STL (compatible with MacOs stl (CXX))</p>
<h1><a class="anchor" id="audio_orchestra_mainpage_language"></a>
What languages are supported? </h1>
<p>AUDIO-ORCHESTRA is written in C++.</p>
<h1><a class="anchor" id="audio_orchestra_mainpage_license_restriction"></a>
Are there any licensing restrictions? </h1>
<p>AUDIO-ORCHESTRA is <b>FREE software</b> and <em>all sub-library are FREE and staticly linkable !!!</em></p>
<h1><a class="anchor" id="audio_orchestra_mainpage_license"></a>
License (MIT) </h1>
<p>Copyright AUDIO-ORCHESTRA Edouard DUPIN</p>
<p>MIT ...</p>
<h1><a class="anchor" id="audio_orchestra_mainpage_sub_page"></a>
Other pages </h1>
<ul>
<li><a class="el" href="audio_orchestra_build.html">Build lib &amp; build sample</a></li>
<li><a href="http://atria-soft.github.io/ewol/ewol_coding_style.html"><b>ewol coding style</b></a> </li>
</ul>
</div></div><!-- contents -->
<!-- HTML footer for doxygen 1.8.8-->
<!-- start footer part -->
</div>
</div>
</div>
</div>
</div>
<hr class="footer"/><address class="footer"><small>
Generated on Mon Oct 24 2016 15:35:48 for Orchestra: audio interface wrapper by &#160;<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.12
</small></address>
</body>
</html>

87
jquery.js vendored Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,182 +0,0 @@
#!/usr/bin/python
import lutin.tools as tools
import realog.debug as debug
def get_type():
return "LIBRARY"
def get_desc():
return "Generic wrapper on all audio interface"
def get_licence():
return "APACHE-2"
def get_compagny_type():
return "com"
def get_compagny_name():
return "atria-soft"
def get_maintainer():
return "authors.txt"
def get_version():
return "version.txt"
def configure(target, my_module):
my_module.add_src_file([
'audio/orchestra/debug.cpp',
'audio/orchestra/status.cpp',
'audio/orchestra/type.cpp',
'audio/orchestra/mode.cpp',
'audio/orchestra/state.cpp',
'audio/orchestra/error.cpp',
'audio/orchestra/base.cpp',
'audio/orchestra/Interface.cpp',
'audio/orchestra/Flags.cpp',
'audio/orchestra/Api.cpp',
'audio/orchestra/DeviceInfo.cpp',
'audio/orchestra/StreamOptions.cpp',
'audio/orchestra/api/Dummy.cpp'
])
my_module.add_header_file([
'audio/orchestra/debug.hpp',
'audio/orchestra/status.hpp',
'audio/orchestra/type.hpp',
'audio/orchestra/mode.hpp',
'audio/orchestra/state.hpp',
'audio/orchestra/error.hpp',
'audio/orchestra/base.hpp',
'audio/orchestra/Interface.hpp',
'audio/orchestra/Flags.hpp',
'audio/orchestra/Api.hpp',
'audio/orchestra/DeviceInfo.hpp',
'audio/orchestra/StreamOptions.hpp',
'audio/orchestra/CallbackInfo.hpp',
'audio/orchestra/StreamParameters.hpp'
])
my_module.add_depend([
'audio',
'etk'
])
# add all the time the dummy interface
my_module.add_flag('c++', ['-DORCHESTRA_BUILD_DUMMY'], export=True)
# TODO : Add a FILE interface:
if "Windows" in target.get_type():
my_module.add_src_file([
'audio/orchestra/api/Asio.cpp',
'audio/orchestra/api/Ds.cpp',
])
# load optionnal API:
my_module.add_optionnal_depend('asio', ["c++", "-DORCHESTRA_BUILD_ASIO"])
my_module.add_optionnal_depend('ds', ["c++", "-DORCHESTRA_BUILD_DS"])
my_module.add_optionnal_depend('wasapi', ["c++", "-DORCHESTRA_BUILD_WASAPI"])
elif "Linux" in target.get_type():
my_module.add_src_file([
'audio/orchestra/api/Alsa.cpp',
'audio/orchestra/api/Jack.cpp',
'audio/orchestra/api/Pulse.cpp',
'audio/orchestra/api/PulseDeviceList.cpp'
])
my_module.add_optionnal_depend('alsa', ["c++", "-DORCHESTRA_BUILD_ALSA"])
my_module.add_optionnal_depend('jack', ["c++", "-DORCHESTRA_BUILD_JACK"])
my_module.add_optionnal_depend('pulse', ["c++", "-DORCHESTRA_BUILD_PULSE"])
elif "MacOs" in target.get_type():
my_module.add_src_file([
'audio/orchestra/api/Core.cpp'
])
# MacOsX core
my_module.add_optionnal_depend('CoreAudio', ["c++", "-DORCHESTRA_BUILD_MACOSX_CORE"])
elif "IOs" in target.get_type():
my_module.add_src_file('audio/orchestra/api/CoreIos.mm')
# IOsX core
my_module.add_optionnal_depend('CoreAudio', ["c++", "-DORCHESTRA_BUILD_IOS_CORE"])
elif "Android" in target.get_type():
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraConstants.java')
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraManagerCallback.java')
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraNative.java')
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraInterfaceInput.java')
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraInterfaceOutput.java')
my_module.add_src_file('android/org/musicdsp/orchestra/OrchestraManager.java')
# create inter language interfacef
my_module.add_src_file('org.musicdsp.orchestra.OrchestraConstants.javah')
my_module.add_path('android', type='java')
my_module.add_depend(['SDK', 'jvm-basics', 'ejson'])
my_module.add_flag('c++', ['-DORCHESTRA_BUILD_JAVA'], export=True)
my_module.add_src_file('audio/orchestra/api/Android.cpp')
my_module.add_src_file('audio/orchestra/api/AndroidNativeInterface.cpp')
# add tre creator of the basic java class ...
target.add_action("BINARY", 11, "audio-orchestra-out-wrapper", tool_generate_add_java_section_in_class)
else:
debug.warning("unknow target for audio_orchestra : " + target.name);
my_module.add_path(".")
return True
##################################################################
##
## Android specific section
##
##################################################################
def tool_generate_add_java_section_in_class(target, module, package_name):
module.add_pkg("GENERATE_SECTION__IMPORT", [
"import org.musicdsp.orchestra.OrchestraManager;"
])
module.add_pkg("GENERATE_SECTION__DECLARE", [
"private OrchestraManager m_audioManagerHandle;"
])
module.add_pkg("GENERATE_SECTION__CONSTRUCTOR", [
"// load audio maneger if it does not work, it is not critical ...",
"try {",
" m_audioManagerHandle = new OrchestraManager();",
"} catch (RuntimeException e) {",
" Log.e(\"" + package_name + "\", \"Can not load Audio interface (maybe not really needed) :\" + e);",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_CREATE", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onCreate();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_START", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onStart();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_RESTART", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onRestart();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_RESUME", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onResume();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_PAUSE", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onPause();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_STOP", [
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onStop();",
"}"
])
module.add_pkg("GENERATE_SECTION__ON_DESTROY", [
"// Destroy the AdView.",
"if (m_audioManagerHandle != null) {",
" m_audioManagerHandle.onDestroy();",
"}"
])

26
menu.js Normal file
View File

@ -0,0 +1,26 @@
function initMenu(relPath,searchEnabled,serverSide,searchPage,search) {
function makeTree(data,relPath) {
var result='';
if ('children' in data) {
result+='<ul>';
for (var i in data.children) {
result+='<li><a href="'+relPath+data.children[i].url+'">'+
data.children[i].text+'</a>'+
makeTree(data.children[i],relPath)+'</li>';
}
result+='</ul>';
}
return result;
}
$('#main-nav').append(makeTree(menudata,relPath));
$('#main-nav').children(':first').addClass('sm sm-dox').attr('id','main-menu');
if (searchEnabled) {
if (serverSide) {
$('#main-menu').append('<li style="float:right"><div id="MSearchBox" class="MSearchBoxInactive"><div class="left"><form id="FSearchBox" action="'+searchPage+'" method="get"><img id="MSearchSelect" src="'+relPath+'search/mag.png" alt=""/><input type="text" id="MSearchField" name="query" value="'+search+'" size="20" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)"></form></div><div class="right"></div></div></li>');
} else {
$('#main-menu').append('<li style="float:right"><div id="MSearchBox" class="MSearchBoxInactive"><span class="left"><img id="MSearchSelect" src="'+relPath+'search/mag_sel.png" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" alt=""/><input type="text" id="MSearchField" value="'+search+'" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)" onkeyup="searchBox.OnSearchFieldChange(event)"/></span><span class="right"><a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="'+relPath+'search/close.png" alt=""/></a></span></div></li>');
}
}
$('#main-menu').smartmenus();
}

3
menudata.js Normal file
View File

@ -0,0 +1,3 @@
var menudata={children:[
{text:'Main Page',url:'index.html'},
{text:'Related Pages',url:'pages.html'}]}

BIN
nav_f.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 153 B

BIN
nav_g.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 B

BIN
nav_h.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 B

BIN
open.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 123 B

156
pages.html Normal file
View File

@ -0,0 +1,156 @@
<!-- HTML header for doxygen 1.8.8-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!-- For Mobile Devices -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<title>Orchestra: audio interface wrapper: Related Pages</title>
<!--<link href="tabs.css" rel="stylesheet" type="text/css"/>-->
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
<link href="customdoxygen.css" rel="stylesheet" type="text/css"/>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/js/bootstrap.min.js"></script>
<script type="text/javascript" src="doxy-boot.js"></script>
</head>
<body>
<nav class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand">Orchestra: audio interface wrapper 0.4.0</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li><a href="index.html">Main&nbsp;Page</a></li>
<li><a href="pages.html">Related&nbsp;Pages</a></li>
<li><a href="namespaces.html">Namespaces</a></li>
<li><a href="annotated.html">Classes</a></li>
<li><a href="files.html">Files</a></li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">
Link-libs<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li><a href="http://HeeroYui.github.io/lutin">lutin</a></li>
<li><a href="http://atria-soft.github.io/ewol">ewol</a></li>
<li><a href="http://atria-soft.github.io/echrono">echrono</a></li>
<li><a href="http://atria-soft.github.io/etk">etk</a></li>
<li><a href="http://atria-soft.github.io/ejson">ejson</a></li>
<li><a href="http://atria-soft.github.io/exml">exml</a></li>
<li><a href="http://atria-soft.github.io/esvg">esvg</a></li>
<li><a href="http://atria-soft.github.io/egami">egami</a></li>
<li><a href="http://atria-soft.github.io/gale">gale</a></li>
<li><a href="http://atria-soft.github.io/ege">ege</a></li>
<li><a href="http://atria-soft.github.io/elog">elog</a></li>
<li><a href="http://atria-soft.github.io/ememory">ememory</a></li>
<li><a href="http://atria-soft.github.io/enet">enet</a></li>
<li><a href="http://atria-soft.github.io/eproperty">eproperty</a></li>
<li><a href="http://atria-soft.github.io/esignal">esignal</a></li>
<li><a href="http://atria-soft.github.io/etranslate">etranslate</a></li>
<li><a href="http://atria-soft.github.io/zeus">zeus</a></li>
<li><a href="http://musicdsp.github.io/audio-ess">audio-ess</a></li>
<li><a href="http://musicdsp.github.io/audio">audio</a></li>
<li><a href="http://musicdsp.github.io/audio-drain">audio-drain</a></li>
<li><a href="http://musicdsp.github.io/audio-orchestra">audio-orchestra</a></li>
<li><a href="http://musicdsp.github.io/audio-river">audio-river</a></li>
</ul>
</li>
</ul>
<div id="search-box" class="input-group">
<div class="input-group-btn">
<button aria-expanded="false" type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">
<span class="glyphicon glyphicon-search"></span>
<span class="caret"></span>
</button>
<ul class="dropdown-menu">
<li><a href="#">All</a></li>
<li><a href="#">Classes</a></li>
<li><a href="#">Namespaces</a></li>
<li><a href="#">Files</a></li>
<li><a href="#">Functions</a></li>
<li><a href="#">Variables</a></li>
<li><a href="#">Typedefs</a></li>
<li><a href="#">Enumerations</a></li>
<li><a href="#">Enumerator</a></li>
<li><a href="#">Friends</a></li>
<li><a href="#">Macros</a></li>
<li><a href="#">Pages</a></li>
</ul>
</div>
<button id="search-close" type="button" class="close" aria-label="Close">
<span aria-hidden="true"></span>
</button>
<input id="search-field" class="form-control" accesskey="S" onkeydown="searchBox.OnSearchFieldChange(event);" placeholder="Search ..." type="text">
</div>
</div><!--/.nav-collapse -->
</div>
</nav>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div class="content" id="content">
<div class="container">
<div class="row">
<div class="col-sm-12 panel panel-default" style="padding-bottom: 15px;">
<div style="margin-bottom: 15px;margin-top: 60px;">
<!-- end header part -->
<!-- Generated by Doxygen 1.8.12 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<script type="text/javascript" src="menudata.js"></script>
<script type="text/javascript" src="menu.js"></script>
<script type="text/javascript">
$(function() {
initMenu('',true,false,'search.php','Search');
$(document).ready(function() { init_search(); });
});
</script>
<div id="main-nav"></div>
</div><!-- top -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="headertitle">
<div class="title">Related Pages</div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock">Here is a list of all related documentation pages:</div><div class="directory">
<table class="directory">
<tr id="row_0_" class="even"><td class="entry"><span style="width:16px;display:inline-block;">&#160;</span><a class="el" href="audio_orchestra_build.html" target="_self">Build lib &amp; build sample</a></td><td class="desc"></td></tr>
</table>
</div><!-- directory -->
</div><!-- contents -->
<!-- HTML footer for doxygen 1.8.8-->
<!-- start footer part -->
</div>
</div>
</div>
</div>
</div>
<hr class="footer"/><address class="footer"><small>
Generated on Mon Oct 24 2016 15:35:48 for Orchestra: audio interface wrapper by &#160;<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.12
</small></address>
</body>
</html>

26
search/all_0.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_0.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

21
search/all_0.js Normal file
View File

@ -0,0 +1,21 @@
var searchData=
[
['a',['a',['http://atria-soft.github.io/etk/classetk_1_1_color.html#a772e2882d3a9f91c6f73c0c70c8f3130',1,'etk::Color']]],
['absolute',['absolute',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a0f1734db865e5a9c69d896bc237de133',1,'etk::Vector2D::absolute()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a0f1734db865e5a9c69d896bc237de133',1,'Vector2D&lt; int32_t &gt;::absolute()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a0f1734db865e5a9c69d896bc237de133',1,'Vector2D&lt; uint32_t &gt;::absolute()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a5b9733cd0928920ccc8e34d89e980b82',1,'etk::Vector3D::absolute()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#ab8a9feace5794c5a8a02e4b0e124408a',1,'etk::Vector4D::absolute()']]],
['add',['add',['http://atria-soft.github.io/etk/classetk_1_1_hash.html#a723e93ae06d3d612dc4ebf2e1e7ffd5a',1,'etk::Hash']]],
['aliceblue',['aliceBlue',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#adcf86ef9c7918dcb094a0783761899f1',1,'etk::color']]],
['angle',['angle',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a3b403b529d49082265f3655aff656a7e',1,'etk::Vector3D']]],
['antiquewhite',['antiqueWhite',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a88abb6eca9c1ba0f346fb697683fa836',1,'etk::color']]],
['applyscalerotation',['applyScaleRotation',['http://atria-soft.github.io/etk/classetk_1_1_matrix2.html#aaafd3d4f83d6fd021b76a54f6a31b010',1,'etk::Matrix2']]],
['aqua',['aqua',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a2cef110e71ce12876fcfd6605e57f716',1,'etk::color']]],
['aquamarine',['aquamarine',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a8b69f67ed3e2db9915acde652a1cff80',1,'etk::color']]],
['archive',['Archive',['http://atria-soft.github.io/etk/classetk_1_1_archive.html',1,'etk']]],
['archive',['Archive',['http://atria-soft.github.io/etk/classetk_1_1_archive.html#a5991c05535749c9ed04eeec185f675ac',1,'etk::Archive']]],
['archive_2ehpp',['Archive.hpp',['http://atria-soft.github.io/etk/__archive__8hpp.html',1,'']]],
['archivecontent',['ArchiveContent',['http://atria-soft.github.io/etk/classetk_1_1_archive_content.html',1,'etk']]],
['archivecontent',['ArchiveContent',['http://atria-soft.github.io/etk/classetk_1_1_archive_content.html#aac29f8265fa496ca9576f21e13a704d4',1,'etk::ArchiveContent']]],
['audio',['audio',['http://musicdsp.github.io/audio/namespaceaudio.html',1,'']]],
['avg',['avg',['http://atria-soft.github.io/etk/namespaceetk.html#a4397d293209affffd0c2f6832ebe2aea',1,'etk']]],
['azure',['azure',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a51cb04e4a5b927ec82ceef2676b781f7',1,'etk::color']]],
['audio_2dorchestra_20library',['AUDIO-ORCHESTRA library',['../index.html',1,'']]]
];

26
search/all_1.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_1.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

18
search/all_1.js Normal file
View File

@ -0,0 +1,18 @@
var searchData=
[
['build_20lib_20_26_20build_20sample',['Build lib &amp; build sample',['http://musicdsp.github.io/audio/audio_build.html',1,'']]],
['build_20lib_20_26_20build_20sample',['Build lib &amp; build sample',['../audio_orchestra_build.html',1,'']]],
['b',['b',['http://atria-soft.github.io/etk/classetk_1_1_color.html#ac6be77ac33983d040a2ede58bc631d94',1,'etk::Color']]],
['basenoise',['BaseNoise',['http://atria-soft.github.io/etk/classetk_1_1_base_noise.html',1,'etk']]],
['basenoise',['BaseNoise',['http://atria-soft.github.io/etk/classetk_1_1_base_noise.html#ad5cbfcc2d967af185c264744de04cf15',1,'etk::BaseNoise']]],
['beige',['beige',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a68244b2fa52245487cec1154155d0e03',1,'etk::color']]],
['bisque',['bisque',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ad03e04b97263a2c64dedfc405ff983ee',1,'etk::color']]],
['black',['black',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a4198b330ccb2e9008665733eee338f73',1,'etk::color']]],
['blanchedalmond',['blanchedAlmond',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a12e8ba075411585a68aece7d0fead4cc',1,'etk::color']]],
['blue',['blue',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a50f114c6849684e9984ae1322493572c',1,'etk::color']]],
['blueviolet',['blueViolet',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ac91bcec9a0a115d1070397f86cfdee4d',1,'etk::color']]],
['brown',['brown',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#af3dff0347662115abb89c6ddb8447227',1,'etk::color']]],
['burlywood',['burlyWood',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a1920dd335710c842ea4706e2383ef784',1,'etk::color']]],
['build_20lib_20_26_20build_20sample',['Build lib &amp; build sample',['http://atria-soft.github.io/elog/elog_build.html',1,'']]],
['build_20lib_20_26_20build_20sample',['Build lib &amp; build sample',['http://atria-soft.github.io/etk/etk_build.html',1,'']]]
];

26
search/all_10.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_10.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

72
search/all_10.js Normal file
View File

@ -0,0 +1,72 @@
var searchData=
[
['saddlebrown',['saddleBrown',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a5407ca00d7f7bf3815a72616e0be95ae',1,'etk::color']]],
['safenormalize',['safeNormalize',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a245ccd3b57812316fc1ec98ea5c19434',1,'etk::Vector2D::safeNormalize()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a245ccd3b57812316fc1ec98ea5c19434',1,'Vector2D&lt; int32_t &gt;::safeNormalize()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a245ccd3b57812316fc1ec98ea5c19434',1,'Vector2D&lt; uint32_t &gt;::safeNormalize()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#abee6d22d509043f24a5aedb208ba1019',1,'etk::Vector3D::safeNormalize()']]],
['salmon',['salmon',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ad324737686a8c6ec9208a93e727710d8',1,'etk::color']]],
['sandybrown',['sandyBrown',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#affd1d87686e7d4fb1a720d4cb5c354cb',1,'etk::color']]],
['scale',['scale',['http://atria-soft.github.io/etk/classetk_1_1_matrix2.html#aacc34d6c0be39b22fed735cd09ffed84',1,'etk::Matrix2::scale(const vec2 &amp;_vect)'],['http://atria-soft.github.io/etk/classetk_1_1_matrix2.html#adaa317393ef799d0ab49c3b10cf47231',1,'etk::Matrix2::scale(float _value)'],['http://atria-soft.github.io/etk/classetk_1_1_matrix4.html#aa3121f90430c2e2d80bc967d4b94c114',1,'etk::Matrix4::scale(const vec3 &amp;_vect)'],['http://atria-soft.github.io/etk/classetk_1_1_matrix4.html#a41eb56d343978f32caa64dfda50cd6a5',1,'etk::Matrix4::scale(float _sx, float _sy, float _sz)']]],
['seagreen',['seaGreen',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a3d3d8140130f2383e7740b4cea443470',1,'etk::color']]],
['seashell',['seaShell',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a6b50105504f9be5dcf7ae59cbb115051',1,'etk::color']]],
['seeknode',['seekNode',['http://atria-soft.github.io/etk/namespaceetk.html#a4a0133c254ab2433999c1b61fd9d993e',1,'etk']]],
['seeknode_5fcurrent',['seekNode_current',['http://atria-soft.github.io/etk/namespaceetk.html#a4a0133c254ab2433999c1b61fd9d993ea8da2a40c899dc80a97a96999766d1598',1,'etk']]],
['seeknode_5fend',['seekNode_end',['http://atria-soft.github.io/etk/namespaceetk.html#a4a0133c254ab2433999c1b61fd9d993ea981349cc2910e974472575409d19f0b6',1,'etk']]],
['seeknode_5fstart',['seekNode_start',['http://atria-soft.github.io/etk/namespaceetk.html#a4a0133c254ab2433999c1b61fd9d993ea9019d83f9a109a54ab84067c2aa8abcd',1,'etk']]],
['select',['select',['http://atria-soft.github.io/etk/classetk_1_1_matrix.html#a1c8f1bb6f8d14c0fb9d4d39dac67b07b',1,'etk::Matrix']]],
['set',['set',['http://atria-soft.github.io/etk/classetk_1_1_color.html#a0955ac7d80e3886afa872d47e0cc1415',1,'etk::Color::set(MY_TYPE _r, MY_TYPE _g, MY_TYPE _b, MY_TYPE _a)'],['http://atria-soft.github.io/etk/classetk_1_1_color.html#a0f663138f780f134ae07957f5a9fef57',1,'etk::Color::set(MY_TYPE _r, MY_TYPE _g, MY_TYPE _b)'],['http://atria-soft.github.io/etk/classetk_1_1_color.html#a7de7eef4b78f10829066af98be02f27b',1,'etk::Color::set(MY_TYPE _r, MY_TYPE _g)'],['http://atria-soft.github.io/etk/classetk_1_1_color.html#a5063a9ee59f18fbeb7172f833617d8eb',1,'etk::Color::set(MY_TYPE _r)'],['http://atria-soft.github.io/etk/classetk_1_1_hash.html#af372b733bfbccd0d67f4df1fbfb62ad7',1,'etk::Hash::set()']]],
['seta',['setA',['http://atria-soft.github.io/etk/classetk_1_1_color.html#a1d24c054b6d64d2a73c6cef57d91c31a',1,'etk::Color']]],
['setargzero',['setArgZero',['http://atria-soft.github.io/etk/namespaceetk.html#a5eda91763c9f02f0e7e16c099092695d',1,'etk']]],
['setb',['setB',['http://atria-soft.github.io/etk/classetk_1_1_color.html#ae8078ca64701dbc1b5080589037743f1',1,'etk::Color']]],
['setbacktrace',['setBackTrace',['http://atria-soft.github.io/elog/namespaceelog.html#aae6ff218b5e851513cfc29d030b865fb',1,'elog']]],
['setbasefoldercache',['setBaseFolderCache',['http://atria-soft.github.io/etk/namespaceetk.html#a6cdcbe0aeba288278c89a995481b9e6f',1,'etk']]],
['setbasefolderdata',['setBaseFolderData',['http://atria-soft.github.io/etk/namespaceetk.html#ab8546560f91d95e2df5f176f5ec0fc99',1,'etk']]],
['setbasefolderdatauser',['setBaseFolderDataUser',['http://atria-soft.github.io/etk/namespaceetk.html#a91fb8f53bc0d3ad4ed2061b3f5af7734',1,'etk']]],
['setcallbacklog',['setCallbackLog',['http://atria-soft.github.io/elog/namespaceelog.html#a8c02ad11df203c22c7e65c759b43efdd',1,'elog']]],
['setcolor',['setColor',['http://atria-soft.github.io/elog/namespaceelog.html#a157a0ce2993c115906b8a2607f6a9133',1,'elog']]],
['setfrompoints',['setFromPoints',['http://atria-soft.github.io/etk/classetk_1_1_plane.html#a69b4871efd25fa204a3917360bfb8cc9',1,'etk::Plane']]],
['setfunction',['setFunction',['http://atria-soft.github.io/elog/namespaceelog.html#aed76a2bba4f8c0f51633fa64ab08362d',1,'elog']]],
['setg',['setG',['http://atria-soft.github.io/etk/classetk_1_1_color.html#a724b243ffc1ba471eab6b04f6ca6ecd4',1,'etk::Color']]],
['setgroupreadable',['setGroupReadable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#acca31813653f621d4859d5c837f057a5',1,'etk::FSNodeRight']]],
['setgrouprunable',['setGroupRunable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a5184205ed51108dd9bf3ed4ad141588e',1,'etk::FSNodeRight']]],
['setgroupwritable',['setGroupWritable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#acc834eeef2e84b9e7cceac38b3a59389',1,'etk::FSNodeRight']]],
['setintercept',['setIntercept',['http://atria-soft.github.io/etk/classetk_1_1_plane.html#a691582e68a6ee38b0c7c9ccf2a6734f3',1,'etk::Plane']]],
['setinterpolate3',['setInterpolate3',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a3e59820b3cc3f9138e69d8aee9a448c8',1,'etk::Vector3D']]],
['setlevel',['setLevel',['http://atria-soft.github.io/elog/namespaceelog.html#a3afe4089acb36e88d7266c1ce85ddc7b',1,'elog::setLevel(const std::string &amp;_name, enum elog::level _level)'],['http://atria-soft.github.io/elog/namespaceelog.html#a423b95c78b78e98de35c8f176b9efbdc',1,'elog::setLevel(int32_t _id, enum elog::level _level)'],['http://atria-soft.github.io/elog/namespaceelog.html#a600189d0be9885f63d651201ab0c3688',1,'elog::setLevel(enum elog::level _level)']]],
['setlibname',['setLibName',['http://atria-soft.github.io/elog/namespaceelog.html#a98f690a8538d4726fa0060331e021a08',1,'elog']]],
['setline',['setLine',['http://atria-soft.github.io/elog/namespaceelog.html#a2591d4ba7e3136ff84c0b81289000b79',1,'elog']]],
['setloginfile',['setLogInFile',['http://atria-soft.github.io/elog/namespaceelog.html#a78083b9d6ef033e329b788f8fa22f3f3',1,'elog']]],
['setmax',['setMax',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a72ae52d19423c46bad955ad1a9f59041',1,'etk::Vector2D::setMax()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a72ae52d19423c46bad955ad1a9f59041',1,'Vector2D&lt; int32_t &gt;::setMax()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a72ae52d19423c46bad955ad1a9f59041',1,'Vector2D&lt; uint32_t &gt;::setMax()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a7b9a94f71854f5ad89ef23ef22dcb321',1,'etk::Vector3D::setMax()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a2469c42527ead691f479a9a573709e5f',1,'etk::Vector4D::setMax()']]],
['setmin',['setMin',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aa087eeec653b538d166c0d2794737fb1',1,'etk::Vector2D::setMin()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aa087eeec653b538d166c0d2794737fb1',1,'Vector2D&lt; int32_t &gt;::setMin()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aa087eeec653b538d166c0d2794737fb1',1,'Vector2D&lt; uint32_t &gt;::setMin()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a7c2e3d0042a62791b1c653d5081577c2',1,'etk::Vector3D::setMin()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#aa647808a65613af0e1e757f27444f0a6',1,'etk::Vector4D::setMin()']]],
['setname',['setName',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a6e787e7f3148dae0c74139be7b05ab43',1,'etk::FSNode']]],
['setnormal',['setNormal',['http://atria-soft.github.io/etk/classetk_1_1_plane.html#a202b3335b2b71cc84726a1b57b8e1e70',1,'etk::Plane']]],
['setotherreadable',['setOtherReadable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a6ad46a56d871f5925a826a6fd3071b78',1,'etk::FSNodeRight']]],
['setotherrunable',['setOtherRunable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a290708c849f5b58714dad5a1926cfe1c',1,'etk::FSNodeRight']]],
['setotherwritable',['setOtherWritable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a931050b25db28423f1a2899f8ec188a0',1,'etk::FSNodeRight']]],
['setr',['setR',['http://atria-soft.github.io/etk/classetk_1_1_color.html#a96c8b12779776562c2fa0dfdc4d1b242',1,'etk::Color']]],
['setright',['setRight',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a65bce1c8887edad87a90c8c7ffb861d3',1,'etk::FSNode']]],
['setthreadid',['setThreadId',['http://atria-soft.github.io/elog/namespaceelog.html#a9b835d4980949026a8883570ea3837af',1,'elog']]],
['setthreadnameenable',['setThreadNameEnable',['http://atria-soft.github.io/elog/namespaceelog.html#ae64b5abf2ea03562679668e6242c49a2',1,'elog']]],
['settime',['setTime',['http://atria-soft.github.io/elog/namespaceelog.html#a15e30e61e8db5a43e72358d2c02be6a4',1,'elog']]],
['setuserreadable',['setUserReadable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#acadd7b9c2c632f9805569ff4f592bda9',1,'etk::FSNodeRight']]],
['setuserrunable',['setUserRunable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a0ee76ec4897c406ab67ea25659953070',1,'etk::FSNodeRight']]],
['setuserwritable',['setUserWritable',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node_right.html#a9e5e2e4c7926c22101e6955b3d8c9139',1,'etk::FSNodeRight']]],
['setvalue',['setValue',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ade86675814738c6b7a6a797ee128a2b2',1,'etk::Vector2D::setValue()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ade86675814738c6b7a6a797ee128a2b2',1,'Vector2D&lt; int32_t &gt;::setValue()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ade86675814738c6b7a6a797ee128a2b2',1,'Vector2D&lt; uint32_t &gt;::setValue()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a96d02449aaa2dfeb4e60320da667ab92',1,'etk::Vector3D::setValue()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a9b164290093d948905fab0f56fbe22fc',1,'etk::Vector4D::setValue()']]],
['setw',['setW',['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a1750c9d1b91d67b8b2bc9d0cce759944',1,'etk::Vector4D']]],
['setx',['setX',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ae2acd7c10cdd510ce23ff11839c95c04',1,'etk::Vector2D::setX()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ae2acd7c10cdd510ce23ff11839c95c04',1,'Vector2D&lt; int32_t &gt;::setX()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ae2acd7c10cdd510ce23ff11839c95c04',1,'Vector2D&lt; uint32_t &gt;::setX()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#ab7ab9d9ce1138ffafebaff3001bb7d29',1,'etk::Vector3D::setX()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a238d966b077394ff118f2088479fb620',1,'etk::Vector4D::setX()']]],
['sety',['setY',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a35a3f35ed049b7193ca67ea815efd465',1,'etk::Vector2D::setY()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a35a3f35ed049b7193ca67ea815efd465',1,'Vector2D&lt; int32_t &gt;::setY()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a35a3f35ed049b7193ca67ea815efd465',1,'Vector2D&lt; uint32_t &gt;::setY()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a10c41fb516fb33ef56201f06992462d1',1,'etk::Vector3D::setY()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a7489a0c8d592c9464a8e378bbb7e570e',1,'etk::Vector4D::setY()']]],
['setz',['setZ',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a9a5c1d69fd9066daae0a759831ba0e30',1,'etk::Vector3D::setZ()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#ae5bbc387ea4199ea535d4d033cfc40d1',1,'etk::Vector4D::setZ()']]],
['setzero',['setZero',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab2e921e0009f0e0de78d06d16f6a78e0',1,'etk::Vector2D::setZero()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab2e921e0009f0e0de78d06d16f6a78e0',1,'Vector2D&lt; int32_t &gt;::setZero()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab2e921e0009f0e0de78d06d16f6a78e0',1,'Vector2D&lt; uint32_t &gt;::setZero()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a7ddb707a7a1609bcbd8c092186a6db19',1,'etk::Vector3D::setZero()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a978b0511ade11701ffdbd7974de6932e',1,'etk::Vector4D::setZero()']]],
['sienna',['sienna',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a0a8eaf790795f7b5fc63c81ade8652ce',1,'etk::color']]],
['silver',['silver',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#aca25e934d9d1ac1538a4a0c7011c6d0c',1,'etk::color']]],
['simplifypath',['simplifyPath',['http://atria-soft.github.io/etk/namespaceetk.html#a0087446ff0e9b533ea70b3043ae2addc',1,'etk']]],
['size',['size',['http://atria-soft.github.io/etk/classetk_1_1_archive.html#a17d06497d98be15f6080cf84010d35a0',1,'etk::Archive::size()'],['http://atria-soft.github.io/etk/classetk_1_1_archive_content.html#a36dc83c35ebdacdce11e04dbcba4334c',1,'etk::ArchiveContent::size()'],['http://atria-soft.github.io/etk/classetk_1_1_hash.html#a4d639eb19c25b1ff76d34c225a132deb',1,'etk::Hash::size()']]],
['skyblue',['skyBlue',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a1576fd8ed2c3fe5ed5a10f8147d0ed1d',1,'etk::color']]],
['slateblue',['slateBlue',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a4f605302a82a6173de726e797c3edf0d',1,'etk::color']]],
['slategray',['slateGray',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a5d3c48d14e696bc1b8d9d43158dfa0b2',1,'etk::color']]],
['slategrey',['slateGrey',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a9cefc035f467790176ba159bb6255c6e',1,'etk::color']]],
['snow',['snow',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ae312c38fc6be5178a4ff63921f162723',1,'etk::color']]],
['space',['Space',['http://atria-soft.github.io/etk/namespaceu32char.html#a7a114cf0424bb72fd7f3f10c9cd017e8',1,'u32char']]],
['springgreen',['springGreen',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ab3b6e2028b00791f15ddff7f9a1fe703',1,'etk::color']]],
['stdtools_2ehpp',['stdTools.hpp',['http://atria-soft.github.io/etk/std__tools__8hpp.html',1,'']]],
['steelblue',['steelBlue',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a9162b466a59a0bbe420b49c565e9dd6f',1,'etk::color']]],
['suppress',['Suppress',['http://atria-soft.github.io/etk/namespaceu32char.html#a9cff086787b8b0321e36251a27c40321',1,'u32char']]]
];

26
search/all_11.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_11.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

36
search/all_11.js Normal file
View File

@ -0,0 +1,36 @@
var searchData=
[
['tutorials',['Tutorials',['http://atria-soft.github.io/elog/elog_tutorial.html',1,'']]],
['tabulation',['Tabulation',['http://atria-soft.github.io/etk/namespaceu32char.html#ad7a0c42ea443e20bfceb69f863bebc6a',1,'u32char']]],
['tan',['tan',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a923b2869775837088eabdb48681f4b1e',1,'etk::color']]],
['teal',['teal',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a9b28dace6aa8c81c0c59a7343875d2b5',1,'etk::color']]],
['theoricfirst',['theoricFirst',['http://atria-soft.github.io/etk/namespaceutf8.html#a52043c6c7cf75da5f8e8812ffb4ffc1c',1,'utf8']]],
['theoriclen',['theoricLen',['http://atria-soft.github.io/etk/namespaceutf8.html#ad408da64c12fe3345b9576ab487bd7e3',1,'utf8']]],
['thistle',['thistle',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a729d8532a3b7c147fcaa726ce2d887b8',1,'etk::color']]],
['time_2ehpp',['Time.hpp',['http://musicdsp.github.io/audio/__time__8hpp.html',1,'']]],
['timeaccessed',['timeAccessed',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a43c3b946ef91744dd2991734166c1d87',1,'etk::FSNode']]],
['timeaccessedstring',['timeAccessedString',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a17022f6598927b1b9f169cbcd99099e6',1,'etk::FSNode']]],
['timecreated',['timeCreated',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a46fe9a1916400ca932d31a847dff0f21',1,'etk::FSNode']]],
['timecreatedstring',['timeCreatedString',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a30a497f2d095469b5846756a1b51a749',1,'etk::FSNode']]],
['timemodified',['timeModified',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#afa84341a6764252f5fb70eb48540f496',1,'etk::FSNode']]],
['timemodifiedstring',['timeModifiedString',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a3cdee551ca80df9597a408b42b4f7210',1,'etk::FSNode']]],
['to_5fstring',['to_string',['http://atria-soft.github.io/etk/namespaceetk.html#a48ed31a00f989f5b188d7254e6945a74',1,'etk::to_string(const TYPE &amp;_variable)'],['http://atria-soft.github.io/etk/namespaceetk.html#a17aa4febea213096442bcb8e7ca80805',1,'etk::to_string(const std::vector&lt; TYPE &gt; &amp;_list)']]],
['toint',['toInt',['http://atria-soft.github.io/etk/namespaceu32char.html#afc40d527459e3869ea8dc54d3b5ef225',1,'u32char']]],
['tomato',['tomato',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a9f3fe621ef99b1b452a5a11af1a06ae0',1,'etk::color']]],
['tool_2ehpp',['tool.hpp',['http://atria-soft.github.io/etk/tool__8hpp.html',1,'']]],
['touch',['touch',['http://atria-soft.github.io/etk/classetk_1_1_f_s_node.html#a6fd01aeb7f84399ad99573e0a7e2f8fc',1,'etk::FSNode']]],
['translate',['translate',['http://atria-soft.github.io/etk/classetk_1_1_matrix2.html#abd22ba1cbe8dc53c0446abcb55d8551a',1,'etk::Matrix2::translate()'],['http://atria-soft.github.io/etk/classetk_1_1_matrix4.html#aa6cd07f3a802c5a57bddbf5a6db95c4d',1,'etk::Matrix4::translate()']]],
['transpose',['transpose',['http://atria-soft.github.io/etk/classetk_1_1_matrix.html#ab05c8b14ec68367d0391c23b77870a3b',1,'etk::Matrix::transpose()'],['http://atria-soft.github.io/etk/classetk_1_1_matrix4.html#a8214b2300ebdc4b42bde1efe93e84fae',1,'etk::Matrix4::transpose()']]],
['triple',['triple',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#af1e591712d4a15bd21ad6fd22d72df97',1,'etk::Vector3D']]],
['turquoise',['turquoise',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a8162f21fcdb858a4c5017a20db4c5830',1,'etk::color']]],
['typenode',['typeNode',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8',1,'etk']]],
['typenode_5fblock',['typeNode_block',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a18f1c41a5c9bc4842a512954af23d630',1,'etk']]],
['typenode_5fcharacter',['typeNode_character',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a07f61c5191a0e393871ecf69f4f32eed',1,'etk']]],
['typenode_5ffifo',['typeNode_fifo',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8af5a5a8cef27d1c2abf4c6b7b9a893890',1,'etk']]],
['typenode_5ffile',['typeNode_file',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a4bb596537f408d12f89da085cac752c6',1,'etk']]],
['typenode_5ffolder',['typeNode_folder',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a67fbbfd55ff1568df12ca9719bc73680',1,'etk']]],
['typenode_5flink',['typeNode_link',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a73f8fdd1c9f08183be017f372935cf14',1,'etk']]],
['typenode_5fsocket',['typeNode_socket',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a9b46f1f9d230b9490b2c17af379c61bc',1,'etk']]],
['typenode_5funknow',['typeNode_unknow',['http://atria-soft.github.io/etk/namespaceetk.html#a99c2dd948d6da85ed816fa0c267862e8a4c85a6b7dccfd424a20b383e5dbe63bf',1,'etk']]],
['types_2ehpp',['types.hpp',['http://atria-soft.github.io/etk/types__8hpp.html',1,'(Global Namespace)'],['http://musicdsp.github.io/audio/types__8hpp.html',1,'(Global Namespace)']]]
];

26
search/all_12.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_12.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

6
search/all_12.js Normal file
View File

@ -0,0 +1,6 @@
var searchData=
[
['u32char',['u32char',['http://atria-soft.github.io/etk/namespaceu32char.html',1,'']]],
['unsetloginfile',['unsetLogInFile',['http://atria-soft.github.io/elog/namespaceelog.html#a496120feb0c59449a46057559c6c8a1a',1,'elog']]],
['utf8',['utf8',['http://atria-soft.github.io/etk/namespaceutf8.html',1,'']]]
];

26
search/all_13.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_13.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

15
search/all_13.js Normal file
View File

@ -0,0 +1,15 @@
var searchData=
[
['vector2d',['Vector2D',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html',1,'etk']]],
['vector2d',['Vector2D',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a09b5e26eacb50a8059d0e0c65405eb82',1,'etk::Vector2D::Vector2D(T _xxx, T _yyy)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aaece962e3caa1d70afe0b1682ce8212e',1,'etk::Vector2D::Vector2D(const Vector2D&lt; double &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#acab36ec2f778107bc89f4e5c9463191b',1,'etk::Vector2D::Vector2D(const Vector2D&lt; float &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab07ef273334d86b96dd13a4ce4c19137',1,'etk::Vector2D::Vector2D(const Vector2D&lt; int32_t &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a187bb96fc68cb5d5dadc99f573674b98',1,'etk::Vector2D::Vector2D(const std::string &amp;_str)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a09b5e26eacb50a8059d0e0c65405eb82',1,'Vector2D&lt; int32_t &gt;::Vector2D(int32_t _xxx, int32_t _yyy)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aaece962e3caa1d70afe0b1682ce8212e',1,'Vector2D&lt; int32_t &gt;::Vector2D(const Vector2D&lt; double &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#acab36ec2f778107bc89f4e5c9463191b',1,'Vector2D&lt; int32_t &gt;::Vector2D(const Vector2D&lt; float &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab07ef273334d86b96dd13a4ce4c19137',1,'Vector2D&lt; int32_t &gt;::Vector2D(const Vector2D&lt; int32_t &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a187bb96fc68cb5d5dadc99f573674b98',1,'Vector2D&lt; int32_t &gt;::Vector2D(const std::string &amp;_str)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a09b5e26eacb50a8059d0e0c65405eb82',1,'Vector2D&lt; uint32_t &gt;::Vector2D(uint32_t _xxx, uint32_t _yyy)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#aaece962e3caa1d70afe0b1682ce8212e',1,'Vector2D&lt; uint32_t &gt;::Vector2D(const Vector2D&lt; double &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#acab36ec2f778107bc89f4e5c9463191b',1,'Vector2D&lt; uint32_t &gt;::Vector2D(const Vector2D&lt; float &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#ab07ef273334d86b96dd13a4ce4c19137',1,'Vector2D&lt; uint32_t &gt;::Vector2D(const Vector2D&lt; int32_t &gt; &amp;_obj)'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a187bb96fc68cb5d5dadc99f573674b98',1,'Vector2D&lt; uint32_t &gt;::Vector2D(const std::string &amp;_str)']]],
['vector2d_2ehpp',['Vector2D.hpp',['http://atria-soft.github.io/etk/__vector2__d__8hpp.html',1,'']]],
['vector2d_3c_20int32_5ft_20_3e',['Vector2D&lt; int32_t &gt;',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html',1,'']]],
['vector2d_3c_20uint32_5ft_20_3e',['Vector2D&lt; uint32_t &gt;',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html',1,'']]],
['vector3d',['Vector3D',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html',1,'etk']]],
['vector3d',['Vector3D',['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a409169c1781cae5220b9f7bd078e05d9',1,'etk::Vector3D::Vector3D()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#a7f5d02301a551e7f686092d7b9a5b269',1,'etk::Vector3D::Vector3D(const T &amp;_xxx, const T &amp;_yyy, const T &amp;_zzz)']]],
['vector3d_2ehpp',['Vector3D.hpp',['http://atria-soft.github.io/etk/__vector3__d__8hpp.html',1,'']]],
['vector4d',['Vector4D',['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html',1,'etk']]],
['vector4d',['Vector4D',['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a0e140dec4eca4f3695f19fb92dc3e1ae',1,'etk::Vector4D::Vector4D()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a7a41fd47b438cc43849a2b2c0f308db2',1,'etk::Vector4D::Vector4D(const T &amp;_xxx, const T &amp;_yyy, const T &amp;_zzz, const T &amp;_www)']]],
['vector4d_2ehpp',['Vector4D.hpp',['http://atria-soft.github.io/etk/__vector4__d__8hpp.html',1,'']]],
['violet',['violet',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#ac86b1f752bb46992b83000f1b48957ec',1,'etk::color']]]
];

26
search/all_14.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_14.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

8
search/all_14.js Normal file
View File

@ -0,0 +1,8 @@
var searchData=
[
['w',['w',['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a0ede53650faf9183b87a3592e8be142d',1,'etk::Vector4D']]],
['wait',['wait',['http://atria-soft.github.io/etk/classetk_1_1_fifo.html#a0693f87b2886e553ccdafdac112ebbd5',1,'etk::Fifo::wait(MY_TYPE &amp;_data)'],['http://atria-soft.github.io/etk/classetk_1_1_fifo.html#aa08e5c57d91bc54ea08f377a6e2653e3',1,'etk::Fifo::wait(MY_TYPE &amp;_data, uint32_t _timeOutInUs)']]],
['wheat',['wheat',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a0fee5d2cd67adc3c4a5d820616854cde',1,'etk::color']]],
['white',['white',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a6559ba632982f84ab6215281bd431b9c',1,'etk::color']]],
['whitesmoke',['whiteSmoke',['http://atria-soft.github.io/etk/namespaceetk_1_1color.html#a1b012cf56fb3a1c63e55aa2d83f7472b',1,'etk::color']]]
];

26
search/all_15.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_15.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

4
search/all_15.js Normal file
View File

@ -0,0 +1,4 @@
var searchData=
[
['x',['x',['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a8409de5d430e5cdcb7326aee94176873',1,'etk::Vector2D::x()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a8409de5d430e5cdcb7326aee94176873',1,'Vector2D&lt; int32_t &gt;::x()'],['http://atria-soft.github.io/etk/classetk_1_1_vector2_d.html#a8409de5d430e5cdcb7326aee94176873',1,'Vector2D&lt; uint32_t &gt;::x()'],['http://atria-soft.github.io/etk/classetk_1_1_vector3_d.html#af2bda50fefea08fb49c04b15a61bcb6a',1,'etk::Vector3D::x()'],['http://atria-soft.github.io/etk/classetk_1_1_vector4_d.html#a98cef087b46916c257dc7b7e39230e74',1,'etk::Vector4D::x()']]]
];

26
search/all_16.html Normal file
View File

@ -0,0 +1,26 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta name="generator" content="Doxygen 1.8.12"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="all_16.js"></script>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="Loading">Loading...</div>
<div id="SRResults"></div>
<script type="text/javascript"><!--
createResults();
--></script>
<div class="SRStatus" id="Searching">Searching...</div>
<div class="SRStatus" id="NoMatches">No Matches</div>
<script type="text/javascript"><!--
document.getElementById("Loading").style.display="none";
document.getElementById("NoMatches").style.display="none";
var searchResults = new SearchResults("searchResults");
searchResults.Search();
--></script>
</div>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More