1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2009 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.6
49 // Static variable definitions.
50 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
51 const unsigned int RtApi::SAMPLE_RATES[] = {
52 4000, 5512, 8000, 9600, 11025, 16000, 22050,
53 32000, 44100, 48000, 88200, 96000, 176400, 192000
56 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
57 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
58 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
59 #define MUTEX_LOCK(A) EnterCriticalSection(A)
60 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
61 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
63 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
64 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
65 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
66 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
68 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
69 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
72 // *************************************************** //
74 // RtAudio definitions.
76 // *************************************************** //
78 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
82 // The order here will control the order of RtAudio's API search in
84 #if defined(__UNIX_JACK__)
85 apis.push_back( UNIX_JACK );
87 #if defined(__LINUX_ALSA__)
88 apis.push_back( LINUX_ALSA );
90 #if defined(__LINUX_OSS__)
91 apis.push_back( LINUX_OSS );
93 #if defined(__WINDOWS_ASIO__)
94 apis.push_back( WINDOWS_ASIO );
96 #if defined(__WINDOWS_DS__)
97 apis.push_back( WINDOWS_DS );
99 #if defined(__MACOSX_CORE__)
100 apis.push_back( MACOSX_CORE );
102 #if defined(__RTAUDIO_DUMMY__)
103 apis.push_back( RTAUDIO_DUMMY );
107 void RtAudio :: openRtApi( RtAudio::Api api )
109 #if defined(__UNIX_JACK__)
110 if ( api == UNIX_JACK )
111 rtapi_ = new RtApiJack();
113 #if defined(__LINUX_ALSA__)
114 if ( api == LINUX_ALSA )
115 rtapi_ = new RtApiAlsa();
117 #if defined(__LINUX_OSS__)
118 if ( api == LINUX_OSS )
119 rtapi_ = new RtApiOss();
121 #if defined(__WINDOWS_ASIO__)
122 if ( api == WINDOWS_ASIO )
123 rtapi_ = new RtApiAsio();
125 #if defined(__WINDOWS_DS__)
126 if ( api == WINDOWS_DS )
127 rtapi_ = new RtApiDs();
129 #if defined(__MACOSX_CORE__)
130 if ( api == MACOSX_CORE )
131 rtapi_ = new RtApiCore();
133 #if defined(__RTAUDIO_DUMMY__)
134 if ( api == RTAUDIO_DUMMY )
135 rtapi_ = new RtApiDummy();
139 RtAudio :: RtAudio( RtAudio::Api api ) throw()
143 if ( api != UNSPECIFIED ) {
144 // Attempt to open the specified API.
146 if ( rtapi_ ) return;
148 // No compiled support for specified API value. Issue a debug
149 // warning and continue as if no API was specified.
150 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
153 // Iterate through the compiled APIs and return as soon as we find
154 // one with at least one device or we reach the end of the list.
155 std::vector< RtAudio::Api > apis;
156 getCompiledApi( apis );
157 for ( unsigned int i=0; i<apis.size(); i++ ) {
158 openRtApi( apis[i] );
159 if ( rtapi_->getDeviceCount() ) break;
162 if ( rtapi_ ) return;
164 // It should not be possible to get here because the preprocessor
165 // definition __RTAUDIO_DUMMY__ is automatically defined if no
166 // API-specific definitions are passed to the compiler. But just in
167 // case something weird happens, we'll print out an error message.
168 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
171 RtAudio :: ~RtAudio() throw()
176 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
177 RtAudio::StreamParameters *inputParameters,
178 RtAudioFormat format, unsigned int sampleRate,
179 unsigned int *bufferFrames,
180 RtAudioCallback callback, void *userData,
181 RtAudio::StreamOptions *options )
183 return rtapi_->openStream( outputParameters, inputParameters, format,
184 sampleRate, bufferFrames, callback,
188 // *************************************************** //
190 // Public RtApi definitions (see end of file for
191 // private or protected utility functions).
193 // *************************************************** //
197 stream_.state = STREAM_CLOSED;
198 stream_.mode = UNINITIALIZED;
199 stream_.apiHandle = 0;
200 stream_.userBuffer[0] = 0;
201 stream_.userBuffer[1] = 0;
202 MUTEX_INITIALIZE( &stream_.mutex );
203 showWarnings_ = true;
208 MUTEX_DESTROY( &stream_.mutex );
211 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
212 RtAudio::StreamParameters *iParams,
213 RtAudioFormat format, unsigned int sampleRate,
214 unsigned int *bufferFrames,
215 RtAudioCallback callback, void *userData,
216 RtAudio::StreamOptions *options )
218 if ( stream_.state != STREAM_CLOSED ) {
219 errorText_ = "RtApi::openStream: a stream is already open!";
220 error( RtError::INVALID_USE );
223 if ( oParams && oParams->nChannels < 1 ) {
224 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
225 error( RtError::INVALID_USE );
228 if ( iParams && iParams->nChannels < 1 ) {
229 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
230 error( RtError::INVALID_USE );
233 if ( oParams == NULL && iParams == NULL ) {
234 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
235 error( RtError::INVALID_USE );
238 if ( formatBytes(format) == 0 ) {
239 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
240 error( RtError::INVALID_USE );
243 unsigned int nDevices = getDeviceCount();
244 unsigned int oChannels = 0;
246 oChannels = oParams->nChannels;
247 if ( oParams->deviceId >= nDevices ) {
248 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
249 error( RtError::INVALID_USE );
253 unsigned int iChannels = 0;
255 iChannels = iParams->nChannels;
256 if ( iParams->deviceId >= nDevices ) {
257 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
258 error( RtError::INVALID_USE );
265 if ( oChannels > 0 ) {
267 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
268 sampleRate, format, bufferFrames, options );
269 if ( result == false ) error( RtError::SYSTEM_ERROR );
272 if ( iChannels > 0 ) {
274 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
275 sampleRate, format, bufferFrames, options );
276 if ( result == false ) {
277 if ( oChannels > 0 ) closeStream();
278 error( RtError::SYSTEM_ERROR );
282 stream_.callbackInfo.callback = (void *) callback;
283 stream_.callbackInfo.userData = userData;
285 if ( options ) options->numberOfBuffers = stream_.nBuffers;
286 stream_.state = STREAM_STOPPED;
289 unsigned int RtApi :: getDefaultInputDevice( void )
291 // Should be implemented in subclasses if possible.
295 unsigned int RtApi :: getDefaultOutputDevice( void )
297 // Should be implemented in subclasses if possible.
301 void RtApi :: closeStream( void )
303 // MUST be implemented in subclasses!
307 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
308 unsigned int firstChannel, unsigned int sampleRate,
309 RtAudioFormat format, unsigned int *bufferSize,
310 RtAudio::StreamOptions *options )
312 // MUST be implemented in subclasses!
316 void RtApi :: tickStreamTime( void )
318 // Subclasses that do not provide their own implementation of
319 // getStreamTime should call this function once per buffer I/O to
320 // provide basic stream time support.
322 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
324 #if defined( HAVE_GETTIMEOFDAY )
325 gettimeofday( &stream_.lastTickTimestamp, NULL );
329 long RtApi :: getStreamLatency( void )
333 long totalLatency = 0;
334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
335 totalLatency = stream_.latency[0];
336 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
337 totalLatency += stream_.latency[1];
342 double RtApi :: getStreamTime( void )
346 #if defined( HAVE_GETTIMEOFDAY )
347 // Return a very accurate estimate of the stream time by
348 // adding in the elapsed time since the last tick.
352 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
353 return stream_.streamTime;
355 gettimeofday( &now, NULL );
356 then = stream_.lastTickTimestamp;
357 return stream_.streamTime +
358 ((now.tv_sec + 0.000001 * now.tv_usec) -
359 (then.tv_sec + 0.000001 * then.tv_usec));
361 return stream_.streamTime;
365 unsigned int RtApi :: getStreamSampleRate( void )
369 return stream_.sampleRate;
373 // *************************************************** //
375 // OS/API-specific methods.
377 // *************************************************** //
379 #if defined(__MACOSX_CORE__)
381 // The OS X CoreAudio API is designed to use a separate callback
382 // procedure for each of its audio devices. A single RtAudio duplex
383 // stream using two different devices is supported here, though it
384 // cannot be guaranteed to always behave correctly because we cannot
385 // synchronize these two callbacks.
387 // A property listener is installed for over/underrun information.
388 // However, no functionality is currently provided to allow property
389 // listeners to trigger user handlers because it is unclear what could
390 // be done if a critical stream parameter (buffer size, sample rate,
391 // device disconnect) notification arrived. The listeners entail
392 // quite a bit of extra code and most likely, a user program wouldn't
393 // be prepared for the result anyway. However, we do provide a flag
394 // to the client callback function to inform of an over/underrun.
396 // The mechanism for querying and setting system parameters was
397 // updated (and perhaps simplified) in OS-X version 10.4. However,
398 // since 10.4 support is not necessarily available to all users, I've
399 // decided not to update the respective code at this time. Perhaps
400 // this will happen when Apple makes 10.4 free for everyone. :-)
402 // A structure to hold various information related to the CoreAudio API
405 AudioDeviceID id[2]; // device ids
406 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
407 AudioDeviceIOProcID procId[2];
409 UInt32 iStream[2]; // device stream index (or first if using multiple)
410 UInt32 nStreams[2]; // number of streams to use
413 pthread_cond_t condition;
414 int drainCounter; // Tracks callback counts when draining
415 bool internalDrain; // Indicates if stop is initiated from callback or not.
418 :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
421 RtApiCore :: RtApiCore()
423 // Nothing to do here.
426 RtApiCore :: ~RtApiCore()
428 // The subclass destructor gets called before the base class
429 // destructor, so close an existing stream before deallocating
430 // apiDeviceId memory.
431 if ( stream_.state != STREAM_CLOSED ) closeStream();
434 unsigned int RtApiCore :: getDeviceCount( void )
436 // Find out how many audio devices there are, if any.
438 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
439 if ( result != noErr ) {
440 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
441 error( RtError::WARNING );
445 return dataSize / sizeof( AudioDeviceID );
448 unsigned int RtApiCore :: getDefaultInputDevice( void )
450 unsigned int nDevices = getDeviceCount();
451 if ( nDevices <= 1 ) return 0;
454 UInt32 dataSize = sizeof( AudioDeviceID );
455 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
458 if ( result != noErr ) {
459 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
460 error( RtError::WARNING );
464 dataSize *= nDevices;
465 AudioDeviceID deviceList[ nDevices ];
466 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
467 if ( result != noErr ) {
468 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
469 error( RtError::WARNING );
473 for ( unsigned int i=0; i<nDevices; i++ )
474 if ( id == deviceList[i] ) return i;
476 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
477 error( RtError::WARNING );
481 unsigned int RtApiCore :: getDefaultOutputDevice( void )
483 unsigned int nDevices = getDeviceCount();
484 if ( nDevices <= 1 ) return 0;
487 UInt32 dataSize = sizeof( AudioDeviceID );
488 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
491 if ( result != noErr ) {
492 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
493 error( RtError::WARNING );
497 dataSize *= nDevices;
498 AudioDeviceID deviceList[ nDevices ];
499 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
500 if ( result != noErr ) {
501 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
502 error( RtError::WARNING );
506 for ( unsigned int i=0; i<nDevices; i++ )
507 if ( id == deviceList[i] ) return i;
509 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
510 error( RtError::WARNING );
514 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
516 RtAudio::DeviceInfo info;
520 unsigned int nDevices = getDeviceCount();
521 if ( nDevices == 0 ) {
522 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
523 error( RtError::INVALID_USE );
526 if ( device >= nDevices ) {
527 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
528 error( RtError::INVALID_USE );
531 AudioDeviceID deviceList[ nDevices ];
532 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
533 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
534 if ( result != noErr ) {
535 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
536 error( RtError::WARNING );
540 AudioDeviceID id = deviceList[ device ];
542 // Get the device name.
546 result = AudioDeviceGetProperty( id, 0, false,
547 kAudioDevicePropertyDeviceManufacturer,
550 if ( result != noErr ) {
551 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
552 errorText_ = errorStream_.str();
553 error( RtError::WARNING );
556 info.name.append( (const char *)name, strlen(name) );
557 info.name.append( ": " );
560 result = AudioDeviceGetProperty( id, 0, false,
561 kAudioDevicePropertyDeviceName,
563 if ( result != noErr ) {
564 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
565 errorText_ = errorStream_.str();
566 error( RtError::WARNING );
569 info.name.append( (const char *)name, strlen(name) );
571 // Get the output stream "configuration".
572 AudioBufferList *bufferList = nil;
573 result = AudioDeviceGetPropertyInfo( id, 0, false,
574 kAudioDevicePropertyStreamConfiguration,
576 if (result != noErr || dataSize == 0) {
577 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
578 errorText_ = errorStream_.str();
579 error( RtError::WARNING );
583 // Allocate the AudioBufferList.
584 bufferList = (AudioBufferList *) malloc( dataSize );
585 if ( bufferList == NULL ) {
586 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
587 error( RtError::WARNING );
591 result = AudioDeviceGetProperty( id, 0, false,
592 kAudioDevicePropertyStreamConfiguration,
593 &dataSize, bufferList );
594 if ( result != noErr ) {
596 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
597 errorText_ = errorStream_.str();
598 error( RtError::WARNING );
602 // Get output channel information.
603 unsigned int i, nStreams = bufferList->mNumberBuffers;
604 for ( i=0; i<nStreams; i++ )
605 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
608 // Get the input stream "configuration".
609 result = AudioDeviceGetPropertyInfo( id, 0, true,
610 kAudioDevicePropertyStreamConfiguration,
612 if (result != noErr || dataSize == 0) {
613 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
614 errorText_ = errorStream_.str();
615 error( RtError::WARNING );
619 // Allocate the AudioBufferList.
620 bufferList = (AudioBufferList *) malloc( dataSize );
621 if ( bufferList == NULL ) {
622 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
623 error( RtError::WARNING );
627 result = AudioDeviceGetProperty( id, 0, true,
628 kAudioDevicePropertyStreamConfiguration,
629 &dataSize, bufferList );
630 if ( result != noErr ) {
632 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
633 errorText_ = errorStream_.str();
634 error( RtError::WARNING );
638 // Get input channel information.
639 nStreams = bufferList->mNumberBuffers;
640 for ( i=0; i<nStreams; i++ )
641 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
644 // If device opens for both playback and capture, we determine the channels.
645 if ( info.outputChannels > 0 && info.inputChannels > 0 )
646 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
648 // Probe the device sample rates.
649 bool isInput = false;
650 if ( info.outputChannels == 0 ) isInput = true;
652 // Determine the supported sample rates.
653 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
654 kAudioDevicePropertyAvailableNominalSampleRates,
657 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
658 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
659 errorText_ = errorStream_.str();
660 error( RtError::WARNING );
664 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
665 AudioValueRange rangeList[ nRanges ];
666 result = AudioDeviceGetProperty( id, 0, isInput,
667 kAudioDevicePropertyAvailableNominalSampleRates,
668 &dataSize, &rangeList );
670 if ( result != kAudioHardwareNoError ) {
671 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
672 errorText_ = errorStream_.str();
673 error( RtError::WARNING );
677 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
678 for ( UInt32 i=0; i<nRanges; i++ ) {
679 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
680 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
683 info.sampleRates.clear();
684 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
685 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
686 info.sampleRates.push_back( SAMPLE_RATES[k] );
689 if ( info.sampleRates.size() == 0 ) {
690 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
691 errorText_ = errorStream_.str();
692 error( RtError::WARNING );
696 // CoreAudio always uses 32-bit floating point data for PCM streams.
697 // Thus, any other "physical" formats supported by the device are of
698 // no interest to the client.
699 info.nativeFormats = RTAUDIO_FLOAT32;
701 if ( getDefaultOutputDevice() == device )
702 info.isDefaultOutput = true;
703 if ( getDefaultInputDevice() == device )
704 info.isDefaultInput = true;
710 OSStatus callbackHandler( AudioDeviceID inDevice,
711 const AudioTimeStamp* inNow,
712 const AudioBufferList* inInputData,
713 const AudioTimeStamp* inInputTime,
714 AudioBufferList* outOutputData,
715 const AudioTimeStamp* inOutputTime,
718 CallbackInfo *info = (CallbackInfo *) infoPointer;
720 RtApiCore *object = (RtApiCore *) info->object;
721 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
722 return kAudioHardwareUnspecifiedError;
724 return kAudioHardwareNoError;
727 OSStatus deviceListener( AudioDeviceID inDevice,
730 AudioDevicePropertyID propertyID,
731 void* handlePointer )
733 CoreHandle *handle = (CoreHandle *) handlePointer;
734 if ( propertyID == kAudioDeviceProcessorOverload ) {
736 handle->xrun[1] = true;
738 handle->xrun[0] = true;
741 return kAudioHardwareNoError;
744 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
746 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
750 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
751 unsigned int firstChannel, unsigned int sampleRate,
752 RtAudioFormat format, unsigned int *bufferSize,
753 RtAudio::StreamOptions *options )
756 unsigned int nDevices = getDeviceCount();
757 if ( nDevices == 0 ) {
758 // This should not happen because a check is made before this function is called.
759 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
763 if ( device >= nDevices ) {
764 // This should not happen because a check is made before this function is called.
765 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
769 AudioDeviceID deviceList[ nDevices ];
770 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
771 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
772 if ( result != noErr ) {
773 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
777 AudioDeviceID id = deviceList[ device ];
779 // Setup for stream mode.
780 bool isInput = false;
781 if ( mode == INPUT ) isInput = true;
783 // Set or disable "hog" mode.
784 dataSize = sizeof( UInt32 );
786 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
787 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
788 if ( result != noErr ) {
789 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
790 errorText_ = errorStream_.str();
794 // Get the stream "configuration".
795 AudioBufferList *bufferList;
796 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
797 kAudioDevicePropertyStreamConfiguration,
799 if (result != noErr || dataSize == 0) {
800 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
801 errorText_ = errorStream_.str();
805 // Allocate the AudioBufferList.
806 bufferList = (AudioBufferList *) malloc( dataSize );
807 if ( bufferList == NULL ) {
808 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
812 result = AudioDeviceGetProperty( id, 0, isInput,
813 kAudioDevicePropertyStreamConfiguration,
814 &dataSize, bufferList );
815 if ( result != noErr ) {
817 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
818 errorText_ = errorStream_.str();
822 // Search for one or more streams that contain the desired number of
823 // channels. CoreAudio devices can have an arbitrary number of
824 // streams and each stream can have an arbitrary number of channels.
825 // For each stream, a single buffer of interleaved samples is
826 // provided. RtAudio prefers the use of one stream of interleaved
827 // data or multiple consecutive single-channel streams. However, we
828 // now support multiple consecutive multi-channel streams of
829 // interleaved data as well.
830 UInt32 iStream, offsetCounter = firstChannel;
831 UInt32 nStreams = bufferList->mNumberBuffers;
832 bool monoMode = false;
833 bool foundStream = false;
835 // First check that the device supports the requested number of
837 UInt32 deviceChannels = 0;
838 for ( iStream=0; iStream<nStreams; iStream++ )
839 deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
841 if ( deviceChannels < ( channels + firstChannel ) ) {
843 errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
844 errorText_ = errorStream_.str();
848 // Look for a single stream meeting our needs.
849 UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
850 for ( iStream=0; iStream<nStreams; iStream++ ) {
851 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
852 if ( streamChannels >= channels + offsetCounter ) {
853 firstStream = iStream;
854 channelOffset = offsetCounter;
858 if ( streamChannels > offsetCounter ) break;
859 offsetCounter -= streamChannels;
862 // If we didn't find a single stream above, then we should be able
863 // to meet the channel specification with multiple streams.
864 if ( foundStream == false ) {
866 offsetCounter = firstChannel;
867 for ( iStream=0; iStream<nStreams; iStream++ ) {
868 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
869 if ( streamChannels > offsetCounter ) break;
870 offsetCounter -= streamChannels;
873 firstStream = iStream;
874 channelOffset = offsetCounter;
875 Int32 channelCounter = channels + offsetCounter - streamChannels;
877 if ( streamChannels > 1 ) monoMode = false;
878 while ( channelCounter > 0 ) {
879 streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
880 if ( streamChannels > 1 ) monoMode = false;
881 channelCounter -= streamChannels;
888 // Determine the buffer size.
889 AudioValueRange bufferRange;
890 dataSize = sizeof( AudioValueRange );
891 result = AudioDeviceGetProperty( id, 0, isInput,
892 kAudioDevicePropertyBufferFrameSizeRange,
893 &dataSize, &bufferRange );
894 if ( result != noErr ) {
895 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
896 errorText_ = errorStream_.str();
900 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
901 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
902 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
904 // Set the buffer size. For multiple streams, I'm assuming we only
905 // need to make this setting for the master channel.
906 UInt32 theSize = (UInt32) *bufferSize;
907 dataSize = sizeof( UInt32 );
908 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
909 kAudioDevicePropertyBufferFrameSize,
910 dataSize, &theSize );
912 if ( result != noErr ) {
913 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
914 errorText_ = errorStream_.str();
918 // If attempting to setup a duplex stream, the bufferSize parameter
919 // MUST be the same in both directions!
920 *bufferSize = theSize;
921 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
922 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
923 errorText_ = errorStream_.str();
927 stream_.bufferSize = *bufferSize;
928 stream_.nBuffers = 1;
930 // Get the stream ID(s) so we can set the stream format. We'll have
931 // to do this for each stream.
932 AudioStreamID streamIDs[ nStreams ];
933 dataSize = nStreams * sizeof( AudioStreamID );
934 result = AudioDeviceGetProperty( id, 0, isInput,
935 kAudioDevicePropertyStreams,
936 &dataSize, &streamIDs );
937 if ( result != noErr ) {
938 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
939 errorText_ = errorStream_.str();
943 // Now set the stream format. Also, check the physical format of the
944 // device and change that if necessary.
945 AudioStreamBasicDescription description;
946 dataSize = sizeof( AudioStreamBasicDescription );
949 for ( UInt32 i=0; i<streamCount; i++ ) {
951 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
952 kAudioStreamPropertyVirtualFormat,
953 &dataSize, &description );
955 if ( result != noErr ) {
956 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
957 errorText_ = errorStream_.str();
961 // Set the sample rate and data format id. However, only make the
962 // change if the sample rate is not within 1.0 of the desired
963 // rate and the format is not linear pcm.
964 updateFormat = false;
965 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
966 description.mSampleRate = (double) sampleRate;
970 if ( description.mFormatID != kAudioFormatLinearPCM ) {
971 description.mFormatID = kAudioFormatLinearPCM;
975 if ( updateFormat ) {
976 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0,
977 kAudioStreamPropertyVirtualFormat,
978 dataSize, &description );
979 if ( result != noErr ) {
980 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
981 errorText_ = errorStream_.str();
986 // Now check the physical format.
987 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
988 kAudioStreamPropertyPhysicalFormat,
989 &dataSize, &description );
990 if ( result != noErr ) {
991 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
992 errorText_ = errorStream_.str();
996 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
997 description.mFormatID = kAudioFormatLinearPCM;
998 AudioStreamBasicDescription testDescription = description;
999 unsigned long formatFlags;
1001 // We'll try higher bit rates first and then work our way down.
1002 testDescription.mBitsPerChannel = 32;
1003 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
1004 testDescription.mFormatFlags = formatFlags;
1005 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1006 if ( result == noErr ) continue;
1008 testDescription = description;
1009 testDescription.mBitsPerChannel = 32;
1010 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
1011 testDescription.mFormatFlags = formatFlags;
1012 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1013 if ( result == noErr ) continue;
1015 testDescription = description;
1016 testDescription.mBitsPerChannel = 24;
1017 testDescription.mFormatFlags = formatFlags;
1018 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1019 if ( result == noErr ) continue;
1021 testDescription = description;
1022 testDescription.mBitsPerChannel = 16;
1023 testDescription.mFormatFlags = formatFlags;
1024 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1025 if ( result == noErr ) continue;
1027 testDescription = description;
1028 testDescription.mBitsPerChannel = 8;
1029 testDescription.mFormatFlags = formatFlags;
1030 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1031 if ( result != noErr ) {
1032 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1033 errorText_ = errorStream_.str();
1039 // Get the stream latency. There can be latency in both the device
1040 // and the stream. First, attempt to get the device latency on the
1041 // master channel or the first open channel. Errors that might
1042 // occur here are not deemed critical.
1044 // ***** CHECK THIS ***** //
1045 UInt32 latency, channel = 0;
1046 dataSize = sizeof( UInt32 );
1047 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1048 if ( hasProperty( id, channel, isInput, property ) == true ) {
1049 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1050 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1052 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1053 errorText_ = errorStream_.str();
1054 error( RtError::WARNING );
1058 // Now try to get the stream latency. For multiple streams, I assume the
1059 // latency is equal for each.
1060 result = AudioStreamGetProperty( streamIDs[firstStream], 0, property, &dataSize, &latency );
1061 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1063 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1064 errorText_ = errorStream_.str();
1065 error( RtError::WARNING );
1068 // Byte-swapping: According to AudioHardware.h, the stream data will
1069 // always be presented in native-endian format, so we should never
1070 // need to byte swap.
1071 stream_.doByteSwap[mode] = false;
1073 // From the CoreAudio documentation, PCM data must be supplied as
1075 stream_.userFormat = format;
1076 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1078 if ( streamCount == 1 )
1079 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1080 else // multiple streams
1081 stream_.nDeviceChannels[mode] = channels;
1082 stream_.nUserChannels[mode] = channels;
1083 stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
1084 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1085 else stream_.userInterleaved = true;
1086 stream_.deviceInterleaved[mode] = true;
1087 if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
1089 // Set flags for buffer conversion.
1090 stream_.doConvertBuffer[mode] = false;
1091 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1092 stream_.doConvertBuffer[mode] = true;
1093 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1094 stream_.doConvertBuffer[mode] = true;
1095 if ( streamCount == 1 ) {
1096 if ( stream_.nUserChannels[mode] > 1 &&
1097 stream_.userInterleaved != stream_.deviceInterleaved[mode] )
1098 stream_.doConvertBuffer[mode] = true;
1100 else if ( monoMode && stream_.userInterleaved )
1101 stream_.doConvertBuffer[mode] = true;
1103 // Allocate our CoreHandle structure for the stream.
1104 CoreHandle *handle = 0;
1105 if ( stream_.apiHandle == 0 ) {
1107 handle = new CoreHandle;
1109 catch ( std::bad_alloc& ) {
1110 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1114 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1115 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1118 stream_.apiHandle = (void *) handle;
1121 handle = (CoreHandle *) stream_.apiHandle;
1122 handle->iStream[mode] = firstStream;
1123 handle->nStreams[mode] = streamCount;
1124 handle->id[mode] = id;
1126 // Allocate necessary internal buffers.
1127 unsigned long bufferBytes;
1128 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1129 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1130 if ( stream_.userBuffer[mode] == NULL ) {
1131 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1135 // If possible, we will make use of the CoreAudio stream buffers as
1136 // "device buffers". However, we can't do this if using multiple
1138 if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
1140 bool makeBuffer = true;
1141 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1142 if ( mode == INPUT ) {
1143 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1144 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1145 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1150 bufferBytes *= *bufferSize;
1151 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1152 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1153 if ( stream_.deviceBuffer == NULL ) {
1154 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1160 stream_.sampleRate = sampleRate;
1161 stream_.device[mode] = device;
1162 stream_.state = STREAM_STOPPED;
1163 stream_.callbackInfo.object = (void *) this;
1165 // Setup the buffer conversion information structure.
1166 if ( stream_.doConvertBuffer[mode] ) {
1167 if ( streamCount > 1 ) setConvertInfo( mode, 0 );
1168 else setConvertInfo( mode, channelOffset );
1171 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1172 // Only one callback procedure per device.
1173 stream_.mode = DUPLEX;
1175 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1176 result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
1178 // deprecated in favor of AudioDeviceCreateIOProcID()
1179 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1181 if ( result != noErr ) {
1182 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1183 errorText_ = errorStream_.str();
1186 if ( stream_.mode == OUTPUT && mode == INPUT )
1187 stream_.mode = DUPLEX;
1189 stream_.mode = mode;
1192 // Setup the device property listener for over/underload.
1193 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1194 kAudioDeviceProcessorOverload,
1195 deviceListener, (void *) handle );
1201 pthread_cond_destroy( &handle->condition );
1203 stream_.apiHandle = 0;
1206 for ( int i=0; i<2; i++ ) {
1207 if ( stream_.userBuffer[i] ) {
1208 free( stream_.userBuffer[i] );
1209 stream_.userBuffer[i] = 0;
1213 if ( stream_.deviceBuffer ) {
1214 free( stream_.deviceBuffer );
1215 stream_.deviceBuffer = 0;
1221 void RtApiCore :: closeStream( void )
1223 if ( stream_.state == STREAM_CLOSED ) {
1224 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1225 error( RtError::WARNING );
1229 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1230 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1231 if ( stream_.state == STREAM_RUNNING )
1232 AudioDeviceStop( handle->id[0], callbackHandler );
1233 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1234 AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
1236 // deprecated in favor of AudioDeviceDestroyIOProcID()
1237 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1241 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1242 if ( stream_.state == STREAM_RUNNING )
1243 AudioDeviceStop( handle->id[1], callbackHandler );
1244 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1245 AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
1247 // deprecated in favor of AudioDeviceDestroyIOProcID()
1248 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1252 for ( int i=0; i<2; i++ ) {
1253 if ( stream_.userBuffer[i] ) {
1254 free( stream_.userBuffer[i] );
1255 stream_.userBuffer[i] = 0;
1259 if ( stream_.deviceBuffer ) {
1260 free( stream_.deviceBuffer );
1261 stream_.deviceBuffer = 0;
1264 // Destroy pthread condition variable.
1265 pthread_cond_destroy( &handle->condition );
1267 stream_.apiHandle = 0;
1269 stream_.mode = UNINITIALIZED;
1270 stream_.state = STREAM_CLOSED;
1273 void RtApiCore :: startStream( void )
1276 if ( stream_.state == STREAM_RUNNING ) {
1277 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1278 error( RtError::WARNING );
1282 MUTEX_LOCK( &stream_.mutex );
1284 OSStatus result = noErr;
1285 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1286 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1288 result = AudioDeviceStart( handle->id[0], callbackHandler );
1289 if ( result != noErr ) {
1290 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1291 errorText_ = errorStream_.str();
1296 if ( stream_.mode == INPUT ||
1297 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1299 result = AudioDeviceStart( handle->id[1], callbackHandler );
1300 if ( result != noErr ) {
1301 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1302 errorText_ = errorStream_.str();
1307 handle->drainCounter = 0;
1308 handle->internalDrain = false;
1309 stream_.state = STREAM_RUNNING;
1312 MUTEX_UNLOCK( &stream_.mutex );
1314 if ( result == noErr ) return;
1315 error( RtError::SYSTEM_ERROR );
1318 void RtApiCore :: stopStream( void )
1321 if ( stream_.state == STREAM_STOPPED ) {
1322 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1323 error( RtError::WARNING );
1327 MUTEX_LOCK( &stream_.mutex );
1329 if ( stream_.state == STREAM_STOPPED ) {
1330 MUTEX_UNLOCK( &stream_.mutex );
1334 OSStatus result = noErr;
1335 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1336 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1338 if ( handle->drainCounter == 0 ) {
1339 handle->drainCounter = 1;
1340 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1343 result = AudioDeviceStop( handle->id[0], callbackHandler );
1344 if ( result != noErr ) {
1345 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1346 errorText_ = errorStream_.str();
1351 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1353 result = AudioDeviceStop( handle->id[1], callbackHandler );
1354 if ( result != noErr ) {
1355 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1356 errorText_ = errorStream_.str();
1361 stream_.state = STREAM_STOPPED;
1364 MUTEX_UNLOCK( &stream_.mutex );
1366 if ( result == noErr ) return;
1367 error( RtError::SYSTEM_ERROR );
1370 void RtApiCore :: abortStream( void )
1373 if ( stream_.state == STREAM_STOPPED ) {
1374 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1375 error( RtError::WARNING );
1379 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1380 handle->drainCounter = 1;
1385 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1386 const AudioBufferList *inBufferList,
1387 const AudioBufferList *outBufferList )
1389 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1390 if ( stream_.state == STREAM_CLOSED ) {
1391 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1392 error( RtError::WARNING );
1396 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1397 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1399 // Check if we were draining the stream and signal is finished.
1400 if ( handle->drainCounter > 3 ) {
1401 if ( handle->internalDrain == false )
1402 pthread_cond_signal( &handle->condition );
1408 MUTEX_LOCK( &stream_.mutex );
1410 // The state might change while waiting on a mutex.
1411 if ( stream_.state == STREAM_STOPPED ) {
1412 MUTEX_UNLOCK( &stream_.mutex );
1416 AudioDeviceID outputDevice = handle->id[0];
1418 // Invoke user callback to get fresh output data UNLESS we are
1419 // draining stream or duplex mode AND the input/output devices are
1420 // different AND this function is called for the input device.
1421 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1422 RtAudioCallback callback = (RtAudioCallback) info->callback;
1423 double streamTime = getStreamTime();
1424 RtAudioStreamStatus status = 0;
1425 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1426 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1427 handle->xrun[0] = false;
1429 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1430 status |= RTAUDIO_INPUT_OVERFLOW;
1431 handle->xrun[1] = false;
1433 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1434 stream_.bufferSize, streamTime, status, info->userData );
1435 if ( handle->drainCounter == 2 ) {
1436 MUTEX_UNLOCK( &stream_.mutex );
1440 else if ( handle->drainCounter == 1 )
1441 handle->internalDrain = true;
1444 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1446 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1448 if ( handle->nStreams[0] == 1 ) {
1449 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1451 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1453 else { // fill multiple streams with zeros
1454 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1455 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1457 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1461 else if ( handle->nStreams[0] == 1 ) {
1462 if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
1463 convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
1464 stream_.userBuffer[0], stream_.convertInfo[0] );
1466 else { // copy from user buffer
1467 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1468 stream_.userBuffer[0],
1469 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1472 else { // fill multiple streams
1473 Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
1474 if ( stream_.doConvertBuffer[0] ) {
1475 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1476 inBuffer = (Float32 *) stream_.deviceBuffer;
1479 if ( stream_.deviceInterleaved[0] == false ) { // mono mode
1480 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1481 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1482 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1483 (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
1486 else { // fill multiple multi-channel streams with interleaved data
1487 UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
1490 bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
1491 UInt32 inChannels = stream_.nUserChannels[0];
1492 if ( stream_.doConvertBuffer[0] ) {
1493 inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1494 inChannels = stream_.nDeviceChannels[0];
1497 if ( inInterleaved ) inOffset = 1;
1498 else inOffset = stream_.bufferSize;
1500 channelsLeft = inChannels;
1501 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1503 out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
1504 streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
1507 // Account for possible channel offset in first stream
1508 if ( i == 0 && stream_.channelOffset[0] > 0 ) {
1509 streamChannels -= stream_.channelOffset[0];
1510 outJump = stream_.channelOffset[0];
1514 // Account for possible unfilled channels at end of the last stream
1515 if ( streamChannels > channelsLeft ) {
1516 outJump = streamChannels - channelsLeft;
1517 streamChannels = channelsLeft;
1520 // Determine input buffer offsets and skips
1521 if ( inInterleaved ) {
1522 inJump = inChannels;
1523 in += inChannels - channelsLeft;
1527 in += (inChannels - channelsLeft) * inOffset;
1530 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1531 for ( unsigned int j=0; j<streamChannels; j++ ) {
1532 *out++ = in[j*inOffset];
1537 channelsLeft -= streamChannels;
1542 if ( handle->drainCounter ) {
1543 handle->drainCounter++;
1548 AudioDeviceID inputDevice;
1549 inputDevice = handle->id[1];
1550 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1552 if ( handle->nStreams[1] == 1 ) {
1553 if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
1554 convertBuffer( stream_.userBuffer[1],
1555 (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
1556 stream_.convertInfo[1] );
1558 else { // copy to user buffer
1559 memcpy( stream_.userBuffer[1],
1560 inBufferList->mBuffers[handle->iStream[1]].mData,
1561 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1564 else { // read from multiple streams
1565 Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
1566 if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
1568 if ( stream_.deviceInterleaved[1] == false ) { // mono mode
1569 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1570 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1571 memcpy( (void *)&outBuffer[i*stream_.bufferSize],
1572 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1575 else { // read from multiple multi-channel streams
1576 UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
1579 bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
1580 UInt32 outChannels = stream_.nUserChannels[1];
1581 if ( stream_.doConvertBuffer[1] ) {
1582 outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1583 outChannels = stream_.nDeviceChannels[1];
1586 if ( outInterleaved ) outOffset = 1;
1587 else outOffset = stream_.bufferSize;
1589 channelsLeft = outChannels;
1590 for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
1592 in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
1593 streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
1596 // Account for possible channel offset in first stream
1597 if ( i == 0 && stream_.channelOffset[1] > 0 ) {
1598 streamChannels -= stream_.channelOffset[1];
1599 inJump = stream_.channelOffset[1];
1603 // Account for possible unread channels at end of the last stream
1604 if ( streamChannels > channelsLeft ) {
1605 inJump = streamChannels - channelsLeft;
1606 streamChannels = channelsLeft;
1609 // Determine output buffer offsets and skips
1610 if ( outInterleaved ) {
1611 outJump = outChannels;
1612 out += outChannels - channelsLeft;
1616 out += (outChannels - channelsLeft) * outOffset;
1619 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1620 for ( unsigned int j=0; j<streamChannels; j++ ) {
1621 out[j*outOffset] = *in++;
1626 channelsLeft -= streamChannels;
1630 if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
1631 convertBuffer( stream_.userBuffer[1],
1632 stream_.deviceBuffer,
1633 stream_.convertInfo[1] );
1639 MUTEX_UNLOCK( &stream_.mutex );
1641 RtApi::tickStreamTime();
1645 const char* RtApiCore :: getErrorCode( OSStatus code )
1649 case kAudioHardwareNotRunningError:
1650 return "kAudioHardwareNotRunningError";
1652 case kAudioHardwareUnspecifiedError:
1653 return "kAudioHardwareUnspecifiedError";
1655 case kAudioHardwareUnknownPropertyError:
1656 return "kAudioHardwareUnknownPropertyError";
1658 case kAudioHardwareBadPropertySizeError:
1659 return "kAudioHardwareBadPropertySizeError";
1661 case kAudioHardwareIllegalOperationError:
1662 return "kAudioHardwareIllegalOperationError";
1664 case kAudioHardwareBadObjectError:
1665 return "kAudioHardwareBadObjectError";
1667 case kAudioHardwareBadDeviceError:
1668 return "kAudioHardwareBadDeviceError";
1670 case kAudioHardwareBadStreamError:
1671 return "kAudioHardwareBadStreamError";
1673 case kAudioHardwareUnsupportedOperationError:
1674 return "kAudioHardwareUnsupportedOperationError";
1676 case kAudioDeviceUnsupportedFormatError:
1677 return "kAudioDeviceUnsupportedFormatError";
1679 case kAudioDevicePermissionsError:
1680 return "kAudioDevicePermissionsError";
1683 return "CoreAudio unknown error";
1687 //******************** End of __MACOSX_CORE__ *********************//
1690 #if defined(__UNIX_JACK__)
1692 // JACK is a low-latency audio server, originally written for the
1693 // GNU/Linux operating system and now also ported to OS-X. It can
1694 // connect a number of different applications to an audio device, as
1695 // well as allowing them to share audio between themselves.
1697 // When using JACK with RtAudio, "devices" refer to JACK clients that
1698 // have ports connected to the server. The JACK server is typically
1699 // started in a terminal as follows:
1701 // .jackd -d alsa -d hw:0
1703 // or through an interface program such as qjackctl. Many of the
1704 // parameters normally set for a stream are fixed by the JACK server
1705 // and can be specified when the JACK server is started. In
1708 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1710 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1711 // frames, and number of buffers = 4. Once the server is running, it
1712 // is not possible to override these values. If the values are not
1713 // specified in the command-line, the JACK server uses default values.
1715 // The JACK server does not have to be running when an instance of
1716 // RtApiJack is created, though the function getDeviceCount() will
1717 // report 0 devices found until JACK has been started. When no
1718 // devices are available (i.e., the JACK server is not running), a
1719 // stream cannot be opened.
1721 #include <jack/jack.h>
1724 // A structure to hold various information related to the Jack API
1727 jack_client_t *client;
1728 jack_port_t **ports[2];
1729 std::string deviceName[2];
1731 pthread_cond_t condition;
1732 int drainCounter; // Tracks callback counts when draining
1733 bool internalDrain; // Indicates if stop is initiated from callback or not.
1736 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1739 void jackSilentError( const char * ) {};
1741 RtApiJack :: RtApiJack()
1743 // Nothing to do here.
1744 #if !defined(__RTAUDIO_DEBUG__)
1745 // Turn off Jack's internal error reporting.
1746 jack_set_error_function( &jackSilentError );
1750 RtApiJack :: ~RtApiJack()
1752 if ( stream_.state != STREAM_CLOSED ) closeStream();
1755 unsigned int RtApiJack :: getDeviceCount( void )
1757 // See if we can become a jack client.
1758 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1759 jack_status_t *status = NULL;
1760 jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
1761 if ( client == 0 ) return 0;
1764 std::string port, previousPort;
1765 unsigned int nChannels = 0, nDevices = 0;
1766 ports = jack_get_ports( client, NULL, NULL, 0 );
1768 // Parse the port names up to the first colon (:).
1771 port = (char *) ports[ nChannels ];
1772 iColon = port.find(":");
1773 if ( iColon != std::string::npos ) {
1774 port = port.substr( 0, iColon + 1 );
1775 if ( port != previousPort ) {
1777 previousPort = port;
1780 } while ( ports[++nChannels] );
1784 jack_client_close( client );
1788 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1790 RtAudio::DeviceInfo info;
1791 info.probed = false;
1793 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption
1794 jack_status_t *status = NULL;
1795 jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
1796 if ( client == 0 ) {
1797 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1798 error( RtError::WARNING );
1803 std::string port, previousPort;
1804 unsigned int nPorts = 0, nDevices = 0;
1805 ports = jack_get_ports( client, NULL, NULL, 0 );
1807 // Parse the port names up to the first colon (:).
1810 port = (char *) ports[ nPorts ];
1811 iColon = port.find(":");
1812 if ( iColon != std::string::npos ) {
1813 port = port.substr( 0, iColon );
1814 if ( port != previousPort ) {
1815 if ( nDevices == device ) info.name = port;
1817 previousPort = port;
1820 } while ( ports[++nPorts] );
1824 if ( device >= nDevices ) {
1825 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1826 error( RtError::INVALID_USE );
1829 // Get the current jack server sample rate.
1830 info.sampleRates.clear();
1831 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1833 // Count the available ports containing the client name as device
1834 // channels. Jack "input ports" equal RtAudio output channels.
1835 unsigned int nChannels = 0;
1836 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1838 while ( ports[ nChannels ] ) nChannels++;
1840 info.outputChannels = nChannels;
1843 // Jack "output ports" equal RtAudio input channels.
1845 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1847 while ( ports[ nChannels ] ) nChannels++;
1849 info.inputChannels = nChannels;
1852 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1853 jack_client_close(client);
1854 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1855 error( RtError::WARNING );
1859 // If device opens for both playback and capture, we determine the channels.
1860 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1861 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1863 // Jack always uses 32-bit floats.
1864 info.nativeFormats = RTAUDIO_FLOAT32;
1866 // Jack doesn't provide default devices so we'll use the first available one.
1867 if ( device == 0 && info.outputChannels > 0 )
1868 info.isDefaultOutput = true;
1869 if ( device == 0 && info.inputChannels > 0 )
1870 info.isDefaultInput = true;
1872 jack_client_close(client);
1877 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1879 CallbackInfo *info = (CallbackInfo *) infoPointer;
1881 RtApiJack *object = (RtApiJack *) info->object;
1882 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1887 void jackShutdown( void *infoPointer )
1889 CallbackInfo *info = (CallbackInfo *) infoPointer;
1890 RtApiJack *object = (RtApiJack *) info->object;
1892 // Check current stream state. If stopped, then we'll assume this
1893 // was called as a result of a call to RtApiJack::stopStream (the
1894 // deactivation of a client handle causes this function to be called).
1895 // If not, we'll assume the Jack server is shutting down or some
1896 // other problem occurred and we should close the stream.
1897 if ( object->isStreamRunning() == false ) return;
1899 object->closeStream();
1900 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1903 int jackXrun( void *infoPointer )
1905 JackHandle *handle = (JackHandle *) infoPointer;
1907 if ( handle->ports[0] ) handle->xrun[0] = true;
1908 if ( handle->ports[1] ) handle->xrun[1] = true;
1913 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1914 unsigned int firstChannel, unsigned int sampleRate,
1915 RtAudioFormat format, unsigned int *bufferSize,
1916 RtAudio::StreamOptions *options )
1918 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1920 // Look for jack server and try to become a client (only do once per stream).
1921 jack_client_t *client = 0;
1922 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1923 jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1924 jack_status_t *status = NULL;
1925 if ( options && !options->streamName.empty() )
1926 client = jack_client_open( options->streamName.c_str(), jackoptions, status );
1928 client = jack_client_open( "RtApiJack", jackoptions, status );
1929 if ( client == 0 ) {
1930 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1931 error( RtError::WARNING );
1936 // The handle must have been created on an earlier pass.
1937 client = handle->client;
1941 std::string port, previousPort, deviceName;
1942 unsigned int nPorts = 0, nDevices = 0;
1943 ports = jack_get_ports( client, NULL, NULL, 0 );
1945 // Parse the port names up to the first colon (:).
1948 port = (char *) ports[ nPorts ];
1949 iColon = port.find(":");
1950 if ( iColon != std::string::npos ) {
1951 port = port.substr( 0, iColon );
1952 if ( port != previousPort ) {
1953 if ( nDevices == device ) deviceName = port;
1955 previousPort = port;
1958 } while ( ports[++nPorts] );
1962 if ( device >= nDevices ) {
1963 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1967 // Count the available ports containing the client name as device
1968 // channels. Jack "input ports" equal RtAudio output channels.
1969 unsigned int nChannels = 0;
1970 unsigned long flag = JackPortIsInput;
1971 if ( mode == INPUT ) flag = JackPortIsOutput;
1972 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1974 while ( ports[ nChannels ] ) nChannels++;
1978 // Compare the jack ports for specified client to the requested number of channels.
1979 if ( nChannels < (channels + firstChannel) ) {
1980 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1981 errorText_ = errorStream_.str();
1985 // Check the jack server sample rate.
1986 unsigned int jackRate = jack_get_sample_rate( client );
1987 if ( sampleRate != jackRate ) {
1988 jack_client_close( client );
1989 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1990 errorText_ = errorStream_.str();
1993 stream_.sampleRate = jackRate;
1995 // Get the latency of the JACK port.
1996 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1997 if ( ports[ firstChannel ] )
1998 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
2001 // The jack server always uses 32-bit floating-point data.
2002 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2003 stream_.userFormat = format;
2005 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2006 else stream_.userInterleaved = true;
2008 // Jack always uses non-interleaved buffers.
2009 stream_.deviceInterleaved[mode] = false;
2011 // Jack always provides host byte-ordered data.
2012 stream_.doByteSwap[mode] = false;
2014 // Get the buffer size. The buffer size and number of buffers
2015 // (periods) is set when the jack server is started.
2016 stream_.bufferSize = (int) jack_get_buffer_size( client );
2017 *bufferSize = stream_.bufferSize;
2019 stream_.nDeviceChannels[mode] = channels;
2020 stream_.nUserChannels[mode] = channels;
2022 // Set flags for buffer conversion.
2023 stream_.doConvertBuffer[mode] = false;
2024 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2025 stream_.doConvertBuffer[mode] = true;
2026 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2027 stream_.nUserChannels[mode] > 1 )
2028 stream_.doConvertBuffer[mode] = true;
2030 // Allocate our JackHandle structure for the stream.
2031 if ( handle == 0 ) {
2033 handle = new JackHandle;
2035 catch ( std::bad_alloc& ) {
2036 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
2040 if ( pthread_cond_init(&handle->condition, NULL) ) {
2041 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
2044 stream_.apiHandle = (void *) handle;
2045 handle->client = client;
2047 handle->deviceName[mode] = deviceName;
2049 // Allocate necessary internal buffers.
2050 unsigned long bufferBytes;
2051 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2052 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2053 if ( stream_.userBuffer[mode] == NULL ) {
2054 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
2058 if ( stream_.doConvertBuffer[mode] ) {
2060 bool makeBuffer = true;
2061 if ( mode == OUTPUT )
2062 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2063 else { // mode == INPUT
2064 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
2065 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2066 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2067 if ( bufferBytes < bytesOut ) makeBuffer = false;
2072 bufferBytes *= *bufferSize;
2073 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2074 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2075 if ( stream_.deviceBuffer == NULL ) {
2076 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
2082 // Allocate memory for the Jack ports (channels) identifiers.
2083 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
2084 if ( handle->ports[mode] == NULL ) {
2085 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
2089 stream_.device[mode] = device;
2090 stream_.channelOffset[mode] = firstChannel;
2091 stream_.state = STREAM_STOPPED;
2092 stream_.callbackInfo.object = (void *) this;
2094 if ( stream_.mode == OUTPUT && mode == INPUT )
2095 // We had already set up the stream for output.
2096 stream_.mode = DUPLEX;
2098 stream_.mode = mode;
2099 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2100 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
2101 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2104 // Register our ports.
2106 if ( mode == OUTPUT ) {
2107 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2108 snprintf( label, 64, "outport %d", i );
2109 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
2110 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
2114 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2115 snprintf( label, 64, "inport %d", i );
2116 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
2117 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
2121 // Setup the buffer conversion information structure. We don't use
2122 // buffers to do channel offsets, so we override that parameter
2124 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2130 pthread_cond_destroy( &handle->condition );
2131 jack_client_close( handle->client );
2133 if ( handle->ports[0] ) free( handle->ports[0] );
2134 if ( handle->ports[1] ) free( handle->ports[1] );
2137 stream_.apiHandle = 0;
2140 for ( int i=0; i<2; i++ ) {
2141 if ( stream_.userBuffer[i] ) {
2142 free( stream_.userBuffer[i] );
2143 stream_.userBuffer[i] = 0;
2147 if ( stream_.deviceBuffer ) {
2148 free( stream_.deviceBuffer );
2149 stream_.deviceBuffer = 0;
2155 void RtApiJack :: closeStream( void )
2157 if ( stream_.state == STREAM_CLOSED ) {
2158 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2159 error( RtError::WARNING );
2163 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2166 if ( stream_.state == STREAM_RUNNING )
2167 jack_deactivate( handle->client );
2169 jack_client_close( handle->client );
2173 if ( handle->ports[0] ) free( handle->ports[0] );
2174 if ( handle->ports[1] ) free( handle->ports[1] );
2175 pthread_cond_destroy( &handle->condition );
2177 stream_.apiHandle = 0;
2180 for ( int i=0; i<2; i++ ) {
2181 if ( stream_.userBuffer[i] ) {
2182 free( stream_.userBuffer[i] );
2183 stream_.userBuffer[i] = 0;
2187 if ( stream_.deviceBuffer ) {
2188 free( stream_.deviceBuffer );
2189 stream_.deviceBuffer = 0;
2192 stream_.mode = UNINITIALIZED;
2193 stream_.state = STREAM_CLOSED;
2196 void RtApiJack :: startStream( void )
2199 if ( stream_.state == STREAM_RUNNING ) {
2200 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2201 error( RtError::WARNING );
2205 MUTEX_LOCK(&stream_.mutex);
2207 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2208 int result = jack_activate( handle->client );
2210 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2216 // Get the list of available ports.
2217 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2219 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2220 if ( ports == NULL) {
2221 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2225 // Now make the port connections. Since RtAudio wasn't designed to
2226 // allow the user to select particular channels of a device, we'll
2227 // just open the first "nChannels" ports with offset.
2228 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2230 if ( ports[ stream_.channelOffset[0] + i ] )
2231 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2234 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2241 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2243 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2244 if ( ports == NULL) {
2245 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2249 // Now make the port connections. See note above.
2250 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2252 if ( ports[ stream_.channelOffset[1] + i ] )
2253 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2256 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2263 handle->drainCounter = 0;
2264 handle->internalDrain = false;
2265 stream_.state = STREAM_RUNNING;
2268 MUTEX_UNLOCK(&stream_.mutex);
2270 if ( result == 0 ) return;
2271 error( RtError::SYSTEM_ERROR );
2274 void RtApiJack :: stopStream( void )
2277 if ( stream_.state == STREAM_STOPPED ) {
2278 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2279 error( RtError::WARNING );
2283 MUTEX_LOCK( &stream_.mutex );
2285 if ( stream_.state == STREAM_STOPPED ) {
2286 MUTEX_UNLOCK( &stream_.mutex );
2290 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2291 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2293 if ( handle->drainCounter == 0 ) {
2294 handle->drainCounter = 1;
2295 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2299 jack_deactivate( handle->client );
2300 stream_.state = STREAM_STOPPED;
2302 MUTEX_UNLOCK( &stream_.mutex );
2305 void RtApiJack :: abortStream( void )
2308 if ( stream_.state == STREAM_STOPPED ) {
2309 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2310 error( RtError::WARNING );
2314 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2315 handle->drainCounter = 1;
2320 // This function will be called by a spawned thread when the user
2321 // callback function signals that the stream should be stopped or
2322 // aborted. It is necessary to handle it this way because the
2323 // callbackEvent() function must return before the jack_deactivate()
2324 // function will return.
2325 extern "C" void *jackStopStream( void *ptr )
2327 CallbackInfo *info = (CallbackInfo *) ptr;
2328 RtApiJack *object = (RtApiJack *) info->object;
2330 object->stopStream();
2332 pthread_exit( NULL );
2335 bool RtApiJack :: callbackEvent( unsigned long nframes )
2337 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2338 if ( stream_.state == STREAM_CLOSED ) {
2339 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2340 error( RtError::WARNING );
2343 if ( stream_.bufferSize != nframes ) {
2344 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2345 error( RtError::WARNING );
2349 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2350 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2352 // Check if we were draining the stream and signal is finished.
2353 if ( handle->drainCounter > 3 ) {
2354 if ( handle->internalDrain == true ) {
2356 pthread_create( &id, NULL, jackStopStream, info );
2359 pthread_cond_signal( &handle->condition );
2363 MUTEX_LOCK( &stream_.mutex );
2365 // The state might change while waiting on a mutex.
2366 if ( stream_.state == STREAM_STOPPED ) {
2367 MUTEX_UNLOCK( &stream_.mutex );
2371 // Invoke user callback first, to get fresh output data.
2372 if ( handle->drainCounter == 0 ) {
2373 RtAudioCallback callback = (RtAudioCallback) info->callback;
2374 double streamTime = getStreamTime();
2375 RtAudioStreamStatus status = 0;
2376 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2377 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2378 handle->xrun[0] = false;
2380 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2381 status |= RTAUDIO_INPUT_OVERFLOW;
2382 handle->xrun[1] = false;
2384 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2385 stream_.bufferSize, streamTime, status, info->userData );
2386 if ( handle->drainCounter == 2 ) {
2387 MUTEX_UNLOCK( &stream_.mutex );
2389 pthread_create( &id, NULL, jackStopStream, info );
2392 else if ( handle->drainCounter == 1 )
2393 handle->internalDrain = true;
2396 jack_default_audio_sample_t *jackbuffer;
2397 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2398 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2400 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2402 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2403 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2404 memset( jackbuffer, 0, bufferBytes );
2408 else if ( stream_.doConvertBuffer[0] ) {
2410 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2412 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2413 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2414 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2417 else { // no buffer conversion
2418 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2419 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2420 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2424 if ( handle->drainCounter ) {
2425 handle->drainCounter++;
2430 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2432 if ( stream_.doConvertBuffer[1] ) {
2433 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2434 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2435 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2437 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2439 else { // no buffer conversion
2440 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2441 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2442 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2448 MUTEX_UNLOCK(&stream_.mutex);
2450 RtApi::tickStreamTime();
2453 //******************** End of __UNIX_JACK__ *********************//
2456 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2458 // The ASIO API is designed around a callback scheme, so this
2459 // implementation is similar to that used for OS-X CoreAudio and Linux
2460 // Jack. The primary constraint with ASIO is that it only allows
2461 // access to a single driver at a time. Thus, it is not possible to
2462 // have more than one simultaneous RtAudio stream.
2464 // This implementation also requires a number of external ASIO files
2465 // and a few global variables. The ASIO callback scheme does not
2466 // allow for the passing of user data, so we must create a global
2467 // pointer to our callbackInfo structure.
2469 // On unix systems, we make use of a pthread condition variable.
2470 // Since there is no equivalent in Windows, I hacked something based
2471 // on information found in
2472 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2474 #include "asiosys.h"
2476 #include "iasiothiscallresolver.h"
2477 #include "asiodrivers.h"
2480 AsioDrivers drivers;
2481 ASIOCallbacks asioCallbacks;
2482 ASIODriverInfo driverInfo;
2483 CallbackInfo *asioCallbackInfo;
2487 int drainCounter; // Tracks callback counts when draining
2488 bool internalDrain; // Indicates if stop is initiated from callback or not.
2489 ASIOBufferInfo *bufferInfos;
2493 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2496 // Function declarations (definitions at end of section)
2497 static const char* getAsioErrorString( ASIOError result );
2498 void sampleRateChanged( ASIOSampleRate sRate );
2499 long asioMessages( long selector, long value, void* message, double* opt );
2501 RtApiAsio :: RtApiAsio()
2503 // ASIO cannot run on a multi-threaded appartment. You can call
2504 // CoInitialize beforehand, but it must be for appartment threading
2505 // (in which case, CoInitilialize will return S_FALSE here).
2506 coInitialized_ = false;
2507 HRESULT hr = CoInitialize( NULL );
2509 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2510 error( RtError::WARNING );
2512 coInitialized_ = true;
2514 drivers.removeCurrentDriver();
2515 driverInfo.asioVersion = 2;
2517 // See note in DirectSound implementation about GetDesktopWindow().
2518 driverInfo.sysRef = GetForegroundWindow();
2521 RtApiAsio :: ~RtApiAsio()
2523 if ( stream_.state != STREAM_CLOSED ) closeStream();
2524 if ( coInitialized_ ) CoUninitialize();
2527 unsigned int RtApiAsio :: getDeviceCount( void )
2529 return (unsigned int) drivers.asioGetNumDev();
2532 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2534 RtAudio::DeviceInfo info;
2535 info.probed = false;
2538 unsigned int nDevices = getDeviceCount();
2539 if ( nDevices == 0 ) {
2540 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2541 error( RtError::INVALID_USE );
2544 if ( device >= nDevices ) {
2545 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2546 error( RtError::INVALID_USE );
2549 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2550 if ( stream_.state != STREAM_CLOSED ) {
2551 if ( device >= devices_.size() ) {
2552 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2553 error( RtError::WARNING );
2556 return devices_[ device ];
2559 char driverName[32];
2560 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2561 if ( result != ASE_OK ) {
2562 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2563 errorText_ = errorStream_.str();
2564 error( RtError::WARNING );
2568 info.name = driverName;
2570 if ( !drivers.loadDriver( driverName ) ) {
2571 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2572 errorText_ = errorStream_.str();
2573 error( RtError::WARNING );
2577 result = ASIOInit( &driverInfo );
2578 if ( result != ASE_OK ) {
2579 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2580 errorText_ = errorStream_.str();
2581 error( RtError::WARNING );
2585 // Determine the device channel information.
2586 long inputChannels, outputChannels;
2587 result = ASIOGetChannels( &inputChannels, &outputChannels );
2588 if ( result != ASE_OK ) {
2589 drivers.removeCurrentDriver();
2590 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2591 errorText_ = errorStream_.str();
2592 error( RtError::WARNING );
2596 info.outputChannels = outputChannels;
2597 info.inputChannels = inputChannels;
2598 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2599 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2601 // Determine the supported sample rates.
2602 info.sampleRates.clear();
2603 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2604 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2605 if ( result == ASE_OK )
2606 info.sampleRates.push_back( SAMPLE_RATES[i] );
2609 // Determine supported data types ... just check first channel and assume rest are the same.
2610 ASIOChannelInfo channelInfo;
2611 channelInfo.channel = 0;
2612 channelInfo.isInput = true;
2613 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2614 result = ASIOGetChannelInfo( &channelInfo );
2615 if ( result != ASE_OK ) {
2616 drivers.removeCurrentDriver();
2617 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2618 errorText_ = errorStream_.str();
2619 error( RtError::WARNING );
2623 info.nativeFormats = 0;
2624 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2625 info.nativeFormats |= RTAUDIO_SINT16;
2626 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2627 info.nativeFormats |= RTAUDIO_SINT32;
2628 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2629 info.nativeFormats |= RTAUDIO_FLOAT32;
2630 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2631 info.nativeFormats |= RTAUDIO_FLOAT64;
2633 if ( getDefaultOutputDevice() == device )
2634 info.isDefaultOutput = true;
2635 if ( getDefaultInputDevice() == device )
2636 info.isDefaultInput = true;
2639 drivers.removeCurrentDriver();
2643 void bufferSwitch( long index, ASIOBool processNow )
2645 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2646 object->callbackEvent( index );
2649 void RtApiAsio :: saveDeviceInfo( void )
2653 unsigned int nDevices = getDeviceCount();
2654 devices_.resize( nDevices );
2655 for ( unsigned int i=0; i<nDevices; i++ )
2656 devices_[i] = getDeviceInfo( i );
2659 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2660 unsigned int firstChannel, unsigned int sampleRate,
2661 RtAudioFormat format, unsigned int *bufferSize,
2662 RtAudio::StreamOptions *options )
2664 // For ASIO, a duplex stream MUST use the same driver.
2665 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2666 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2670 char driverName[32];
2671 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2672 if ( result != ASE_OK ) {
2673 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2674 errorText_ = errorStream_.str();
2678 // The getDeviceInfo() function will not work when a stream is open
2679 // because ASIO does not allow multiple devices to run at the same
2680 // time. Thus, we'll probe the system before opening a stream and
2681 // save the results for use by getDeviceInfo().
2682 this->saveDeviceInfo();
2684 // Only load the driver once for duplex stream.
2685 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2686 if ( !drivers.loadDriver( driverName ) ) {
2687 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2688 errorText_ = errorStream_.str();
2692 result = ASIOInit( &driverInfo );
2693 if ( result != ASE_OK ) {
2694 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2695 errorText_ = errorStream_.str();
2700 // Check the device channel count.
2701 long inputChannels, outputChannels;
2702 result = ASIOGetChannels( &inputChannels, &outputChannels );
2703 if ( result != ASE_OK ) {
2704 drivers.removeCurrentDriver();
2705 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2706 errorText_ = errorStream_.str();
2710 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2711 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2712 drivers.removeCurrentDriver();
2713 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2714 errorText_ = errorStream_.str();
2717 stream_.nDeviceChannels[mode] = channels;
2718 stream_.nUserChannels[mode] = channels;
2719 stream_.channelOffset[mode] = firstChannel;
2721 // Verify the sample rate is supported.
2722 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2723 if ( result != ASE_OK ) {
2724 drivers.removeCurrentDriver();
2725 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2726 errorText_ = errorStream_.str();
2730 // Get the current sample rate
2731 ASIOSampleRate currentRate;
2732 result = ASIOGetSampleRate( ¤tRate );
2733 if ( result != ASE_OK ) {
2734 drivers.removeCurrentDriver();
2735 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2736 errorText_ = errorStream_.str();
2740 // Set the sample rate only if necessary
2741 if ( currentRate != sampleRate ) {
2742 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2743 if ( result != ASE_OK ) {
2744 drivers.removeCurrentDriver();
2745 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2746 errorText_ = errorStream_.str();
2751 // Determine the driver data type.
2752 ASIOChannelInfo channelInfo;
2753 channelInfo.channel = 0;
2754 if ( mode == OUTPUT ) channelInfo.isInput = false;
2755 else channelInfo.isInput = true;
2756 result = ASIOGetChannelInfo( &channelInfo );
2757 if ( result != ASE_OK ) {
2758 drivers.removeCurrentDriver();
2759 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2760 errorText_ = errorStream_.str();
2764 // Assuming WINDOWS host is always little-endian.
2765 stream_.doByteSwap[mode] = false;
2766 stream_.userFormat = format;
2767 stream_.deviceFormat[mode] = 0;
2768 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2769 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2770 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2772 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2773 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2774 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2776 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2777 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2778 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2780 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2781 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2782 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2785 if ( stream_.deviceFormat[mode] == 0 ) {
2786 drivers.removeCurrentDriver();
2787 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2788 errorText_ = errorStream_.str();
2792 // Set the buffer size. For a duplex stream, this will end up
2793 // setting the buffer size based on the input constraints, which
2795 long minSize, maxSize, preferSize, granularity;
2796 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2797 if ( result != ASE_OK ) {
2798 drivers.removeCurrentDriver();
2799 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2800 errorText_ = errorStream_.str();
2804 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2805 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2806 else if ( granularity == -1 ) {
2807 // Make sure bufferSize is a power of two.
2808 int log2_of_min_size = 0;
2809 int log2_of_max_size = 0;
2811 for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
2812 if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
2813 if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
2816 long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
2817 int min_delta_num = log2_of_min_size;
2819 for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
2820 long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
2821 if (current_delta < min_delta) {
2822 min_delta = current_delta;
2827 *bufferSize = ( (unsigned int)1 << min_delta_num );
2828 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2829 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2831 else if ( granularity != 0 ) {
2832 // Set to an even multiple of granularity, rounding up.
2833 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2836 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2837 drivers.removeCurrentDriver();
2838 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2842 stream_.bufferSize = *bufferSize;
2843 stream_.nBuffers = 2;
2845 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2846 else stream_.userInterleaved = true;
2848 // ASIO always uses non-interleaved buffers.
2849 stream_.deviceInterleaved[mode] = false;
2851 // Allocate, if necessary, our AsioHandle structure for the stream.
2852 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2853 if ( handle == 0 ) {
2855 handle = new AsioHandle;
2857 catch ( std::bad_alloc& ) {
2858 //if ( handle == NULL ) {
2859 drivers.removeCurrentDriver();
2860 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2863 handle->bufferInfos = 0;
2865 // Create a manual-reset event.
2866 handle->condition = CreateEvent( NULL, // no security
2867 TRUE, // manual-reset
2868 FALSE, // non-signaled initially
2870 stream_.apiHandle = (void *) handle;
2873 // Create the ASIO internal buffers. Since RtAudio sets up input
2874 // and output separately, we'll have to dispose of previously
2875 // created output buffers for a duplex stream.
2876 long inputLatency, outputLatency;
2877 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2878 ASIODisposeBuffers();
2879 if ( handle->bufferInfos ) free( handle->bufferInfos );
2882 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2883 bool buffersAllocated = false;
2884 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2885 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2886 if ( handle->bufferInfos == NULL ) {
2887 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2888 errorText_ = errorStream_.str();
2892 ASIOBufferInfo *infos;
2893 infos = handle->bufferInfos;
2894 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2895 infos->isInput = ASIOFalse;
2896 infos->channelNum = i + stream_.channelOffset[0];
2897 infos->buffers[0] = infos->buffers[1] = 0;
2899 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2900 infos->isInput = ASIOTrue;
2901 infos->channelNum = i + stream_.channelOffset[1];
2902 infos->buffers[0] = infos->buffers[1] = 0;
2905 // Set up the ASIO callback structure and create the ASIO data buffers.
2906 asioCallbacks.bufferSwitch = &bufferSwitch;
2907 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2908 asioCallbacks.asioMessage = &asioMessages;
2909 asioCallbacks.bufferSwitchTimeInfo = NULL;
2910 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2911 if ( result != ASE_OK ) {
2912 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2913 errorText_ = errorStream_.str();
2916 buffersAllocated = true;
2918 // Set flags for buffer conversion.
2919 stream_.doConvertBuffer[mode] = false;
2920 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2921 stream_.doConvertBuffer[mode] = true;
2922 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2923 stream_.nUserChannels[mode] > 1 )
2924 stream_.doConvertBuffer[mode] = true;
2926 // Allocate necessary internal buffers
2927 unsigned long bufferBytes;
2928 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2929 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2930 if ( stream_.userBuffer[mode] == NULL ) {
2931 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2935 if ( stream_.doConvertBuffer[mode] ) {
2937 bool makeBuffer = true;
2938 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2939 if ( mode == INPUT ) {
2940 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2941 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2942 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2947 bufferBytes *= *bufferSize;
2948 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2949 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2950 if ( stream_.deviceBuffer == NULL ) {
2951 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2957 stream_.sampleRate = sampleRate;
2958 stream_.device[mode] = device;
2959 stream_.state = STREAM_STOPPED;
2960 asioCallbackInfo = &stream_.callbackInfo;
2961 stream_.callbackInfo.object = (void *) this;
2962 if ( stream_.mode == OUTPUT && mode == INPUT )
2963 // We had already set up an output stream.
2964 stream_.mode = DUPLEX;
2966 stream_.mode = mode;
2968 // Determine device latencies
2969 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2970 if ( result != ASE_OK ) {
2971 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2972 errorText_ = errorStream_.str();
2973 error( RtError::WARNING); // warn but don't fail
2976 stream_.latency[0] = outputLatency;
2977 stream_.latency[1] = inputLatency;
2980 // Setup the buffer conversion information structure. We don't use
2981 // buffers to do channel offsets, so we override that parameter
2983 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2988 if ( buffersAllocated )
2989 ASIODisposeBuffers();
2990 drivers.removeCurrentDriver();
2993 CloseHandle( handle->condition );
2994 if ( handle->bufferInfos )
2995 free( handle->bufferInfos );
2997 stream_.apiHandle = 0;
3000 for ( int i=0; i<2; i++ ) {
3001 if ( stream_.userBuffer[i] ) {
3002 free( stream_.userBuffer[i] );
3003 stream_.userBuffer[i] = 0;
3007 if ( stream_.deviceBuffer ) {
3008 free( stream_.deviceBuffer );
3009 stream_.deviceBuffer = 0;
3015 void RtApiAsio :: closeStream()
3017 if ( stream_.state == STREAM_CLOSED ) {
3018 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
3019 error( RtError::WARNING );
3023 if ( stream_.state == STREAM_RUNNING ) {
3024 stream_.state = STREAM_STOPPED;
3027 ASIODisposeBuffers();
3028 drivers.removeCurrentDriver();
3030 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3032 CloseHandle( handle->condition );
3033 if ( handle->bufferInfos )
3034 free( handle->bufferInfos );
3036 stream_.apiHandle = 0;
3039 for ( int i=0; i<2; i++ ) {
3040 if ( stream_.userBuffer[i] ) {
3041 free( stream_.userBuffer[i] );
3042 stream_.userBuffer[i] = 0;
3046 if ( stream_.deviceBuffer ) {
3047 free( stream_.deviceBuffer );
3048 stream_.deviceBuffer = 0;
3051 stream_.mode = UNINITIALIZED;
3052 stream_.state = STREAM_CLOSED;
3055 void RtApiAsio :: startStream()
3058 if ( stream_.state == STREAM_RUNNING ) {
3059 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
3060 error( RtError::WARNING );
3064 MUTEX_LOCK( &stream_.mutex );
3066 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3067 ASIOError result = ASIOStart();
3068 if ( result != ASE_OK ) {
3069 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
3070 errorText_ = errorStream_.str();
3074 handle->drainCounter = 0;
3075 handle->internalDrain = false;
3076 stream_.state = STREAM_RUNNING;
3080 MUTEX_UNLOCK( &stream_.mutex );
3082 if ( result == ASE_OK ) return;
3083 error( RtError::SYSTEM_ERROR );
3086 void RtApiAsio :: stopStream()
3089 if ( stream_.state == STREAM_STOPPED ) {
3090 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
3091 error( RtError::WARNING );
3095 MUTEX_LOCK( &stream_.mutex );
3097 if ( stream_.state == STREAM_STOPPED ) {
3098 MUTEX_UNLOCK( &stream_.mutex );
3102 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3103 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3104 if ( handle->drainCounter == 0 ) {
3105 handle->drainCounter = 1;
3106 MUTEX_UNLOCK( &stream_.mutex );
3107 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
3108 ResetEvent( handle->condition );
3109 MUTEX_LOCK( &stream_.mutex );
3113 ASIOError result = ASIOStop();
3114 if ( result != ASE_OK ) {
3115 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
3116 errorText_ = errorStream_.str();
3119 stream_.state = STREAM_STOPPED;
3120 MUTEX_UNLOCK( &stream_.mutex );
3122 if ( result == ASE_OK ) return;
3123 error( RtError::SYSTEM_ERROR );
3126 void RtApiAsio :: abortStream()
3129 if ( stream_.state == STREAM_STOPPED ) {
3130 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
3131 error( RtError::WARNING );
3135 // The following lines were commented-out because some behavior was
3136 // noted where the device buffers need to be zeroed to avoid
3137 // continuing sound, even when the device buffers are completely
3138 // disposed. So now, calling abort is the same as calling stop.
3139 // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3140 // handle->drainCounter = 1;
3144 bool RtApiAsio :: callbackEvent( long bufferIndex )
3146 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
3147 if ( stream_.state == STREAM_CLOSED ) {
3148 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
3149 error( RtError::WARNING );
3153 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3154 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3156 // Check if we were draining the stream and signal is finished.
3157 if ( handle->drainCounter > 3 ) {
3158 if ( handle->internalDrain == false )
3159 SetEvent( handle->condition );
3165 MUTEX_LOCK( &stream_.mutex );
3167 // The state might change while waiting on a mutex.
3168 if ( stream_.state == STREAM_STOPPED ) goto unlock;
3170 // Invoke user callback to get fresh output data UNLESS we are
3172 if ( handle->drainCounter == 0 ) {
3173 RtAudioCallback callback = (RtAudioCallback) info->callback;
3174 double streamTime = getStreamTime();
3175 RtAudioStreamStatus status = 0;
3176 if ( stream_.mode != INPUT && asioXRun == true ) {
3177 status |= RTAUDIO_OUTPUT_UNDERFLOW;
3180 if ( stream_.mode != OUTPUT && asioXRun == true ) {
3181 status |= RTAUDIO_INPUT_OVERFLOW;
3184 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
3185 stream_.bufferSize, streamTime, status, info->userData );
3186 if ( handle->drainCounter == 2 ) {
3187 MUTEX_UNLOCK( &stream_.mutex );
3191 else if ( handle->drainCounter == 1 )
3192 handle->internalDrain = true;
3195 unsigned int nChannels, bufferBytes, i, j;
3196 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
3197 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3199 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
3201 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3203 for ( i=0, j=0; i<nChannels; i++ ) {
3204 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3205 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3209 else if ( stream_.doConvertBuffer[0] ) {
3211 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3212 if ( stream_.doByteSwap[0] )
3213 byteSwapBuffer( stream_.deviceBuffer,
3214 stream_.bufferSize * stream_.nDeviceChannels[0],
3215 stream_.deviceFormat[0] );
3217 for ( i=0, j=0; i<nChannels; i++ ) {
3218 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3219 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3220 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3226 if ( stream_.doByteSwap[0] )
3227 byteSwapBuffer( stream_.userBuffer[0],
3228 stream_.bufferSize * stream_.nUserChannels[0],
3229 stream_.userFormat );
3231 for ( i=0, j=0; i<nChannels; i++ ) {
3232 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3233 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3234 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3239 if ( handle->drainCounter ) {
3240 handle->drainCounter++;
3245 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3247 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3249 if (stream_.doConvertBuffer[1]) {
3251 // Always interleave ASIO input data.
3252 for ( i=0, j=0; i<nChannels; i++ ) {
3253 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3254 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3255 handle->bufferInfos[i].buffers[bufferIndex],
3259 if ( stream_.doByteSwap[1] )
3260 byteSwapBuffer( stream_.deviceBuffer,
3261 stream_.bufferSize * stream_.nDeviceChannels[1],
3262 stream_.deviceFormat[1] );
3263 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3267 for ( i=0, j=0; i<nChannels; i++ ) {
3268 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3269 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3270 handle->bufferInfos[i].buffers[bufferIndex],
3275 if ( stream_.doByteSwap[1] )
3276 byteSwapBuffer( stream_.userBuffer[1],
3277 stream_.bufferSize * stream_.nUserChannels[1],
3278 stream_.userFormat );
3283 // The following call was suggested by Malte Clasen. While the API
3284 // documentation indicates it should not be required, some device
3285 // drivers apparently do not function correctly without it.
3288 MUTEX_UNLOCK( &stream_.mutex );
3290 RtApi::tickStreamTime();
3294 void sampleRateChanged( ASIOSampleRate sRate )
3296 // The ASIO documentation says that this usually only happens during
3297 // external sync. Audio processing is not stopped by the driver,
3298 // actual sample rate might not have even changed, maybe only the
3299 // sample rate status of an AES/EBU or S/PDIF digital input at the
3302 RtApi *object = (RtApi *) asioCallbackInfo->object;
3304 object->stopStream();
3306 catch ( RtError &exception ) {
3307 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3311 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3314 long asioMessages( long selector, long value, void* message, double* opt )
3318 switch( selector ) {
3319 case kAsioSelectorSupported:
3320 if ( value == kAsioResetRequest
3321 || value == kAsioEngineVersion
3322 || value == kAsioResyncRequest
3323 || value == kAsioLatenciesChanged
3324 // The following three were added for ASIO 2.0, you don't
3325 // necessarily have to support them.
3326 || value == kAsioSupportsTimeInfo
3327 || value == kAsioSupportsTimeCode
3328 || value == kAsioSupportsInputMonitor)
3331 case kAsioResetRequest:
3332 // Defer the task and perform the reset of the driver during the
3333 // next "safe" situation. You cannot reset the driver right now,
3334 // as this code is called from the driver. Reset the driver is
3335 // done by completely destruct is. I.e. ASIOStop(),
3336 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3338 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3341 case kAsioResyncRequest:
3342 // This informs the application that the driver encountered some
3343 // non-fatal data loss. It is used for synchronization purposes
3344 // of different media. Added mainly to work around the Win16Mutex
3345 // problems in Windows 95/98 with the Windows Multimedia system,
3346 // which could lose data because the Mutex was held too long by
3347 // another thread. However a driver can issue it in other
3349 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3353 case kAsioLatenciesChanged:
3354 // This will inform the host application that the drivers were
3355 // latencies changed. Beware, it this does not mean that the
3356 // buffer sizes have changed! You might need to update internal
3358 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3361 case kAsioEngineVersion:
3362 // Return the supported ASIO version of the host application. If
3363 // a host application does not implement this selector, ASIO 1.0
3364 // is assumed by the driver.
3367 case kAsioSupportsTimeInfo:
3368 // Informs the driver whether the
3369 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3370 // For compatibility with ASIO 1.0 drivers the host application
3371 // should always support the "old" bufferSwitch method, too.
3374 case kAsioSupportsTimeCode:
3375 // Informs the driver whether application is interested in time
3376 // code info. If an application does not need to know about time
3377 // code, the driver has less work to do.
3384 static const char* getAsioErrorString( ASIOError result )
3392 static Messages m[] =
3394 { ASE_NotPresent, "Hardware input or output is not present or available." },
3395 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3396 { ASE_InvalidParameter, "Invalid input parameter." },
3397 { ASE_InvalidMode, "Invalid mode." },
3398 { ASE_SPNotAdvancing, "Sample position not advancing." },
3399 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3400 { ASE_NoMemory, "Not enough memory to complete the request." }
3403 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3404 if ( m[i].value == result ) return m[i].message;
3406 return "Unknown error.";
3408 //******************** End of __WINDOWS_ASIO__ *********************//
3412 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3414 // Modified by Robin Davies, October 2005
3415 // - Improvements to DirectX pointer chasing.
3416 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3417 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3418 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3419 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3424 #if defined(__MINGW32__)
3425 // missing from latest mingw winapi
3426 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3427 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3428 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3429 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3432 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3434 #ifdef _MSC_VER // if Microsoft Visual C++
3435 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3438 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3440 if ( laterPointer > earlierPointer )
3441 return laterPointer - earlierPointer;
3443 return laterPointer - earlierPointer + bufferSize;
3446 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3448 if ( pointer > bufferSize ) pointer -= bufferSize;
3449 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3450 if ( pointer < earlierPointer ) pointer += bufferSize;
3451 return pointer >= earlierPointer && pointer < laterPointer;
3454 // A structure to hold various information related to the DirectSound
3455 // API implementation.
3457 unsigned int drainCounter; // Tracks callback counts when draining
3458 bool internalDrain; // Indicates if stop is initiated from callback or not.
3462 UINT bufferPointer[2];
3463 DWORD dsBufferSize[2];
3464 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3468 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3472 RtApiDs::RtDsStatistics RtApiDs::statistics;
3474 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3475 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3477 RtDsStatistics s = statistics;
3479 // update the calculated fields.
3480 if ( s.inputFrameSize != 0 )
3481 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3483 if ( s.outputFrameSize != 0 )
3484 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3490 // Declarations for utility functions, callbacks, and structures
3491 // specific to the DirectSound implementation.
3492 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3493 LPCTSTR description,
3497 static char* getErrorString( int code );
3499 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3505 unsigned int counter;
3511 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3514 RtApiDs :: RtApiDs()
3516 // Dsound will run both-threaded. If CoInitialize fails, then just
3517 // accept whatever the mainline chose for a threading model.
3518 coInitialized_ = false;
3519 HRESULT hr = CoInitialize( NULL );
3520 if ( !FAILED( hr ) ) coInitialized_ = true;
3523 RtApiDs :: ~RtApiDs()
3525 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3526 if ( stream_.state != STREAM_CLOSED ) closeStream();
3529 unsigned int RtApiDs :: getDefaultInputDevice( void )
3531 // Count output devices.
3533 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3534 if ( FAILED( result ) ) {
3535 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3536 errorText_ = errorStream_.str();
3537 error( RtError::WARNING );
3541 // Now enumerate input devices until we find the id = NULL.
3542 info.isInput = true;
3543 info.getDefault = true;
3544 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3545 if ( FAILED( result ) ) {
3546 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3547 errorText_ = errorStream_.str();
3548 error( RtError::WARNING );
3552 if ( info.counter > 0 ) return info.counter - 1;
3556 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3558 // Enumerate output devices until we find the id = NULL.
3560 info.getDefault = true;
3561 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3562 if ( FAILED( result ) ) {
3563 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3564 errorText_ = errorStream_.str();
3565 error( RtError::WARNING );
3569 if ( info.counter > 0 ) return info.counter - 1;
3573 unsigned int RtApiDs :: getDeviceCount( void )
3575 // Count DirectSound devices.
3577 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3578 if ( FAILED( result ) ) {
3579 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3580 errorText_ = errorStream_.str();
3581 error( RtError::WARNING );
3584 // Count DirectSoundCapture devices.
3585 info.isInput = true;
3586 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3587 if ( FAILED( result ) ) {
3588 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3589 errorText_ = errorStream_.str();
3590 error( RtError::WARNING );
3593 return info.counter;
3596 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3598 // Because DirectSound always enumerates input and output devices
3599 // separately (and because we don't attempt to combine devices
3600 // internally), none of our "devices" will ever be duplex.
3602 RtAudio::DeviceInfo info;
3603 info.probed = false;
3605 // Enumerate through devices to find the id (if it exists). Note
3606 // that we have to do the output enumeration first, even if this is
3607 // an input device, in order for the device counter to be correct.
3609 dsinfo.findIndex = true;
3610 dsinfo.index = device;
3611 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3612 if ( FAILED( result ) ) {
3613 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3614 errorText_ = errorStream_.str();
3615 error( RtError::WARNING );
3618 if ( dsinfo.name.empty() ) goto probeInput;
3620 LPDIRECTSOUND output;
3622 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3623 if ( FAILED( result ) ) {
3624 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3625 errorText_ = errorStream_.str();
3626 error( RtError::WARNING );
3630 outCaps.dwSize = sizeof( outCaps );
3631 result = output->GetCaps( &outCaps );
3632 if ( FAILED( result ) ) {
3634 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3635 errorText_ = errorStream_.str();
3636 error( RtError::WARNING );
3640 // Get output channel information.
3641 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3643 // Get sample rate information.
3644 info.sampleRates.clear();
3645 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3646 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3647 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3648 info.sampleRates.push_back( SAMPLE_RATES[k] );
3651 // Get format information.
3652 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3653 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3657 if ( getDefaultOutputDevice() == device )
3658 info.isDefaultOutput = true;
3660 // Copy name and return.
3661 info.name = dsinfo.name;
3668 dsinfo.isInput = true;
3669 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3670 if ( FAILED( result ) ) {
3671 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3672 errorText_ = errorStream_.str();
3673 error( RtError::WARNING );
3676 if ( dsinfo.name.empty() ) return info;
3678 LPDIRECTSOUNDCAPTURE input;
3679 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3680 if ( FAILED( result ) ) {
3681 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3682 errorText_ = errorStream_.str();
3683 error( RtError::WARNING );
3688 inCaps.dwSize = sizeof( inCaps );
3689 result = input->GetCaps( &inCaps );
3690 if ( FAILED( result ) ) {
3692 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3693 errorText_ = errorStream_.str();
3694 error( RtError::WARNING );
3698 // Get input channel information.
3699 info.inputChannels = inCaps.dwChannels;
3701 // Get sample rate and format information.
3702 if ( inCaps.dwChannels == 2 ) {
3703 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3704 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3705 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3706 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3707 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3708 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3709 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3710 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3712 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3713 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3714 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3715 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3716 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3718 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3719 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3720 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3721 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3722 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3725 else if ( inCaps.dwChannels == 1 ) {
3726 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3727 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3728 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3729 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3730 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3731 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3732 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3733 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3735 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3736 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3737 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3738 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3739 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3741 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3742 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3743 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3744 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3745 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3748 else info.inputChannels = 0; // technically, this would be an error
3752 if ( info.inputChannels == 0 ) return info;
3754 if ( getDefaultInputDevice() == device )
3755 info.isDefaultInput = true;
3757 // Copy name and return.
3758 info.name = dsinfo.name;
3763 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3764 unsigned int firstChannel, unsigned int sampleRate,
3765 RtAudioFormat format, unsigned int *bufferSize,
3766 RtAudio::StreamOptions *options )
3768 if ( channels + firstChannel > 2 ) {
3769 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3773 // Enumerate through devices to find the id (if it exists). Note
3774 // that we have to do the output enumeration first, even if this is
3775 // an input device, in order for the device counter to be correct.
3777 dsinfo.findIndex = true;
3778 dsinfo.index = device;
3779 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3780 if ( FAILED( result ) ) {
3781 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3782 errorText_ = errorStream_.str();
3786 if ( mode == OUTPUT ) {
3787 if ( dsinfo.name.empty() ) {
3788 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3789 errorText_ = errorStream_.str();
3793 else { // mode == INPUT
3794 dsinfo.isInput = true;
3795 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3796 if ( FAILED( result ) ) {
3797 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3798 errorText_ = errorStream_.str();
3801 if ( dsinfo.name.empty() ) {
3802 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3803 errorText_ = errorStream_.str();
3808 // According to a note in PortAudio, using GetDesktopWindow()
3809 // instead of GetForegroundWindow() is supposed to avoid problems
3810 // that occur when the application's window is not the foreground
3811 // window. Also, if the application window closes before the
3812 // DirectSound buffer, DirectSound can crash. However, for console
3813 // applications, no sound was produced when using GetDesktopWindow().
3814 HWND hWnd = GetForegroundWindow();
3816 // Check the numberOfBuffers parameter and limit the lowest value to
3817 // two. This is a judgement call and a value of two is probably too
3818 // low for capture, but it should work for playback.
3820 if ( options ) nBuffers = options->numberOfBuffers;
3821 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3822 if ( nBuffers < 2 ) nBuffers = 3;
3824 // Create the wave format structure. The data format setting will
3825 // be determined later.
3826 WAVEFORMATEX waveFormat;
3827 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3828 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3829 waveFormat.nChannels = channels + firstChannel;
3830 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3832 // Determine the device buffer size. By default, 32k, but we will
3833 // grow it to make allowances for very large software buffer sizes.
3834 DWORD dsBufferSize = 0;
3835 DWORD dsPointerLeadTime = 0;
3836 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3838 void *ohandle = 0, *bhandle = 0;
3839 if ( mode == OUTPUT ) {
3841 LPDIRECTSOUND output;
3842 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3843 if ( FAILED( result ) ) {
3844 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3845 errorText_ = errorStream_.str();
3850 outCaps.dwSize = sizeof( outCaps );
3851 result = output->GetCaps( &outCaps );
3852 if ( FAILED( result ) ) {
3854 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3855 errorText_ = errorStream_.str();
3859 // Check channel information.
3860 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3861 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3862 errorText_ = errorStream_.str();
3866 // Check format information. Use 16-bit format unless not
3867 // supported or user requests 8-bit.
3868 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3869 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3870 waveFormat.wBitsPerSample = 16;
3871 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3874 waveFormat.wBitsPerSample = 8;
3875 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3877 stream_.userFormat = format;
3879 // Update wave format structure and buffer information.
3880 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3881 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3882 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3884 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3885 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3888 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3889 // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3890 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3891 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3892 if ( FAILED( result ) ) {
3894 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3895 errorText_ = errorStream_.str();
3899 // Even though we will write to the secondary buffer, we need to
3900 // access the primary buffer to set the correct output format
3901 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3902 // buffer description.
3903 DSBUFFERDESC bufferDescription;
3904 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3905 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3906 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3908 // Obtain the primary buffer
3909 LPDIRECTSOUNDBUFFER buffer;
3910 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3911 if ( FAILED( result ) ) {
3913 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3914 errorText_ = errorStream_.str();
3918 // Set the primary DS buffer sound format.
3919 result = buffer->SetFormat( &waveFormat );
3920 if ( FAILED( result ) ) {
3922 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3923 errorText_ = errorStream_.str();
3927 // Setup the secondary DS buffer description.
3928 dsBufferSize = (DWORD) bufferBytes;
3929 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3930 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3931 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3932 DSBCAPS_GLOBALFOCUS |
3933 DSBCAPS_GETCURRENTPOSITION2 |
3934 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3935 bufferDescription.dwBufferBytes = bufferBytes;
3936 bufferDescription.lpwfxFormat = &waveFormat;
3938 // Try to create the secondary DS buffer. If that doesn't work,
3939 // try to use software mixing. Otherwise, there's a problem.
3940 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3941 if ( FAILED( result ) ) {
3942 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3943 DSBCAPS_GLOBALFOCUS |
3944 DSBCAPS_GETCURRENTPOSITION2 |
3945 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3946 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3947 if ( FAILED( result ) ) {
3949 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3950 errorText_ = errorStream_.str();
3955 // Get the buffer size ... might be different from what we specified.
3957 dsbcaps.dwSize = sizeof( DSBCAPS );
3958 result = buffer->GetCaps( &dsbcaps );
3959 if ( FAILED( result ) ) {
3962 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3963 errorText_ = errorStream_.str();
3967 bufferBytes = dsbcaps.dwBufferBytes;
3969 // Lock the DS buffer
3972 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3973 if ( FAILED( result ) ) {
3976 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3977 errorText_ = errorStream_.str();
3981 // Zero the DS buffer
3982 ZeroMemory( audioPtr, dataLen );
3984 // Unlock the DS buffer
3985 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3986 if ( FAILED( result ) ) {
3989 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3990 errorText_ = errorStream_.str();
3994 dsBufferSize = bufferBytes;
3995 ohandle = (void *) output;
3996 bhandle = (void *) buffer;
3999 if ( mode == INPUT ) {
4001 LPDIRECTSOUNDCAPTURE input;
4002 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
4003 if ( FAILED( result ) ) {
4004 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
4005 errorText_ = errorStream_.str();
4010 inCaps.dwSize = sizeof( inCaps );
4011 result = input->GetCaps( &inCaps );
4012 if ( FAILED( result ) ) {
4014 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
4015 errorText_ = errorStream_.str();
4019 // Check channel information.
4020 if ( inCaps.dwChannels < channels + firstChannel ) {
4021 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
4025 // Check format information. Use 16-bit format unless user
4027 DWORD deviceFormats;
4028 if ( channels + firstChannel == 2 ) {
4029 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
4030 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4031 waveFormat.wBitsPerSample = 8;
4032 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4034 else { // assume 16-bit is supported
4035 waveFormat.wBitsPerSample = 16;
4036 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4039 else { // channel == 1
4040 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
4041 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4042 waveFormat.wBitsPerSample = 8;
4043 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4045 else { // assume 16-bit is supported
4046 waveFormat.wBitsPerSample = 16;
4047 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4050 stream_.userFormat = format;
4052 // Update wave format structure and buffer information.
4053 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
4054 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
4055 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
4057 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
4058 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
4061 // Setup the secondary DS buffer description.
4062 dsBufferSize = bufferBytes;
4063 DSCBUFFERDESC bufferDescription;
4064 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
4065 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
4066 bufferDescription.dwFlags = 0;
4067 bufferDescription.dwReserved = 0;
4068 bufferDescription.dwBufferBytes = bufferBytes;
4069 bufferDescription.lpwfxFormat = &waveFormat;
4071 // Create the capture buffer.
4072 LPDIRECTSOUNDCAPTUREBUFFER buffer;
4073 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
4074 if ( FAILED( result ) ) {
4076 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
4077 errorText_ = errorStream_.str();
4081 // Get the buffer size ... might be different from what we specified.
4083 dscbcaps.dwSize = sizeof( DSCBCAPS );
4084 result = buffer->GetCaps( &dscbcaps );
4085 if ( FAILED( result ) ) {
4088 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
4089 errorText_ = errorStream_.str();
4093 bufferBytes = dscbcaps.dwBufferBytes;
4095 // Lock the capture buffer
4098 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
4099 if ( FAILED( result ) ) {
4102 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
4103 errorText_ = errorStream_.str();
4108 ZeroMemory( audioPtr, dataLen );
4110 // Unlock the buffer
4111 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4112 if ( FAILED( result ) ) {
4115 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
4116 errorText_ = errorStream_.str();
4120 dsBufferSize = bufferBytes;
4121 ohandle = (void *) input;
4122 bhandle = (void *) buffer;
4125 // Set various stream parameters
4126 DsHandle *handle = 0;
4127 stream_.nDeviceChannels[mode] = channels + firstChannel;
4128 stream_.nUserChannels[mode] = channels;
4129 stream_.bufferSize = *bufferSize;
4130 stream_.channelOffset[mode] = firstChannel;
4131 stream_.deviceInterleaved[mode] = true;
4132 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
4133 else stream_.userInterleaved = true;
4135 // Set flag for buffer conversion
4136 stream_.doConvertBuffer[mode] = false;
4137 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
4138 stream_.doConvertBuffer[mode] = true;
4139 if (stream_.userFormat != stream_.deviceFormat[mode])
4140 stream_.doConvertBuffer[mode] = true;
4141 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
4142 stream_.nUserChannels[mode] > 1 )
4143 stream_.doConvertBuffer[mode] = true;
4145 // Allocate necessary internal buffers
4146 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
4147 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
4148 if ( stream_.userBuffer[mode] == NULL ) {
4149 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
4153 if ( stream_.doConvertBuffer[mode] ) {
4155 bool makeBuffer = true;
4156 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
4157 if ( mode == INPUT ) {
4158 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4159 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
4160 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
4165 bufferBytes *= *bufferSize;
4166 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
4167 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
4168 if ( stream_.deviceBuffer == NULL ) {
4169 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
4175 // Allocate our DsHandle structures for the stream.
4176 if ( stream_.apiHandle == 0 ) {
4178 handle = new DsHandle;
4180 catch ( std::bad_alloc& ) {
4181 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
4185 // Create a manual-reset event.
4186 handle->condition = CreateEvent( NULL, // no security
4187 TRUE, // manual-reset
4188 FALSE, // non-signaled initially
4190 stream_.apiHandle = (void *) handle;
4193 handle = (DsHandle *) stream_.apiHandle;
4194 handle->id[mode] = ohandle;
4195 handle->buffer[mode] = bhandle;
4196 handle->dsBufferSize[mode] = dsBufferSize;
4197 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
4199 stream_.device[mode] = device;
4200 stream_.state = STREAM_STOPPED;
4201 if ( stream_.mode == OUTPUT && mode == INPUT )
4202 // We had already set up an output stream.
4203 stream_.mode = DUPLEX;
4205 stream_.mode = mode;
4206 stream_.nBuffers = nBuffers;
4207 stream_.sampleRate = sampleRate;
4209 // Setup the buffer conversion information structure.
4210 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
4212 // Setup the callback thread.
4214 stream_.callbackInfo.object = (void *) this;
4215 stream_.callbackInfo.isRunning = true;
4216 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
4217 &stream_.callbackInfo, 0, &threadId );
4218 if ( stream_.callbackInfo.thread == 0 ) {
4219 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4223 // Boost DS thread priority
4224 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4229 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4230 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4231 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4232 if ( buffer ) buffer->Release();
4235 if ( handle->buffer[1] ) {
4236 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4237 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4238 if ( buffer ) buffer->Release();
4241 CloseHandle( handle->condition );
4243 stream_.apiHandle = 0;
4246 for ( int i=0; i<2; i++ ) {
4247 if ( stream_.userBuffer[i] ) {
4248 free( stream_.userBuffer[i] );
4249 stream_.userBuffer[i] = 0;
4253 if ( stream_.deviceBuffer ) {
4254 free( stream_.deviceBuffer );
4255 stream_.deviceBuffer = 0;
4261 void RtApiDs :: closeStream()
4263 if ( stream_.state == STREAM_CLOSED ) {
4264 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4265 error( RtError::WARNING );
4269 // Stop the callback thread.
4270 stream_.callbackInfo.isRunning = false;
4271 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4272 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4274 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4276 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4277 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4278 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4285 if ( handle->buffer[1] ) {
4286 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4287 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4294 CloseHandle( handle->condition );
4296 stream_.apiHandle = 0;
4299 for ( int i=0; i<2; i++ ) {
4300 if ( stream_.userBuffer[i] ) {
4301 free( stream_.userBuffer[i] );
4302 stream_.userBuffer[i] = 0;
4306 if ( stream_.deviceBuffer ) {
4307 free( stream_.deviceBuffer );
4308 stream_.deviceBuffer = 0;
4311 stream_.mode = UNINITIALIZED;
4312 stream_.state = STREAM_CLOSED;
4315 void RtApiDs :: startStream()
4318 if ( stream_.state == STREAM_RUNNING ) {
4319 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4320 error( RtError::WARNING );
4324 // Increase scheduler frequency on lesser windows (a side-effect of
4325 // increasing timer accuracy). On greater windows (Win2K or later),
4326 // this is already in effect.
4328 MUTEX_LOCK( &stream_.mutex );
4330 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4332 timeBeginPeriod( 1 );
4335 memset( &statistics, 0, sizeof( statistics ) );
4336 statistics.sampleRate = stream_.sampleRate;
4337 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4340 buffersRolling = false;
4341 duplexPrerollBytes = 0;
4343 if ( stream_.mode == DUPLEX ) {
4344 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4345 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4349 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4350 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4352 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4353 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4354 if ( FAILED( result ) ) {
4355 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4356 errorText_ = errorStream_.str();
4361 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4362 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4364 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4365 result = buffer->Start( DSCBSTART_LOOPING );
4366 if ( FAILED( result ) ) {
4367 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4368 errorText_ = errorStream_.str();
4373 handle->drainCounter = 0;
4374 handle->internalDrain = false;
4375 stream_.state = STREAM_RUNNING;
4378 MUTEX_UNLOCK( &stream_.mutex );
4380 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4383 void RtApiDs :: stopStream()
4386 if ( stream_.state == STREAM_STOPPED ) {
4387 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4388 error( RtError::WARNING );
4392 MUTEX_LOCK( &stream_.mutex );
4394 if ( stream_.state == STREAM_STOPPED ) {
4395 MUTEX_UNLOCK( &stream_.mutex );
4402 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4403 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4404 if ( handle->drainCounter == 0 ) {
4405 handle->drainCounter = 1;
4406 MUTEX_UNLOCK( &stream_.mutex );
4407 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4408 ResetEvent( handle->condition );
4409 MUTEX_LOCK( &stream_.mutex );
4412 // Stop the buffer and clear memory
4413 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4414 result = buffer->Stop();
4415 if ( FAILED( result ) ) {
4416 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4417 errorText_ = errorStream_.str();
4421 // Lock the buffer and clear it so that if we start to play again,
4422 // we won't have old data playing.
4423 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4424 if ( FAILED( result ) ) {
4425 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";
4426 errorText_ = errorStream_.str();
4430 // Zero the DS buffer
4431 ZeroMemory( audioPtr, dataLen );
4433 // Unlock the DS buffer
4434 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4435 if ( FAILED( result ) ) {
4436 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4437 errorText_ = errorStream_.str();
4441 // If we start playing again, we must begin at beginning of buffer.
4442 handle->bufferPointer[0] = 0;
4445 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4446 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4450 result = buffer->Stop();
4451 if ( FAILED( result ) ) {
4452 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4453 errorText_ = errorStream_.str();
4457 // Lock the buffer and clear it so that if we start to play again,
4458 // we won't have old data playing.
4459 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4460 if ( FAILED( result ) ) {
4461 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";
4462 errorText_ = errorStream_.str();
4466 // Zero the DS buffer
4467 ZeroMemory( audioPtr, dataLen );
4469 // Unlock the DS buffer
4470 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4471 if ( FAILED( result ) ) {
4472 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4473 errorText_ = errorStream_.str();
4477 // If we start recording again, we must begin at beginning of buffer.
4478 handle->bufferPointer[1] = 0;
4482 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4483 stream_.state = STREAM_STOPPED;
4484 MUTEX_UNLOCK( &stream_.mutex );
4486 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4489 void RtApiDs :: abortStream()
4492 if ( stream_.state == STREAM_STOPPED ) {
4493 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4494 error( RtError::WARNING );
4498 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4499 handle->drainCounter = 1;
4504 void RtApiDs :: callbackEvent()
4506 if ( stream_.state == STREAM_STOPPED ) {
4507 Sleep(50); // sleep 50 milliseconds
4511 if ( stream_.state == STREAM_CLOSED ) {
4512 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4513 error( RtError::WARNING );
4517 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4518 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4520 // Check if we were draining the stream and signal is finished.
4521 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4522 if ( handle->internalDrain == false )
4523 SetEvent( handle->condition );
4529 MUTEX_LOCK( &stream_.mutex );
4531 // The state might change while waiting on a mutex.
4532 if ( stream_.state == STREAM_STOPPED ) {
4533 MUTEX_UNLOCK( &stream_.mutex );
4537 // Invoke user callback to get fresh output data UNLESS we are
4539 if ( handle->drainCounter == 0 ) {
4540 RtAudioCallback callback = (RtAudioCallback) info->callback;
4541 double streamTime = getStreamTime();
4542 RtAudioStreamStatus status = 0;
4543 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4544 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4545 handle->xrun[0] = false;
4547 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4548 status |= RTAUDIO_INPUT_OVERFLOW;
4549 handle->xrun[1] = false;
4551 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4552 stream_.bufferSize, streamTime, status, info->userData );
4553 if ( handle->drainCounter == 2 ) {
4554 MUTEX_UNLOCK( &stream_.mutex );
4558 else if ( handle->drainCounter == 1 )
4559 handle->internalDrain = true;
4563 DWORD currentWritePos, safeWritePos;
4564 DWORD currentReadPos, safeReadPos;
4568 #ifdef GENERATE_DEBUG_LOG
4569 DWORD writeTime, readTime;
4572 LPVOID buffer1 = NULL;
4573 LPVOID buffer2 = NULL;
4574 DWORD bufferSize1 = 0;
4575 DWORD bufferSize2 = 0;
4580 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4581 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4583 // It takes a while for the devices to get rolling. As a result,
4584 // there's no guarantee that the capture and write device pointers
4585 // will move in lockstep. Wait here for both devices to start
4586 // rolling, and then set our buffer pointers accordingly.
4587 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4588 // bytes later than the write buffer.
4590 // Stub: a serious risk of having a pre-emptive scheduling round
4591 // take place between the two GetCurrentPosition calls... but I'm
4592 // really not sure how to solve the problem. Temporarily boost to
4593 // Realtime priority, maybe; but I'm not sure what priority the
4594 // DirectSound service threads run at. We *should* be roughly
4595 // within a ms or so of correct.
4597 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4598 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4600 DWORD initialWritePos, initialSafeWritePos;
4601 DWORD initialReadPos, initialSafeReadPos;
4603 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4604 if ( FAILED( result ) ) {
4605 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4606 errorText_ = errorStream_.str();
4607 error( RtError::SYSTEM_ERROR );
4609 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4610 if ( FAILED( result ) ) {
4611 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4612 errorText_ = errorStream_.str();
4613 error( RtError::SYSTEM_ERROR );
4616 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4617 if ( FAILED( result ) ) {
4618 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4619 errorText_ = errorStream_.str();
4620 error( RtError::SYSTEM_ERROR );
4622 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4623 if ( FAILED( result ) ) {
4624 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4625 errorText_ = errorStream_.str();
4626 error( RtError::SYSTEM_ERROR );
4628 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4632 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4634 buffersRolling = true;
4635 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4636 handle->bufferPointer[1] = safeReadPos;
4639 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4641 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4643 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4644 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4645 bufferBytes *= formatBytes( stream_.userFormat );
4646 memset( stream_.userBuffer[0], 0, bufferBytes );
4649 // Setup parameters and do buffer conversion if necessary.
4650 if ( stream_.doConvertBuffer[0] ) {
4651 buffer = stream_.deviceBuffer;
4652 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4653 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4654 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4657 buffer = stream_.userBuffer[0];
4658 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4659 bufferBytes *= formatBytes( stream_.userFormat );
4662 // No byte swapping necessary in DirectSound implementation.
4664 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4665 // unsigned. So, we need to convert our signed 8-bit data here to
4667 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4668 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4670 DWORD dsBufferSize = handle->dsBufferSize[0];
4671 nextWritePos = handle->bufferPointer[0];
4675 // Find out where the read and "safe write" pointers are.
4676 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4677 if ( FAILED( result ) ) {
4678 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4679 errorText_ = errorStream_.str();
4680 error( RtError::SYSTEM_ERROR );
4683 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4684 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4685 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4686 endWrite = nextWritePos + bufferBytes;
4688 // Check whether the entire write region is behind the play pointer.
4689 if ( leadPos >= endWrite ) break;
4691 // If we are here, then we must wait until the play pointer gets
4692 // beyond the write region. The approach here is to use the
4693 // Sleep() function to suspend operation until safePos catches
4694 // up. Calculate number of milliseconds to wait as:
4695 // time = distance * (milliseconds/second) * fudgefactor /
4696 // ((bytes/sample) * (samples/second))
4697 // A "fudgefactor" less than 1 is used because it was found
4698 // that sleeping too long was MUCH worse than sleeping for
4699 // several shorter periods.
4700 double millis = ( endWrite - leadPos ) * 900.0;
4701 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4702 if ( millis < 1.0 ) millis = 1.0;
4703 if ( millis > 50.0 ) {
4704 static int nOverruns = 0;
4707 Sleep( (DWORD) millis );
4710 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4711 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4714 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4715 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4716 // We've strayed into the forbidden zone ... resync the read pointer.
4717 //++statistics.numberOfWriteUnderruns;
4718 handle->xrun[0] = true;
4719 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4720 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4721 handle->bufferPointer[0] = nextWritePos;
4722 endWrite = nextWritePos + bufferBytes;
4725 // Lock free space in the buffer
4726 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4727 &bufferSize1, &buffer2, &bufferSize2, 0 );
4728 if ( FAILED( result ) ) {
4729 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4730 errorText_ = errorStream_.str();
4731 error( RtError::SYSTEM_ERROR );
4734 // Copy our buffer into the DS buffer
4735 CopyMemory( buffer1, buffer, bufferSize1 );
4736 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4738 // Update our buffer offset and unlock sound buffer
4739 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4740 if ( FAILED( result ) ) {
4741 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4742 errorText_ = errorStream_.str();
4743 error( RtError::SYSTEM_ERROR );
4745 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4746 handle->bufferPointer[0] = nextWritePos;
4748 if ( handle->drainCounter ) {
4749 handle->drainCounter++;
4754 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4756 // Setup parameters.
4757 if ( stream_.doConvertBuffer[1] ) {
4758 buffer = stream_.deviceBuffer;
4759 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4760 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4763 buffer = stream_.userBuffer[1];
4764 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4765 bufferBytes *= formatBytes( stream_.userFormat );
4768 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4769 long nextReadPos = handle->bufferPointer[1];
4770 DWORD dsBufferSize = handle->dsBufferSize[1];
4772 // Find out where the write and "safe read" pointers are.
4773 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4774 if ( FAILED( result ) ) {
4775 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4776 errorText_ = errorStream_.str();
4777 error( RtError::SYSTEM_ERROR );
4780 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4781 DWORD endRead = nextReadPos + bufferBytes;
4783 // Handling depends on whether we are INPUT or DUPLEX.
4784 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4785 // then a wait here will drag the write pointers into the forbidden zone.
4787 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4788 // it's in a safe position. This causes dropouts, but it seems to be the only
4789 // practical way to sync up the read and write pointers reliably, given the
4790 // the very complex relationship between phase and increment of the read and write
4793 // In order to minimize audible dropouts in DUPLEX mode, we will
4794 // provide a pre-roll period of 0.5 seconds in which we return
4795 // zeros from the read buffer while the pointers sync up.
4797 if ( stream_.mode == DUPLEX ) {
4798 if ( safeReadPos < endRead ) {
4799 if ( duplexPrerollBytes <= 0 ) {
4800 // Pre-roll time over. Be more agressive.
4801 int adjustment = endRead-safeReadPos;
4803 handle->xrun[1] = true;
4804 //++statistics.numberOfReadOverruns;
4806 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4807 // and perform fine adjustments later.
4808 // - small adjustments: back off by twice as much.
4809 if ( adjustment >= 2*bufferBytes )
4810 nextReadPos = safeReadPos-2*bufferBytes;
4812 nextReadPos = safeReadPos-bufferBytes-adjustment;
4814 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4815 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4816 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4820 // In pre=roll time. Just do it.
4821 nextReadPos = safeReadPos-bufferBytes;
4822 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4824 endRead = nextReadPos + bufferBytes;
4827 else { // mode == INPUT
4828 while ( safeReadPos < endRead ) {
4829 // See comments for playback.
4830 double millis = (endRead - safeReadPos) * 900.0;
4831 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4832 if ( millis < 1.0 ) millis = 1.0;
4833 Sleep( (DWORD) millis );
4835 // Wake up, find out where we are now
4836 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4837 if ( FAILED( result ) ) {
4838 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4839 errorText_ = errorStream_.str();
4840 error( RtError::SYSTEM_ERROR );
4843 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4847 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4848 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4850 // Lock free space in the buffer
4851 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4852 &bufferSize1, &buffer2, &bufferSize2, 0 );
4853 if ( FAILED( result ) ) {
4854 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4855 errorText_ = errorStream_.str();
4856 error( RtError::SYSTEM_ERROR );
4859 if ( duplexPrerollBytes <= 0 ) {
4860 // Copy our buffer into the DS buffer
4861 CopyMemory( buffer, buffer1, bufferSize1 );
4862 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4865 memset( buffer, 0, bufferSize1 );
4866 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4867 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4870 // Update our buffer offset and unlock sound buffer
4871 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4872 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4873 if ( FAILED( result ) ) {
4874 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4875 errorText_ = errorStream_.str();
4876 error( RtError::SYSTEM_ERROR );
4878 handle->bufferPointer[1] = nextReadPos;
4880 // No byte swapping necessary in DirectSound implementation.
4882 // If necessary, convert 8-bit data from unsigned to signed.
4883 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4884 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4886 // Do buffer conversion if necessary.
4887 if ( stream_.doConvertBuffer[1] )
4888 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4890 #ifdef GENERATE_DEBUG_LOG
4891 if ( currentDebugLogEntry < debugLog.size() )
4893 TTickRecord &r = debugLog[currentDebugLogEntry++];
4894 r.currentReadPointer = currentReadPos;
4895 r.safeReadPointer = safeReadPos;
4896 r.currentWritePointer = currentWritePos;
4897 r.safeWritePointer = safeWritePos;
4898 r.readTime = readTime;
4899 r.writeTime = writeTime;
4900 r.nextReadPointer = handles[1].bufferPointer;
4901 r.nextWritePointer = handles[0].bufferPointer;
4906 MUTEX_UNLOCK( &stream_.mutex );
4908 RtApi::tickStreamTime();
4911 // Definitions for utility functions and callbacks
4912 // specific to the DirectSound implementation.
4914 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4916 CallbackInfo *info = (CallbackInfo *) ptr;
4917 RtApiDs *object = (RtApiDs *) info->object;
4918 bool* isRunning = &info->isRunning;
4920 while ( *isRunning == true ) {
4921 object->callbackEvent();
4930 std::string convertTChar( LPCTSTR name )
4934 #if defined( UNICODE ) || defined( _UNICODE )
4935 // Yes, this conversion doesn't make sense for two-byte characters
4936 // but RtAudio is currently written to return an std::string of
4937 // one-byte chars for the device name.
4938 for ( unsigned int i=0; i<wcslen( name ); i++ )
4939 s.push_back( name[i] );
4941 s.append( std::string( name ) );
4947 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4948 LPCTSTR description,
4952 EnumInfo *info = (EnumInfo *) lpContext;
4955 if ( info->isInput == true ) {
4957 LPDIRECTSOUNDCAPTURE object;
4959 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4960 if ( hr != DS_OK ) return TRUE;
4962 caps.dwSize = sizeof(caps);
4963 hr = object->GetCaps( &caps );
4964 if ( hr == DS_OK ) {
4965 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4972 LPDIRECTSOUND object;
4973 hr = DirectSoundCreate( lpguid, &object, NULL );
4974 if ( hr != DS_OK ) return TRUE;
4976 caps.dwSize = sizeof(caps);
4977 hr = object->GetCaps( &caps );
4978 if ( hr == DS_OK ) {
4979 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4985 if ( info->getDefault && lpguid == NULL ) return FALSE;
4987 if ( info->findIndex && info->counter > info->index ) {
4989 info->name = convertTChar( description );
4996 static char* getErrorString( int code )
5000 case DSERR_ALLOCATED:
5001 return "Already allocated";
5003 case DSERR_CONTROLUNAVAIL:
5004 return "Control unavailable";
5006 case DSERR_INVALIDPARAM:
5007 return "Invalid parameter";
5009 case DSERR_INVALIDCALL:
5010 return "Invalid call";
5013 return "Generic error";
5015 case DSERR_PRIOLEVELNEEDED:
5016 return "Priority level needed";
5018 case DSERR_OUTOFMEMORY:
5019 return "Out of memory";
5021 case DSERR_BADFORMAT:
5022 return "The sample rate or the channel format is not supported";
5024 case DSERR_UNSUPPORTED:
5025 return "Not supported";
5027 case DSERR_NODRIVER:
5030 case DSERR_ALREADYINITIALIZED:
5031 return "Already initialized";
5033 case DSERR_NOAGGREGATION:
5034 return "No aggregation";
5036 case DSERR_BUFFERLOST:
5037 return "Buffer lost";
5039 case DSERR_OTHERAPPHASPRIO:
5040 return "Another application already has priority";
5042 case DSERR_UNINITIALIZED:
5043 return "Uninitialized";
5046 return "DirectSound unknown error";
5049 //******************** End of __WINDOWS_DS__ *********************//
5053 #if defined(__LINUX_ALSA__)
5055 #include <alsa/asoundlib.h>
5058 // A structure to hold various information related to the ALSA API
5061 snd_pcm_t *handles[2];
5064 pthread_cond_t runnable;
5067 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
5070 extern "C" void *alsaCallbackHandler( void * ptr );
5072 RtApiAlsa :: RtApiAlsa()
5074 // Nothing to do here.
5077 RtApiAlsa :: ~RtApiAlsa()
5079 if ( stream_.state != STREAM_CLOSED ) closeStream();
5082 unsigned int RtApiAlsa :: getDeviceCount( void )
5084 unsigned nDevices = 0;
5085 int result, subdevice, card;
5089 // Count cards and devices
5091 snd_card_next( &card );
5092 while ( card >= 0 ) {
5093 sprintf( name, "hw:%d", card );
5094 result = snd_ctl_open( &handle, name, 0 );
5096 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5097 errorText_ = errorStream_.str();
5098 error( RtError::WARNING );
5103 result = snd_ctl_pcm_next_device( handle, &subdevice );
5105 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5106 errorText_ = errorStream_.str();
5107 error( RtError::WARNING );
5110 if ( subdevice < 0 )
5115 snd_ctl_close( handle );
5116 snd_card_next( &card );
5122 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
5124 RtAudio::DeviceInfo info;
5125 info.probed = false;
5127 unsigned nDevices = 0;
5128 int result, subdevice, card;
5132 // Count cards and devices
5134 snd_card_next( &card );
5135 while ( card >= 0 ) {
5136 sprintf( name, "hw:%d", card );
5137 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5139 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5140 errorText_ = errorStream_.str();
5141 error( RtError::WARNING );
5146 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5148 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5149 errorText_ = errorStream_.str();
5150 error( RtError::WARNING );
5153 if ( subdevice < 0 ) break;
5154 if ( nDevices == device ) {
5155 sprintf( name, "hw:%d,%d", card, subdevice );
5161 snd_ctl_close( chandle );
5162 snd_card_next( &card );
5165 if ( nDevices == 0 ) {
5166 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
5167 error( RtError::INVALID_USE );
5170 if ( device >= nDevices ) {
5171 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
5172 error( RtError::INVALID_USE );
5177 // If a stream is already open, we cannot probe the stream devices.
5178 // Thus, use the saved results.
5179 if ( stream_.state != STREAM_CLOSED &&
5180 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
5181 if ( device >= devices_.size() ) {
5182 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
5183 error( RtError::WARNING );
5186 return devices_[ device ];
5189 int openMode = SND_PCM_ASYNC;
5190 snd_pcm_stream_t stream;
5191 snd_pcm_info_t *pcminfo;
5192 snd_pcm_info_alloca( &pcminfo );
5194 snd_pcm_hw_params_t *params;
5195 snd_pcm_hw_params_alloca( ¶ms );
5197 // First try for playback
5198 stream = SND_PCM_STREAM_PLAYBACK;
5199 snd_pcm_info_set_device( pcminfo, subdevice );
5200 snd_pcm_info_set_subdevice( pcminfo, 0 );
5201 snd_pcm_info_set_stream( pcminfo, stream );
5203 result = snd_ctl_pcm_info( chandle, pcminfo );
5205 // Device probably doesn't support playback.
5209 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
5211 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5212 errorText_ = errorStream_.str();
5213 error( RtError::WARNING );
5217 // The device is open ... fill the parameter structure.
5218 result = snd_pcm_hw_params_any( phandle, params );
5220 snd_pcm_close( phandle );
5221 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5222 errorText_ = errorStream_.str();
5223 error( RtError::WARNING );
5227 // Get output channel information.
5229 result = snd_pcm_hw_params_get_channels_max( params, &value );
5231 snd_pcm_close( phandle );
5232 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
5233 errorText_ = errorStream_.str();
5234 error( RtError::WARNING );
5237 info.outputChannels = value;
5238 snd_pcm_close( phandle );
5241 // Now try for capture
5242 stream = SND_PCM_STREAM_CAPTURE;
5243 snd_pcm_info_set_stream( pcminfo, stream );
5245 result = snd_ctl_pcm_info( chandle, pcminfo );
5246 snd_ctl_close( chandle );
5248 // Device probably doesn't support capture.
5249 if ( info.outputChannels == 0 ) return info;
5250 goto probeParameters;
5253 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5255 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5256 errorText_ = errorStream_.str();
5257 error( RtError::WARNING );
5258 if ( info.outputChannels == 0 ) return info;
5259 goto probeParameters;
5262 // The device is open ... fill the parameter structure.
5263 result = snd_pcm_hw_params_any( phandle, params );
5265 snd_pcm_close( phandle );
5266 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5267 errorText_ = errorStream_.str();
5268 error( RtError::WARNING );
5269 if ( info.outputChannels == 0 ) return info;
5270 goto probeParameters;
5273 result = snd_pcm_hw_params_get_channels_max( params, &value );
5275 snd_pcm_close( phandle );
5276 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5277 errorText_ = errorStream_.str();
5278 error( RtError::WARNING );
5279 if ( info.outputChannels == 0 ) return info;
5280 goto probeParameters;
5282 info.inputChannels = value;
5283 snd_pcm_close( phandle );
5285 // If device opens for both playback and capture, we determine the channels.
5286 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5287 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5289 // ALSA doesn't provide default devices so we'll use the first available one.
5290 if ( device == 0 && info.outputChannels > 0 )
5291 info.isDefaultOutput = true;
5292 if ( device == 0 && info.inputChannels > 0 )
5293 info.isDefaultInput = true;
5296 // At this point, we just need to figure out the supported data
5297 // formats and sample rates. We'll proceed by opening the device in
5298 // the direction with the maximum number of channels, or playback if
5299 // they are equal. This might limit our sample rate options, but so
5302 if ( info.outputChannels >= info.inputChannels )
5303 stream = SND_PCM_STREAM_PLAYBACK;
5305 stream = SND_PCM_STREAM_CAPTURE;
5306 snd_pcm_info_set_stream( pcminfo, stream );
5308 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5310 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5311 errorText_ = errorStream_.str();
5312 error( RtError::WARNING );
5316 // The device is open ... fill the parameter structure.
5317 result = snd_pcm_hw_params_any( phandle, params );
5319 snd_pcm_close( phandle );
5320 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5321 errorText_ = errorStream_.str();
5322 error( RtError::WARNING );
5326 // Test our discrete set of sample rate values.
5327 info.sampleRates.clear();
5328 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5329 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5330 info.sampleRates.push_back( SAMPLE_RATES[i] );
5332 if ( info.sampleRates.size() == 0 ) {
5333 snd_pcm_close( phandle );
5334 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5335 errorText_ = errorStream_.str();
5336 error( RtError::WARNING );
5340 // Probe the supported data formats ... we don't care about endian-ness just yet
5341 snd_pcm_format_t format;
5342 info.nativeFormats = 0;
5343 format = SND_PCM_FORMAT_S8;
5344 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5345 info.nativeFormats |= RTAUDIO_SINT8;
5346 format = SND_PCM_FORMAT_S16;
5347 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5348 info.nativeFormats |= RTAUDIO_SINT16;
5349 format = SND_PCM_FORMAT_S24;
5350 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5351 info.nativeFormats |= RTAUDIO_SINT24;
5352 format = SND_PCM_FORMAT_S32;
5353 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5354 info.nativeFormats |= RTAUDIO_SINT32;
5355 format = SND_PCM_FORMAT_FLOAT;
5356 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5357 info.nativeFormats |= RTAUDIO_FLOAT32;
5358 format = SND_PCM_FORMAT_FLOAT64;
5359 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5360 info.nativeFormats |= RTAUDIO_FLOAT64;
5362 // Check that we have at least one supported format
5363 if ( info.nativeFormats == 0 ) {
5364 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5365 errorText_ = errorStream_.str();
5366 error( RtError::WARNING );
5370 // Get the device name
5372 result = snd_card_get_name( card, &cardname );
5374 sprintf( name, "hw:%s,%d", cardname, subdevice );
5377 // That's all ... close the device and return
5378 snd_pcm_close( phandle );
5383 void RtApiAlsa :: saveDeviceInfo( void )
5387 unsigned int nDevices = getDeviceCount();
5388 devices_.resize( nDevices );
5389 for ( unsigned int i=0; i<nDevices; i++ )
5390 devices_[i] = getDeviceInfo( i );
5393 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5394 unsigned int firstChannel, unsigned int sampleRate,
5395 RtAudioFormat format, unsigned int *bufferSize,
5396 RtAudio::StreamOptions *options )
5399 #if defined(__RTAUDIO_DEBUG__)
5401 snd_output_stdio_attach(&out, stderr, 0);
5404 // I'm not using the "plug" interface ... too much inconsistent behavior.
5406 unsigned nDevices = 0;
5407 int result, subdevice, card;
5411 // Count cards and devices
5413 snd_card_next( &card );
5414 while ( card >= 0 ) {
5415 sprintf( name, "hw:%d", card );
5416 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5418 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5419 errorText_ = errorStream_.str();
5424 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5425 if ( result < 0 ) break;
5426 if ( subdevice < 0 ) break;
5427 if ( nDevices == device ) {
5428 sprintf( name, "hw:%d,%d", card, subdevice );
5429 snd_ctl_close( chandle );
5434 snd_ctl_close( chandle );
5435 snd_card_next( &card );
5438 if ( nDevices == 0 ) {
5439 // This should not happen because a check is made before this function is called.
5440 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5444 if ( device >= nDevices ) {
5445 // This should not happen because a check is made before this function is called.
5446 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5452 // The getDeviceInfo() function will not work for a device that is
5453 // already open. Thus, we'll probe the system before opening a
5454 // stream and save the results for use by getDeviceInfo().
5455 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5456 this->saveDeviceInfo();
5458 snd_pcm_stream_t stream;
5459 if ( mode == OUTPUT )
5460 stream = SND_PCM_STREAM_PLAYBACK;
5462 stream = SND_PCM_STREAM_CAPTURE;
5465 int openMode = SND_PCM_ASYNC;
5466 result = snd_pcm_open( &phandle, name, stream, openMode );
5468 if ( mode == OUTPUT )
5469 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5471 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5472 errorText_ = errorStream_.str();
5476 // Fill the parameter structure.
5477 snd_pcm_hw_params_t *hw_params;
5478 snd_pcm_hw_params_alloca( &hw_params );
5479 result = snd_pcm_hw_params_any( phandle, hw_params );
5481 snd_pcm_close( phandle );
5482 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5483 errorText_ = errorStream_.str();
5487 #if defined(__RTAUDIO_DEBUG__)
5488 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5489 snd_pcm_hw_params_dump( hw_params, out );
5492 // Set access ... check user preference.
5493 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5494 stream_.userInterleaved = false;
5495 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5497 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5498 stream_.deviceInterleaved[mode] = true;
5501 stream_.deviceInterleaved[mode] = false;
5504 stream_.userInterleaved = true;
5505 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5507 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5508 stream_.deviceInterleaved[mode] = false;
5511 stream_.deviceInterleaved[mode] = true;
5515 snd_pcm_close( phandle );
5516 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5517 errorText_ = errorStream_.str();
5521 // Determine how to set the device format.
5522 stream_.userFormat = format;
5523 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5525 if ( format == RTAUDIO_SINT8 )
5526 deviceFormat = SND_PCM_FORMAT_S8;
5527 else if ( format == RTAUDIO_SINT16 )
5528 deviceFormat = SND_PCM_FORMAT_S16;
5529 else if ( format == RTAUDIO_SINT24 )
5530 deviceFormat = SND_PCM_FORMAT_S24;
5531 else if ( format == RTAUDIO_SINT32 )
5532 deviceFormat = SND_PCM_FORMAT_S32;
5533 else if ( format == RTAUDIO_FLOAT32 )
5534 deviceFormat = SND_PCM_FORMAT_FLOAT;
5535 else if ( format == RTAUDIO_FLOAT64 )
5536 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5538 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5539 stream_.deviceFormat[mode] = format;
5543 // The user requested format is not natively supported by the device.
5544 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5545 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5546 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5550 deviceFormat = SND_PCM_FORMAT_FLOAT;
5551 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5552 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5556 deviceFormat = SND_PCM_FORMAT_S32;
5557 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5558 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5562 deviceFormat = SND_PCM_FORMAT_S24;
5563 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5564 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5568 deviceFormat = SND_PCM_FORMAT_S16;
5569 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5570 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5574 deviceFormat = SND_PCM_FORMAT_S8;
5575 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5576 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5580 // If we get here, no supported format was found.
5581 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5582 errorText_ = errorStream_.str();
5586 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5588 snd_pcm_close( phandle );
5589 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5590 errorText_ = errorStream_.str();
5594 // Determine whether byte-swaping is necessary.
5595 stream_.doByteSwap[mode] = false;
5596 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5597 result = snd_pcm_format_cpu_endian( deviceFormat );
5599 stream_.doByteSwap[mode] = true;
5600 else if (result < 0) {
5601 snd_pcm_close( phandle );
5602 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5603 errorText_ = errorStream_.str();
5608 // Set the sample rate.
5609 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5611 snd_pcm_close( phandle );
5612 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5613 errorText_ = errorStream_.str();
5617 // Determine the number of channels for this device. We support a possible
5618 // minimum device channel number > than the value requested by the user.
5619 stream_.nUserChannels[mode] = channels;
5621 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5622 unsigned int deviceChannels = value;
5623 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5624 snd_pcm_close( phandle );
5625 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5626 errorText_ = errorStream_.str();
5630 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5632 snd_pcm_close( phandle );
5633 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5634 errorText_ = errorStream_.str();
5637 deviceChannels = value;
5638 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5639 stream_.nDeviceChannels[mode] = deviceChannels;
5641 // Set the device channels.
5642 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5644 snd_pcm_close( phandle );
5645 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5646 errorText_ = errorStream_.str();
5650 // Set the buffer number, which in ALSA is referred to as the "period".
5651 int totalSize, dir = 0;
5652 unsigned int periods = 0;
5653 if ( options ) periods = options->numberOfBuffers;
5654 totalSize = *bufferSize * periods;
5656 // Set the buffer (or period) size.
5657 snd_pcm_uframes_t periodSize = *bufferSize;
5658 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5660 snd_pcm_close( phandle );
5661 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5662 errorText_ = errorStream_.str();
5665 *bufferSize = periodSize;
5667 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5668 else periods = totalSize / *bufferSize;
5669 // Even though the hardware might allow 1 buffer, it won't work reliably.
5670 if ( periods < 2 ) periods = 2;
5671 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5673 snd_pcm_close( phandle );
5674 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5675 errorText_ = errorStream_.str();
5679 // If attempting to setup a duplex stream, the bufferSize parameter
5680 // MUST be the same in both directions!
5681 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5682 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5683 errorText_ = errorStream_.str();
5687 stream_.bufferSize = *bufferSize;
5689 // Install the hardware configuration
5690 result = snd_pcm_hw_params( phandle, hw_params );
5692 snd_pcm_close( phandle );
5693 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5694 errorText_ = errorStream_.str();
5698 #if defined(__RTAUDIO_DEBUG__)
5699 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5700 snd_pcm_hw_params_dump( hw_params, out );
5703 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5704 snd_pcm_sw_params_t *sw_params = NULL;
5705 snd_pcm_sw_params_alloca( &sw_params );
5706 snd_pcm_sw_params_current( phandle, sw_params );
5707 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5708 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
5709 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5711 // The following two settings were suggested by Theo Veenker
5712 //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
5713 //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
5715 // here are two options for a fix
5716 //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
5717 snd_pcm_uframes_t val;
5718 snd_pcm_sw_params_get_boundary( sw_params, &val );
5719 snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
5721 result = snd_pcm_sw_params( phandle, sw_params );
5723 snd_pcm_close( phandle );
5724 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5725 errorText_ = errorStream_.str();
5729 #if defined(__RTAUDIO_DEBUG__)
5730 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5731 snd_pcm_sw_params_dump( sw_params, out );
5734 // Set flags for buffer conversion
5735 stream_.doConvertBuffer[mode] = false;
5736 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5737 stream_.doConvertBuffer[mode] = true;
5738 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5739 stream_.doConvertBuffer[mode] = true;
5740 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5741 stream_.nUserChannels[mode] > 1 )
5742 stream_.doConvertBuffer[mode] = true;
5744 // Allocate the ApiHandle if necessary and then save.
5745 AlsaHandle *apiInfo = 0;
5746 if ( stream_.apiHandle == 0 ) {
5748 apiInfo = (AlsaHandle *) new AlsaHandle;
5750 catch ( std::bad_alloc& ) {
5751 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5755 if ( pthread_cond_init( &apiInfo->runnable, NULL ) ) {
5756 errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
5760 stream_.apiHandle = (void *) apiInfo;
5761 apiInfo->handles[0] = 0;
5762 apiInfo->handles[1] = 0;
5765 apiInfo = (AlsaHandle *) stream_.apiHandle;
5767 apiInfo->handles[mode] = phandle;
5769 // Allocate necessary internal buffers.
5770 unsigned long bufferBytes;
5771 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5772 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5773 if ( stream_.userBuffer[mode] == NULL ) {
5774 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5778 if ( stream_.doConvertBuffer[mode] ) {
5780 bool makeBuffer = true;
5781 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5782 if ( mode == INPUT ) {
5783 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5784 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5785 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5790 bufferBytes *= *bufferSize;
5791 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5792 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5793 if ( stream_.deviceBuffer == NULL ) {
5794 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5800 stream_.sampleRate = sampleRate;
5801 stream_.nBuffers = periods;
5802 stream_.device[mode] = device;
5803 stream_.state = STREAM_STOPPED;
5805 // Setup the buffer conversion information structure.
5806 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5808 // Setup thread if necessary.
5809 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5810 // We had already set up an output stream.
5811 stream_.mode = DUPLEX;
5812 // Link the streams if possible.
5813 apiInfo->synchronized = false;
5814 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5815 apiInfo->synchronized = true;
5817 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5818 error( RtError::WARNING );
5822 stream_.mode = mode;
5824 // Setup callback thread.
5825 stream_.callbackInfo.object = (void *) this;
5827 // Set the thread attributes for joinable and realtime scheduling
5828 // priority (optional). The higher priority will only take affect
5829 // if the program is run as root or suid. Note, under Linux
5830 // processes with CAP_SYS_NICE privilege, a user can change
5831 // scheduling policy and priority (thus need not be root). See
5832 // POSIX "capabilities".
5833 pthread_attr_t attr;
5834 pthread_attr_init( &attr );
5835 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5836 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5837 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
5838 struct sched_param param;
5839 int priority = options->priority;
5840 int min = sched_get_priority_min( SCHED_RR );
5841 int max = sched_get_priority_max( SCHED_RR );
5842 if ( priority < min ) priority = min;
5843 else if ( priority > max ) priority = max;
5844 param.sched_priority = priority;
5845 pthread_attr_setschedparam( &attr, ¶m );
5846 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5849 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5851 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5854 stream_.callbackInfo.isRunning = true;
5855 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5856 pthread_attr_destroy( &attr );
5858 stream_.callbackInfo.isRunning = false;
5859 errorText_ = "RtApiAlsa::error creating callback thread!";
5868 pthread_cond_destroy( &apiInfo->runnable );
5869 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5870 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5872 stream_.apiHandle = 0;
5875 for ( int i=0; i<2; i++ ) {
5876 if ( stream_.userBuffer[i] ) {
5877 free( stream_.userBuffer[i] );
5878 stream_.userBuffer[i] = 0;
5882 if ( stream_.deviceBuffer ) {
5883 free( stream_.deviceBuffer );
5884 stream_.deviceBuffer = 0;
5890 void RtApiAlsa :: closeStream()
5892 if ( stream_.state == STREAM_CLOSED ) {
5893 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5894 error( RtError::WARNING );
5898 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5899 stream_.callbackInfo.isRunning = false;
5900 MUTEX_LOCK( &stream_.mutex );
5901 if ( stream_.state == STREAM_STOPPED )
5902 pthread_cond_signal( &apiInfo->runnable );
5903 MUTEX_UNLOCK( &stream_.mutex );
5904 pthread_join( stream_.callbackInfo.thread, NULL );
5906 if ( stream_.state == STREAM_RUNNING ) {
5907 stream_.state = STREAM_STOPPED;
5908 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5909 snd_pcm_drop( apiInfo->handles[0] );
5910 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5911 snd_pcm_drop( apiInfo->handles[1] );
5915 pthread_cond_destroy( &apiInfo->runnable );
5916 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5917 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5919 stream_.apiHandle = 0;
5922 for ( int i=0; i<2; i++ ) {
5923 if ( stream_.userBuffer[i] ) {
5924 free( stream_.userBuffer[i] );
5925 stream_.userBuffer[i] = 0;
5929 if ( stream_.deviceBuffer ) {
5930 free( stream_.deviceBuffer );
5931 stream_.deviceBuffer = 0;
5934 stream_.mode = UNINITIALIZED;
5935 stream_.state = STREAM_CLOSED;
5938 void RtApiAlsa :: startStream()
5940 // This method calls snd_pcm_prepare if the device isn't already in that state.
5943 if ( stream_.state == STREAM_RUNNING ) {
5944 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5945 error( RtError::WARNING );
5949 MUTEX_LOCK( &stream_.mutex );
5952 snd_pcm_state_t state;
5953 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5954 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5955 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5956 state = snd_pcm_state( handle[0] );
5957 if ( state != SND_PCM_STATE_PREPARED ) {
5958 result = snd_pcm_prepare( handle[0] );
5960 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5961 errorText_ = errorStream_.str();
5967 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5968 state = snd_pcm_state( handle[1] );
5969 if ( state != SND_PCM_STATE_PREPARED ) {
5970 result = snd_pcm_prepare( handle[1] );
5972 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5973 errorText_ = errorStream_.str();
5979 stream_.state = STREAM_RUNNING;
5982 MUTEX_UNLOCK( &stream_.mutex );
5984 pthread_cond_signal( &apiInfo->runnable );
5986 if ( result >= 0 ) return;
5987 error( RtError::SYSTEM_ERROR );
5990 void RtApiAlsa :: stopStream()
5993 if ( stream_.state == STREAM_STOPPED ) {
5994 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5995 error( RtError::WARNING );
5999 MUTEX_LOCK( &stream_.mutex );
6001 if ( stream_.state == STREAM_STOPPED ) {
6002 MUTEX_UNLOCK( &stream_.mutex );
6007 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6008 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6009 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6010 if ( apiInfo->synchronized )
6011 result = snd_pcm_drop( handle[0] );
6013 result = snd_pcm_drain( handle[0] );
6015 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
6016 errorText_ = errorStream_.str();
6021 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6022 result = snd_pcm_drop( handle[1] );
6024 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
6025 errorText_ = errorStream_.str();
6031 stream_.state = STREAM_STOPPED;
6032 MUTEX_UNLOCK( &stream_.mutex );
6034 if ( result >= 0 ) return;
6035 error( RtError::SYSTEM_ERROR );
6038 void RtApiAlsa :: abortStream()
6041 if ( stream_.state == STREAM_STOPPED ) {
6042 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
6043 error( RtError::WARNING );
6047 MUTEX_LOCK( &stream_.mutex );
6049 if ( stream_.state == STREAM_STOPPED ) {
6050 MUTEX_UNLOCK( &stream_.mutex );
6055 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6056 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6057 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6058 result = snd_pcm_drop( handle[0] );
6060 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
6061 errorText_ = errorStream_.str();
6066 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6067 result = snd_pcm_drop( handle[1] );
6069 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
6070 errorText_ = errorStream_.str();
6076 stream_.state = STREAM_STOPPED;
6077 MUTEX_UNLOCK( &stream_.mutex );
6079 if ( result >= 0 ) return;
6080 error( RtError::SYSTEM_ERROR );
6083 void RtApiAlsa :: callbackEvent()
6085 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6086 if ( stream_.state == STREAM_STOPPED ) {
6087 MUTEX_LOCK( &stream_.mutex );
6088 pthread_cond_wait( &apiInfo->runnable, &stream_.mutex );
6089 if ( stream_.state != STREAM_RUNNING ) {
6090 MUTEX_UNLOCK( &stream_.mutex );
6093 MUTEX_UNLOCK( &stream_.mutex );
6096 if ( stream_.state == STREAM_CLOSED ) {
6097 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
6098 error( RtError::WARNING );
6102 int doStopStream = 0;
6103 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6104 double streamTime = getStreamTime();
6105 RtAudioStreamStatus status = 0;
6106 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
6107 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6108 apiInfo->xrun[0] = false;
6110 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
6111 status |= RTAUDIO_INPUT_OVERFLOW;
6112 apiInfo->xrun[1] = false;
6114 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6115 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6117 if ( doStopStream == 2 ) {
6122 MUTEX_LOCK( &stream_.mutex );
6124 // The state might change while waiting on a mutex.
6125 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6131 snd_pcm_sframes_t frames;
6132 RtAudioFormat format;
6133 handle = (snd_pcm_t **) apiInfo->handles;
6135 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6137 // Setup parameters.
6138 if ( stream_.doConvertBuffer[1] ) {
6139 buffer = stream_.deviceBuffer;
6140 channels = stream_.nDeviceChannels[1];
6141 format = stream_.deviceFormat[1];
6144 buffer = stream_.userBuffer[1];
6145 channels = stream_.nUserChannels[1];
6146 format = stream_.userFormat;
6149 // Read samples from device in interleaved/non-interleaved format.
6150 if ( stream_.deviceInterleaved[1] )
6151 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
6153 void *bufs[channels];
6154 size_t offset = stream_.bufferSize * formatBytes( format );
6155 for ( int i=0; i<channels; i++ )
6156 bufs[i] = (void *) (buffer + (i * offset));
6157 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
6160 if ( result < (int) stream_.bufferSize ) {
6161 // Either an error or overrun occured.
6162 if ( result == -EPIPE ) {
6163 snd_pcm_state_t state = snd_pcm_state( handle[1] );
6164 if ( state == SND_PCM_STATE_XRUN ) {
6165 apiInfo->xrun[1] = true;
6166 result = snd_pcm_prepare( handle[1] );
6168 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
6169 errorText_ = errorStream_.str();
6173 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6174 errorText_ = errorStream_.str();
6178 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
6179 errorText_ = errorStream_.str();
6181 error( RtError::WARNING );
6185 // Do byte swapping if necessary.
6186 if ( stream_.doByteSwap[1] )
6187 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
6189 // Do buffer conversion if necessary.
6190 if ( stream_.doConvertBuffer[1] )
6191 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6193 // Check stream latency
6194 result = snd_pcm_delay( handle[1], &frames );
6195 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
6200 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6202 // Setup parameters and do buffer conversion if necessary.
6203 if ( stream_.doConvertBuffer[0] ) {
6204 buffer = stream_.deviceBuffer;
6205 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6206 channels = stream_.nDeviceChannels[0];
6207 format = stream_.deviceFormat[0];
6210 buffer = stream_.userBuffer[0];
6211 channels = stream_.nUserChannels[0];
6212 format = stream_.userFormat;
6215 // Do byte swapping if necessary.
6216 if ( stream_.doByteSwap[0] )
6217 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
6219 // Write samples to device in interleaved/non-interleaved format.
6220 if ( stream_.deviceInterleaved[0] )
6221 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
6223 void *bufs[channels];
6224 size_t offset = stream_.bufferSize * formatBytes( format );
6225 for ( int i=0; i<channels; i++ )
6226 bufs[i] = (void *) (buffer + (i * offset));
6227 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
6230 if ( result < (int) stream_.bufferSize ) {
6231 // Either an error or underrun occured.
6232 if ( result == -EPIPE ) {
6233 snd_pcm_state_t state = snd_pcm_state( handle[0] );
6234 if ( state == SND_PCM_STATE_XRUN ) {
6235 apiInfo->xrun[0] = true;
6236 result = snd_pcm_prepare( handle[0] );
6238 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
6239 errorText_ = errorStream_.str();
6243 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6244 errorText_ = errorStream_.str();
6248 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
6249 errorText_ = errorStream_.str();
6251 error( RtError::WARNING );
6255 // Check stream latency
6256 result = snd_pcm_delay( handle[0], &frames );
6257 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
6261 MUTEX_UNLOCK( &stream_.mutex );
6263 RtApi::tickStreamTime();
6264 if ( doStopStream == 1 ) this->stopStream();
6267 extern "C" void *alsaCallbackHandler( void *ptr )
6269 CallbackInfo *info = (CallbackInfo *) ptr;
6270 RtApiAlsa *object = (RtApiAlsa *) info->object;
6271 bool *isRunning = &info->isRunning;
6273 while ( *isRunning == true ) {
6274 pthread_testcancel();
6275 object->callbackEvent();
6278 pthread_exit( NULL );
6281 //******************** End of __LINUX_ALSA__ *********************//
6285 #if defined(__LINUX_OSS__)
6288 #include <sys/ioctl.h>
6291 #include "soundcard.h"
6295 extern "C" void *ossCallbackHandler(void * ptr);
6297 // A structure to hold various information related to the OSS API
6300 int id[2]; // device ids
6303 pthread_cond_t runnable;
6306 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6309 RtApiOss :: RtApiOss()
6311 // Nothing to do here.
6314 RtApiOss :: ~RtApiOss()
6316 if ( stream_.state != STREAM_CLOSED ) closeStream();
6319 unsigned int RtApiOss :: getDeviceCount( void )
6321 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6322 if ( mixerfd == -1 ) {
6323 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6324 error( RtError::WARNING );
6328 oss_sysinfo sysinfo;
6329 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6331 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6332 error( RtError::WARNING );
6337 return sysinfo.numaudios;
6340 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6342 RtAudio::DeviceInfo info;
6343 info.probed = false;
6345 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6346 if ( mixerfd == -1 ) {
6347 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6348 error( RtError::WARNING );
6352 oss_sysinfo sysinfo;
6353 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6354 if ( result == -1 ) {
6356 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6357 error( RtError::WARNING );
6361 unsigned nDevices = sysinfo.numaudios;
6362 if ( nDevices == 0 ) {
6364 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6365 error( RtError::INVALID_USE );
6368 if ( device >= nDevices ) {
6370 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6371 error( RtError::INVALID_USE );
6374 oss_audioinfo ainfo;
6376 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6378 if ( result == -1 ) {
6379 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6380 errorText_ = errorStream_.str();
6381 error( RtError::WARNING );
6386 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6387 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6388 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6389 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6390 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6393 // Probe data formats ... do for input
6394 unsigned long mask = ainfo.iformats;
6395 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6396 info.nativeFormats |= RTAUDIO_SINT16;
6397 if ( mask & AFMT_S8 )
6398 info.nativeFormats |= RTAUDIO_SINT8;
6399 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6400 info.nativeFormats |= RTAUDIO_SINT32;
6401 if ( mask & AFMT_FLOAT )
6402 info.nativeFormats |= RTAUDIO_FLOAT32;
6403 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6404 info.nativeFormats |= RTAUDIO_SINT24;
6406 // Check that we have at least one supported format
6407 if ( info.nativeFormats == 0 ) {
6408 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6409 errorText_ = errorStream_.str();
6410 error( RtError::WARNING );
6414 // Probe the supported sample rates.
6415 info.sampleRates.clear();
6416 if ( ainfo.nrates ) {
6417 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6418 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6419 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6420 info.sampleRates.push_back( SAMPLE_RATES[k] );
6427 // Check min and max rate values;
6428 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6429 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6430 info.sampleRates.push_back( SAMPLE_RATES[k] );
6434 if ( info.sampleRates.size() == 0 ) {
6435 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6436 errorText_ = errorStream_.str();
6437 error( RtError::WARNING );
6441 info.name = ainfo.name;
6448 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6449 unsigned int firstChannel, unsigned int sampleRate,
6450 RtAudioFormat format, unsigned int *bufferSize,
6451 RtAudio::StreamOptions *options )
6453 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6454 if ( mixerfd == -1 ) {
6455 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6459 oss_sysinfo sysinfo;
6460 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6461 if ( result == -1 ) {
6463 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6467 unsigned nDevices = sysinfo.numaudios;
6468 if ( nDevices == 0 ) {
6469 // This should not happen because a check is made before this function is called.
6471 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6475 if ( device >= nDevices ) {
6476 // This should not happen because a check is made before this function is called.
6478 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6482 oss_audioinfo ainfo;
6484 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6486 if ( result == -1 ) {
6487 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6488 errorText_ = errorStream_.str();
6492 // Check if device supports input or output
6493 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6494 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6495 if ( mode == OUTPUT )
6496 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6498 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6499 errorText_ = errorStream_.str();
6504 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6505 if ( mode == OUTPUT )
6507 else { // mode == INPUT
6508 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6509 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6510 close( handle->id[0] );
6512 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6513 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6514 errorText_ = errorStream_.str();
6517 // Check that the number previously set channels is the same.
6518 if ( stream_.nUserChannels[0] != channels ) {
6519 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6520 errorText_ = errorStream_.str();
6529 // Set exclusive access if specified.
6530 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6532 // Try to open the device.
6534 fd = open( ainfo.devnode, flags, 0 );
6536 if ( errno == EBUSY )
6537 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6539 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6540 errorText_ = errorStream_.str();
6544 // For duplex operation, specifically set this mode (this doesn't seem to work).
6546 if ( flags | O_RDWR ) {
6547 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6548 if ( result == -1) {
6549 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6550 errorText_ = errorStream_.str();
6556 // Check the device channel support.
6557 stream_.nUserChannels[mode] = channels;
6558 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6560 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6561 errorText_ = errorStream_.str();
6565 // Set the number of channels.
6566 int deviceChannels = channels + firstChannel;
6567 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6568 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6570 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6571 errorText_ = errorStream_.str();
6574 stream_.nDeviceChannels[mode] = deviceChannels;
6576 // Get the data format mask
6578 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6579 if ( result == -1 ) {
6581 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6582 errorText_ = errorStream_.str();
6586 // Determine how to set the device format.
6587 stream_.userFormat = format;
6588 int deviceFormat = -1;
6589 stream_.doByteSwap[mode] = false;
6590 if ( format == RTAUDIO_SINT8 ) {
6591 if ( mask & AFMT_S8 ) {
6592 deviceFormat = AFMT_S8;
6593 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6596 else if ( format == RTAUDIO_SINT16 ) {
6597 if ( mask & AFMT_S16_NE ) {
6598 deviceFormat = AFMT_S16_NE;
6599 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6601 else if ( mask & AFMT_S16_OE ) {
6602 deviceFormat = AFMT_S16_OE;
6603 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6604 stream_.doByteSwap[mode] = true;
6607 else if ( format == RTAUDIO_SINT24 ) {
6608 if ( mask & AFMT_S24_NE ) {
6609 deviceFormat = AFMT_S24_NE;
6610 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6612 else if ( mask & AFMT_S24_OE ) {
6613 deviceFormat = AFMT_S24_OE;
6614 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6615 stream_.doByteSwap[mode] = true;
6618 else if ( format == RTAUDIO_SINT32 ) {
6619 if ( mask & AFMT_S32_NE ) {
6620 deviceFormat = AFMT_S32_NE;
6621 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6623 else if ( mask & AFMT_S32_OE ) {
6624 deviceFormat = AFMT_S32_OE;
6625 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6626 stream_.doByteSwap[mode] = true;
6630 if ( deviceFormat == -1 ) {
6631 // The user requested format is not natively supported by the device.
6632 if ( mask & AFMT_S16_NE ) {
6633 deviceFormat = AFMT_S16_NE;
6634 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6636 else if ( mask & AFMT_S32_NE ) {
6637 deviceFormat = AFMT_S32_NE;
6638 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6640 else if ( mask & AFMT_S24_NE ) {
6641 deviceFormat = AFMT_S24_NE;
6642 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6644 else if ( mask & AFMT_S16_OE ) {
6645 deviceFormat = AFMT_S16_OE;
6646 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6647 stream_.doByteSwap[mode] = true;
6649 else if ( mask & AFMT_S32_OE ) {
6650 deviceFormat = AFMT_S32_OE;
6651 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6652 stream_.doByteSwap[mode] = true;
6654 else if ( mask & AFMT_S24_OE ) {
6655 deviceFormat = AFMT_S24_OE;
6656 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6657 stream_.doByteSwap[mode] = true;
6659 else if ( mask & AFMT_S8) {
6660 deviceFormat = AFMT_S8;
6661 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6665 if ( stream_.deviceFormat[mode] == 0 ) {
6666 // This really shouldn't happen ...
6668 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6669 errorText_ = errorStream_.str();
6673 // Set the data format.
6674 int temp = deviceFormat;
6675 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6676 if ( result == -1 || deviceFormat != temp ) {
6678 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6679 errorText_ = errorStream_.str();
6683 // Attempt to set the buffer size. According to OSS, the minimum
6684 // number of buffers is two. The supposed minimum buffer size is 16
6685 // bytes, so that will be our lower bound. The argument to this
6686 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6687 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6688 // We'll check the actual value used near the end of the setup
6690 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6691 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6693 if ( options ) buffers = options->numberOfBuffers;
6694 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6695 if ( buffers < 2 ) buffers = 3;
6696 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6697 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6698 if ( result == -1 ) {
6700 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6701 errorText_ = errorStream_.str();
6704 stream_.nBuffers = buffers;
6706 // Save buffer size (in sample frames).
6707 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6708 stream_.bufferSize = *bufferSize;
6710 // Set the sample rate.
6711 int srate = sampleRate;
6712 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6713 if ( result == -1 ) {
6715 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6716 errorText_ = errorStream_.str();
6720 // Verify the sample rate setup worked.
6721 if ( abs( srate - sampleRate ) > 100 ) {
6723 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6724 errorText_ = errorStream_.str();
6727 stream_.sampleRate = sampleRate;
6729 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6730 // We're doing duplex setup here.
6731 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6732 stream_.nDeviceChannels[0] = deviceChannels;
6735 // Set interleaving parameters.
6736 stream_.userInterleaved = true;
6737 stream_.deviceInterleaved[mode] = true;
6738 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6739 stream_.userInterleaved = false;
6741 // Set flags for buffer conversion
6742 stream_.doConvertBuffer[mode] = false;
6743 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6744 stream_.doConvertBuffer[mode] = true;
6745 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6746 stream_.doConvertBuffer[mode] = true;
6747 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6748 stream_.nUserChannels[mode] > 1 )
6749 stream_.doConvertBuffer[mode] = true;
6751 // Allocate the stream handles if necessary and then save.
6752 if ( stream_.apiHandle == 0 ) {
6754 handle = new OssHandle;
6756 catch ( std::bad_alloc& ) {
6757 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6761 if ( pthread_cond_init( &handle->runnable, NULL ) ) {
6762 errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
6766 stream_.apiHandle = (void *) handle;
6769 handle = (OssHandle *) stream_.apiHandle;
6771 handle->id[mode] = fd;
6773 // Allocate necessary internal buffers.
6774 unsigned long bufferBytes;
6775 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6776 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6777 if ( stream_.userBuffer[mode] == NULL ) {
6778 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6782 if ( stream_.doConvertBuffer[mode] ) {
6784 bool makeBuffer = true;
6785 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6786 if ( mode == INPUT ) {
6787 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6788 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6789 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6794 bufferBytes *= *bufferSize;
6795 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6796 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6797 if ( stream_.deviceBuffer == NULL ) {
6798 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6804 stream_.device[mode] = device;
6805 stream_.state = STREAM_STOPPED;
6807 // Setup the buffer conversion information structure.
6808 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6810 // Setup thread if necessary.
6811 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6812 // We had already set up an output stream.
6813 stream_.mode = DUPLEX;
6814 if ( stream_.device[0] == device ) handle->id[0] = fd;
6817 stream_.mode = mode;
6819 // Setup callback thread.
6820 stream_.callbackInfo.object = (void *) this;
6822 // Set the thread attributes for joinable and realtime scheduling
6823 // priority. The higher priority will only take affect if the
6824 // program is run as root or suid.
6825 pthread_attr_t attr;
6826 pthread_attr_init( &attr );
6827 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6828 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6829 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
6830 struct sched_param param;
6831 int priority = options->priority;
6832 int min = sched_get_priority_min( SCHED_RR );
6833 int max = sched_get_priority_max( SCHED_RR );
6834 if ( priority < min ) priority = min;
6835 else if ( priority > max ) priority = max;
6836 param.sched_priority = priority;
6837 pthread_attr_setschedparam( &attr, ¶m );
6838 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6841 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6843 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6846 stream_.callbackInfo.isRunning = true;
6847 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6848 pthread_attr_destroy( &attr );
6850 stream_.callbackInfo.isRunning = false;
6851 errorText_ = "RtApiOss::error creating callback thread!";
6860 pthread_cond_destroy( &handle->runnable );
6861 if ( handle->id[0] ) close( handle->id[0] );
6862 if ( handle->id[1] ) close( handle->id[1] );
6864 stream_.apiHandle = 0;
6867 for ( int i=0; i<2; i++ ) {
6868 if ( stream_.userBuffer[i] ) {
6869 free( stream_.userBuffer[i] );
6870 stream_.userBuffer[i] = 0;
6874 if ( stream_.deviceBuffer ) {
6875 free( stream_.deviceBuffer );
6876 stream_.deviceBuffer = 0;
6882 void RtApiOss :: closeStream()
6884 if ( stream_.state == STREAM_CLOSED ) {
6885 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6886 error( RtError::WARNING );
6890 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6891 stream_.callbackInfo.isRunning = false;
6892 MUTEX_LOCK( &stream_.mutex );
6893 if ( stream_.state == STREAM_STOPPED )
6894 pthread_cond_signal( &handle->runnable );
6895 MUTEX_UNLOCK( &stream_.mutex );
6896 pthread_join( stream_.callbackInfo.thread, NULL );
6898 if ( stream_.state == STREAM_RUNNING ) {
6899 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6900 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6902 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6903 stream_.state = STREAM_STOPPED;
6907 pthread_cond_destroy( &handle->runnable );
6908 if ( handle->id[0] ) close( handle->id[0] );
6909 if ( handle->id[1] ) close( handle->id[1] );
6911 stream_.apiHandle = 0;
6914 for ( int i=0; i<2; i++ ) {
6915 if ( stream_.userBuffer[i] ) {
6916 free( stream_.userBuffer[i] );
6917 stream_.userBuffer[i] = 0;
6921 if ( stream_.deviceBuffer ) {
6922 free( stream_.deviceBuffer );
6923 stream_.deviceBuffer = 0;
6926 stream_.mode = UNINITIALIZED;
6927 stream_.state = STREAM_CLOSED;
6930 void RtApiOss :: startStream()
6933 if ( stream_.state == STREAM_RUNNING ) {
6934 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6935 error( RtError::WARNING );
6939 MUTEX_LOCK( &stream_.mutex );
6941 stream_.state = STREAM_RUNNING;
6943 // No need to do anything else here ... OSS automatically starts
6944 // when fed samples.
6946 MUTEX_UNLOCK( &stream_.mutex );
6948 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6949 pthread_cond_signal( &handle->runnable );
6952 void RtApiOss :: stopStream()
6955 if ( stream_.state == STREAM_STOPPED ) {
6956 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6957 error( RtError::WARNING );
6961 MUTEX_LOCK( &stream_.mutex );
6963 // The state might change while waiting on a mutex.
6964 if ( stream_.state == STREAM_STOPPED ) {
6965 MUTEX_UNLOCK( &stream_.mutex );
6970 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6971 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6973 // Flush the output with zeros a few times.
6976 RtAudioFormat format;
6978 if ( stream_.doConvertBuffer[0] ) {
6979 buffer = stream_.deviceBuffer;
6980 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6981 format = stream_.deviceFormat[0];
6984 buffer = stream_.userBuffer[0];
6985 samples = stream_.bufferSize * stream_.nUserChannels[0];
6986 format = stream_.userFormat;
6989 memset( buffer, 0, samples * formatBytes(format) );
6990 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6991 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6992 if ( result == -1 ) {
6993 errorText_ = "RtApiOss::stopStream: audio write error.";
6994 error( RtError::WARNING );
6998 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6999 if ( result == -1 ) {
7000 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7001 errorText_ = errorStream_.str();
7004 handle->triggered = false;
7007 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7008 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7009 if ( result == -1 ) {
7010 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7011 errorText_ = errorStream_.str();
7017 stream_.state = STREAM_STOPPED;
7018 MUTEX_UNLOCK( &stream_.mutex );
7020 if ( result != -1 ) return;
7021 error( RtError::SYSTEM_ERROR );
7024 void RtApiOss :: abortStream()
7027 if ( stream_.state == STREAM_STOPPED ) {
7028 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
7029 error( RtError::WARNING );
7033 MUTEX_LOCK( &stream_.mutex );
7035 // The state might change while waiting on a mutex.
7036 if ( stream_.state == STREAM_STOPPED ) {
7037 MUTEX_UNLOCK( &stream_.mutex );
7042 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7043 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7044 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7045 if ( result == -1 ) {
7046 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7047 errorText_ = errorStream_.str();
7050 handle->triggered = false;
7053 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7054 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7055 if ( result == -1 ) {
7056 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7057 errorText_ = errorStream_.str();
7063 stream_.state = STREAM_STOPPED;
7064 MUTEX_UNLOCK( &stream_.mutex );
7066 if ( result != -1 ) return;
7067 error( RtError::SYSTEM_ERROR );
7070 void RtApiOss :: callbackEvent()
7072 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7073 if ( stream_.state == STREAM_STOPPED ) {
7074 MUTEX_LOCK( &stream_.mutex );
7075 pthread_cond_wait( &handle->runnable, &stream_.mutex );
7076 if ( stream_.state != STREAM_RUNNING ) {
7077 MUTEX_UNLOCK( &stream_.mutex );
7080 MUTEX_UNLOCK( &stream_.mutex );
7083 if ( stream_.state == STREAM_CLOSED ) {
7084 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
7085 error( RtError::WARNING );
7089 // Invoke user callback to get fresh output data.
7090 int doStopStream = 0;
7091 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7092 double streamTime = getStreamTime();
7093 RtAudioStreamStatus status = 0;
7094 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
7095 status |= RTAUDIO_OUTPUT_UNDERFLOW;
7096 handle->xrun[0] = false;
7098 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
7099 status |= RTAUDIO_INPUT_OVERFLOW;
7100 handle->xrun[1] = false;
7102 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
7103 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
7104 if ( doStopStream == 2 ) {
7105 this->abortStream();
7109 MUTEX_LOCK( &stream_.mutex );
7111 // The state might change while waiting on a mutex.
7112 if ( stream_.state == STREAM_STOPPED ) goto unlock;
7117 RtAudioFormat format;
7119 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7121 // Setup parameters and do buffer conversion if necessary.
7122 if ( stream_.doConvertBuffer[0] ) {
7123 buffer = stream_.deviceBuffer;
7124 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
7125 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7126 format = stream_.deviceFormat[0];
7129 buffer = stream_.userBuffer[0];
7130 samples = stream_.bufferSize * stream_.nUserChannels[0];
7131 format = stream_.userFormat;
7134 // Do byte swapping if necessary.
7135 if ( stream_.doByteSwap[0] )
7136 byteSwapBuffer( buffer, samples, format );
7138 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
7140 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7141 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7142 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
7143 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7144 handle->triggered = true;
7147 // Write samples to device.
7148 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7150 if ( result == -1 ) {
7151 // We'll assume this is an underrun, though there isn't a
7152 // specific means for determining that.
7153 handle->xrun[0] = true;
7154 errorText_ = "RtApiOss::callbackEvent: audio write error.";
7155 error( RtError::WARNING );
7156 // Continue on to input section.
7160 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
7162 // Setup parameters.
7163 if ( stream_.doConvertBuffer[1] ) {
7164 buffer = stream_.deviceBuffer;
7165 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
7166 format = stream_.deviceFormat[1];
7169 buffer = stream_.userBuffer[1];
7170 samples = stream_.bufferSize * stream_.nUserChannels[1];
7171 format = stream_.userFormat;
7174 // Read samples from device.
7175 result = read( handle->id[1], buffer, samples * formatBytes(format) );
7177 if ( result == -1 ) {
7178 // We'll assume this is an overrun, though there isn't a
7179 // specific means for determining that.
7180 handle->xrun[1] = true;
7181 errorText_ = "RtApiOss::callbackEvent: audio read error.";
7182 error( RtError::WARNING );
7186 // Do byte swapping if necessary.
7187 if ( stream_.doByteSwap[1] )
7188 byteSwapBuffer( buffer, samples, format );
7190 // Do buffer conversion if necessary.
7191 if ( stream_.doConvertBuffer[1] )
7192 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
7196 MUTEX_UNLOCK( &stream_.mutex );
7198 RtApi::tickStreamTime();
7199 if ( doStopStream == 1 ) this->stopStream();
7202 extern "C" void *ossCallbackHandler( void *ptr )
7204 CallbackInfo *info = (CallbackInfo *) ptr;
7205 RtApiOss *object = (RtApiOss *) info->object;
7206 bool *isRunning = &info->isRunning;
7208 while ( *isRunning == true ) {
7209 pthread_testcancel();
7210 object->callbackEvent();
7213 pthread_exit( NULL );
7216 //******************** End of __LINUX_OSS__ *********************//
7220 // *************************************************** //
7222 // Protected common (OS-independent) RtAudio methods.
7224 // *************************************************** //
7226 // This method can be modified to control the behavior of error
7227 // message printing.
7228 void RtApi :: error( RtError::Type type )
7230 errorStream_.str(""); // clear the ostringstream
7231 if ( type == RtError::WARNING && showWarnings_ == true )
7232 std::cerr << '\n' << errorText_ << "\n\n";
7234 throw( RtError( errorText_, type ) );
7237 void RtApi :: verifyStream()
7239 if ( stream_.state == STREAM_CLOSED ) {
7240 errorText_ = "RtApi:: a stream is not open!";
7241 error( RtError::INVALID_USE );
7245 void RtApi :: clearStreamInfo()
7247 stream_.mode = UNINITIALIZED;
7248 stream_.state = STREAM_CLOSED;
7249 stream_.sampleRate = 0;
7250 stream_.bufferSize = 0;
7251 stream_.nBuffers = 0;
7252 stream_.userFormat = 0;
7253 stream_.userInterleaved = true;
7254 stream_.streamTime = 0.0;
7255 stream_.apiHandle = 0;
7256 stream_.deviceBuffer = 0;
7257 stream_.callbackInfo.callback = 0;
7258 stream_.callbackInfo.userData = 0;
7259 stream_.callbackInfo.isRunning = false;
7260 for ( int i=0; i<2; i++ ) {
7261 stream_.device[i] = 11111;
7262 stream_.doConvertBuffer[i] = false;
7263 stream_.deviceInterleaved[i] = true;
7264 stream_.doByteSwap[i] = false;
7265 stream_.nUserChannels[i] = 0;
7266 stream_.nDeviceChannels[i] = 0;
7267 stream_.channelOffset[i] = 0;
7268 stream_.deviceFormat[i] = 0;
7269 stream_.latency[i] = 0;
7270 stream_.userBuffer[i] = 0;
7271 stream_.convertInfo[i].channels = 0;
7272 stream_.convertInfo[i].inJump = 0;
7273 stream_.convertInfo[i].outJump = 0;
7274 stream_.convertInfo[i].inFormat = 0;
7275 stream_.convertInfo[i].outFormat = 0;
7276 stream_.convertInfo[i].inOffset.clear();
7277 stream_.convertInfo[i].outOffset.clear();
7281 unsigned int RtApi :: formatBytes( RtAudioFormat format )
7283 if ( format == RTAUDIO_SINT16 )
7285 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7286 format == RTAUDIO_FLOAT32 )
7288 else if ( format == RTAUDIO_FLOAT64 )
7290 else if ( format == RTAUDIO_SINT8 )
7293 errorText_ = "RtApi::formatBytes: undefined format.";
7294 error( RtError::WARNING );
7299 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
7301 if ( mode == INPUT ) { // convert device to user buffer
7302 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
7303 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
7304 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
7305 stream_.convertInfo[mode].outFormat = stream_.userFormat;
7307 else { // convert user to device buffer
7308 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
7309 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
7310 stream_.convertInfo[mode].inFormat = stream_.userFormat;
7311 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
7314 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
7315 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7317 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7319 // Set up the interleave/deinterleave offsets.
7320 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7321 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7322 ( mode == INPUT && stream_.userInterleaved ) ) {
7323 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7324 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7325 stream_.convertInfo[mode].outOffset.push_back( k );
7326 stream_.convertInfo[mode].inJump = 1;
7330 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7331 stream_.convertInfo[mode].inOffset.push_back( k );
7332 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7333 stream_.convertInfo[mode].outJump = 1;
7337 else { // no (de)interleaving
7338 if ( stream_.userInterleaved ) {
7339 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7340 stream_.convertInfo[mode].inOffset.push_back( k );
7341 stream_.convertInfo[mode].outOffset.push_back( k );
7345 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7346 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7347 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7348 stream_.convertInfo[mode].inJump = 1;
7349 stream_.convertInfo[mode].outJump = 1;
7354 // Add channel offset.
7355 if ( firstChannel > 0 ) {
7356 if ( stream_.deviceInterleaved[mode] ) {
7357 if ( mode == OUTPUT ) {
7358 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7359 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7362 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7363 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7367 if ( mode == OUTPUT ) {
7368 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7369 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7372 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7373 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7379 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7381 // This function does format conversion, input/output channel compensation, and
7382 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7383 // the upper three bytes of a 32-bit integer.
7385 // Clear our device buffer when in/out duplex device channels are different
7386 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7387 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7388 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7391 if (info.outFormat == RTAUDIO_FLOAT64) {
7393 Float64 *out = (Float64 *)outBuffer;
7395 if (info.inFormat == RTAUDIO_SINT8) {
7396 signed char *in = (signed char *)inBuffer;
7397 scale = 1.0 / 127.5;
7398 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7399 for (j=0; j<info.channels; j++) {
7400 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7401 out[info.outOffset[j]] += 0.5;
7402 out[info.outOffset[j]] *= scale;
7405 out += info.outJump;
7408 else if (info.inFormat == RTAUDIO_SINT16) {
7409 Int16 *in = (Int16 *)inBuffer;
7410 scale = 1.0 / 32767.5;
7411 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7412 for (j=0; j<info.channels; j++) {
7413 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7414 out[info.outOffset[j]] += 0.5;
7415 out[info.outOffset[j]] *= scale;
7418 out += info.outJump;
7421 else if (info.inFormat == RTAUDIO_SINT24) {
7422 Int32 *in = (Int32 *)inBuffer;
7423 scale = 1.0 / 8388607.5;
7424 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7425 for (j=0; j<info.channels; j++) {
7426 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7427 out[info.outOffset[j]] += 0.5;
7428 out[info.outOffset[j]] *= scale;
7431 out += info.outJump;
7434 else if (info.inFormat == RTAUDIO_SINT32) {
7435 Int32 *in = (Int32 *)inBuffer;
7436 scale = 1.0 / 2147483647.5;
7437 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7438 for (j=0; j<info.channels; j++) {
7439 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7440 out[info.outOffset[j]] += 0.5;
7441 out[info.outOffset[j]] *= scale;
7444 out += info.outJump;
7447 else if (info.inFormat == RTAUDIO_FLOAT32) {
7448 Float32 *in = (Float32 *)inBuffer;
7449 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7450 for (j=0; j<info.channels; j++) {
7451 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7454 out += info.outJump;
7457 else if (info.inFormat == RTAUDIO_FLOAT64) {
7458 // Channel compensation and/or (de)interleaving only.
7459 Float64 *in = (Float64 *)inBuffer;
7460 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7461 for (j=0; j<info.channels; j++) {
7462 out[info.outOffset[j]] = in[info.inOffset[j]];
7465 out += info.outJump;
7469 else if (info.outFormat == RTAUDIO_FLOAT32) {
7471 Float32 *out = (Float32 *)outBuffer;
7473 if (info.inFormat == RTAUDIO_SINT8) {
7474 signed char *in = (signed char *)inBuffer;
7475 scale = (Float32) ( 1.0 / 127.5 );
7476 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7477 for (j=0; j<info.channels; j++) {
7478 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7479 out[info.outOffset[j]] += 0.5;
7480 out[info.outOffset[j]] *= scale;
7483 out += info.outJump;
7486 else if (info.inFormat == RTAUDIO_SINT16) {
7487 Int16 *in = (Int16 *)inBuffer;
7488 scale = (Float32) ( 1.0 / 32767.5 );
7489 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7490 for (j=0; j<info.channels; j++) {
7491 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7492 out[info.outOffset[j]] += 0.5;
7493 out[info.outOffset[j]] *= scale;
7496 out += info.outJump;
7499 else if (info.inFormat == RTAUDIO_SINT24) {
7500 Int32 *in = (Int32 *)inBuffer;
7501 scale = (Float32) ( 1.0 / 8388607.5 );
7502 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7503 for (j=0; j<info.channels; j++) {
7504 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7505 out[info.outOffset[j]] += 0.5;
7506 out[info.outOffset[j]] *= scale;
7509 out += info.outJump;
7512 else if (info.inFormat == RTAUDIO_SINT32) {
7513 Int32 *in = (Int32 *)inBuffer;
7514 scale = (Float32) ( 1.0 / 2147483647.5 );
7515 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7516 for (j=0; j<info.channels; j++) {
7517 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7518 out[info.outOffset[j]] += 0.5;
7519 out[info.outOffset[j]] *= scale;
7522 out += info.outJump;
7525 else if (info.inFormat == RTAUDIO_FLOAT32) {
7526 // Channel compensation and/or (de)interleaving only.
7527 Float32 *in = (Float32 *)inBuffer;
7528 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7529 for (j=0; j<info.channels; j++) {
7530 out[info.outOffset[j]] = in[info.inOffset[j]];
7533 out += info.outJump;
7536 else if (info.inFormat == RTAUDIO_FLOAT64) {
7537 Float64 *in = (Float64 *)inBuffer;
7538 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7539 for (j=0; j<info.channels; j++) {
7540 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7543 out += info.outJump;
7547 else if (info.outFormat == RTAUDIO_SINT32) {
7548 Int32 *out = (Int32 *)outBuffer;
7549 if (info.inFormat == RTAUDIO_SINT8) {
7550 signed char *in = (signed char *)inBuffer;
7551 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7552 for (j=0; j<info.channels; j++) {
7553 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7554 out[info.outOffset[j]] <<= 24;
7557 out += info.outJump;
7560 else if (info.inFormat == RTAUDIO_SINT16) {
7561 Int16 *in = (Int16 *)inBuffer;
7562 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7563 for (j=0; j<info.channels; j++) {
7564 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7565 out[info.outOffset[j]] <<= 16;
7568 out += info.outJump;
7571 else if (info.inFormat == RTAUDIO_SINT24) {
7572 Int32 *in = (Int32 *)inBuffer;
7573 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7574 for (j=0; j<info.channels; j++) {
7575 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7576 out[info.outOffset[j]] <<= 8;
7579 out += info.outJump;
7582 else if (info.inFormat == RTAUDIO_SINT32) {
7583 // Channel compensation and/or (de)interleaving only.
7584 Int32 *in = (Int32 *)inBuffer;
7585 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7586 for (j=0; j<info.channels; j++) {
7587 out[info.outOffset[j]] = in[info.inOffset[j]];
7590 out += info.outJump;
7593 else if (info.inFormat == RTAUDIO_FLOAT32) {
7594 Float32 *in = (Float32 *)inBuffer;
7595 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7596 for (j=0; j<info.channels; j++) {
7597 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7600 out += info.outJump;
7603 else if (info.inFormat == RTAUDIO_FLOAT64) {
7604 Float64 *in = (Float64 *)inBuffer;
7605 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7606 for (j=0; j<info.channels; j++) {
7607 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7610 out += info.outJump;
7614 else if (info.outFormat == RTAUDIO_SINT24) {
7615 Int32 *out = (Int32 *)outBuffer;
7616 if (info.inFormat == RTAUDIO_SINT8) {
7617 signed char *in = (signed char *)inBuffer;
7618 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7619 for (j=0; j<info.channels; j++) {
7620 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7621 out[info.outOffset[j]] <<= 16;
7624 out += info.outJump;
7627 else if (info.inFormat == RTAUDIO_SINT16) {
7628 Int16 *in = (Int16 *)inBuffer;
7629 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7630 for (j=0; j<info.channels; j++) {
7631 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7632 out[info.outOffset[j]] <<= 8;
7635 out += info.outJump;
7638 else if (info.inFormat == RTAUDIO_SINT24) {
7639 // Channel compensation and/or (de)interleaving only.
7640 Int32 *in = (Int32 *)inBuffer;
7641 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7642 for (j=0; j<info.channels; j++) {
7643 out[info.outOffset[j]] = in[info.inOffset[j]];
7646 out += info.outJump;
7649 else if (info.inFormat == RTAUDIO_SINT32) {
7650 Int32 *in = (Int32 *)inBuffer;
7651 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7652 for (j=0; j<info.channels; j++) {
7653 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7654 out[info.outOffset[j]] >>= 8;
7657 out += info.outJump;
7660 else if (info.inFormat == RTAUDIO_FLOAT32) {
7661 Float32 *in = (Float32 *)inBuffer;
7662 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7663 for (j=0; j<info.channels; j++) {
7664 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7667 out += info.outJump;
7670 else if (info.inFormat == RTAUDIO_FLOAT64) {
7671 Float64 *in = (Float64 *)inBuffer;
7672 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7673 for (j=0; j<info.channels; j++) {
7674 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7677 out += info.outJump;
7681 else if (info.outFormat == RTAUDIO_SINT16) {
7682 Int16 *out = (Int16 *)outBuffer;
7683 if (info.inFormat == RTAUDIO_SINT8) {
7684 signed char *in = (signed char *)inBuffer;
7685 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7686 for (j=0; j<info.channels; j++) {
7687 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7688 out[info.outOffset[j]] <<= 8;
7691 out += info.outJump;
7694 else if (info.inFormat == RTAUDIO_SINT16) {
7695 // Channel compensation and/or (de)interleaving only.
7696 Int16 *in = (Int16 *)inBuffer;
7697 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7698 for (j=0; j<info.channels; j++) {
7699 out[info.outOffset[j]] = in[info.inOffset[j]];
7702 out += info.outJump;
7705 else if (info.inFormat == RTAUDIO_SINT24) {
7706 Int32 *in = (Int32 *)inBuffer;
7707 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7708 for (j=0; j<info.channels; j++) {
7709 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7712 out += info.outJump;
7715 else if (info.inFormat == RTAUDIO_SINT32) {
7716 Int32 *in = (Int32 *)inBuffer;
7717 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7718 for (j=0; j<info.channels; j++) {
7719 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7722 out += info.outJump;
7725 else if (info.inFormat == RTAUDIO_FLOAT32) {
7726 Float32 *in = (Float32 *)inBuffer;
7727 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7728 for (j=0; j<info.channels; j++) {
7729 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7732 out += info.outJump;
7735 else if (info.inFormat == RTAUDIO_FLOAT64) {
7736 Float64 *in = (Float64 *)inBuffer;
7737 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7738 for (j=0; j<info.channels; j++) {
7739 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7742 out += info.outJump;
7746 else if (info.outFormat == RTAUDIO_SINT8) {
7747 signed char *out = (signed char *)outBuffer;
7748 if (info.inFormat == RTAUDIO_SINT8) {
7749 // Channel compensation and/or (de)interleaving only.
7750 signed char *in = (signed char *)inBuffer;
7751 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7752 for (j=0; j<info.channels; j++) {
7753 out[info.outOffset[j]] = in[info.inOffset[j]];
7756 out += info.outJump;
7759 if (info.inFormat == RTAUDIO_SINT16) {
7760 Int16 *in = (Int16 *)inBuffer;
7761 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7762 for (j=0; j<info.channels; j++) {
7763 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7766 out += info.outJump;
7769 else if (info.inFormat == RTAUDIO_SINT24) {
7770 Int32 *in = (Int32 *)inBuffer;
7771 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7772 for (j=0; j<info.channels; j++) {
7773 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7776 out += info.outJump;
7779 else if (info.inFormat == RTAUDIO_SINT32) {
7780 Int32 *in = (Int32 *)inBuffer;
7781 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7782 for (j=0; j<info.channels; j++) {
7783 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7786 out += info.outJump;
7789 else if (info.inFormat == RTAUDIO_FLOAT32) {
7790 Float32 *in = (Float32 *)inBuffer;
7791 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7792 for (j=0; j<info.channels; j++) {
7793 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7796 out += info.outJump;
7799 else if (info.inFormat == RTAUDIO_FLOAT64) {
7800 Float64 *in = (Float64 *)inBuffer;
7801 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7802 for (j=0; j<info.channels; j++) {
7803 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7806 out += info.outJump;
7812 //static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
7813 //static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
7814 //static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
7816 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7822 if ( format == RTAUDIO_SINT16 ) {
7823 for ( unsigned int i=0; i<samples; i++ ) {
7824 // Swap 1st and 2nd bytes.
7829 // Increment 2 bytes.
7833 else if ( format == RTAUDIO_SINT24 ||
7834 format == RTAUDIO_SINT32 ||
7835 format == RTAUDIO_FLOAT32 ) {
7836 for ( unsigned int i=0; i<samples; i++ ) {
7837 // Swap 1st and 4th bytes.
7842 // Swap 2nd and 3rd bytes.
7848 // Increment 3 more bytes.
7852 else if ( format == RTAUDIO_FLOAT64 ) {
7853 for ( unsigned int i=0; i<samples; i++ ) {
7854 // Swap 1st and 8th bytes
7859 // Swap 2nd and 7th bytes
7865 // Swap 3rd and 6th bytes
7871 // Swap 4th and 5th bytes
7877 // Increment 5 more bytes.
7883 // Indentation settings for Vim and Emacs
7886 // c-basic-offset: 2
7887 // indent-tabs-mode: nil
7890 // vim: et sts=2 sw=2