1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2009 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.5
49 // Static variable definitions.
50 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
51 const unsigned int RtApi::SAMPLE_RATES[] = {
52 4000, 5512, 8000, 9600, 11025, 16000, 22050,
53 32000, 44100, 48000, 88200, 96000, 176400, 192000
56 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
57 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
58 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
59 #define MUTEX_LOCK(A) EnterCriticalSection(A)
60 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
61 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
63 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
64 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
65 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
66 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
68 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
69 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
72 // *************************************************** //
74 // RtAudio definitions.
76 // *************************************************** //
78 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
82 // The order here will control the order of RtAudio's API search in
84 #if defined(__UNIX_JACK__)
85 apis.push_back( UNIX_JACK );
87 #if defined(__LINUX_ALSA__)
88 apis.push_back( LINUX_ALSA );
90 #if defined(__LINUX_OSS__)
91 apis.push_back( LINUX_OSS );
93 #if defined(__WINDOWS_ASIO__)
94 apis.push_back( WINDOWS_ASIO );
96 #if defined(__WINDOWS_DS__)
97 apis.push_back( WINDOWS_DS );
99 #if defined(__MACOSX_CORE__)
100 apis.push_back( MACOSX_CORE );
102 #if defined(__RTAUDIO_DUMMY__)
103 apis.push_back( RTAUDIO_DUMMY );
107 void RtAudio :: openRtApi( RtAudio::Api api )
109 #if defined(__UNIX_JACK__)
110 if ( api == UNIX_JACK )
111 rtapi_ = new RtApiJack();
113 #if defined(__LINUX_ALSA__)
114 if ( api == LINUX_ALSA )
115 rtapi_ = new RtApiAlsa();
117 #if defined(__LINUX_OSS__)
118 if ( api == LINUX_OSS )
119 rtapi_ = new RtApiOss();
121 #if defined(__WINDOWS_ASIO__)
122 if ( api == WINDOWS_ASIO )
123 rtapi_ = new RtApiAsio();
125 #if defined(__WINDOWS_DS__)
126 if ( api == WINDOWS_DS )
127 rtapi_ = new RtApiDs();
129 #if defined(__MACOSX_CORE__)
130 if ( api == MACOSX_CORE )
131 rtapi_ = new RtApiCore();
133 #if defined(__RTAUDIO_DUMMY__)
134 if ( api == RTAUDIO_DUMMY )
135 rtapi_ = new RtApiDummy();
139 RtAudio :: RtAudio( RtAudio::Api api ) throw()
143 if ( api != UNSPECIFIED ) {
144 // Attempt to open the specified API.
146 if ( rtapi_ ) return;
148 // No compiled support for specified API value. Issue a debug
149 // warning and continue as if no API was specified.
150 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
153 // Iterate through the compiled APIs and return as soon as we find
154 // one with at least one device or we reach the end of the list.
155 std::vector< RtAudio::Api > apis;
156 getCompiledApi( apis );
157 for ( unsigned int i=0; i<apis.size(); i++ ) {
158 openRtApi( apis[i] );
159 if ( rtapi_->getDeviceCount() ) break;
162 if ( rtapi_ ) return;
164 // It should not be possible to get here because the preprocessor
165 // definition __RTAUDIO_DUMMY__ is automatically defined if no
166 // API-specific definitions are passed to the compiler. But just in
167 // case something weird happens, we'll print out an error message.
168 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
171 RtAudio :: ~RtAudio() throw()
176 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
177 RtAudio::StreamParameters *inputParameters,
178 RtAudioFormat format, unsigned int sampleRate,
179 unsigned int *bufferFrames,
180 RtAudioCallback callback, void *userData,
181 RtAudio::StreamOptions *options )
183 return rtapi_->openStream( outputParameters, inputParameters, format,
184 sampleRate, bufferFrames, callback,
188 // *************************************************** //
190 // Public RtApi definitions (see end of file for
191 // private or protected utility functions).
193 // *************************************************** //
197 stream_.state = STREAM_CLOSED;
198 stream_.mode = UNINITIALIZED;
199 stream_.apiHandle = 0;
200 stream_.userBuffer[0] = 0;
201 stream_.userBuffer[1] = 0;
202 MUTEX_INITIALIZE( &stream_.mutex );
203 showWarnings_ = true;
208 MUTEX_DESTROY( &stream_.mutex );
211 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
212 RtAudio::StreamParameters *iParams,
213 RtAudioFormat format, unsigned int sampleRate,
214 unsigned int *bufferFrames,
215 RtAudioCallback callback, void *userData,
216 RtAudio::StreamOptions *options )
218 if ( stream_.state != STREAM_CLOSED ) {
219 errorText_ = "RtApi::openStream: a stream is already open!";
220 error( RtError::INVALID_USE );
223 if ( oParams && oParams->nChannels < 1 ) {
224 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
225 error( RtError::INVALID_USE );
228 if ( iParams && iParams->nChannels < 1 ) {
229 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
230 error( RtError::INVALID_USE );
233 if ( oParams == NULL && iParams == NULL ) {
234 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
235 error( RtError::INVALID_USE );
238 if ( formatBytes(format) == 0 ) {
239 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
240 error( RtError::INVALID_USE );
243 unsigned int nDevices = getDeviceCount();
244 unsigned int oChannels = 0;
246 oChannels = oParams->nChannels;
247 if ( oParams->deviceId >= nDevices ) {
248 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
249 error( RtError::INVALID_USE );
253 unsigned int iChannels = 0;
255 iChannels = iParams->nChannels;
256 if ( iParams->deviceId >= nDevices ) {
257 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
258 error( RtError::INVALID_USE );
265 if ( oChannels > 0 ) {
267 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
268 sampleRate, format, bufferFrames, options );
269 if ( result == false ) error( RtError::SYSTEM_ERROR );
272 if ( iChannels > 0 ) {
274 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
275 sampleRate, format, bufferFrames, options );
276 if ( result == false ) {
277 if ( oChannels > 0 ) closeStream();
278 error( RtError::SYSTEM_ERROR );
282 stream_.callbackInfo.callback = (void *) callback;
283 stream_.callbackInfo.userData = userData;
285 if ( options ) options->numberOfBuffers = stream_.nBuffers;
286 stream_.state = STREAM_STOPPED;
289 unsigned int RtApi :: getDefaultInputDevice( void )
291 // Should be implemented in subclasses if possible.
295 unsigned int RtApi :: getDefaultOutputDevice( void )
297 // Should be implemented in subclasses if possible.
301 void RtApi :: closeStream( void )
303 // MUST be implemented in subclasses!
307 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
308 unsigned int firstChannel, unsigned int sampleRate,
309 RtAudioFormat format, unsigned int *bufferSize,
310 RtAudio::StreamOptions *options )
312 // MUST be implemented in subclasses!
316 void RtApi :: tickStreamTime( void )
318 // Subclasses that do not provide their own implementation of
319 // getStreamTime should call this function once per buffer I/O to
320 // provide basic stream time support.
322 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
324 #if defined( HAVE_GETTIMEOFDAY )
325 gettimeofday( &stream_.lastTickTimestamp, NULL );
329 long RtApi :: getStreamLatency( void )
333 long totalLatency = 0;
334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
335 totalLatency = stream_.latency[0];
336 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
337 totalLatency += stream_.latency[1];
342 double RtApi :: getStreamTime( void )
346 #if defined( HAVE_GETTIMEOFDAY )
347 // Return a very accurate estimate of the stream time by
348 // adding in the elapsed time since the last tick.
352 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
353 return stream_.streamTime;
355 gettimeofday( &now, NULL );
356 then = stream_.lastTickTimestamp;
357 return stream_.streamTime +
358 ((now.tv_sec + 0.000001 * now.tv_usec) -
359 (then.tv_sec + 0.000001 * then.tv_usec));
361 return stream_.streamTime;
365 unsigned int RtApi :: getStreamSampleRate( void )
369 return stream_.sampleRate;
373 // *************************************************** //
375 // OS/API-specific methods.
377 // *************************************************** //
379 #if defined(__MACOSX_CORE__)
381 // The OS X CoreAudio API is designed to use a separate callback
382 // procedure for each of its audio devices. A single RtAudio duplex
383 // stream using two different devices is supported here, though it
384 // cannot be guaranteed to always behave correctly because we cannot
385 // synchronize these two callbacks.
387 // A property listener is installed for over/underrun information.
388 // However, no functionality is currently provided to allow property
389 // listeners to trigger user handlers because it is unclear what could
390 // be done if a critical stream parameter (buffer size, sample rate,
391 // device disconnect) notification arrived. The listeners entail
392 // quite a bit of extra code and most likely, a user program wouldn't
393 // be prepared for the result anyway. However, we do provide a flag
394 // to the client callback function to inform of an over/underrun.
396 // The mechanism for querying and setting system parameters was
397 // updated (and perhaps simplified) in OS-X version 10.4. However,
398 // since 10.4 support is not necessarily available to all users, I've
399 // decided not to update the respective code at this time. Perhaps
400 // this will happen when Apple makes 10.4 free for everyone. :-)
402 // A structure to hold various information related to the CoreAudio API
405 AudioDeviceID id[2]; // device ids
406 AudioDeviceIOProcID procId[2];
407 UInt32 iStream[2]; // device stream index (or first if using multiple)
408 UInt32 nStreams[2]; // number of streams to use
411 pthread_cond_t condition;
412 int drainCounter; // Tracks callback counts when draining
413 bool internalDrain; // Indicates if stop is initiated from callback or not.
416 :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
419 RtApiCore :: RtApiCore()
421 // Nothing to do here.
424 RtApiCore :: ~RtApiCore()
426 // The subclass destructor gets called before the base class
427 // destructor, so close an existing stream before deallocating
428 // apiDeviceId memory.
429 if ( stream_.state != STREAM_CLOSED ) closeStream();
432 unsigned int RtApiCore :: getDeviceCount( void )
434 // Find out how many audio devices there are, if any.
436 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
437 if ( result != noErr ) {
438 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
439 error( RtError::WARNING );
443 return dataSize / sizeof( AudioDeviceID );
446 unsigned int RtApiCore :: getDefaultInputDevice( void )
448 unsigned int nDevices = getDeviceCount();
449 if ( nDevices <= 1 ) return 0;
452 UInt32 dataSize = sizeof( AudioDeviceID );
453 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
456 if ( result != noErr ) {
457 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
458 error( RtError::WARNING );
462 dataSize *= nDevices;
463 AudioDeviceID deviceList[ nDevices ];
464 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
465 if ( result != noErr ) {
466 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
467 error( RtError::WARNING );
471 for ( unsigned int i=0; i<nDevices; i++ )
472 if ( id == deviceList[i] ) return i;
474 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
475 error( RtError::WARNING );
479 unsigned int RtApiCore :: getDefaultOutputDevice( void )
481 unsigned int nDevices = getDeviceCount();
482 if ( nDevices <= 1 ) return 0;
485 UInt32 dataSize = sizeof( AudioDeviceID );
486 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
489 if ( result != noErr ) {
490 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
491 error( RtError::WARNING );
495 dataSize *= nDevices;
496 AudioDeviceID deviceList[ nDevices ];
497 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
498 if ( result != noErr ) {
499 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
500 error( RtError::WARNING );
504 for ( unsigned int i=0; i<nDevices; i++ )
505 if ( id == deviceList[i] ) return i;
507 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
508 error( RtError::WARNING );
512 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
514 RtAudio::DeviceInfo info;
518 unsigned int nDevices = getDeviceCount();
519 if ( nDevices == 0 ) {
520 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
521 error( RtError::INVALID_USE );
524 if ( device >= nDevices ) {
525 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
526 error( RtError::INVALID_USE );
529 AudioDeviceID deviceList[ nDevices ];
530 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
531 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
532 if ( result != noErr ) {
533 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
534 error( RtError::WARNING );
538 AudioDeviceID id = deviceList[ device ];
540 // Get the device name.
544 result = AudioDeviceGetProperty( id, 0, false,
545 kAudioDevicePropertyDeviceManufacturer,
548 if ( result != noErr ) {
549 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
550 errorText_ = errorStream_.str();
551 error( RtError::WARNING );
554 info.name.append( (const char *)name, strlen(name) );
555 info.name.append( ": " );
558 result = AudioDeviceGetProperty( id, 0, false,
559 kAudioDevicePropertyDeviceName,
561 if ( result != noErr ) {
562 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
563 errorText_ = errorStream_.str();
564 error( RtError::WARNING );
567 info.name.append( (const char *)name, strlen(name) );
569 // Get the output stream "configuration".
570 AudioBufferList *bufferList = nil;
571 result = AudioDeviceGetPropertyInfo( id, 0, false,
572 kAudioDevicePropertyStreamConfiguration,
574 if (result != noErr || dataSize == 0) {
575 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
576 errorText_ = errorStream_.str();
577 error( RtError::WARNING );
581 // Allocate the AudioBufferList.
582 bufferList = (AudioBufferList *) malloc( dataSize );
583 if ( bufferList == NULL ) {
584 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
585 error( RtError::WARNING );
589 result = AudioDeviceGetProperty( id, 0, false,
590 kAudioDevicePropertyStreamConfiguration,
591 &dataSize, bufferList );
592 if ( result != noErr ) {
594 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
595 errorText_ = errorStream_.str();
596 error( RtError::WARNING );
600 // Get output channel information.
601 unsigned int i, nStreams = bufferList->mNumberBuffers;
602 for ( i=0; i<nStreams; i++ )
603 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
606 // Get the input stream "configuration".
607 result = AudioDeviceGetPropertyInfo( id, 0, true,
608 kAudioDevicePropertyStreamConfiguration,
610 if (result != noErr || dataSize == 0) {
611 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
612 errorText_ = errorStream_.str();
613 error( RtError::WARNING );
617 // Allocate the AudioBufferList.
618 bufferList = (AudioBufferList *) malloc( dataSize );
619 if ( bufferList == NULL ) {
620 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
621 error( RtError::WARNING );
625 result = AudioDeviceGetProperty( id, 0, true,
626 kAudioDevicePropertyStreamConfiguration,
627 &dataSize, bufferList );
628 if ( result != noErr ) {
630 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
631 errorText_ = errorStream_.str();
632 error( RtError::WARNING );
636 // Get input channel information.
637 nStreams = bufferList->mNumberBuffers;
638 for ( i=0; i<nStreams; i++ )
639 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
642 // If device opens for both playback and capture, we determine the channels.
643 if ( info.outputChannels > 0 && info.inputChannels > 0 )
644 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
646 // Probe the device sample rates.
647 bool isInput = false;
648 if ( info.outputChannels == 0 ) isInput = true;
650 // Determine the supported sample rates.
651 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
652 kAudioDevicePropertyAvailableNominalSampleRates,
655 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
656 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
657 errorText_ = errorStream_.str();
658 error( RtError::WARNING );
662 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
663 AudioValueRange rangeList[ nRanges ];
664 result = AudioDeviceGetProperty( id, 0, isInput,
665 kAudioDevicePropertyAvailableNominalSampleRates,
666 &dataSize, &rangeList );
668 if ( result != kAudioHardwareNoError ) {
669 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
670 errorText_ = errorStream_.str();
671 error( RtError::WARNING );
675 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
676 for ( UInt32 i=0; i<nRanges; i++ ) {
677 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
678 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
681 info.sampleRates.clear();
682 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
683 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
684 info.sampleRates.push_back( SAMPLE_RATES[k] );
687 if ( info.sampleRates.size() == 0 ) {
688 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
689 errorText_ = errorStream_.str();
690 error( RtError::WARNING );
694 // CoreAudio always uses 32-bit floating point data for PCM streams.
695 // Thus, any other "physical" formats supported by the device are of
696 // no interest to the client.
697 info.nativeFormats = RTAUDIO_FLOAT32;
699 if ( getDefaultOutputDevice() == device )
700 info.isDefaultOutput = true;
701 if ( getDefaultInputDevice() == device )
702 info.isDefaultInput = true;
708 OSStatus callbackHandler( AudioDeviceID inDevice,
709 const AudioTimeStamp* inNow,
710 const AudioBufferList* inInputData,
711 const AudioTimeStamp* inInputTime,
712 AudioBufferList* outOutputData,
713 const AudioTimeStamp* inOutputTime,
716 CallbackInfo *info = (CallbackInfo *) infoPointer;
718 RtApiCore *object = (RtApiCore *) info->object;
719 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
720 return kAudioHardwareUnspecifiedError;
722 return kAudioHardwareNoError;
725 OSStatus deviceListener( AudioDeviceID inDevice,
728 AudioDevicePropertyID propertyID,
729 void* handlePointer )
731 CoreHandle *handle = (CoreHandle *) handlePointer;
732 if ( propertyID == kAudioDeviceProcessorOverload ) {
734 handle->xrun[1] = true;
736 handle->xrun[0] = true;
739 return kAudioHardwareNoError;
742 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
744 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
748 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
749 unsigned int firstChannel, unsigned int sampleRate,
750 RtAudioFormat format, unsigned int *bufferSize,
751 RtAudio::StreamOptions *options )
754 unsigned int nDevices = getDeviceCount();
755 if ( nDevices == 0 ) {
756 // This should not happen because a check is made before this function is called.
757 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
761 if ( device >= nDevices ) {
762 // This should not happen because a check is made before this function is called.
763 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
767 AudioDeviceID deviceList[ nDevices ];
768 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
769 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
770 if ( result != noErr ) {
771 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
775 AudioDeviceID id = deviceList[ device ];
777 // Setup for stream mode.
778 bool isInput = false;
779 if ( mode == INPUT ) isInput = true;
781 // Set or disable "hog" mode.
782 dataSize = sizeof( UInt32 );
784 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
785 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
786 if ( result != noErr ) {
787 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
788 errorText_ = errorStream_.str();
792 // Get the stream "configuration".
793 AudioBufferList *bufferList;
794 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
795 kAudioDevicePropertyStreamConfiguration,
797 if (result != noErr || dataSize == 0) {
798 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
799 errorText_ = errorStream_.str();
803 // Allocate the AudioBufferList.
804 bufferList = (AudioBufferList *) malloc( dataSize );
805 if ( bufferList == NULL ) {
806 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
810 result = AudioDeviceGetProperty( id, 0, isInput,
811 kAudioDevicePropertyStreamConfiguration,
812 &dataSize, bufferList );
813 if ( result != noErr ) {
815 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
816 errorText_ = errorStream_.str();
820 // Search for one or more streams that contain the desired number of
821 // channels. CoreAudio devices can have an arbitrary number of
822 // streams and each stream can have an arbitrary number of channels.
823 // For each stream, a single buffer of interleaved samples is
824 // provided. RtAudio prefers the use of one stream of interleaved
825 // data or multiple consecutive single-channel streams. However, we
826 // now support multiple consecutive multi-channel streams of
827 // interleaved data as well.
828 UInt32 iStream, offsetCounter = firstChannel;
829 UInt32 nStreams = bufferList->mNumberBuffers;
830 bool monoMode = false;
831 bool foundStream = false;
833 // First check that the device supports the requested number of
835 UInt32 deviceChannels = 0;
836 for ( iStream=0; iStream<nStreams; iStream++ )
837 deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
839 if ( deviceChannels < ( channels + firstChannel ) ) {
841 errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
842 errorText_ = errorStream_.str();
846 // Look for a single stream meeting our needs.
847 UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
848 for ( iStream=0; iStream<nStreams; iStream++ ) {
849 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
850 if ( streamChannels >= channels + offsetCounter ) {
851 firstStream = iStream;
852 channelOffset = offsetCounter;
856 if ( streamChannels > offsetCounter ) break;
857 offsetCounter -= streamChannels;
860 // If we didn't find a single stream above, then we should be able
861 // to meet the channel specification with multiple streams.
862 if ( foundStream == false ) {
864 offsetCounter = firstChannel;
865 for ( iStream=0; iStream<nStreams; iStream++ ) {
866 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
867 if ( streamChannels > offsetCounter ) break;
868 offsetCounter -= streamChannels;
871 firstStream = iStream;
872 channelOffset = offsetCounter;
873 Int32 channelCounter = channels + offsetCounter - streamChannels;
875 if ( streamChannels > 1 ) monoMode = false;
876 while ( channelCounter > 0 ) {
877 streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
878 if ( streamChannels > 1 ) monoMode = false;
879 channelCounter -= streamChannels;
886 // Determine the buffer size.
887 AudioValueRange bufferRange;
888 dataSize = sizeof( AudioValueRange );
889 result = AudioDeviceGetProperty( id, 0, isInput,
890 kAudioDevicePropertyBufferFrameSizeRange,
891 &dataSize, &bufferRange );
892 if ( result != noErr ) {
893 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
894 errorText_ = errorStream_.str();
898 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
899 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
900 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
902 // Set the buffer size. For multiple streams, I'm assuming we only
903 // need to make this setting for the master channel.
904 UInt32 theSize = (UInt32) *bufferSize;
905 dataSize = sizeof( UInt32 );
906 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
907 kAudioDevicePropertyBufferFrameSize,
908 dataSize, &theSize );
910 if ( result != noErr ) {
911 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
912 errorText_ = errorStream_.str();
916 // If attempting to setup a duplex stream, the bufferSize parameter
917 // MUST be the same in both directions!
918 *bufferSize = theSize;
919 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
920 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
921 errorText_ = errorStream_.str();
925 stream_.bufferSize = *bufferSize;
926 stream_.nBuffers = 1;
928 // Get the stream ID(s) so we can set the stream format. We'll have
929 // to do this for each stream.
930 AudioStreamID streamIDs[ nStreams ];
931 dataSize = nStreams * sizeof( AudioStreamID );
932 result = AudioDeviceGetProperty( id, 0, isInput,
933 kAudioDevicePropertyStreams,
934 &dataSize, &streamIDs );
935 if ( result != noErr ) {
936 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
937 errorText_ = errorStream_.str();
941 // Now set the stream format. Also, check the physical format of the
942 // device and change that if necessary.
943 AudioStreamBasicDescription description;
944 dataSize = sizeof( AudioStreamBasicDescription );
947 for ( UInt32 i=0; i<streamCount; i++ ) {
949 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
950 kAudioStreamPropertyVirtualFormat,
951 &dataSize, &description );
953 if ( result != noErr ) {
954 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
955 errorText_ = errorStream_.str();
959 // Set the sample rate and data format id. However, only make the
960 // change if the sample rate is not within 1.0 of the desired
961 // rate and the format is not linear pcm.
962 updateFormat = false;
963 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
964 description.mSampleRate = (double) sampleRate;
968 if ( description.mFormatID != kAudioFormatLinearPCM ) {
969 description.mFormatID = kAudioFormatLinearPCM;
973 if ( updateFormat ) {
974 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0,
975 kAudioStreamPropertyVirtualFormat,
976 dataSize, &description );
977 if ( result != noErr ) {
978 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
979 errorText_ = errorStream_.str();
984 // Now check the physical format.
985 result = AudioStreamGetProperty( streamIDs[firstStream+i], 0,
986 kAudioStreamPropertyPhysicalFormat,
987 &dataSize, &description );
988 if ( result != noErr ) {
989 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
990 errorText_ = errorStream_.str();
994 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
995 description.mFormatID = kAudioFormatLinearPCM;
996 AudioStreamBasicDescription testDescription = description;
997 unsigned long formatFlags;
999 // We'll try higher bit rates first and then work our way down.
1000 testDescription.mBitsPerChannel = 32;
1001 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
1002 testDescription.mFormatFlags = formatFlags;
1003 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1004 if ( result == noErr ) continue;
1006 testDescription = description;
1007 testDescription.mBitsPerChannel = 32;
1008 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
1009 testDescription.mFormatFlags = formatFlags;
1010 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1011 if ( result == noErr ) continue;
1013 testDescription = description;
1014 testDescription.mBitsPerChannel = 24;
1015 testDescription.mFormatFlags = formatFlags;
1016 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1017 if ( result == noErr ) continue;
1019 testDescription = description;
1020 testDescription.mBitsPerChannel = 16;
1021 testDescription.mFormatFlags = formatFlags;
1022 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1023 if ( result == noErr ) continue;
1025 testDescription = description;
1026 testDescription.mBitsPerChannel = 8;
1027 testDescription.mFormatFlags = formatFlags;
1028 result = AudioStreamSetProperty( streamIDs[firstStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1029 if ( result != noErr ) {
1030 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1031 errorText_ = errorStream_.str();
1037 // Get the stream latency. There can be latency in both the device
1038 // and the stream. First, attempt to get the device latency on the
1039 // master channel or the first open channel. Errors that might
1040 // occur here are not deemed critical.
1042 // ***** CHECK THIS ***** //
1043 UInt32 latency, channel = 0;
1044 dataSize = sizeof( UInt32 );
1045 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1046 if ( hasProperty( id, channel, isInput, property ) == true ) {
1047 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1048 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1050 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1051 errorText_ = errorStream_.str();
1052 error( RtError::WARNING );
1056 // Now try to get the stream latency. For multiple streams, I assume the
1057 // latency is equal for each.
1058 result = AudioStreamGetProperty( streamIDs[firstStream], 0, property, &dataSize, &latency );
1059 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1061 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1062 errorText_ = errorStream_.str();
1063 error( RtError::WARNING );
1066 // Byte-swapping: According to AudioHardware.h, the stream data will
1067 // always be presented in native-endian format, so we should never
1068 // need to byte swap.
1069 stream_.doByteSwap[mode] = false;
1071 // From the CoreAudio documentation, PCM data must be supplied as
1073 stream_.userFormat = format;
1074 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1076 if ( streamCount == 1 )
1077 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1078 else // multiple streams
1079 stream_.nDeviceChannels[mode] = channels;
1080 stream_.nUserChannels[mode] = channels;
1081 stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
1082 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1083 else stream_.userInterleaved = true;
1084 stream_.deviceInterleaved[mode] = true;
1085 if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
1087 // Set flags for buffer conversion.
1088 stream_.doConvertBuffer[mode] = false;
1089 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1090 stream_.doConvertBuffer[mode] = true;
1091 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1092 stream_.doConvertBuffer[mode] = true;
1093 if ( streamCount == 1 ) {
1094 if ( stream_.nUserChannels[mode] > 1 &&
1095 stream_.userInterleaved != stream_.deviceInterleaved[mode] )
1096 stream_.doConvertBuffer[mode] = true;
1098 else if ( monoMode && stream_.userInterleaved )
1099 stream_.doConvertBuffer[mode] = true;
1101 // Allocate our CoreHandle structure for the stream.
1102 CoreHandle *handle = 0;
1103 if ( stream_.apiHandle == 0 ) {
1105 handle = new CoreHandle;
1107 catch ( std::bad_alloc& ) {
1108 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1112 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1113 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1116 stream_.apiHandle = (void *) handle;
1119 handle = (CoreHandle *) stream_.apiHandle;
1120 handle->iStream[mode] = firstStream;
1121 handle->nStreams[mode] = streamCount;
1122 handle->id[mode] = id;
1124 // Allocate necessary internal buffers.
1125 unsigned long bufferBytes;
1126 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1127 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1128 if ( stream_.userBuffer[mode] == NULL ) {
1129 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1133 // If possible, we will make use of the CoreAudio stream buffers as
1134 // "device buffers". However, we can't do this if using multiple
1136 if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
1138 bool makeBuffer = true;
1139 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1140 if ( mode == INPUT ) {
1141 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1142 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1143 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1148 bufferBytes *= *bufferSize;
1149 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1150 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1151 if ( stream_.deviceBuffer == NULL ) {
1152 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1158 stream_.sampleRate = sampleRate;
1159 stream_.device[mode] = device;
1160 stream_.state = STREAM_STOPPED;
1161 stream_.callbackInfo.object = (void *) this;
1163 // Setup the buffer conversion information structure.
1164 if ( stream_.doConvertBuffer[mode] ) {
1165 if ( streamCount > 1 ) setConvertInfo( mode, 0 );
1166 else setConvertInfo( mode, channelOffset );
1169 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1170 // Only one callback procedure per device.
1171 stream_.mode = DUPLEX;
1173 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1174 result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
1176 // deprecated in favor of AudioDeviceCreateIOProcID()
1177 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1179 if ( result != noErr ) {
1180 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1181 errorText_ = errorStream_.str();
1184 if ( stream_.mode == OUTPUT && mode == INPUT )
1185 stream_.mode = DUPLEX;
1187 stream_.mode = mode;
1190 // Setup the device property listener for over/underload.
1191 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1192 kAudioDeviceProcessorOverload,
1193 deviceListener, (void *) handle );
1199 pthread_cond_destroy( &handle->condition );
1201 stream_.apiHandle = 0;
1204 for ( int i=0; i<2; i++ ) {
1205 if ( stream_.userBuffer[i] ) {
1206 free( stream_.userBuffer[i] );
1207 stream_.userBuffer[i] = 0;
1211 if ( stream_.deviceBuffer ) {
1212 free( stream_.deviceBuffer );
1213 stream_.deviceBuffer = 0;
1219 void RtApiCore :: closeStream( void )
1221 if ( stream_.state == STREAM_CLOSED ) {
1222 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1223 error( RtError::WARNING );
1227 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1228 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1229 if ( stream_.state == STREAM_RUNNING )
1230 AudioDeviceStop( handle->id[0], callbackHandler );
1231 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1232 AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
1234 // deprecated in favor of AudioDeviceDestroyIOProcID()
1235 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1239 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1240 if ( stream_.state == STREAM_RUNNING )
1241 AudioDeviceStop( handle->id[1], callbackHandler );
1242 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1243 AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
1245 // deprecated in favor of AudioDeviceDestroyIOProcID()
1246 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1250 for ( int i=0; i<2; i++ ) {
1251 if ( stream_.userBuffer[i] ) {
1252 free( stream_.userBuffer[i] );
1253 stream_.userBuffer[i] = 0;
1257 if ( stream_.deviceBuffer ) {
1258 free( stream_.deviceBuffer );
1259 stream_.deviceBuffer = 0;
1262 // Destroy pthread condition variable.
1263 pthread_cond_destroy( &handle->condition );
1265 stream_.apiHandle = 0;
1267 stream_.mode = UNINITIALIZED;
1268 stream_.state = STREAM_CLOSED;
1271 void RtApiCore :: startStream( void )
1274 if ( stream_.state == STREAM_RUNNING ) {
1275 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1276 error( RtError::WARNING );
1280 MUTEX_LOCK( &stream_.mutex );
1282 OSStatus result = noErr;
1283 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1284 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1286 result = AudioDeviceStart( handle->id[0], callbackHandler );
1287 if ( result != noErr ) {
1288 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1289 errorText_ = errorStream_.str();
1294 if ( stream_.mode == INPUT ||
1295 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1297 result = AudioDeviceStart( handle->id[1], callbackHandler );
1298 if ( result != noErr ) {
1299 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1300 errorText_ = errorStream_.str();
1305 handle->drainCounter = 0;
1306 handle->internalDrain = false;
1307 stream_.state = STREAM_RUNNING;
1310 MUTEX_UNLOCK( &stream_.mutex );
1312 if ( result == noErr ) return;
1313 error( RtError::SYSTEM_ERROR );
1316 void RtApiCore :: stopStream( void )
1319 if ( stream_.state == STREAM_STOPPED ) {
1320 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1321 error( RtError::WARNING );
1325 MUTEX_LOCK( &stream_.mutex );
1327 if ( stream_.state == STREAM_STOPPED ) {
1328 MUTEX_UNLOCK( &stream_.mutex );
1332 OSStatus result = noErr;
1333 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1336 if ( handle->drainCounter == 0 ) {
1337 handle->drainCounter = 1;
1338 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1341 result = AudioDeviceStop( handle->id[0], callbackHandler );
1342 if ( result != noErr ) {
1343 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1344 errorText_ = errorStream_.str();
1349 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1351 result = AudioDeviceStop( handle->id[1], callbackHandler );
1352 if ( result != noErr ) {
1353 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1354 errorText_ = errorStream_.str();
1359 stream_.state = STREAM_STOPPED;
1362 MUTEX_UNLOCK( &stream_.mutex );
1364 if ( result == noErr ) return;
1365 error( RtError::SYSTEM_ERROR );
1368 void RtApiCore :: abortStream( void )
1371 if ( stream_.state == STREAM_STOPPED ) {
1372 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1373 error( RtError::WARNING );
1377 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1378 handle->drainCounter = 1;
1383 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1384 const AudioBufferList *inBufferList,
1385 const AudioBufferList *outBufferList )
1387 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1388 if ( stream_.state == STREAM_CLOSED ) {
1389 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1390 error( RtError::WARNING );
1394 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1395 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1397 // Check if we were draining the stream and signal is finished.
1398 if ( handle->drainCounter > 3 ) {
1399 if ( handle->internalDrain == false )
1400 pthread_cond_signal( &handle->condition );
1406 MUTEX_LOCK( &stream_.mutex );
1408 // The state might change while waiting on a mutex.
1409 if ( stream_.state == STREAM_STOPPED ) {
1410 MUTEX_UNLOCK( &stream_.mutex );
1414 AudioDeviceID outputDevice = handle->id[0];
1416 // Invoke user callback to get fresh output data UNLESS we are
1417 // draining stream or duplex mode AND the input/output devices are
1418 // different AND this function is called for the input device.
1419 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1420 RtAudioCallback callback = (RtAudioCallback) info->callback;
1421 double streamTime = getStreamTime();
1422 RtAudioStreamStatus status = 0;
1423 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1424 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1425 handle->xrun[0] = false;
1427 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1428 status |= RTAUDIO_INPUT_OVERFLOW;
1429 handle->xrun[1] = false;
1431 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1432 stream_.bufferSize, streamTime, status, info->userData );
1433 if ( handle->drainCounter == 2 ) {
1434 MUTEX_UNLOCK( &stream_.mutex );
1438 else if ( handle->drainCounter == 1 )
1439 handle->internalDrain = true;
1442 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1444 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1446 if ( handle->nStreams[0] == 1 ) {
1447 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1449 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1451 else { // fill multiple streams with zeros
1452 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1453 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1455 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1459 else if ( handle->nStreams[0] == 1 ) {
1460 if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
1461 convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
1462 stream_.userBuffer[0], stream_.convertInfo[0] );
1464 else { // copy from user buffer
1465 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1466 stream_.userBuffer[0],
1467 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1470 else { // fill multiple streams
1471 Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
1472 if ( stream_.doConvertBuffer[0] ) {
1473 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1474 inBuffer = (Float32 *) stream_.deviceBuffer;
1477 if ( stream_.deviceInterleaved[0] == false ) { // mono mode
1478 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1479 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1480 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1481 (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
1484 else { // fill multiple multi-channel streams with interleaved data
1485 UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
1488 bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
1489 UInt32 inChannels = stream_.nUserChannels[0];
1490 if ( stream_.doConvertBuffer[0] ) {
1491 inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1492 inChannels = stream_.nDeviceChannels[0];
1495 if ( inInterleaved ) inOffset = 1;
1496 else inOffset = stream_.bufferSize;
1498 channelsLeft = inChannels;
1499 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1501 out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
1502 streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
1505 // Account for possible channel offset in first stream
1506 if ( i == 0 && stream_.channelOffset[0] > 0 ) {
1507 streamChannels -= stream_.channelOffset[0];
1508 outJump = stream_.channelOffset[0];
1512 // Account for possible unfilled channels at end of the last stream
1513 if ( streamChannels > channelsLeft ) {
1514 outJump = streamChannels - channelsLeft;
1515 streamChannels = channelsLeft;
1518 // Determine input buffer offsets and skips
1519 if ( inInterleaved ) {
1520 inJump = inChannels;
1521 in += inChannels - channelsLeft;
1525 in += (inChannels - channelsLeft) * inOffset;
1528 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1529 for ( unsigned int j=0; j<streamChannels; j++ ) {
1530 *out++ = in[j*inOffset];
1535 channelsLeft -= streamChannels;
1540 if ( handle->drainCounter ) {
1541 handle->drainCounter++;
1546 AudioDeviceID inputDevice;
1547 inputDevice = handle->id[1];
1548 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1550 if ( handle->nStreams[1] == 1 ) {
1551 if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
1552 convertBuffer( stream_.userBuffer[1],
1553 (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
1554 stream_.convertInfo[1] );
1556 else { // copy to user buffer
1557 memcpy( stream_.userBuffer[1],
1558 inBufferList->mBuffers[handle->iStream[1]].mData,
1559 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1562 else { // read from multiple streams
1563 Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
1564 if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
1566 if ( stream_.deviceInterleaved[1] == false ) { // mono mode
1567 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1568 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1569 memcpy( (void *)&outBuffer[i*stream_.bufferSize],
1570 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1573 else { // read from multiple multi-channel streams
1574 UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
1577 bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
1578 UInt32 outChannels = stream_.nUserChannels[1];
1579 if ( stream_.doConvertBuffer[1] ) {
1580 outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1581 outChannels = stream_.nDeviceChannels[1];
1584 if ( outInterleaved ) outOffset = 1;
1585 else outOffset = stream_.bufferSize;
1587 channelsLeft = outChannels;
1588 for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
1590 in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
1591 streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
1594 // Account for possible channel offset in first stream
1595 if ( i == 0 && stream_.channelOffset[1] > 0 ) {
1596 streamChannels -= stream_.channelOffset[1];
1597 inJump = stream_.channelOffset[1];
1601 // Account for possible unread channels at end of the last stream
1602 if ( streamChannels > channelsLeft ) {
1603 inJump = streamChannels - channelsLeft;
1604 streamChannels = channelsLeft;
1607 // Determine output buffer offsets and skips
1608 if ( outInterleaved ) {
1609 outJump = outChannels;
1610 out += outChannels - channelsLeft;
1614 out += (outChannels - channelsLeft) * outOffset;
1617 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1618 for ( unsigned int j=0; j<streamChannels; j++ ) {
1619 out[j*outOffset] = *in++;
1624 channelsLeft -= streamChannels;
1628 if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
1629 convertBuffer( stream_.userBuffer[1],
1630 stream_.deviceBuffer,
1631 stream_.convertInfo[1] );
1637 MUTEX_UNLOCK( &stream_.mutex );
1639 RtApi::tickStreamTime();
1643 const char* RtApiCore :: getErrorCode( OSStatus code )
1647 case kAudioHardwareNotRunningError:
1648 return "kAudioHardwareNotRunningError";
1650 case kAudioHardwareUnspecifiedError:
1651 return "kAudioHardwareUnspecifiedError";
1653 case kAudioHardwareUnknownPropertyError:
1654 return "kAudioHardwareUnknownPropertyError";
1656 case kAudioHardwareBadPropertySizeError:
1657 return "kAudioHardwareBadPropertySizeError";
1659 case kAudioHardwareIllegalOperationError:
1660 return "kAudioHardwareIllegalOperationError";
1662 case kAudioHardwareBadObjectError:
1663 return "kAudioHardwareBadObjectError";
1665 case kAudioHardwareBadDeviceError:
1666 return "kAudioHardwareBadDeviceError";
1668 case kAudioHardwareBadStreamError:
1669 return "kAudioHardwareBadStreamError";
1671 case kAudioHardwareUnsupportedOperationError:
1672 return "kAudioHardwareUnsupportedOperationError";
1674 case kAudioDeviceUnsupportedFormatError:
1675 return "kAudioDeviceUnsupportedFormatError";
1677 case kAudioDevicePermissionsError:
1678 return "kAudioDevicePermissionsError";
1681 return "CoreAudio unknown error";
1685 //******************** End of __MACOSX_CORE__ *********************//
1688 #if defined(__UNIX_JACK__)
1690 // JACK is a low-latency audio server, originally written for the
1691 // GNU/Linux operating system and now also ported to OS-X. It can
1692 // connect a number of different applications to an audio device, as
1693 // well as allowing them to share audio between themselves.
1695 // When using JACK with RtAudio, "devices" refer to JACK clients that
1696 // have ports connected to the server. The JACK server is typically
1697 // started in a terminal as follows:
1699 // .jackd -d alsa -d hw:0
1701 // or through an interface program such as qjackctl. Many of the
1702 // parameters normally set for a stream are fixed by the JACK server
1703 // and can be specified when the JACK server is started. In
1706 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1708 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1709 // frames, and number of buffers = 4. Once the server is running, it
1710 // is not possible to override these values. If the values are not
1711 // specified in the command-line, the JACK server uses default values.
1713 // The JACK server does not have to be running when an instance of
1714 // RtApiJack is created, though the function getDeviceCount() will
1715 // report 0 devices found until JACK has been started. When no
1716 // devices are available (i.e., the JACK server is not running), a
1717 // stream cannot be opened.
1719 #include <jack/jack.h>
1722 // A structure to hold various information related to the Jack API
1725 jack_client_t *client;
1726 jack_port_t **ports[2];
1727 std::string deviceName[2];
1729 pthread_cond_t condition;
1730 int drainCounter; // Tracks callback counts when draining
1731 bool internalDrain; // Indicates if stop is initiated from callback or not.
1734 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1737 void jackSilentError( const char * ) {};
1739 RtApiJack :: RtApiJack()
1741 // Nothing to do here.
1742 #if !defined(__RTAUDIO_DEBUG__)
1743 // Turn off Jack's internal error reporting.
1744 jack_set_error_function( &jackSilentError );
1748 RtApiJack :: ~RtApiJack()
1750 if ( stream_.state != STREAM_CLOSED ) closeStream();
1753 unsigned int RtApiJack :: getDeviceCount( void )
1755 // See if we can become a jack client.
1756 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1757 jack_status_t *status = NULL;
1758 jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
1759 if ( client == 0 ) return 0;
1762 std::string port, previousPort;
1763 unsigned int nChannels = 0, nDevices = 0;
1764 ports = jack_get_ports( client, NULL, NULL, 0 );
1766 // Parse the port names up to the first colon (:).
1769 port = (char *) ports[ nChannels ];
1770 iColon = port.find(":");
1771 if ( iColon != std::string::npos ) {
1772 port = port.substr( 0, iColon + 1 );
1773 if ( port != previousPort ) {
1775 previousPort = port;
1778 } while ( ports[++nChannels] );
1782 jack_client_close( client );
1786 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1788 RtAudio::DeviceInfo info;
1789 info.probed = false;
1791 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption
1792 jack_status_t *status = NULL;
1793 jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
1794 if ( client == 0 ) {
1795 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1796 error( RtError::WARNING );
1801 std::string port, previousPort;
1802 unsigned int nPorts = 0, nDevices = 0;
1803 ports = jack_get_ports( client, NULL, NULL, 0 );
1805 // Parse the port names up to the first colon (:).
1808 port = (char *) ports[ nPorts ];
1809 iColon = port.find(":");
1810 if ( iColon != std::string::npos ) {
1811 port = port.substr( 0, iColon );
1812 if ( port != previousPort ) {
1813 if ( nDevices == device ) info.name = port;
1815 previousPort = port;
1818 } while ( ports[++nPorts] );
1822 if ( device >= nDevices ) {
1823 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1824 error( RtError::INVALID_USE );
1827 // Get the current jack server sample rate.
1828 info.sampleRates.clear();
1829 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1831 // Count the available ports containing the client name as device
1832 // channels. Jack "input ports" equal RtAudio output channels.
1833 unsigned int nChannels = 0;
1834 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1836 while ( ports[ nChannels ] ) nChannels++;
1838 info.outputChannels = nChannels;
1841 // Jack "output ports" equal RtAudio input channels.
1843 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1845 while ( ports[ nChannels ] ) nChannels++;
1847 info.inputChannels = nChannels;
1850 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1851 jack_client_close(client);
1852 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1853 error( RtError::WARNING );
1857 // If device opens for both playback and capture, we determine the channels.
1858 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1859 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1861 // Jack always uses 32-bit floats.
1862 info.nativeFormats = RTAUDIO_FLOAT32;
1864 // Jack doesn't provide default devices so we'll use the first available one.
1865 if ( device == 0 && info.outputChannels > 0 )
1866 info.isDefaultOutput = true;
1867 if ( device == 0 && info.inputChannels > 0 )
1868 info.isDefaultInput = true;
1870 jack_client_close(client);
1875 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1877 CallbackInfo *info = (CallbackInfo *) infoPointer;
1879 RtApiJack *object = (RtApiJack *) info->object;
1880 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1885 void jackShutdown( void *infoPointer )
1887 CallbackInfo *info = (CallbackInfo *) infoPointer;
1888 RtApiJack *object = (RtApiJack *) info->object;
1890 // Check current stream state. If stopped, then we'll assume this
1891 // was called as a result of a call to RtApiJack::stopStream (the
1892 // deactivation of a client handle causes this function to be called).
1893 // If not, we'll assume the Jack server is shutting down or some
1894 // other problem occurred and we should close the stream.
1895 if ( object->isStreamRunning() == false ) return;
1897 object->closeStream();
1898 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1901 int jackXrun( void *infoPointer )
1903 JackHandle *handle = (JackHandle *) infoPointer;
1905 if ( handle->ports[0] ) handle->xrun[0] = true;
1906 if ( handle->ports[1] ) handle->xrun[1] = true;
1911 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1912 unsigned int firstChannel, unsigned int sampleRate,
1913 RtAudioFormat format, unsigned int *bufferSize,
1914 RtAudio::StreamOptions *options )
1916 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1918 // Look for jack server and try to become a client (only do once per stream).
1919 jack_client_t *client = 0;
1920 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1921 jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1922 jack_status_t *status = NULL;
1923 if ( options && !options->streamName.empty() )
1924 client = jack_client_open( options->streamName.c_str(), jackoptions, status );
1926 client = jack_client_open( "RtApiJack", jackoptions, status );
1927 if ( client == 0 ) {
1928 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1929 error( RtError::WARNING );
1934 // The handle must have been created on an earlier pass.
1935 client = handle->client;
1939 std::string port, previousPort, deviceName;
1940 unsigned int nPorts = 0, nDevices = 0;
1941 ports = jack_get_ports( client, NULL, NULL, 0 );
1943 // Parse the port names up to the first colon (:).
1946 port = (char *) ports[ nPorts ];
1947 iColon = port.find(":");
1948 if ( iColon != std::string::npos ) {
1949 port = port.substr( 0, iColon );
1950 if ( port != previousPort ) {
1951 if ( nDevices == device ) deviceName = port;
1953 previousPort = port;
1956 } while ( ports[++nPorts] );
1960 if ( device >= nDevices ) {
1961 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1965 // Count the available ports containing the client name as device
1966 // channels. Jack "input ports" equal RtAudio output channels.
1967 unsigned int nChannels = 0;
1968 unsigned long flag = JackPortIsInput;
1969 if ( mode == INPUT ) flag = JackPortIsOutput;
1970 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1972 while ( ports[ nChannels ] ) nChannels++;
1976 // Compare the jack ports for specified client to the requested number of channels.
1977 if ( nChannels < (channels + firstChannel) ) {
1978 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1979 errorText_ = errorStream_.str();
1983 // Check the jack server sample rate.
1984 unsigned int jackRate = jack_get_sample_rate( client );
1985 if ( sampleRate != jackRate ) {
1986 jack_client_close( client );
1987 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1988 errorText_ = errorStream_.str();
1991 stream_.sampleRate = jackRate;
1993 // Get the latency of the JACK port.
1994 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1995 if ( ports[ firstChannel ] )
1996 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1999 // The jack server always uses 32-bit floating-point data.
2000 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2001 stream_.userFormat = format;
2003 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2004 else stream_.userInterleaved = true;
2006 // Jack always uses non-interleaved buffers.
2007 stream_.deviceInterleaved[mode] = false;
2009 // Jack always provides host byte-ordered data.
2010 stream_.doByteSwap[mode] = false;
2012 // Get the buffer size. The buffer size and number of buffers
2013 // (periods) is set when the jack server is started.
2014 stream_.bufferSize = (int) jack_get_buffer_size( client );
2015 *bufferSize = stream_.bufferSize;
2017 stream_.nDeviceChannels[mode] = channels;
2018 stream_.nUserChannels[mode] = channels;
2020 // Set flags for buffer conversion.
2021 stream_.doConvertBuffer[mode] = false;
2022 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2023 stream_.doConvertBuffer[mode] = true;
2024 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2025 stream_.nUserChannels[mode] > 1 )
2026 stream_.doConvertBuffer[mode] = true;
2028 // Allocate our JackHandle structure for the stream.
2029 if ( handle == 0 ) {
2031 handle = new JackHandle;
2033 catch ( std::bad_alloc& ) {
2034 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
2038 if ( pthread_cond_init(&handle->condition, NULL) ) {
2039 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
2042 stream_.apiHandle = (void *) handle;
2043 handle->client = client;
2045 handle->deviceName[mode] = deviceName;
2047 // Allocate necessary internal buffers.
2048 unsigned long bufferBytes;
2049 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2050 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2051 if ( stream_.userBuffer[mode] == NULL ) {
2052 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
2056 if ( stream_.doConvertBuffer[mode] ) {
2058 bool makeBuffer = true;
2059 if ( mode == OUTPUT )
2060 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2061 else { // mode == INPUT
2062 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
2063 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2064 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2065 if ( bufferBytes < bytesOut ) makeBuffer = false;
2070 bufferBytes *= *bufferSize;
2071 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2072 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2073 if ( stream_.deviceBuffer == NULL ) {
2074 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
2080 // Allocate memory for the Jack ports (channels) identifiers.
2081 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
2082 if ( handle->ports[mode] == NULL ) {
2083 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
2087 stream_.device[mode] = device;
2088 stream_.channelOffset[mode] = firstChannel;
2089 stream_.state = STREAM_STOPPED;
2090 stream_.callbackInfo.object = (void *) this;
2092 if ( stream_.mode == OUTPUT && mode == INPUT )
2093 // We had already set up the stream for output.
2094 stream_.mode = DUPLEX;
2096 stream_.mode = mode;
2097 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2098 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
2099 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2102 // Register our ports.
2104 if ( mode == OUTPUT ) {
2105 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2106 snprintf( label, 64, "outport %d", i );
2107 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
2108 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
2112 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2113 snprintf( label, 64, "inport %d", i );
2114 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
2115 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
2119 // Setup the buffer conversion information structure. We don't use
2120 // buffers to do channel offsets, so we override that parameter
2122 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2128 pthread_cond_destroy( &handle->condition );
2129 jack_client_close( handle->client );
2131 if ( handle->ports[0] ) free( handle->ports[0] );
2132 if ( handle->ports[1] ) free( handle->ports[1] );
2135 stream_.apiHandle = 0;
2138 for ( int i=0; i<2; i++ ) {
2139 if ( stream_.userBuffer[i] ) {
2140 free( stream_.userBuffer[i] );
2141 stream_.userBuffer[i] = 0;
2145 if ( stream_.deviceBuffer ) {
2146 free( stream_.deviceBuffer );
2147 stream_.deviceBuffer = 0;
2153 void RtApiJack :: closeStream( void )
2155 if ( stream_.state == STREAM_CLOSED ) {
2156 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2157 error( RtError::WARNING );
2161 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2164 if ( stream_.state == STREAM_RUNNING )
2165 jack_deactivate( handle->client );
2167 jack_client_close( handle->client );
2171 if ( handle->ports[0] ) free( handle->ports[0] );
2172 if ( handle->ports[1] ) free( handle->ports[1] );
2173 pthread_cond_destroy( &handle->condition );
2175 stream_.apiHandle = 0;
2178 for ( int i=0; i<2; i++ ) {
2179 if ( stream_.userBuffer[i] ) {
2180 free( stream_.userBuffer[i] );
2181 stream_.userBuffer[i] = 0;
2185 if ( stream_.deviceBuffer ) {
2186 free( stream_.deviceBuffer );
2187 stream_.deviceBuffer = 0;
2190 stream_.mode = UNINITIALIZED;
2191 stream_.state = STREAM_CLOSED;
2194 void RtApiJack :: startStream( void )
2197 if ( stream_.state == STREAM_RUNNING ) {
2198 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2199 error( RtError::WARNING );
2203 MUTEX_LOCK(&stream_.mutex);
2205 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2206 int result = jack_activate( handle->client );
2208 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2214 // Get the list of available ports.
2215 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2217 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2218 if ( ports == NULL) {
2219 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2223 // Now make the port connections. Since RtAudio wasn't designed to
2224 // allow the user to select particular channels of a device, we'll
2225 // just open the first "nChannels" ports with offset.
2226 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2228 if ( ports[ stream_.channelOffset[0] + i ] )
2229 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2232 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2239 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2241 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2242 if ( ports == NULL) {
2243 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2247 // Now make the port connections. See note above.
2248 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2250 if ( ports[ stream_.channelOffset[1] + i ] )
2251 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2254 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2261 handle->drainCounter = 0;
2262 handle->internalDrain = false;
2263 stream_.state = STREAM_RUNNING;
2266 MUTEX_UNLOCK(&stream_.mutex);
2268 if ( result == 0 ) return;
2269 error( RtError::SYSTEM_ERROR );
2272 void RtApiJack :: stopStream( void )
2275 if ( stream_.state == STREAM_STOPPED ) {
2276 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2277 error( RtError::WARNING );
2281 MUTEX_LOCK( &stream_.mutex );
2283 if ( stream_.state == STREAM_STOPPED ) {
2284 MUTEX_UNLOCK( &stream_.mutex );
2288 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2289 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2291 if ( handle->drainCounter == 0 ) {
2292 handle->drainCounter = 1;
2293 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2297 jack_deactivate( handle->client );
2298 stream_.state = STREAM_STOPPED;
2300 MUTEX_UNLOCK( &stream_.mutex );
2303 void RtApiJack :: abortStream( void )
2306 if ( stream_.state == STREAM_STOPPED ) {
2307 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2308 error( RtError::WARNING );
2312 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2313 handle->drainCounter = 1;
2318 // This function will be called by a spawned thread when the user
2319 // callback function signals that the stream should be stopped or
2320 // aborted. It is necessary to handle it this way because the
2321 // callbackEvent() function must return before the jack_deactivate()
2322 // function will return.
2323 extern "C" void *jackStopStream( void *ptr )
2325 CallbackInfo *info = (CallbackInfo *) ptr;
2326 RtApiJack *object = (RtApiJack *) info->object;
2328 object->stopStream();
2330 pthread_exit( NULL );
2333 bool RtApiJack :: callbackEvent( unsigned long nframes )
2335 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2336 if ( stream_.state == STREAM_CLOSED ) {
2337 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2338 error( RtError::WARNING );
2341 if ( stream_.bufferSize != nframes ) {
2342 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2343 error( RtError::WARNING );
2347 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2348 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2350 // Check if we were draining the stream and signal is finished.
2351 if ( handle->drainCounter > 3 ) {
2352 if ( handle->internalDrain == true ) {
2354 pthread_create( &id, NULL, jackStopStream, info );
2357 pthread_cond_signal( &handle->condition );
2361 MUTEX_LOCK( &stream_.mutex );
2363 // The state might change while waiting on a mutex.
2364 if ( stream_.state == STREAM_STOPPED ) {
2365 MUTEX_UNLOCK( &stream_.mutex );
2369 // Invoke user callback first, to get fresh output data.
2370 if ( handle->drainCounter == 0 ) {
2371 RtAudioCallback callback = (RtAudioCallback) info->callback;
2372 double streamTime = getStreamTime();
2373 RtAudioStreamStatus status = 0;
2374 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2375 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2376 handle->xrun[0] = false;
2378 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2379 status |= RTAUDIO_INPUT_OVERFLOW;
2380 handle->xrun[1] = false;
2382 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2383 stream_.bufferSize, streamTime, status, info->userData );
2384 if ( handle->drainCounter == 2 ) {
2385 MUTEX_UNLOCK( &stream_.mutex );
2387 pthread_create( &id, NULL, jackStopStream, info );
2390 else if ( handle->drainCounter == 1 )
2391 handle->internalDrain = true;
2394 jack_default_audio_sample_t *jackbuffer;
2395 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2396 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2398 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2400 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2401 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2402 memset( jackbuffer, 0, bufferBytes );
2406 else if ( stream_.doConvertBuffer[0] ) {
2408 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2410 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2411 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2412 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2415 else { // no buffer conversion
2416 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2417 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2418 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2422 if ( handle->drainCounter ) {
2423 handle->drainCounter++;
2428 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2430 if ( stream_.doConvertBuffer[1] ) {
2431 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2432 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2433 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2435 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2437 else { // no buffer conversion
2438 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2439 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2440 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2446 MUTEX_UNLOCK(&stream_.mutex);
2448 RtApi::tickStreamTime();
2451 //******************** End of __UNIX_JACK__ *********************//
2454 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2456 // The ASIO API is designed around a callback scheme, so this
2457 // implementation is similar to that used for OS-X CoreAudio and Linux
2458 // Jack. The primary constraint with ASIO is that it only allows
2459 // access to a single driver at a time. Thus, it is not possible to
2460 // have more than one simultaneous RtAudio stream.
2462 // This implementation also requires a number of external ASIO files
2463 // and a few global variables. The ASIO callback scheme does not
2464 // allow for the passing of user data, so we must create a global
2465 // pointer to our callbackInfo structure.
2467 // On unix systems, we make use of a pthread condition variable.
2468 // Since there is no equivalent in Windows, I hacked something based
2469 // on information found in
2470 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2472 #include "asiosys.h"
2474 #include "iasiothiscallresolver.h"
2475 #include "asiodrivers.h"
2478 AsioDrivers drivers;
2479 ASIOCallbacks asioCallbacks;
2480 ASIODriverInfo driverInfo;
2481 CallbackInfo *asioCallbackInfo;
2485 int drainCounter; // Tracks callback counts when draining
2486 bool internalDrain; // Indicates if stop is initiated from callback or not.
2487 ASIOBufferInfo *bufferInfos;
2491 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2494 // Function declarations (definitions at end of section)
2495 static const char* getAsioErrorString( ASIOError result );
2496 void sampleRateChanged( ASIOSampleRate sRate );
2497 long asioMessages( long selector, long value, void* message, double* opt );
2499 RtApiAsio :: RtApiAsio()
2501 // ASIO cannot run on a multi-threaded appartment. You can call
2502 // CoInitialize beforehand, but it must be for appartment threading
2503 // (in which case, CoInitilialize will return S_FALSE here).
2504 coInitialized_ = false;
2505 HRESULT hr = CoInitialize( NULL );
2507 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2508 error( RtError::WARNING );
2510 coInitialized_ = true;
2512 drivers.removeCurrentDriver();
2513 driverInfo.asioVersion = 2;
2515 // See note in DirectSound implementation about GetDesktopWindow().
2516 driverInfo.sysRef = GetForegroundWindow();
2519 RtApiAsio :: ~RtApiAsio()
2521 if ( stream_.state != STREAM_CLOSED ) closeStream();
2522 if ( coInitialized_ ) CoUninitialize();
2525 unsigned int RtApiAsio :: getDeviceCount( void )
2527 return (unsigned int) drivers.asioGetNumDev();
2530 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2532 RtAudio::DeviceInfo info;
2533 info.probed = false;
2536 unsigned int nDevices = getDeviceCount();
2537 if ( nDevices == 0 ) {
2538 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2539 error( RtError::INVALID_USE );
2542 if ( device >= nDevices ) {
2543 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2544 error( RtError::INVALID_USE );
2547 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2548 if ( stream_.state != STREAM_CLOSED ) {
2549 if ( device >= devices_.size() ) {
2550 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2551 error( RtError::WARNING );
2554 return devices_[ device ];
2557 char driverName[32];
2558 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2559 if ( result != ASE_OK ) {
2560 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2561 errorText_ = errorStream_.str();
2562 error( RtError::WARNING );
2566 info.name = driverName;
2568 if ( !drivers.loadDriver( driverName ) ) {
2569 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2570 errorText_ = errorStream_.str();
2571 error( RtError::WARNING );
2575 result = ASIOInit( &driverInfo );
2576 if ( result != ASE_OK ) {
2577 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2578 errorText_ = errorStream_.str();
2579 error( RtError::WARNING );
2583 // Determine the device channel information.
2584 long inputChannels, outputChannels;
2585 result = ASIOGetChannels( &inputChannels, &outputChannels );
2586 if ( result != ASE_OK ) {
2587 drivers.removeCurrentDriver();
2588 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2589 errorText_ = errorStream_.str();
2590 error( RtError::WARNING );
2594 info.outputChannels = outputChannels;
2595 info.inputChannels = inputChannels;
2596 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2597 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2599 // Determine the supported sample rates.
2600 info.sampleRates.clear();
2601 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2602 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2603 if ( result == ASE_OK )
2604 info.sampleRates.push_back( SAMPLE_RATES[i] );
2607 // Determine supported data types ... just check first channel and assume rest are the same.
2608 ASIOChannelInfo channelInfo;
2609 channelInfo.channel = 0;
2610 channelInfo.isInput = true;
2611 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2612 result = ASIOGetChannelInfo( &channelInfo );
2613 if ( result != ASE_OK ) {
2614 drivers.removeCurrentDriver();
2615 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2616 errorText_ = errorStream_.str();
2617 error( RtError::WARNING );
2621 info.nativeFormats = 0;
2622 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2623 info.nativeFormats |= RTAUDIO_SINT16;
2624 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2625 info.nativeFormats |= RTAUDIO_SINT32;
2626 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2627 info.nativeFormats |= RTAUDIO_FLOAT32;
2628 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2629 info.nativeFormats |= RTAUDIO_FLOAT64;
2631 if ( getDefaultOutputDevice() == device )
2632 info.isDefaultOutput = true;
2633 if ( getDefaultInputDevice() == device )
2634 info.isDefaultInput = true;
2637 drivers.removeCurrentDriver();
2641 void bufferSwitch( long index, ASIOBool processNow )
2643 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2644 object->callbackEvent( index );
2647 void RtApiAsio :: saveDeviceInfo( void )
2651 unsigned int nDevices = getDeviceCount();
2652 devices_.resize( nDevices );
2653 for ( unsigned int i=0; i<nDevices; i++ )
2654 devices_[i] = getDeviceInfo( i );
2657 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2658 unsigned int firstChannel, unsigned int sampleRate,
2659 RtAudioFormat format, unsigned int *bufferSize,
2660 RtAudio::StreamOptions *options )
2662 // For ASIO, a duplex stream MUST use the same driver.
2663 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2664 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2668 char driverName[32];
2669 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2670 if ( result != ASE_OK ) {
2671 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2672 errorText_ = errorStream_.str();
2676 // The getDeviceInfo() function will not work when a stream is open
2677 // because ASIO does not allow multiple devices to run at the same
2678 // time. Thus, we'll probe the system before opening a stream and
2679 // save the results for use by getDeviceInfo().
2680 this->saveDeviceInfo();
2682 // Only load the driver once for duplex stream.
2683 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2684 if ( !drivers.loadDriver( driverName ) ) {
2685 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2686 errorText_ = errorStream_.str();
2690 result = ASIOInit( &driverInfo );
2691 if ( result != ASE_OK ) {
2692 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2693 errorText_ = errorStream_.str();
2698 // Check the device channel count.
2699 long inputChannels, outputChannels;
2700 result = ASIOGetChannels( &inputChannels, &outputChannels );
2701 if ( result != ASE_OK ) {
2702 drivers.removeCurrentDriver();
2703 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2704 errorText_ = errorStream_.str();
2708 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2709 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2710 drivers.removeCurrentDriver();
2711 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2712 errorText_ = errorStream_.str();
2715 stream_.nDeviceChannels[mode] = channels;
2716 stream_.nUserChannels[mode] = channels;
2717 stream_.channelOffset[mode] = firstChannel;
2719 // Verify the sample rate is supported.
2720 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2721 if ( result != ASE_OK ) {
2722 drivers.removeCurrentDriver();
2723 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2724 errorText_ = errorStream_.str();
2728 // Get the current sample rate
2729 ASIOSampleRate currentRate;
2730 result = ASIOGetSampleRate( ¤tRate );
2731 if ( result != ASE_OK ) {
2732 drivers.removeCurrentDriver();
2733 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2734 errorText_ = errorStream_.str();
2738 // Set the sample rate only if necessary
2739 if ( currentRate != sampleRate ) {
2740 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2741 if ( result != ASE_OK ) {
2742 drivers.removeCurrentDriver();
2743 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2744 errorText_ = errorStream_.str();
2749 // Determine the driver data type.
2750 ASIOChannelInfo channelInfo;
2751 channelInfo.channel = 0;
2752 if ( mode == OUTPUT ) channelInfo.isInput = false;
2753 else channelInfo.isInput = true;
2754 result = ASIOGetChannelInfo( &channelInfo );
2755 if ( result != ASE_OK ) {
2756 drivers.removeCurrentDriver();
2757 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2758 errorText_ = errorStream_.str();
2762 // Assuming WINDOWS host is always little-endian.
2763 stream_.doByteSwap[mode] = false;
2764 stream_.userFormat = format;
2765 stream_.deviceFormat[mode] = 0;
2766 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2767 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2768 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2770 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2771 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2772 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2774 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2775 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2776 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2778 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2779 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2780 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2783 if ( stream_.deviceFormat[mode] == 0 ) {
2784 drivers.removeCurrentDriver();
2785 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2786 errorText_ = errorStream_.str();
2790 // Set the buffer size. For a duplex stream, this will end up
2791 // setting the buffer size based on the input constraints, which
2793 long minSize, maxSize, preferSize, granularity;
2794 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2795 if ( result != ASE_OK ) {
2796 drivers.removeCurrentDriver();
2797 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2798 errorText_ = errorStream_.str();
2802 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2803 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2804 else if ( granularity == -1 ) {
2805 // Make sure bufferSize is a power of two.
2806 int log2_of_min_size = 0;
2807 int log2_of_max_size = 0;
2809 for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
2810 if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
2811 if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
2814 long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
2815 int min_delta_num = log2_of_min_size;
2817 for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
2818 long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
2819 if (current_delta < min_delta) {
2820 min_delta = current_delta;
2825 *bufferSize = ( (unsigned int)1 << min_delta_num );
2826 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2827 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2829 else if ( granularity != 0 ) {
2830 // Set to an even multiple of granularity, rounding up.
2831 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2834 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2835 drivers.removeCurrentDriver();
2836 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2840 stream_.bufferSize = *bufferSize;
2841 stream_.nBuffers = 2;
2843 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2844 else stream_.userInterleaved = true;
2846 // ASIO always uses non-interleaved buffers.
2847 stream_.deviceInterleaved[mode] = false;
2849 // Allocate, if necessary, our AsioHandle structure for the stream.
2850 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2851 if ( handle == 0 ) {
2853 handle = new AsioHandle;
2855 catch ( std::bad_alloc& ) {
2856 //if ( handle == NULL ) {
2857 drivers.removeCurrentDriver();
2858 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2861 handle->bufferInfos = 0;
2863 // Create a manual-reset event.
2864 handle->condition = CreateEvent( NULL, // no security
2865 TRUE, // manual-reset
2866 FALSE, // non-signaled initially
2868 stream_.apiHandle = (void *) handle;
2871 // Create the ASIO internal buffers. Since RtAudio sets up input
2872 // and output separately, we'll have to dispose of previously
2873 // created output buffers for a duplex stream.
2874 long inputLatency, outputLatency;
2875 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2876 ASIODisposeBuffers();
2877 if ( handle->bufferInfos ) free( handle->bufferInfos );
2880 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2881 bool buffersAllocated = false;
2882 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2883 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2884 if ( handle->bufferInfos == NULL ) {
2885 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2886 errorText_ = errorStream_.str();
2890 ASIOBufferInfo *infos;
2891 infos = handle->bufferInfos;
2892 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2893 infos->isInput = ASIOFalse;
2894 infos->channelNum = i + stream_.channelOffset[0];
2895 infos->buffers[0] = infos->buffers[1] = 0;
2897 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2898 infos->isInput = ASIOTrue;
2899 infos->channelNum = i + stream_.channelOffset[1];
2900 infos->buffers[0] = infos->buffers[1] = 0;
2903 // Set up the ASIO callback structure and create the ASIO data buffers.
2904 asioCallbacks.bufferSwitch = &bufferSwitch;
2905 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2906 asioCallbacks.asioMessage = &asioMessages;
2907 asioCallbacks.bufferSwitchTimeInfo = NULL;
2908 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2909 if ( result != ASE_OK ) {
2910 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2911 errorText_ = errorStream_.str();
2914 buffersAllocated = true;
2916 // Set flags for buffer conversion.
2917 stream_.doConvertBuffer[mode] = false;
2918 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2919 stream_.doConvertBuffer[mode] = true;
2920 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2921 stream_.nUserChannels[mode] > 1 )
2922 stream_.doConvertBuffer[mode] = true;
2924 // Allocate necessary internal buffers
2925 unsigned long bufferBytes;
2926 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2927 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2928 if ( stream_.userBuffer[mode] == NULL ) {
2929 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2933 if ( stream_.doConvertBuffer[mode] ) {
2935 bool makeBuffer = true;
2936 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2937 if ( mode == INPUT ) {
2938 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2939 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2940 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2945 bufferBytes *= *bufferSize;
2946 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2947 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2948 if ( stream_.deviceBuffer == NULL ) {
2949 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2955 stream_.sampleRate = sampleRate;
2956 stream_.device[mode] = device;
2957 stream_.state = STREAM_STOPPED;
2958 asioCallbackInfo = &stream_.callbackInfo;
2959 stream_.callbackInfo.object = (void *) this;
2960 if ( stream_.mode == OUTPUT && mode == INPUT )
2961 // We had already set up an output stream.
2962 stream_.mode = DUPLEX;
2964 stream_.mode = mode;
2966 // Determine device latencies
2967 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2968 if ( result != ASE_OK ) {
2969 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2970 errorText_ = errorStream_.str();
2971 error( RtError::WARNING); // warn but don't fail
2974 stream_.latency[0] = outputLatency;
2975 stream_.latency[1] = inputLatency;
2978 // Setup the buffer conversion information structure. We don't use
2979 // buffers to do channel offsets, so we override that parameter
2981 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2986 if ( buffersAllocated )
2987 ASIODisposeBuffers();
2988 drivers.removeCurrentDriver();
2991 CloseHandle( handle->condition );
2992 if ( handle->bufferInfos )
2993 free( handle->bufferInfos );
2995 stream_.apiHandle = 0;
2998 for ( int i=0; i<2; i++ ) {
2999 if ( stream_.userBuffer[i] ) {
3000 free( stream_.userBuffer[i] );
3001 stream_.userBuffer[i] = 0;
3005 if ( stream_.deviceBuffer ) {
3006 free( stream_.deviceBuffer );
3007 stream_.deviceBuffer = 0;
3013 void RtApiAsio :: closeStream()
3015 if ( stream_.state == STREAM_CLOSED ) {
3016 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
3017 error( RtError::WARNING );
3021 if ( stream_.state == STREAM_RUNNING ) {
3022 stream_.state = STREAM_STOPPED;
3025 ASIODisposeBuffers();
3026 drivers.removeCurrentDriver();
3028 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3030 CloseHandle( handle->condition );
3031 if ( handle->bufferInfos )
3032 free( handle->bufferInfos );
3034 stream_.apiHandle = 0;
3037 for ( int i=0; i<2; i++ ) {
3038 if ( stream_.userBuffer[i] ) {
3039 free( stream_.userBuffer[i] );
3040 stream_.userBuffer[i] = 0;
3044 if ( stream_.deviceBuffer ) {
3045 free( stream_.deviceBuffer );
3046 stream_.deviceBuffer = 0;
3049 stream_.mode = UNINITIALIZED;
3050 stream_.state = STREAM_CLOSED;
3053 void RtApiAsio :: startStream()
3056 if ( stream_.state == STREAM_RUNNING ) {
3057 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
3058 error( RtError::WARNING );
3062 MUTEX_LOCK( &stream_.mutex );
3064 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3065 ASIOError result = ASIOStart();
3066 if ( result != ASE_OK ) {
3067 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
3068 errorText_ = errorStream_.str();
3072 handle->drainCounter = 0;
3073 handle->internalDrain = false;
3074 stream_.state = STREAM_RUNNING;
3078 MUTEX_UNLOCK( &stream_.mutex );
3080 if ( result == ASE_OK ) return;
3081 error( RtError::SYSTEM_ERROR );
3084 void RtApiAsio :: stopStream()
3087 if ( stream_.state == STREAM_STOPPED ) {
3088 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
3089 error( RtError::WARNING );
3093 MUTEX_LOCK( &stream_.mutex );
3095 if ( stream_.state == STREAM_STOPPED ) {
3096 MUTEX_UNLOCK( &stream_.mutex );
3100 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3101 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3102 if ( handle->drainCounter == 0 ) {
3103 handle->drainCounter = 1;
3104 MUTEX_UNLOCK( &stream_.mutex );
3105 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
3106 ResetEvent( handle->condition );
3107 MUTEX_LOCK( &stream_.mutex );
3111 ASIOError result = ASIOStop();
3112 if ( result != ASE_OK ) {
3113 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
3114 errorText_ = errorStream_.str();
3117 stream_.state = STREAM_STOPPED;
3118 MUTEX_UNLOCK( &stream_.mutex );
3120 if ( result == ASE_OK ) return;
3121 error( RtError::SYSTEM_ERROR );
3124 void RtApiAsio :: abortStream()
3127 if ( stream_.state == STREAM_STOPPED ) {
3128 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
3129 error( RtError::WARNING );
3133 // The following lines were commented-out because some behavior was
3134 // noted where the device buffers need to be zeroed to avoid
3135 // continuing sound, even when the device buffers are completely
3136 // disposed. So now, calling abort is the same as calling stop.
3137 // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3138 // handle->drainCounter = 1;
3142 bool RtApiAsio :: callbackEvent( long bufferIndex )
3144 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
3145 if ( stream_.state == STREAM_CLOSED ) {
3146 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
3147 error( RtError::WARNING );
3151 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3152 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3154 // Check if we were draining the stream and signal is finished.
3155 if ( handle->drainCounter > 3 ) {
3156 if ( handle->internalDrain == false )
3157 SetEvent( handle->condition );
3163 MUTEX_LOCK( &stream_.mutex );
3165 // The state might change while waiting on a mutex.
3166 if ( stream_.state == STREAM_STOPPED ) goto unlock;
3168 // Invoke user callback to get fresh output data UNLESS we are
3170 if ( handle->drainCounter == 0 ) {
3171 RtAudioCallback callback = (RtAudioCallback) info->callback;
3172 double streamTime = getStreamTime();
3173 RtAudioStreamStatus status = 0;
3174 if ( stream_.mode != INPUT && asioXRun == true ) {
3175 status |= RTAUDIO_OUTPUT_UNDERFLOW;
3178 if ( stream_.mode != OUTPUT && asioXRun == true ) {
3179 status |= RTAUDIO_INPUT_OVERFLOW;
3182 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
3183 stream_.bufferSize, streamTime, status, info->userData );
3184 if ( handle->drainCounter == 2 ) {
3185 MUTEX_UNLOCK( &stream_.mutex );
3189 else if ( handle->drainCounter == 1 )
3190 handle->internalDrain = true;
3193 unsigned int nChannels, bufferBytes, i, j;
3194 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
3195 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3197 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
3199 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3201 for ( i=0, j=0; i<nChannels; i++ ) {
3202 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3203 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3207 else if ( stream_.doConvertBuffer[0] ) {
3209 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3210 if ( stream_.doByteSwap[0] )
3211 byteSwapBuffer( stream_.deviceBuffer,
3212 stream_.bufferSize * stream_.nDeviceChannels[0],
3213 stream_.deviceFormat[0] );
3215 for ( i=0, j=0; i<nChannels; i++ ) {
3216 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3217 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3218 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3224 if ( stream_.doByteSwap[0] )
3225 byteSwapBuffer( stream_.userBuffer[0],
3226 stream_.bufferSize * stream_.nUserChannels[0],
3227 stream_.userFormat );
3229 for ( i=0, j=0; i<nChannels; i++ ) {
3230 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3231 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3232 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3237 if ( handle->drainCounter ) {
3238 handle->drainCounter++;
3243 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3245 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3247 if (stream_.doConvertBuffer[1]) {
3249 // Always interleave ASIO input data.
3250 for ( i=0, j=0; i<nChannels; i++ ) {
3251 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3252 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3253 handle->bufferInfos[i].buffers[bufferIndex],
3257 if ( stream_.doByteSwap[1] )
3258 byteSwapBuffer( stream_.deviceBuffer,
3259 stream_.bufferSize * stream_.nDeviceChannels[1],
3260 stream_.deviceFormat[1] );
3261 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3265 for ( i=0, j=0; i<nChannels; i++ ) {
3266 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3267 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3268 handle->bufferInfos[i].buffers[bufferIndex],
3273 if ( stream_.doByteSwap[1] )
3274 byteSwapBuffer( stream_.userBuffer[1],
3275 stream_.bufferSize * stream_.nUserChannels[1],
3276 stream_.userFormat );
3281 // The following call was suggested by Malte Clasen. While the API
3282 // documentation indicates it should not be required, some device
3283 // drivers apparently do not function correctly without it.
3286 MUTEX_UNLOCK( &stream_.mutex );
3288 RtApi::tickStreamTime();
3292 void sampleRateChanged( ASIOSampleRate sRate )
3294 // The ASIO documentation says that this usually only happens during
3295 // external sync. Audio processing is not stopped by the driver,
3296 // actual sample rate might not have even changed, maybe only the
3297 // sample rate status of an AES/EBU or S/PDIF digital input at the
3300 RtApi *object = (RtApi *) asioCallbackInfo->object;
3302 object->stopStream();
3304 catch ( RtError &exception ) {
3305 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3309 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3312 long asioMessages( long selector, long value, void* message, double* opt )
3316 switch( selector ) {
3317 case kAsioSelectorSupported:
3318 if ( value == kAsioResetRequest
3319 || value == kAsioEngineVersion
3320 || value == kAsioResyncRequest
3321 || value == kAsioLatenciesChanged
3322 // The following three were added for ASIO 2.0, you don't
3323 // necessarily have to support them.
3324 || value == kAsioSupportsTimeInfo
3325 || value == kAsioSupportsTimeCode
3326 || value == kAsioSupportsInputMonitor)
3329 case kAsioResetRequest:
3330 // Defer the task and perform the reset of the driver during the
3331 // next "safe" situation. You cannot reset the driver right now,
3332 // as this code is called from the driver. Reset the driver is
3333 // done by completely destruct is. I.e. ASIOStop(),
3334 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3336 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3339 case kAsioResyncRequest:
3340 // This informs the application that the driver encountered some
3341 // non-fatal data loss. It is used for synchronization purposes
3342 // of different media. Added mainly to work around the Win16Mutex
3343 // problems in Windows 95/98 with the Windows Multimedia system,
3344 // which could lose data because the Mutex was held too long by
3345 // another thread. However a driver can issue it in other
3347 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3351 case kAsioLatenciesChanged:
3352 // This will inform the host application that the drivers were
3353 // latencies changed. Beware, it this does not mean that the
3354 // buffer sizes have changed! You might need to update internal
3356 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3359 case kAsioEngineVersion:
3360 // Return the supported ASIO version of the host application. If
3361 // a host application does not implement this selector, ASIO 1.0
3362 // is assumed by the driver.
3365 case kAsioSupportsTimeInfo:
3366 // Informs the driver whether the
3367 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3368 // For compatibility with ASIO 1.0 drivers the host application
3369 // should always support the "old" bufferSwitch method, too.
3372 case kAsioSupportsTimeCode:
3373 // Informs the driver whether application is interested in time
3374 // code info. If an application does not need to know about time
3375 // code, the driver has less work to do.
3382 static const char* getAsioErrorString( ASIOError result )
3390 static Messages m[] =
3392 { ASE_NotPresent, "Hardware input or output is not present or available." },
3393 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3394 { ASE_InvalidParameter, "Invalid input parameter." },
3395 { ASE_InvalidMode, "Invalid mode." },
3396 { ASE_SPNotAdvancing, "Sample position not advancing." },
3397 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3398 { ASE_NoMemory, "Not enough memory to complete the request." }
3401 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3402 if ( m[i].value == result ) return m[i].message;
3404 return "Unknown error.";
3406 //******************** End of __WINDOWS_ASIO__ *********************//
3410 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3412 // Modified by Robin Davies, October 2005
3413 // - Improvements to DirectX pointer chasing.
3414 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3415 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3416 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3417 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3422 #if defined(__MINGW32__)
3423 // missing from latest mingw winapi
3424 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3425 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3426 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3427 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3430 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3432 #ifdef _MSC_VER // if Microsoft Visual C++
3433 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3436 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3438 if ( laterPointer > earlierPointer )
3439 return laterPointer - earlierPointer;
3441 return laterPointer - earlierPointer + bufferSize;
3444 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3446 if ( pointer > bufferSize ) pointer -= bufferSize;
3447 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3448 if ( pointer < earlierPointer ) pointer += bufferSize;
3449 return pointer >= earlierPointer && pointer < laterPointer;
3452 // A structure to hold various information related to the DirectSound
3453 // API implementation.
3455 unsigned int drainCounter; // Tracks callback counts when draining
3456 bool internalDrain; // Indicates if stop is initiated from callback or not.
3460 UINT bufferPointer[2];
3461 DWORD dsBufferSize[2];
3462 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3466 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3470 RtApiDs::RtDsStatistics RtApiDs::statistics;
3472 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3473 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3475 RtDsStatistics s = statistics;
3477 // update the calculated fields.
3478 if ( s.inputFrameSize != 0 )
3479 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3481 if ( s.outputFrameSize != 0 )
3482 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3488 // Declarations for utility functions, callbacks, and structures
3489 // specific to the DirectSound implementation.
3490 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3491 LPCTSTR description,
3495 static char* getErrorString( int code );
3497 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3503 unsigned int counter;
3509 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3512 RtApiDs :: RtApiDs()
3514 // Dsound will run both-threaded. If CoInitialize fails, then just
3515 // accept whatever the mainline chose for a threading model.
3516 coInitialized_ = false;
3517 HRESULT hr = CoInitialize( NULL );
3518 if ( !FAILED( hr ) ) coInitialized_ = true;
3521 RtApiDs :: ~RtApiDs()
3523 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3524 if ( stream_.state != STREAM_CLOSED ) closeStream();
3527 unsigned int RtApiDs :: getDefaultInputDevice( void )
3529 // Count output devices.
3531 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3532 if ( FAILED( result ) ) {
3533 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3534 errorText_ = errorStream_.str();
3535 error( RtError::WARNING );
3539 // Now enumerate input devices until we find the id = NULL.
3540 info.isInput = true;
3541 info.getDefault = true;
3542 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3543 if ( FAILED( result ) ) {
3544 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3545 errorText_ = errorStream_.str();
3546 error( RtError::WARNING );
3550 if ( info.counter > 0 ) return info.counter - 1;
3554 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3556 // Enumerate output devices until we find the id = NULL.
3558 info.getDefault = true;
3559 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3560 if ( FAILED( result ) ) {
3561 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3562 errorText_ = errorStream_.str();
3563 error( RtError::WARNING );
3567 if ( info.counter > 0 ) return info.counter - 1;
3571 unsigned int RtApiDs :: getDeviceCount( void )
3573 // Count DirectSound devices.
3575 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3576 if ( FAILED( result ) ) {
3577 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3578 errorText_ = errorStream_.str();
3579 error( RtError::WARNING );
3582 // Count DirectSoundCapture devices.
3583 info.isInput = true;
3584 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3585 if ( FAILED( result ) ) {
3586 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3587 errorText_ = errorStream_.str();
3588 error( RtError::WARNING );
3591 return info.counter;
3594 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3596 // Because DirectSound always enumerates input and output devices
3597 // separately (and because we don't attempt to combine devices
3598 // internally), none of our "devices" will ever be duplex.
3600 RtAudio::DeviceInfo info;
3601 info.probed = false;
3603 // Enumerate through devices to find the id (if it exists). Note
3604 // that we have to do the output enumeration first, even if this is
3605 // an input device, in order for the device counter to be correct.
3607 dsinfo.findIndex = true;
3608 dsinfo.index = device;
3609 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3610 if ( FAILED( result ) ) {
3611 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3612 errorText_ = errorStream_.str();
3613 error( RtError::WARNING );
3616 if ( dsinfo.name.empty() ) goto probeInput;
3618 LPDIRECTSOUND output;
3620 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3621 if ( FAILED( result ) ) {
3622 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3623 errorText_ = errorStream_.str();
3624 error( RtError::WARNING );
3628 outCaps.dwSize = sizeof( outCaps );
3629 result = output->GetCaps( &outCaps );
3630 if ( FAILED( result ) ) {
3632 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3633 errorText_ = errorStream_.str();
3634 error( RtError::WARNING );
3638 // Get output channel information.
3639 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3641 // Get sample rate information.
3642 info.sampleRates.clear();
3643 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3644 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3645 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3646 info.sampleRates.push_back( SAMPLE_RATES[k] );
3649 // Get format information.
3650 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3651 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3655 if ( getDefaultOutputDevice() == device )
3656 info.isDefaultOutput = true;
3658 // Copy name and return.
3659 info.name = dsinfo.name;
3666 dsinfo.isInput = true;
3667 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3668 if ( FAILED( result ) ) {
3669 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3670 errorText_ = errorStream_.str();
3671 error( RtError::WARNING );
3674 if ( dsinfo.name.empty() ) return info;
3676 LPDIRECTSOUNDCAPTURE input;
3677 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3678 if ( FAILED( result ) ) {
3679 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3680 errorText_ = errorStream_.str();
3681 error( RtError::WARNING );
3686 inCaps.dwSize = sizeof( inCaps );
3687 result = input->GetCaps( &inCaps );
3688 if ( FAILED( result ) ) {
3690 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3691 errorText_ = errorStream_.str();
3692 error( RtError::WARNING );
3696 // Get input channel information.
3697 info.inputChannels = inCaps.dwChannels;
3699 // Get sample rate and format information.
3700 if ( inCaps.dwChannels == 2 ) {
3701 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3702 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3703 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3704 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3705 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3706 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3707 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3708 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3710 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3711 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3712 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3713 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3714 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3716 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3717 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3718 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3719 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3720 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3723 else if ( inCaps.dwChannels == 1 ) {
3724 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3725 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3726 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3727 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3728 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3729 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3730 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3731 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3733 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3734 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3735 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3736 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3737 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3739 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3740 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3741 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3742 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3743 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3746 else info.inputChannels = 0; // technically, this would be an error
3750 if ( info.inputChannels == 0 ) return info;
3752 if ( getDefaultInputDevice() == device )
3753 info.isDefaultInput = true;
3755 // Copy name and return.
3756 info.name = dsinfo.name;
3761 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3762 unsigned int firstChannel, unsigned int sampleRate,
3763 RtAudioFormat format, unsigned int *bufferSize,
3764 RtAudio::StreamOptions *options )
3766 if ( channels + firstChannel > 2 ) {
3767 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3771 // Enumerate through devices to find the id (if it exists). Note
3772 // that we have to do the output enumeration first, even if this is
3773 // an input device, in order for the device counter to be correct.
3775 dsinfo.findIndex = true;
3776 dsinfo.index = device;
3777 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3778 if ( FAILED( result ) ) {
3779 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3780 errorText_ = errorStream_.str();
3784 if ( mode == OUTPUT ) {
3785 if ( dsinfo.name.empty() ) {
3786 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3787 errorText_ = errorStream_.str();
3791 else { // mode == INPUT
3792 dsinfo.isInput = true;
3793 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3794 if ( FAILED( result ) ) {
3795 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3796 errorText_ = errorStream_.str();
3799 if ( dsinfo.name.empty() ) {
3800 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3801 errorText_ = errorStream_.str();
3806 // According to a note in PortAudio, using GetDesktopWindow()
3807 // instead of GetForegroundWindow() is supposed to avoid problems
3808 // that occur when the application's window is not the foreground
3809 // window. Also, if the application window closes before the
3810 // DirectSound buffer, DirectSound can crash. However, for console
3811 // applications, no sound was produced when using GetDesktopWindow().
3812 HWND hWnd = GetForegroundWindow();
3814 // Check the numberOfBuffers parameter and limit the lowest value to
3815 // two. This is a judgement call and a value of two is probably too
3816 // low for capture, but it should work for playback.
3818 if ( options ) nBuffers = options->numberOfBuffers;
3819 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3820 if ( nBuffers < 2 ) nBuffers = 3;
3822 // Create the wave format structure. The data format setting will
3823 // be determined later.
3824 WAVEFORMATEX waveFormat;
3825 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3826 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3827 waveFormat.nChannels = channels + firstChannel;
3828 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3830 // Determine the device buffer size. By default, 32k, but we will
3831 // grow it to make allowances for very large software buffer sizes.
3832 DWORD dsBufferSize = 0;
3833 DWORD dsPointerLeadTime = 0;
3834 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3836 void *ohandle = 0, *bhandle = 0;
3837 if ( mode == OUTPUT ) {
3839 LPDIRECTSOUND output;
3840 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3841 if ( FAILED( result ) ) {
3842 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3843 errorText_ = errorStream_.str();
3848 outCaps.dwSize = sizeof( outCaps );
3849 result = output->GetCaps( &outCaps );
3850 if ( FAILED( result ) ) {
3852 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3853 errorText_ = errorStream_.str();
3857 // Check channel information.
3858 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3859 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3860 errorText_ = errorStream_.str();
3864 // Check format information. Use 16-bit format unless not
3865 // supported or user requests 8-bit.
3866 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3867 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3868 waveFormat.wBitsPerSample = 16;
3869 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3872 waveFormat.wBitsPerSample = 8;
3873 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3875 stream_.userFormat = format;
3877 // Update wave format structure and buffer information.
3878 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3879 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3880 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3882 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3883 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3886 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3887 // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3888 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3889 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3890 if ( FAILED( result ) ) {
3892 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3893 errorText_ = errorStream_.str();
3897 // Even though we will write to the secondary buffer, we need to
3898 // access the primary buffer to set the correct output format
3899 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3900 // buffer description.
3901 DSBUFFERDESC bufferDescription;
3902 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3903 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3904 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3906 // Obtain the primary buffer
3907 LPDIRECTSOUNDBUFFER buffer;
3908 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3909 if ( FAILED( result ) ) {
3911 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3912 errorText_ = errorStream_.str();
3916 // Set the primary DS buffer sound format.
3917 result = buffer->SetFormat( &waveFormat );
3918 if ( FAILED( result ) ) {
3920 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3921 errorText_ = errorStream_.str();
3925 // Setup the secondary DS buffer description.
3926 dsBufferSize = (DWORD) bufferBytes;
3927 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3928 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3929 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3930 DSBCAPS_GLOBALFOCUS |
3931 DSBCAPS_GETCURRENTPOSITION2 |
3932 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3933 bufferDescription.dwBufferBytes = bufferBytes;
3934 bufferDescription.lpwfxFormat = &waveFormat;
3936 // Try to create the secondary DS buffer. If that doesn't work,
3937 // try to use software mixing. Otherwise, there's a problem.
3938 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3939 if ( FAILED( result ) ) {
3940 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3941 DSBCAPS_GLOBALFOCUS |
3942 DSBCAPS_GETCURRENTPOSITION2 |
3943 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3944 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3945 if ( FAILED( result ) ) {
3947 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3948 errorText_ = errorStream_.str();
3953 // Get the buffer size ... might be different from what we specified.
3955 dsbcaps.dwSize = sizeof( DSBCAPS );
3956 result = buffer->GetCaps( &dsbcaps );
3957 if ( FAILED( result ) ) {
3960 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3961 errorText_ = errorStream_.str();
3965 bufferBytes = dsbcaps.dwBufferBytes;
3967 // Lock the DS buffer
3970 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3971 if ( FAILED( result ) ) {
3974 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3975 errorText_ = errorStream_.str();
3979 // Zero the DS buffer
3980 ZeroMemory( audioPtr, dataLen );
3982 // Unlock the DS buffer
3983 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3984 if ( FAILED( result ) ) {
3987 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3988 errorText_ = errorStream_.str();
3992 dsBufferSize = bufferBytes;
3993 ohandle = (void *) output;
3994 bhandle = (void *) buffer;
3997 if ( mode == INPUT ) {
3999 LPDIRECTSOUNDCAPTURE input;
4000 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
4001 if ( FAILED( result ) ) {
4002 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
4003 errorText_ = errorStream_.str();
4008 inCaps.dwSize = sizeof( inCaps );
4009 result = input->GetCaps( &inCaps );
4010 if ( FAILED( result ) ) {
4012 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
4013 errorText_ = errorStream_.str();
4017 // Check channel information.
4018 if ( inCaps.dwChannels < channels + firstChannel ) {
4019 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
4023 // Check format information. Use 16-bit format unless user
4025 DWORD deviceFormats;
4026 if ( channels + firstChannel == 2 ) {
4027 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
4028 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4029 waveFormat.wBitsPerSample = 8;
4030 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4032 else { // assume 16-bit is supported
4033 waveFormat.wBitsPerSample = 16;
4034 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4037 else { // channel == 1
4038 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
4039 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4040 waveFormat.wBitsPerSample = 8;
4041 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4043 else { // assume 16-bit is supported
4044 waveFormat.wBitsPerSample = 16;
4045 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4048 stream_.userFormat = format;
4050 // Update wave format structure and buffer information.
4051 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
4052 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
4053 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
4055 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
4056 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
4059 // Setup the secondary DS buffer description.
4060 dsBufferSize = bufferBytes;
4061 DSCBUFFERDESC bufferDescription;
4062 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
4063 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
4064 bufferDescription.dwFlags = 0;
4065 bufferDescription.dwReserved = 0;
4066 bufferDescription.dwBufferBytes = bufferBytes;
4067 bufferDescription.lpwfxFormat = &waveFormat;
4069 // Create the capture buffer.
4070 LPDIRECTSOUNDCAPTUREBUFFER buffer;
4071 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
4072 if ( FAILED( result ) ) {
4074 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
4075 errorText_ = errorStream_.str();
4079 // Get the buffer size ... might be different from what we specified.
4081 dscbcaps.dwSize = sizeof( DSCBCAPS );
4082 result = buffer->GetCaps( &dscbcaps );
4083 if ( FAILED( result ) ) {
4086 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
4087 errorText_ = errorStream_.str();
4091 bufferBytes = dscbcaps.dwBufferBytes;
4093 // Lock the capture buffer
4096 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
4097 if ( FAILED( result ) ) {
4100 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
4101 errorText_ = errorStream_.str();
4106 ZeroMemory( audioPtr, dataLen );
4108 // Unlock the buffer
4109 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4110 if ( FAILED( result ) ) {
4113 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
4114 errorText_ = errorStream_.str();
4118 dsBufferSize = bufferBytes;
4119 ohandle = (void *) input;
4120 bhandle = (void *) buffer;
4123 // Set various stream parameters
4124 DsHandle *handle = 0;
4125 stream_.nDeviceChannels[mode] = channels + firstChannel;
4126 stream_.nUserChannels[mode] = channels;
4127 stream_.bufferSize = *bufferSize;
4128 stream_.channelOffset[mode] = firstChannel;
4129 stream_.deviceInterleaved[mode] = true;
4130 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
4131 else stream_.userInterleaved = true;
4133 // Set flag for buffer conversion
4134 stream_.doConvertBuffer[mode] = false;
4135 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
4136 stream_.doConvertBuffer[mode] = true;
4137 if (stream_.userFormat != stream_.deviceFormat[mode])
4138 stream_.doConvertBuffer[mode] = true;
4139 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
4140 stream_.nUserChannels[mode] > 1 )
4141 stream_.doConvertBuffer[mode] = true;
4143 // Allocate necessary internal buffers
4144 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
4145 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
4146 if ( stream_.userBuffer[mode] == NULL ) {
4147 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
4151 if ( stream_.doConvertBuffer[mode] ) {
4153 bool makeBuffer = true;
4154 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
4155 if ( mode == INPUT ) {
4156 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4157 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
4158 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
4163 bufferBytes *= *bufferSize;
4164 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
4165 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
4166 if ( stream_.deviceBuffer == NULL ) {
4167 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
4173 // Allocate our DsHandle structures for the stream.
4174 if ( stream_.apiHandle == 0 ) {
4176 handle = new DsHandle;
4178 catch ( std::bad_alloc& ) {
4179 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
4183 // Create a manual-reset event.
4184 handle->condition = CreateEvent( NULL, // no security
4185 TRUE, // manual-reset
4186 FALSE, // non-signaled initially
4188 stream_.apiHandle = (void *) handle;
4191 handle = (DsHandle *) stream_.apiHandle;
4192 handle->id[mode] = ohandle;
4193 handle->buffer[mode] = bhandle;
4194 handle->dsBufferSize[mode] = dsBufferSize;
4195 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
4197 stream_.device[mode] = device;
4198 stream_.state = STREAM_STOPPED;
4199 if ( stream_.mode == OUTPUT && mode == INPUT )
4200 // We had already set up an output stream.
4201 stream_.mode = DUPLEX;
4203 stream_.mode = mode;
4204 stream_.nBuffers = nBuffers;
4205 stream_.sampleRate = sampleRate;
4207 // Setup the buffer conversion information structure.
4208 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
4210 // Setup the callback thread.
4212 stream_.callbackInfo.object = (void *) this;
4213 stream_.callbackInfo.isRunning = true;
4214 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
4215 &stream_.callbackInfo, 0, &threadId );
4216 if ( stream_.callbackInfo.thread == 0 ) {
4217 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4221 // Boost DS thread priority
4222 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4227 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4228 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4229 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4230 if ( buffer ) buffer->Release();
4233 if ( handle->buffer[1] ) {
4234 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4235 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4236 if ( buffer ) buffer->Release();
4239 CloseHandle( handle->condition );
4241 stream_.apiHandle = 0;
4244 for ( int i=0; i<2; i++ ) {
4245 if ( stream_.userBuffer[i] ) {
4246 free( stream_.userBuffer[i] );
4247 stream_.userBuffer[i] = 0;
4251 if ( stream_.deviceBuffer ) {
4252 free( stream_.deviceBuffer );
4253 stream_.deviceBuffer = 0;
4259 void RtApiDs :: closeStream()
4261 if ( stream_.state == STREAM_CLOSED ) {
4262 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4263 error( RtError::WARNING );
4267 // Stop the callback thread.
4268 stream_.callbackInfo.isRunning = false;
4269 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4270 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4272 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4274 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4275 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4276 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4283 if ( handle->buffer[1] ) {
4284 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4285 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4292 CloseHandle( handle->condition );
4294 stream_.apiHandle = 0;
4297 for ( int i=0; i<2; i++ ) {
4298 if ( stream_.userBuffer[i] ) {
4299 free( stream_.userBuffer[i] );
4300 stream_.userBuffer[i] = 0;
4304 if ( stream_.deviceBuffer ) {
4305 free( stream_.deviceBuffer );
4306 stream_.deviceBuffer = 0;
4309 stream_.mode = UNINITIALIZED;
4310 stream_.state = STREAM_CLOSED;
4313 void RtApiDs :: startStream()
4316 if ( stream_.state == STREAM_RUNNING ) {
4317 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4318 error( RtError::WARNING );
4322 // Increase scheduler frequency on lesser windows (a side-effect of
4323 // increasing timer accuracy). On greater windows (Win2K or later),
4324 // this is already in effect.
4326 MUTEX_LOCK( &stream_.mutex );
4328 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4330 timeBeginPeriod( 1 );
4333 memset( &statistics, 0, sizeof( statistics ) );
4334 statistics.sampleRate = stream_.sampleRate;
4335 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4338 buffersRolling = false;
4339 duplexPrerollBytes = 0;
4341 if ( stream_.mode == DUPLEX ) {
4342 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4343 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4347 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4348 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4350 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4351 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4352 if ( FAILED( result ) ) {
4353 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4354 errorText_ = errorStream_.str();
4359 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4360 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4362 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4363 result = buffer->Start( DSCBSTART_LOOPING );
4364 if ( FAILED( result ) ) {
4365 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4366 errorText_ = errorStream_.str();
4371 handle->drainCounter = 0;
4372 handle->internalDrain = false;
4373 stream_.state = STREAM_RUNNING;
4376 MUTEX_UNLOCK( &stream_.mutex );
4378 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4381 void RtApiDs :: stopStream()
4384 if ( stream_.state == STREAM_STOPPED ) {
4385 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4386 error( RtError::WARNING );
4390 MUTEX_LOCK( &stream_.mutex );
4392 if ( stream_.state == STREAM_STOPPED ) {
4393 MUTEX_UNLOCK( &stream_.mutex );
4400 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4401 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4402 if ( handle->drainCounter == 0 ) {
4403 handle->drainCounter = 1;
4404 MUTEX_UNLOCK( &stream_.mutex );
4405 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4406 ResetEvent( handle->condition );
4407 MUTEX_LOCK( &stream_.mutex );
4410 // Stop the buffer and clear memory
4411 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4412 result = buffer->Stop();
4413 if ( FAILED( result ) ) {
4414 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4415 errorText_ = errorStream_.str();
4419 // Lock the buffer and clear it so that if we start to play again,
4420 // we won't have old data playing.
4421 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4422 if ( FAILED( result ) ) {
4423 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";
4424 errorText_ = errorStream_.str();
4428 // Zero the DS buffer
4429 ZeroMemory( audioPtr, dataLen );
4431 // Unlock the DS buffer
4432 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4433 if ( FAILED( result ) ) {
4434 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4435 errorText_ = errorStream_.str();
4439 // If we start playing again, we must begin at beginning of buffer.
4440 handle->bufferPointer[0] = 0;
4443 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4444 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4448 result = buffer->Stop();
4449 if ( FAILED( result ) ) {
4450 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4451 errorText_ = errorStream_.str();
4455 // Lock the buffer and clear it so that if we start to play again,
4456 // we won't have old data playing.
4457 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4458 if ( FAILED( result ) ) {
4459 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";
4460 errorText_ = errorStream_.str();
4464 // Zero the DS buffer
4465 ZeroMemory( audioPtr, dataLen );
4467 // Unlock the DS buffer
4468 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4469 if ( FAILED( result ) ) {
4470 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4471 errorText_ = errorStream_.str();
4475 // If we start recording again, we must begin at beginning of buffer.
4476 handle->bufferPointer[1] = 0;
4480 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4481 stream_.state = STREAM_STOPPED;
4482 MUTEX_UNLOCK( &stream_.mutex );
4484 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4487 void RtApiDs :: abortStream()
4490 if ( stream_.state == STREAM_STOPPED ) {
4491 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4492 error( RtError::WARNING );
4496 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4497 handle->drainCounter = 1;
4502 void RtApiDs :: callbackEvent()
4504 if ( stream_.state == STREAM_STOPPED ) {
4505 Sleep(50); // sleep 50 milliseconds
4509 if ( stream_.state == STREAM_CLOSED ) {
4510 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4511 error( RtError::WARNING );
4515 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4516 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4518 // Check if we were draining the stream and signal is finished.
4519 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4520 if ( handle->internalDrain == false )
4521 SetEvent( handle->condition );
4527 MUTEX_LOCK( &stream_.mutex );
4529 // The state might change while waiting on a mutex.
4530 if ( stream_.state == STREAM_STOPPED ) {
4531 MUTEX_UNLOCK( &stream_.mutex );
4535 // Invoke user callback to get fresh output data UNLESS we are
4537 if ( handle->drainCounter == 0 ) {
4538 RtAudioCallback callback = (RtAudioCallback) info->callback;
4539 double streamTime = getStreamTime();
4540 RtAudioStreamStatus status = 0;
4541 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4542 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4543 handle->xrun[0] = false;
4545 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4546 status |= RTAUDIO_INPUT_OVERFLOW;
4547 handle->xrun[1] = false;
4549 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4550 stream_.bufferSize, streamTime, status, info->userData );
4551 if ( handle->drainCounter == 2 ) {
4552 MUTEX_UNLOCK( &stream_.mutex );
4556 else if ( handle->drainCounter == 1 )
4557 handle->internalDrain = true;
4561 DWORD currentWritePos, safeWritePos;
4562 DWORD currentReadPos, safeReadPos;
4566 #ifdef GENERATE_DEBUG_LOG
4567 DWORD writeTime, readTime;
4570 LPVOID buffer1 = NULL;
4571 LPVOID buffer2 = NULL;
4572 DWORD bufferSize1 = 0;
4573 DWORD bufferSize2 = 0;
4578 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4579 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4581 // It takes a while for the devices to get rolling. As a result,
4582 // there's no guarantee that the capture and write device pointers
4583 // will move in lockstep. Wait here for both devices to start
4584 // rolling, and then set our buffer pointers accordingly.
4585 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4586 // bytes later than the write buffer.
4588 // Stub: a serious risk of having a pre-emptive scheduling round
4589 // take place between the two GetCurrentPosition calls... but I'm
4590 // really not sure how to solve the problem. Temporarily boost to
4591 // Realtime priority, maybe; but I'm not sure what priority the
4592 // DirectSound service threads run at. We *should* be roughly
4593 // within a ms or so of correct.
4595 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4596 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4598 DWORD initialWritePos, initialSafeWritePos;
4599 DWORD initialReadPos, initialSafeReadPos;
4601 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4602 if ( FAILED( result ) ) {
4603 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4604 errorText_ = errorStream_.str();
4605 error( RtError::SYSTEM_ERROR );
4607 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4608 if ( FAILED( result ) ) {
4609 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4610 errorText_ = errorStream_.str();
4611 error( RtError::SYSTEM_ERROR );
4614 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4615 if ( FAILED( result ) ) {
4616 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4617 errorText_ = errorStream_.str();
4618 error( RtError::SYSTEM_ERROR );
4620 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4621 if ( FAILED( result ) ) {
4622 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4623 errorText_ = errorStream_.str();
4624 error( RtError::SYSTEM_ERROR );
4626 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4630 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4632 buffersRolling = true;
4633 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4634 handle->bufferPointer[1] = safeReadPos;
4637 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4639 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4641 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4642 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4643 bufferBytes *= formatBytes( stream_.userFormat );
4644 memset( stream_.userBuffer[0], 0, bufferBytes );
4647 // Setup parameters and do buffer conversion if necessary.
4648 if ( stream_.doConvertBuffer[0] ) {
4649 buffer = stream_.deviceBuffer;
4650 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4651 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4652 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4655 buffer = stream_.userBuffer[0];
4656 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4657 bufferBytes *= formatBytes( stream_.userFormat );
4660 // No byte swapping necessary in DirectSound implementation.
4662 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4663 // unsigned. So, we need to convert our signed 8-bit data here to
4665 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4666 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4668 DWORD dsBufferSize = handle->dsBufferSize[0];
4669 nextWritePos = handle->bufferPointer[0];
4673 // Find out where the read and "safe write" pointers are.
4674 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4675 if ( FAILED( result ) ) {
4676 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4677 errorText_ = errorStream_.str();
4678 error( RtError::SYSTEM_ERROR );
4681 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4682 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4683 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4684 endWrite = nextWritePos + bufferBytes;
4686 // Check whether the entire write region is behind the play pointer.
4687 if ( leadPos >= endWrite ) break;
4689 // If we are here, then we must wait until the play pointer gets
4690 // beyond the write region. The approach here is to use the
4691 // Sleep() function to suspend operation until safePos catches
4692 // up. Calculate number of milliseconds to wait as:
4693 // time = distance * (milliseconds/second) * fudgefactor /
4694 // ((bytes/sample) * (samples/second))
4695 // A "fudgefactor" less than 1 is used because it was found
4696 // that sleeping too long was MUCH worse than sleeping for
4697 // several shorter periods.
4698 double millis = ( endWrite - leadPos ) * 900.0;
4699 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4700 if ( millis < 1.0 ) millis = 1.0;
4701 if ( millis > 50.0 ) {
4702 static int nOverruns = 0;
4705 Sleep( (DWORD) millis );
4708 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4709 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4712 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4713 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4714 // We've strayed into the forbidden zone ... resync the read pointer.
4715 //++statistics.numberOfWriteUnderruns;
4716 handle->xrun[0] = true;
4717 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4718 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4719 handle->bufferPointer[0] = nextWritePos;
4720 endWrite = nextWritePos + bufferBytes;
4723 // Lock free space in the buffer
4724 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4725 &bufferSize1, &buffer2, &bufferSize2, 0 );
4726 if ( FAILED( result ) ) {
4727 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4728 errorText_ = errorStream_.str();
4729 error( RtError::SYSTEM_ERROR );
4732 // Copy our buffer into the DS buffer
4733 CopyMemory( buffer1, buffer, bufferSize1 );
4734 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4736 // Update our buffer offset and unlock sound buffer
4737 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4738 if ( FAILED( result ) ) {
4739 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4740 errorText_ = errorStream_.str();
4741 error( RtError::SYSTEM_ERROR );
4743 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4744 handle->bufferPointer[0] = nextWritePos;
4746 if ( handle->drainCounter ) {
4747 handle->drainCounter++;
4752 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4754 // Setup parameters.
4755 if ( stream_.doConvertBuffer[1] ) {
4756 buffer = stream_.deviceBuffer;
4757 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4758 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4761 buffer = stream_.userBuffer[1];
4762 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4763 bufferBytes *= formatBytes( stream_.userFormat );
4766 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4767 long nextReadPos = handle->bufferPointer[1];
4768 DWORD dsBufferSize = handle->dsBufferSize[1];
4770 // Find out where the write and "safe read" pointers are.
4771 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4772 if ( FAILED( result ) ) {
4773 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4774 errorText_ = errorStream_.str();
4775 error( RtError::SYSTEM_ERROR );
4778 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4779 DWORD endRead = nextReadPos + bufferBytes;
4781 // Handling depends on whether we are INPUT or DUPLEX.
4782 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4783 // then a wait here will drag the write pointers into the forbidden zone.
4785 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4786 // it's in a safe position. This causes dropouts, but it seems to be the only
4787 // practical way to sync up the read and write pointers reliably, given the
4788 // the very complex relationship between phase and increment of the read and write
4791 // In order to minimize audible dropouts in DUPLEX mode, we will
4792 // provide a pre-roll period of 0.5 seconds in which we return
4793 // zeros from the read buffer while the pointers sync up.
4795 if ( stream_.mode == DUPLEX ) {
4796 if ( safeReadPos < endRead ) {
4797 if ( duplexPrerollBytes <= 0 ) {
4798 // Pre-roll time over. Be more agressive.
4799 int adjustment = endRead-safeReadPos;
4801 handle->xrun[1] = true;
4802 //++statistics.numberOfReadOverruns;
4804 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4805 // and perform fine adjustments later.
4806 // - small adjustments: back off by twice as much.
4807 if ( adjustment >= 2*bufferBytes )
4808 nextReadPos = safeReadPos-2*bufferBytes;
4810 nextReadPos = safeReadPos-bufferBytes-adjustment;
4812 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4813 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4814 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4818 // In pre=roll time. Just do it.
4819 nextReadPos = safeReadPos-bufferBytes;
4820 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4822 endRead = nextReadPos + bufferBytes;
4825 else { // mode == INPUT
4826 while ( safeReadPos < endRead ) {
4827 // See comments for playback.
4828 double millis = (endRead - safeReadPos) * 900.0;
4829 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4830 if ( millis < 1.0 ) millis = 1.0;
4831 Sleep( (DWORD) millis );
4833 // Wake up, find out where we are now
4834 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4835 if ( FAILED( result ) ) {
4836 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4837 errorText_ = errorStream_.str();
4838 error( RtError::SYSTEM_ERROR );
4841 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4845 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4846 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4848 // Lock free space in the buffer
4849 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4850 &bufferSize1, &buffer2, &bufferSize2, 0 );
4851 if ( FAILED( result ) ) {
4852 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4853 errorText_ = errorStream_.str();
4854 error( RtError::SYSTEM_ERROR );
4857 if ( duplexPrerollBytes <= 0 ) {
4858 // Copy our buffer into the DS buffer
4859 CopyMemory( buffer, buffer1, bufferSize1 );
4860 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4863 memset( buffer, 0, bufferSize1 );
4864 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4865 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4868 // Update our buffer offset and unlock sound buffer
4869 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4870 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4871 if ( FAILED( result ) ) {
4872 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4873 errorText_ = errorStream_.str();
4874 error( RtError::SYSTEM_ERROR );
4876 handle->bufferPointer[1] = nextReadPos;
4878 // No byte swapping necessary in DirectSound implementation.
4880 // If necessary, convert 8-bit data from unsigned to signed.
4881 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4882 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4884 // Do buffer conversion if necessary.
4885 if ( stream_.doConvertBuffer[1] )
4886 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4888 #ifdef GENERATE_DEBUG_LOG
4889 if ( currentDebugLogEntry < debugLog.size() )
4891 TTickRecord &r = debugLog[currentDebugLogEntry++];
4892 r.currentReadPointer = currentReadPos;
4893 r.safeReadPointer = safeReadPos;
4894 r.currentWritePointer = currentWritePos;
4895 r.safeWritePointer = safeWritePos;
4896 r.readTime = readTime;
4897 r.writeTime = writeTime;
4898 r.nextReadPointer = handles[1].bufferPointer;
4899 r.nextWritePointer = handles[0].bufferPointer;
4904 MUTEX_UNLOCK( &stream_.mutex );
4906 RtApi::tickStreamTime();
4909 // Definitions for utility functions and callbacks
4910 // specific to the DirectSound implementation.
4912 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4914 CallbackInfo *info = (CallbackInfo *) ptr;
4915 RtApiDs *object = (RtApiDs *) info->object;
4916 bool* isRunning = &info->isRunning;
4918 while ( *isRunning == true ) {
4919 object->callbackEvent();
4928 std::string convertTChar( LPCTSTR name )
4932 #if defined( UNICODE ) || defined( _UNICODE )
4933 // Yes, this conversion doesn't make sense for two-byte characters
4934 // but RtAudio is currently written to return an std::string of
4935 // one-byte chars for the device name.
4936 for ( unsigned int i=0; i<wcslen( name ); i++ )
4937 s.push_back( name[i] );
4939 s.append( std::string( name ) );
4945 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4946 LPCTSTR description,
4950 EnumInfo *info = (EnumInfo *) lpContext;
4953 if ( info->isInput == true ) {
4955 LPDIRECTSOUNDCAPTURE object;
4957 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4958 if ( hr != DS_OK ) return TRUE;
4960 caps.dwSize = sizeof(caps);
4961 hr = object->GetCaps( &caps );
4962 if ( hr == DS_OK ) {
4963 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4970 LPDIRECTSOUND object;
4971 hr = DirectSoundCreate( lpguid, &object, NULL );
4972 if ( hr != DS_OK ) return TRUE;
4974 caps.dwSize = sizeof(caps);
4975 hr = object->GetCaps( &caps );
4976 if ( hr == DS_OK ) {
4977 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4983 if ( info->getDefault && lpguid == NULL ) return FALSE;
4985 if ( info->findIndex && info->counter > info->index ) {
4987 info->name = convertTChar( description );
4994 static char* getErrorString( int code )
4998 case DSERR_ALLOCATED:
4999 return "Already allocated";
5001 case DSERR_CONTROLUNAVAIL:
5002 return "Control unavailable";
5004 case DSERR_INVALIDPARAM:
5005 return "Invalid parameter";
5007 case DSERR_INVALIDCALL:
5008 return "Invalid call";
5011 return "Generic error";
5013 case DSERR_PRIOLEVELNEEDED:
5014 return "Priority level needed";
5016 case DSERR_OUTOFMEMORY:
5017 return "Out of memory";
5019 case DSERR_BADFORMAT:
5020 return "The sample rate or the channel format is not supported";
5022 case DSERR_UNSUPPORTED:
5023 return "Not supported";
5025 case DSERR_NODRIVER:
5028 case DSERR_ALREADYINITIALIZED:
5029 return "Already initialized";
5031 case DSERR_NOAGGREGATION:
5032 return "No aggregation";
5034 case DSERR_BUFFERLOST:
5035 return "Buffer lost";
5037 case DSERR_OTHERAPPHASPRIO:
5038 return "Another application already has priority";
5040 case DSERR_UNINITIALIZED:
5041 return "Uninitialized";
5044 return "DirectSound unknown error";
5047 //******************** End of __WINDOWS_DS__ *********************//
5051 #if defined(__LINUX_ALSA__)
5053 #include <alsa/asoundlib.h>
5056 // A structure to hold various information related to the ALSA API
5059 snd_pcm_t *handles[2];
5062 pthread_cond_t runnable;
5065 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
5068 extern "C" void *alsaCallbackHandler( void * ptr );
5070 RtApiAlsa :: RtApiAlsa()
5072 // Nothing to do here.
5075 RtApiAlsa :: ~RtApiAlsa()
5077 if ( stream_.state != STREAM_CLOSED ) closeStream();
5080 unsigned int RtApiAlsa :: getDeviceCount( void )
5082 unsigned nDevices = 0;
5083 int result, subdevice, card;
5087 // Count cards and devices
5089 snd_card_next( &card );
5090 while ( card >= 0 ) {
5091 sprintf( name, "hw:%d", card );
5092 result = snd_ctl_open( &handle, name, 0 );
5094 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5095 errorText_ = errorStream_.str();
5096 error( RtError::WARNING );
5101 result = snd_ctl_pcm_next_device( handle, &subdevice );
5103 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5104 errorText_ = errorStream_.str();
5105 error( RtError::WARNING );
5108 if ( subdevice < 0 )
5113 snd_ctl_close( handle );
5114 snd_card_next( &card );
5120 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
5122 RtAudio::DeviceInfo info;
5123 info.probed = false;
5125 unsigned nDevices = 0;
5126 int result, subdevice, card;
5130 // Count cards and devices
5132 snd_card_next( &card );
5133 while ( card >= 0 ) {
5134 sprintf( name, "hw:%d", card );
5135 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5137 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5138 errorText_ = errorStream_.str();
5139 error( RtError::WARNING );
5144 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5146 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5147 errorText_ = errorStream_.str();
5148 error( RtError::WARNING );
5151 if ( subdevice < 0 ) break;
5152 if ( nDevices == device ) {
5153 sprintf( name, "hw:%d,%d", card, subdevice );
5159 snd_ctl_close( chandle );
5160 snd_card_next( &card );
5163 if ( nDevices == 0 ) {
5164 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
5165 error( RtError::INVALID_USE );
5168 if ( device >= nDevices ) {
5169 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
5170 error( RtError::INVALID_USE );
5175 // If a stream is already open, we cannot probe the stream devices.
5176 // Thus, use the saved results.
5177 if ( stream_.state != STREAM_CLOSED &&
5178 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
5179 if ( device >= devices_.size() ) {
5180 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
5181 error( RtError::WARNING );
5184 return devices_[ device ];
5187 int openMode = SND_PCM_ASYNC;
5188 snd_pcm_stream_t stream;
5189 snd_pcm_info_t *pcminfo;
5190 snd_pcm_info_alloca( &pcminfo );
5192 snd_pcm_hw_params_t *params;
5193 snd_pcm_hw_params_alloca( ¶ms );
5195 // First try for playback
5196 stream = SND_PCM_STREAM_PLAYBACK;
5197 snd_pcm_info_set_device( pcminfo, subdevice );
5198 snd_pcm_info_set_subdevice( pcminfo, 0 );
5199 snd_pcm_info_set_stream( pcminfo, stream );
5201 result = snd_ctl_pcm_info( chandle, pcminfo );
5203 // Device probably doesn't support playback.
5207 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
5209 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5210 errorText_ = errorStream_.str();
5211 error( RtError::WARNING );
5215 // The device is open ... fill the parameter structure.
5216 result = snd_pcm_hw_params_any( phandle, params );
5218 snd_pcm_close( phandle );
5219 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5220 errorText_ = errorStream_.str();
5221 error( RtError::WARNING );
5225 // Get output channel information.
5227 result = snd_pcm_hw_params_get_channels_max( params, &value );
5229 snd_pcm_close( phandle );
5230 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
5231 errorText_ = errorStream_.str();
5232 error( RtError::WARNING );
5235 info.outputChannels = value;
5236 snd_pcm_close( phandle );
5239 // Now try for capture
5240 stream = SND_PCM_STREAM_CAPTURE;
5241 snd_pcm_info_set_stream( pcminfo, stream );
5243 result = snd_ctl_pcm_info( chandle, pcminfo );
5244 snd_ctl_close( chandle );
5246 // Device probably doesn't support capture.
5247 if ( info.outputChannels == 0 ) return info;
5248 goto probeParameters;
5251 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5253 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5254 errorText_ = errorStream_.str();
5255 error( RtError::WARNING );
5256 if ( info.outputChannels == 0 ) return info;
5257 goto probeParameters;
5260 // The device is open ... fill the parameter structure.
5261 result = snd_pcm_hw_params_any( phandle, params );
5263 snd_pcm_close( phandle );
5264 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5265 errorText_ = errorStream_.str();
5266 error( RtError::WARNING );
5267 if ( info.outputChannels == 0 ) return info;
5268 goto probeParameters;
5271 result = snd_pcm_hw_params_get_channels_max( params, &value );
5273 snd_pcm_close( phandle );
5274 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5275 errorText_ = errorStream_.str();
5276 error( RtError::WARNING );
5277 if ( info.outputChannels == 0 ) return info;
5278 goto probeParameters;
5280 info.inputChannels = value;
5281 snd_pcm_close( phandle );
5283 // If device opens for both playback and capture, we determine the channels.
5284 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5285 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5287 // ALSA doesn't provide default devices so we'll use the first available one.
5288 if ( device == 0 && info.outputChannels > 0 )
5289 info.isDefaultOutput = true;
5290 if ( device == 0 && info.inputChannels > 0 )
5291 info.isDefaultInput = true;
5294 // At this point, we just need to figure out the supported data
5295 // formats and sample rates. We'll proceed by opening the device in
5296 // the direction with the maximum number of channels, or playback if
5297 // they are equal. This might limit our sample rate options, but so
5300 if ( info.outputChannels >= info.inputChannels )
5301 stream = SND_PCM_STREAM_PLAYBACK;
5303 stream = SND_PCM_STREAM_CAPTURE;
5304 snd_pcm_info_set_stream( pcminfo, stream );
5306 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5308 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5309 errorText_ = errorStream_.str();
5310 error( RtError::WARNING );
5314 // The device is open ... fill the parameter structure.
5315 result = snd_pcm_hw_params_any( phandle, params );
5317 snd_pcm_close( phandle );
5318 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5319 errorText_ = errorStream_.str();
5320 error( RtError::WARNING );
5324 // Test our discrete set of sample rate values.
5325 info.sampleRates.clear();
5326 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5327 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5328 info.sampleRates.push_back( SAMPLE_RATES[i] );
5330 if ( info.sampleRates.size() == 0 ) {
5331 snd_pcm_close( phandle );
5332 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5333 errorText_ = errorStream_.str();
5334 error( RtError::WARNING );
5338 // Probe the supported data formats ... we don't care about endian-ness just yet
5339 snd_pcm_format_t format;
5340 info.nativeFormats = 0;
5341 format = SND_PCM_FORMAT_S8;
5342 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5343 info.nativeFormats |= RTAUDIO_SINT8;
5344 format = SND_PCM_FORMAT_S16;
5345 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5346 info.nativeFormats |= RTAUDIO_SINT16;
5347 format = SND_PCM_FORMAT_S24;
5348 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5349 info.nativeFormats |= RTAUDIO_SINT24;
5350 format = SND_PCM_FORMAT_S32;
5351 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5352 info.nativeFormats |= RTAUDIO_SINT32;
5353 format = SND_PCM_FORMAT_FLOAT;
5354 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5355 info.nativeFormats |= RTAUDIO_FLOAT32;
5356 format = SND_PCM_FORMAT_FLOAT64;
5357 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5358 info.nativeFormats |= RTAUDIO_FLOAT64;
5360 // Check that we have at least one supported format
5361 if ( info.nativeFormats == 0 ) {
5362 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5363 errorText_ = errorStream_.str();
5364 error( RtError::WARNING );
5368 // Get the device name
5370 result = snd_card_get_name( card, &cardname );
5372 sprintf( name, "hw:%s,%d", cardname, subdevice );
5375 // That's all ... close the device and return
5376 snd_pcm_close( phandle );
5381 void RtApiAlsa :: saveDeviceInfo( void )
5385 unsigned int nDevices = getDeviceCount();
5386 devices_.resize( nDevices );
5387 for ( unsigned int i=0; i<nDevices; i++ )
5388 devices_[i] = getDeviceInfo( i );
5391 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5392 unsigned int firstChannel, unsigned int sampleRate,
5393 RtAudioFormat format, unsigned int *bufferSize,
5394 RtAudio::StreamOptions *options )
5397 #if defined(__RTAUDIO_DEBUG__)
5399 snd_output_stdio_attach(&out, stderr, 0);
5402 // I'm not using the "plug" interface ... too much inconsistent behavior.
5404 unsigned nDevices = 0;
5405 int result, subdevice, card;
5409 // Count cards and devices
5411 snd_card_next( &card );
5412 while ( card >= 0 ) {
5413 sprintf( name, "hw:%d", card );
5414 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5416 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5417 errorText_ = errorStream_.str();
5422 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5423 if ( result < 0 ) break;
5424 if ( subdevice < 0 ) break;
5425 if ( nDevices == device ) {
5426 sprintf( name, "hw:%d,%d", card, subdevice );
5427 snd_ctl_close( chandle );
5432 snd_ctl_close( chandle );
5433 snd_card_next( &card );
5436 if ( nDevices == 0 ) {
5437 // This should not happen because a check is made before this function is called.
5438 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5442 if ( device >= nDevices ) {
5443 // This should not happen because a check is made before this function is called.
5444 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5450 // The getDeviceInfo() function will not work for a device that is
5451 // already open. Thus, we'll probe the system before opening a
5452 // stream and save the results for use by getDeviceInfo().
5453 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5454 this->saveDeviceInfo();
5456 snd_pcm_stream_t stream;
5457 if ( mode == OUTPUT )
5458 stream = SND_PCM_STREAM_PLAYBACK;
5460 stream = SND_PCM_STREAM_CAPTURE;
5463 int openMode = SND_PCM_ASYNC;
5464 result = snd_pcm_open( &phandle, name, stream, openMode );
5466 if ( mode == OUTPUT )
5467 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5469 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5470 errorText_ = errorStream_.str();
5474 // Fill the parameter structure.
5475 snd_pcm_hw_params_t *hw_params;
5476 snd_pcm_hw_params_alloca( &hw_params );
5477 result = snd_pcm_hw_params_any( phandle, hw_params );
5479 snd_pcm_close( phandle );
5480 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5481 errorText_ = errorStream_.str();
5485 #if defined(__RTAUDIO_DEBUG__)
5486 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5487 snd_pcm_hw_params_dump( hw_params, out );
5490 // Set access ... check user preference.
5491 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5492 stream_.userInterleaved = false;
5493 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5495 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5496 stream_.deviceInterleaved[mode] = true;
5499 stream_.deviceInterleaved[mode] = false;
5502 stream_.userInterleaved = true;
5503 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5505 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5506 stream_.deviceInterleaved[mode] = false;
5509 stream_.deviceInterleaved[mode] = true;
5513 snd_pcm_close( phandle );
5514 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5515 errorText_ = errorStream_.str();
5519 // Determine how to set the device format.
5520 stream_.userFormat = format;
5521 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5523 if ( format == RTAUDIO_SINT8 )
5524 deviceFormat = SND_PCM_FORMAT_S8;
5525 else if ( format == RTAUDIO_SINT16 )
5526 deviceFormat = SND_PCM_FORMAT_S16;
5527 else if ( format == RTAUDIO_SINT24 )
5528 deviceFormat = SND_PCM_FORMAT_S24;
5529 else if ( format == RTAUDIO_SINT32 )
5530 deviceFormat = SND_PCM_FORMAT_S32;
5531 else if ( format == RTAUDIO_FLOAT32 )
5532 deviceFormat = SND_PCM_FORMAT_FLOAT;
5533 else if ( format == RTAUDIO_FLOAT64 )
5534 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5536 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5537 stream_.deviceFormat[mode] = format;
5541 // The user requested format is not natively supported by the device.
5542 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5543 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5544 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5548 deviceFormat = SND_PCM_FORMAT_FLOAT;
5549 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5550 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5554 deviceFormat = SND_PCM_FORMAT_S32;
5555 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5556 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5560 deviceFormat = SND_PCM_FORMAT_S24;
5561 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5562 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5566 deviceFormat = SND_PCM_FORMAT_S16;
5567 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5568 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5572 deviceFormat = SND_PCM_FORMAT_S8;
5573 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5574 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5578 // If we get here, no supported format was found.
5579 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5580 errorText_ = errorStream_.str();
5584 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5586 snd_pcm_close( phandle );
5587 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5588 errorText_ = errorStream_.str();
5592 // Determine whether byte-swaping is necessary.
5593 stream_.doByteSwap[mode] = false;
5594 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5595 result = snd_pcm_format_cpu_endian( deviceFormat );
5597 stream_.doByteSwap[mode] = true;
5598 else if (result < 0) {
5599 snd_pcm_close( phandle );
5600 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5601 errorText_ = errorStream_.str();
5606 // Set the sample rate.
5607 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5609 snd_pcm_close( phandle );
5610 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5611 errorText_ = errorStream_.str();
5615 // Determine the number of channels for this device. We support a possible
5616 // minimum device channel number > than the value requested by the user.
5617 stream_.nUserChannels[mode] = channels;
5619 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5620 unsigned int deviceChannels = value;
5621 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5622 snd_pcm_close( phandle );
5623 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5624 errorText_ = errorStream_.str();
5628 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5630 snd_pcm_close( phandle );
5631 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5632 errorText_ = errorStream_.str();
5635 deviceChannels = value;
5636 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5637 stream_.nDeviceChannels[mode] = deviceChannels;
5639 // Set the device channels.
5640 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5642 snd_pcm_close( phandle );
5643 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5644 errorText_ = errorStream_.str();
5648 // Set the buffer number, which in ALSA is referred to as the "period".
5650 unsigned int periods = 0;
5651 if ( options ) periods = options->numberOfBuffers;
5652 totalSize = *bufferSize * periods;
5654 // Set the buffer (or period) size.
5655 snd_pcm_uframes_t periodSize = *bufferSize;
5656 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5658 snd_pcm_close( phandle );
5659 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5660 errorText_ = errorStream_.str();
5663 *bufferSize = periodSize;
5665 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5666 else periods = totalSize / *bufferSize;
5667 // Even though the hardware might allow 1 buffer, it won't work reliably.
5668 if ( periods < 2 ) periods = 2;
5669 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5671 snd_pcm_close( phandle );
5672 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5673 errorText_ = errorStream_.str();
5677 // If attempting to setup a duplex stream, the bufferSize parameter
5678 // MUST be the same in both directions!
5679 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5680 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5681 errorText_ = errorStream_.str();
5685 stream_.bufferSize = *bufferSize;
5687 // Install the hardware configuration
5688 result = snd_pcm_hw_params( phandle, hw_params );
5690 snd_pcm_close( phandle );
5691 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5692 errorText_ = errorStream_.str();
5696 #if defined(__RTAUDIO_DEBUG__)
5697 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5698 snd_pcm_hw_params_dump( hw_params, out );
5701 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5702 snd_pcm_sw_params_t *sw_params = NULL;
5703 snd_pcm_sw_params_alloca( &sw_params );
5704 snd_pcm_sw_params_current( phandle, sw_params );
5705 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5706 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
5707 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5709 // The following two settings were suggested by Theo Veenker
5710 //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
5711 //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
5713 // here are two options for a fix
5714 //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
5715 snd_pcm_uframes_t val;
5716 snd_pcm_sw_params_get_boundary( sw_params, &val );
5717 snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
5719 result = snd_pcm_sw_params( phandle, sw_params );
5721 snd_pcm_close( phandle );
5722 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5723 errorText_ = errorStream_.str();
5727 #if defined(__RTAUDIO_DEBUG__)
5728 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5729 snd_pcm_sw_params_dump( sw_params, out );
5732 // Set flags for buffer conversion
5733 stream_.doConvertBuffer[mode] = false;
5734 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5735 stream_.doConvertBuffer[mode] = true;
5736 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5737 stream_.doConvertBuffer[mode] = true;
5738 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5739 stream_.nUserChannels[mode] > 1 )
5740 stream_.doConvertBuffer[mode] = true;
5742 // Allocate the ApiHandle if necessary and then save.
5743 AlsaHandle *apiInfo = 0;
5744 if ( stream_.apiHandle == 0 ) {
5746 apiInfo = (AlsaHandle *) new AlsaHandle;
5748 catch ( std::bad_alloc& ) {
5749 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5753 if ( pthread_cond_init( &apiInfo->runnable, NULL ) ) {
5754 errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
5758 stream_.apiHandle = (void *) apiInfo;
5759 apiInfo->handles[0] = 0;
5760 apiInfo->handles[1] = 0;
5763 apiInfo = (AlsaHandle *) stream_.apiHandle;
5765 apiInfo->handles[mode] = phandle;
5767 // Allocate necessary internal buffers.
5768 unsigned long bufferBytes;
5769 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5770 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5771 if ( stream_.userBuffer[mode] == NULL ) {
5772 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5776 if ( stream_.doConvertBuffer[mode] ) {
5778 bool makeBuffer = true;
5779 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5780 if ( mode == INPUT ) {
5781 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5782 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5783 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5788 bufferBytes *= *bufferSize;
5789 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5790 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5791 if ( stream_.deviceBuffer == NULL ) {
5792 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5798 stream_.sampleRate = sampleRate;
5799 stream_.nBuffers = periods;
5800 stream_.device[mode] = device;
5801 stream_.state = STREAM_STOPPED;
5803 // Setup the buffer conversion information structure.
5804 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5806 // Setup thread if necessary.
5807 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5808 // We had already set up an output stream.
5809 stream_.mode = DUPLEX;
5810 // Link the streams if possible.
5811 apiInfo->synchronized = false;
5812 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5813 apiInfo->synchronized = true;
5815 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5816 error( RtError::WARNING );
5820 stream_.mode = mode;
5822 // Setup callback thread.
5823 stream_.callbackInfo.object = (void *) this;
5825 // Set the thread attributes for joinable and realtime scheduling
5826 // priority (optional). The higher priority will only take affect
5827 // if the program is run as root or suid. Note, under Linux
5828 // processes with CAP_SYS_NICE privilege, a user can change
5829 // scheduling policy and priority (thus need not be root). See
5830 // POSIX "capabilities".
5831 pthread_attr_t attr;
5832 pthread_attr_init( &attr );
5833 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5834 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5835 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
5836 struct sched_param param;
5837 int priority = options->priority;
5838 int min = sched_get_priority_min( SCHED_RR );
5839 int max = sched_get_priority_max( SCHED_RR );
5840 if ( priority < min ) priority = min;
5841 else if ( priority > max ) priority = max;
5842 param.sched_priority = priority;
5843 pthread_attr_setschedparam( &attr, ¶m );
5844 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5847 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5849 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5852 stream_.callbackInfo.isRunning = true;
5853 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5854 pthread_attr_destroy( &attr );
5856 stream_.callbackInfo.isRunning = false;
5857 errorText_ = "RtApiAlsa::error creating callback thread!";
5866 pthread_cond_destroy( &apiInfo->runnable );
5867 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5868 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5870 stream_.apiHandle = 0;
5873 for ( int i=0; i<2; i++ ) {
5874 if ( stream_.userBuffer[i] ) {
5875 free( stream_.userBuffer[i] );
5876 stream_.userBuffer[i] = 0;
5880 if ( stream_.deviceBuffer ) {
5881 free( stream_.deviceBuffer );
5882 stream_.deviceBuffer = 0;
5888 void RtApiAlsa :: closeStream()
5890 if ( stream_.state == STREAM_CLOSED ) {
5891 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5892 error( RtError::WARNING );
5896 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5897 stream_.callbackInfo.isRunning = false;
5898 MUTEX_LOCK( &stream_.mutex );
5899 if ( stream_.state == STREAM_STOPPED )
5900 pthread_cond_signal( &apiInfo->runnable );
5901 MUTEX_UNLOCK( &stream_.mutex );
5902 pthread_join( stream_.callbackInfo.thread, NULL );
5904 if ( stream_.state == STREAM_RUNNING ) {
5905 stream_.state = STREAM_STOPPED;
5906 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5907 snd_pcm_drop( apiInfo->handles[0] );
5908 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5909 snd_pcm_drop( apiInfo->handles[1] );
5913 pthread_cond_destroy( &apiInfo->runnable );
5914 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5915 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5917 stream_.apiHandle = 0;
5920 for ( int i=0; i<2; i++ ) {
5921 if ( stream_.userBuffer[i] ) {
5922 free( stream_.userBuffer[i] );
5923 stream_.userBuffer[i] = 0;
5927 if ( stream_.deviceBuffer ) {
5928 free( stream_.deviceBuffer );
5929 stream_.deviceBuffer = 0;
5932 stream_.mode = UNINITIALIZED;
5933 stream_.state = STREAM_CLOSED;
5936 void RtApiAlsa :: startStream()
5938 // This method calls snd_pcm_prepare if the device isn't already in that state.
5941 if ( stream_.state == STREAM_RUNNING ) {
5942 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5943 error( RtError::WARNING );
5947 MUTEX_LOCK( &stream_.mutex );
5950 snd_pcm_state_t state;
5951 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5952 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5953 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5954 state = snd_pcm_state( handle[0] );
5955 if ( state != SND_PCM_STATE_PREPARED ) {
5956 result = snd_pcm_prepare( handle[0] );
5958 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5959 errorText_ = errorStream_.str();
5965 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5966 state = snd_pcm_state( handle[1] );
5967 if ( state != SND_PCM_STATE_PREPARED ) {
5968 result = snd_pcm_prepare( handle[1] );
5970 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5971 errorText_ = errorStream_.str();
5977 stream_.state = STREAM_RUNNING;
5980 MUTEX_UNLOCK( &stream_.mutex );
5982 pthread_cond_signal( &apiInfo->runnable );
5984 if ( result >= 0 ) return;
5985 error( RtError::SYSTEM_ERROR );
5988 void RtApiAlsa :: stopStream()
5991 if ( stream_.state == STREAM_STOPPED ) {
5992 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5993 error( RtError::WARNING );
5997 MUTEX_LOCK( &stream_.mutex );
5999 if ( stream_.state == STREAM_STOPPED ) {
6000 MUTEX_UNLOCK( &stream_.mutex );
6005 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6006 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6007 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6008 if ( apiInfo->synchronized )
6009 result = snd_pcm_drop( handle[0] );
6011 result = snd_pcm_drain( handle[0] );
6013 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
6014 errorText_ = errorStream_.str();
6019 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6020 result = snd_pcm_drop( handle[1] );
6022 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
6023 errorText_ = errorStream_.str();
6029 stream_.state = STREAM_STOPPED;
6030 MUTEX_UNLOCK( &stream_.mutex );
6032 if ( result >= 0 ) return;
6033 error( RtError::SYSTEM_ERROR );
6036 void RtApiAlsa :: abortStream()
6039 if ( stream_.state == STREAM_STOPPED ) {
6040 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
6041 error( RtError::WARNING );
6045 MUTEX_LOCK( &stream_.mutex );
6047 if ( stream_.state == STREAM_STOPPED ) {
6048 MUTEX_UNLOCK( &stream_.mutex );
6053 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6054 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6055 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6056 result = snd_pcm_drop( handle[0] );
6058 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
6059 errorText_ = errorStream_.str();
6064 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6065 result = snd_pcm_drop( handle[1] );
6067 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
6068 errorText_ = errorStream_.str();
6074 stream_.state = STREAM_STOPPED;
6075 MUTEX_UNLOCK( &stream_.mutex );
6077 if ( result >= 0 ) return;
6078 error( RtError::SYSTEM_ERROR );
6081 void RtApiAlsa :: callbackEvent()
6083 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6084 if ( stream_.state == STREAM_STOPPED ) {
6085 MUTEX_LOCK( &stream_.mutex );
6086 pthread_cond_wait( &apiInfo->runnable, &stream_.mutex );
6087 if ( stream_.state != STREAM_RUNNING ) {
6088 MUTEX_UNLOCK( &stream_.mutex );
6091 MUTEX_UNLOCK( &stream_.mutex );
6094 if ( stream_.state == STREAM_CLOSED ) {
6095 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
6096 error( RtError::WARNING );
6100 int doStopStream = 0;
6101 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6102 double streamTime = getStreamTime();
6103 RtAudioStreamStatus status = 0;
6104 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
6105 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6106 apiInfo->xrun[0] = false;
6108 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
6109 status |= RTAUDIO_INPUT_OVERFLOW;
6110 apiInfo->xrun[1] = false;
6112 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6113 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6115 if ( doStopStream == 2 ) {
6120 MUTEX_LOCK( &stream_.mutex );
6122 // The state might change while waiting on a mutex.
6123 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6129 snd_pcm_sframes_t frames;
6130 RtAudioFormat format;
6131 handle = (snd_pcm_t **) apiInfo->handles;
6133 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6135 // Setup parameters.
6136 if ( stream_.doConvertBuffer[1] ) {
6137 buffer = stream_.deviceBuffer;
6138 channels = stream_.nDeviceChannels[1];
6139 format = stream_.deviceFormat[1];
6142 buffer = stream_.userBuffer[1];
6143 channels = stream_.nUserChannels[1];
6144 format = stream_.userFormat;
6147 // Read samples from device in interleaved/non-interleaved format.
6148 if ( stream_.deviceInterleaved[1] )
6149 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
6151 void *bufs[channels];
6152 size_t offset = stream_.bufferSize * formatBytes( format );
6153 for ( int i=0; i<channels; i++ )
6154 bufs[i] = (void *) (buffer + (i * offset));
6155 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
6158 if ( result < (int) stream_.bufferSize ) {
6159 // Either an error or overrun occured.
6160 if ( result == -EPIPE ) {
6161 snd_pcm_state_t state = snd_pcm_state( handle[1] );
6162 if ( state == SND_PCM_STATE_XRUN ) {
6163 apiInfo->xrun[1] = true;
6164 result = snd_pcm_prepare( handle[1] );
6166 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
6167 errorText_ = errorStream_.str();
6171 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6172 errorText_ = errorStream_.str();
6176 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
6177 errorText_ = errorStream_.str();
6179 error( RtError::WARNING );
6183 // Do byte swapping if necessary.
6184 if ( stream_.doByteSwap[1] )
6185 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
6187 // Do buffer conversion if necessary.
6188 if ( stream_.doConvertBuffer[1] )
6189 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6191 // Check stream latency
6192 result = snd_pcm_delay( handle[1], &frames );
6193 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
6198 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6200 // Setup parameters and do buffer conversion if necessary.
6201 if ( stream_.doConvertBuffer[0] ) {
6202 buffer = stream_.deviceBuffer;
6203 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6204 channels = stream_.nDeviceChannels[0];
6205 format = stream_.deviceFormat[0];
6208 buffer = stream_.userBuffer[0];
6209 channels = stream_.nUserChannels[0];
6210 format = stream_.userFormat;
6213 // Do byte swapping if necessary.
6214 if ( stream_.doByteSwap[0] )
6215 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
6217 // Write samples to device in interleaved/non-interleaved format.
6218 if ( stream_.deviceInterleaved[0] )
6219 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
6221 void *bufs[channels];
6222 size_t offset = stream_.bufferSize * formatBytes( format );
6223 for ( int i=0; i<channels; i++ )
6224 bufs[i] = (void *) (buffer + (i * offset));
6225 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
6228 if ( result < (int) stream_.bufferSize ) {
6229 // Either an error or underrun occured.
6230 if ( result == -EPIPE ) {
6231 snd_pcm_state_t state = snd_pcm_state( handle[0] );
6232 if ( state == SND_PCM_STATE_XRUN ) {
6233 apiInfo->xrun[0] = true;
6234 result = snd_pcm_prepare( handle[0] );
6236 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
6237 errorText_ = errorStream_.str();
6241 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6242 errorText_ = errorStream_.str();
6246 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
6247 errorText_ = errorStream_.str();
6249 error( RtError::WARNING );
6253 // Check stream latency
6254 result = snd_pcm_delay( handle[0], &frames );
6255 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
6259 MUTEX_UNLOCK( &stream_.mutex );
6261 RtApi::tickStreamTime();
6262 if ( doStopStream == 1 ) this->stopStream();
6265 extern "C" void *alsaCallbackHandler( void *ptr )
6267 CallbackInfo *info = (CallbackInfo *) ptr;
6268 RtApiAlsa *object = (RtApiAlsa *) info->object;
6269 bool *isRunning = &info->isRunning;
6271 while ( *isRunning == true ) {
6272 pthread_testcancel();
6273 object->callbackEvent();
6276 pthread_exit( NULL );
6279 //******************** End of __LINUX_ALSA__ *********************//
6283 #if defined(__LINUX_OSS__)
6286 #include <sys/ioctl.h>
6289 #include "soundcard.h"
6293 extern "C" void *ossCallbackHandler(void * ptr);
6295 // A structure to hold various information related to the OSS API
6298 int id[2]; // device ids
6301 pthread_cond_t runnable;
6304 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6307 RtApiOss :: RtApiOss()
6309 // Nothing to do here.
6312 RtApiOss :: ~RtApiOss()
6314 if ( stream_.state != STREAM_CLOSED ) closeStream();
6317 unsigned int RtApiOss :: getDeviceCount( void )
6319 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6320 if ( mixerfd == -1 ) {
6321 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6322 error( RtError::WARNING );
6326 oss_sysinfo sysinfo;
6327 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6329 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6330 error( RtError::WARNING );
6335 return sysinfo.numaudios;
6338 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6340 RtAudio::DeviceInfo info;
6341 info.probed = false;
6343 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6344 if ( mixerfd == -1 ) {
6345 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6346 error( RtError::WARNING );
6350 oss_sysinfo sysinfo;
6351 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6352 if ( result == -1 ) {
6354 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6355 error( RtError::WARNING );
6359 unsigned nDevices = sysinfo.numaudios;
6360 if ( nDevices == 0 ) {
6362 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6363 error( RtError::INVALID_USE );
6366 if ( device >= nDevices ) {
6368 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6369 error( RtError::INVALID_USE );
6372 oss_audioinfo ainfo;
6374 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6376 if ( result == -1 ) {
6377 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6378 errorText_ = errorStream_.str();
6379 error( RtError::WARNING );
6384 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6385 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6386 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6387 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6388 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6391 // Probe data formats ... do for input
6392 unsigned long mask = ainfo.iformats;
6393 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6394 info.nativeFormats |= RTAUDIO_SINT16;
6395 if ( mask & AFMT_S8 )
6396 info.nativeFormats |= RTAUDIO_SINT8;
6397 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6398 info.nativeFormats |= RTAUDIO_SINT32;
6399 if ( mask & AFMT_FLOAT )
6400 info.nativeFormats |= RTAUDIO_FLOAT32;
6401 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6402 info.nativeFormats |= RTAUDIO_SINT24;
6404 // Check that we have at least one supported format
6405 if ( info.nativeFormats == 0 ) {
6406 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6407 errorText_ = errorStream_.str();
6408 error( RtError::WARNING );
6412 // Probe the supported sample rates.
6413 info.sampleRates.clear();
6414 if ( ainfo.nrates ) {
6415 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6416 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6417 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6418 info.sampleRates.push_back( SAMPLE_RATES[k] );
6425 // Check min and max rate values;
6426 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6427 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6428 info.sampleRates.push_back( SAMPLE_RATES[k] );
6432 if ( info.sampleRates.size() == 0 ) {
6433 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6434 errorText_ = errorStream_.str();
6435 error( RtError::WARNING );
6439 info.name = ainfo.name;
6446 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6447 unsigned int firstChannel, unsigned int sampleRate,
6448 RtAudioFormat format, unsigned int *bufferSize,
6449 RtAudio::StreamOptions *options )
6451 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6452 if ( mixerfd == -1 ) {
6453 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6457 oss_sysinfo sysinfo;
6458 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6459 if ( result == -1 ) {
6461 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6465 unsigned nDevices = sysinfo.numaudios;
6466 if ( nDevices == 0 ) {
6467 // This should not happen because a check is made before this function is called.
6469 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6473 if ( device >= nDevices ) {
6474 // This should not happen because a check is made before this function is called.
6476 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6480 oss_audioinfo ainfo;
6482 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6484 if ( result == -1 ) {
6485 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6486 errorText_ = errorStream_.str();
6490 // Check if device supports input or output
6491 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6492 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6493 if ( mode == OUTPUT )
6494 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6496 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6497 errorText_ = errorStream_.str();
6502 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6503 if ( mode == OUTPUT )
6505 else { // mode == INPUT
6506 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6507 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6508 close( handle->id[0] );
6510 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6511 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6512 errorText_ = errorStream_.str();
6515 // Check that the number previously set channels is the same.
6516 if ( stream_.nUserChannels[0] != channels ) {
6517 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6518 errorText_ = errorStream_.str();
6527 // Set exclusive access if specified.
6528 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6530 // Try to open the device.
6532 fd = open( ainfo.devnode, flags, 0 );
6534 if ( errno == EBUSY )
6535 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6537 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6538 errorText_ = errorStream_.str();
6542 // For duplex operation, specifically set this mode (this doesn't seem to work).
6544 if ( flags | O_RDWR ) {
6545 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6546 if ( result == -1) {
6547 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6548 errorText_ = errorStream_.str();
6554 // Check the device channel support.
6555 stream_.nUserChannels[mode] = channels;
6556 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6558 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6559 errorText_ = errorStream_.str();
6563 // Set the number of channels.
6564 int deviceChannels = channels + firstChannel;
6565 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6566 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6568 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6569 errorText_ = errorStream_.str();
6572 stream_.nDeviceChannels[mode] = deviceChannels;
6574 // Get the data format mask
6576 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6577 if ( result == -1 ) {
6579 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6580 errorText_ = errorStream_.str();
6584 // Determine how to set the device format.
6585 stream_.userFormat = format;
6586 int deviceFormat = -1;
6587 stream_.doByteSwap[mode] = false;
6588 if ( format == RTAUDIO_SINT8 ) {
6589 if ( mask & AFMT_S8 ) {
6590 deviceFormat = AFMT_S8;
6591 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6594 else if ( format == RTAUDIO_SINT16 ) {
6595 if ( mask & AFMT_S16_NE ) {
6596 deviceFormat = AFMT_S16_NE;
6597 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6599 else if ( mask & AFMT_S16_OE ) {
6600 deviceFormat = AFMT_S16_OE;
6601 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6602 stream_.doByteSwap[mode] = true;
6605 else if ( format == RTAUDIO_SINT24 ) {
6606 if ( mask & AFMT_S24_NE ) {
6607 deviceFormat = AFMT_S24_NE;
6608 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6610 else if ( mask & AFMT_S24_OE ) {
6611 deviceFormat = AFMT_S24_OE;
6612 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6613 stream_.doByteSwap[mode] = true;
6616 else if ( format == RTAUDIO_SINT32 ) {
6617 if ( mask & AFMT_S32_NE ) {
6618 deviceFormat = AFMT_S32_NE;
6619 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6621 else if ( mask & AFMT_S32_OE ) {
6622 deviceFormat = AFMT_S32_OE;
6623 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6624 stream_.doByteSwap[mode] = true;
6628 if ( deviceFormat == -1 ) {
6629 // The user requested format is not natively supported by the device.
6630 if ( mask & AFMT_S16_NE ) {
6631 deviceFormat = AFMT_S16_NE;
6632 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6634 else if ( mask & AFMT_S32_NE ) {
6635 deviceFormat = AFMT_S32_NE;
6636 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6638 else if ( mask & AFMT_S24_NE ) {
6639 deviceFormat = AFMT_S24_NE;
6640 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6642 else if ( mask & AFMT_S16_OE ) {
6643 deviceFormat = AFMT_S16_OE;
6644 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6645 stream_.doByteSwap[mode] = true;
6647 else if ( mask & AFMT_S32_OE ) {
6648 deviceFormat = AFMT_S32_OE;
6649 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6650 stream_.doByteSwap[mode] = true;
6652 else if ( mask & AFMT_S24_OE ) {
6653 deviceFormat = AFMT_S24_OE;
6654 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6655 stream_.doByteSwap[mode] = true;
6657 else if ( mask & AFMT_S8) {
6658 deviceFormat = AFMT_S8;
6659 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6663 if ( stream_.deviceFormat[mode] == 0 ) {
6664 // This really shouldn't happen ...
6666 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6667 errorText_ = errorStream_.str();
6671 // Set the data format.
6672 int temp = deviceFormat;
6673 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6674 if ( result == -1 || deviceFormat != temp ) {
6676 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6677 errorText_ = errorStream_.str();
6681 // Attempt to set the buffer size. According to OSS, the minimum
6682 // number of buffers is two. The supposed minimum buffer size is 16
6683 // bytes, so that will be our lower bound. The argument to this
6684 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6685 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6686 // We'll check the actual value used near the end of the setup
6688 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6689 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6691 if ( options ) buffers = options->numberOfBuffers;
6692 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6693 if ( buffers < 2 ) buffers = 3;
6694 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6695 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6696 if ( result == -1 ) {
6698 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6699 errorText_ = errorStream_.str();
6702 stream_.nBuffers = buffers;
6704 // Save buffer size (in sample frames).
6705 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6706 stream_.bufferSize = *bufferSize;
6708 // Set the sample rate.
6709 int srate = sampleRate;
6710 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6711 if ( result == -1 ) {
6713 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6714 errorText_ = errorStream_.str();
6718 // Verify the sample rate setup worked.
6719 if ( abs( srate - sampleRate ) > 100 ) {
6721 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6722 errorText_ = errorStream_.str();
6725 stream_.sampleRate = sampleRate;
6727 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6728 // We're doing duplex setup here.
6729 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6730 stream_.nDeviceChannels[0] = deviceChannels;
6733 // Set interleaving parameters.
6734 stream_.userInterleaved = true;
6735 stream_.deviceInterleaved[mode] = true;
6736 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6737 stream_.userInterleaved = false;
6739 // Set flags for buffer conversion
6740 stream_.doConvertBuffer[mode] = false;
6741 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6742 stream_.doConvertBuffer[mode] = true;
6743 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6744 stream_.doConvertBuffer[mode] = true;
6745 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6746 stream_.nUserChannels[mode] > 1 )
6747 stream_.doConvertBuffer[mode] = true;
6749 // Allocate the stream handles if necessary and then save.
6750 if ( stream_.apiHandle == 0 ) {
6752 handle = new OssHandle;
6754 catch ( std::bad_alloc& ) {
6755 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6759 if ( pthread_cond_init( &handle->runnable, NULL ) ) {
6760 errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
6764 stream_.apiHandle = (void *) handle;
6767 handle = (OssHandle *) stream_.apiHandle;
6769 handle->id[mode] = fd;
6771 // Allocate necessary internal buffers.
6772 unsigned long bufferBytes;
6773 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6774 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6775 if ( stream_.userBuffer[mode] == NULL ) {
6776 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6780 if ( stream_.doConvertBuffer[mode] ) {
6782 bool makeBuffer = true;
6783 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6784 if ( mode == INPUT ) {
6785 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6786 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6787 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6792 bufferBytes *= *bufferSize;
6793 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6794 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6795 if ( stream_.deviceBuffer == NULL ) {
6796 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6802 stream_.device[mode] = device;
6803 stream_.state = STREAM_STOPPED;
6805 // Setup the buffer conversion information structure.
6806 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6808 // Setup thread if necessary.
6809 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6810 // We had already set up an output stream.
6811 stream_.mode = DUPLEX;
6812 if ( stream_.device[0] == device ) handle->id[0] = fd;
6815 stream_.mode = mode;
6817 // Setup callback thread.
6818 stream_.callbackInfo.object = (void *) this;
6820 // Set the thread attributes for joinable and realtime scheduling
6821 // priority. The higher priority will only take affect if the
6822 // program is run as root or suid.
6823 pthread_attr_t attr;
6824 pthread_attr_init( &attr );
6825 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6826 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6827 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
6828 struct sched_param param;
6829 int priority = options->priority;
6830 int min = sched_get_priority_min( SCHED_RR );
6831 int max = sched_get_priority_max( SCHED_RR );
6832 if ( priority < min ) priority = min;
6833 else if ( priority > max ) priority = max;
6834 param.sched_priority = priority;
6835 pthread_attr_setschedparam( &attr, ¶m );
6836 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6839 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6841 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6844 stream_.callbackInfo.isRunning = true;
6845 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6846 pthread_attr_destroy( &attr );
6848 stream_.callbackInfo.isRunning = false;
6849 errorText_ = "RtApiOss::error creating callback thread!";
6858 pthread_cond_destroy( &handle->runnable );
6859 if ( handle->id[0] ) close( handle->id[0] );
6860 if ( handle->id[1] ) close( handle->id[1] );
6862 stream_.apiHandle = 0;
6865 for ( int i=0; i<2; i++ ) {
6866 if ( stream_.userBuffer[i] ) {
6867 free( stream_.userBuffer[i] );
6868 stream_.userBuffer[i] = 0;
6872 if ( stream_.deviceBuffer ) {
6873 free( stream_.deviceBuffer );
6874 stream_.deviceBuffer = 0;
6880 void RtApiOss :: closeStream()
6882 if ( stream_.state == STREAM_CLOSED ) {
6883 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6884 error( RtError::WARNING );
6888 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6889 stream_.callbackInfo.isRunning = false;
6890 MUTEX_LOCK( &stream_.mutex );
6891 if ( stream_.state == STREAM_STOPPED )
6892 pthread_cond_signal( &handle->runnable );
6893 MUTEX_UNLOCK( &stream_.mutex );
6894 pthread_join( stream_.callbackInfo.thread, NULL );
6896 if ( stream_.state == STREAM_RUNNING ) {
6897 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6898 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6900 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6901 stream_.state = STREAM_STOPPED;
6905 pthread_cond_destroy( &handle->runnable );
6906 if ( handle->id[0] ) close( handle->id[0] );
6907 if ( handle->id[1] ) close( handle->id[1] );
6909 stream_.apiHandle = 0;
6912 for ( int i=0; i<2; i++ ) {
6913 if ( stream_.userBuffer[i] ) {
6914 free( stream_.userBuffer[i] );
6915 stream_.userBuffer[i] = 0;
6919 if ( stream_.deviceBuffer ) {
6920 free( stream_.deviceBuffer );
6921 stream_.deviceBuffer = 0;
6924 stream_.mode = UNINITIALIZED;
6925 stream_.state = STREAM_CLOSED;
6928 void RtApiOss :: startStream()
6931 if ( stream_.state == STREAM_RUNNING ) {
6932 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6933 error( RtError::WARNING );
6937 MUTEX_LOCK( &stream_.mutex );
6939 stream_.state = STREAM_RUNNING;
6941 // No need to do anything else here ... OSS automatically starts
6942 // when fed samples.
6944 MUTEX_UNLOCK( &stream_.mutex );
6946 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6947 pthread_cond_signal( &handle->runnable );
6950 void RtApiOss :: stopStream()
6953 if ( stream_.state == STREAM_STOPPED ) {
6954 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6955 error( RtError::WARNING );
6959 MUTEX_LOCK( &stream_.mutex );
6961 // The state might change while waiting on a mutex.
6962 if ( stream_.state == STREAM_STOPPED ) {
6963 MUTEX_UNLOCK( &stream_.mutex );
6968 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6969 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6971 // Flush the output with zeros a few times.
6974 RtAudioFormat format;
6976 if ( stream_.doConvertBuffer[0] ) {
6977 buffer = stream_.deviceBuffer;
6978 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6979 format = stream_.deviceFormat[0];
6982 buffer = stream_.userBuffer[0];
6983 samples = stream_.bufferSize * stream_.nUserChannels[0];
6984 format = stream_.userFormat;
6987 memset( buffer, 0, samples * formatBytes(format) );
6988 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6989 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6990 if ( result == -1 ) {
6991 errorText_ = "RtApiOss::stopStream: audio write error.";
6992 error( RtError::WARNING );
6996 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6997 if ( result == -1 ) {
6998 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6999 errorText_ = errorStream_.str();
7002 handle->triggered = false;
7005 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7006 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7007 if ( result == -1 ) {
7008 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7009 errorText_ = errorStream_.str();
7015 stream_.state = STREAM_STOPPED;
7016 MUTEX_UNLOCK( &stream_.mutex );
7018 if ( result != -1 ) return;
7019 error( RtError::SYSTEM_ERROR );
7022 void RtApiOss :: abortStream()
7025 if ( stream_.state == STREAM_STOPPED ) {
7026 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
7027 error( RtError::WARNING );
7031 MUTEX_LOCK( &stream_.mutex );
7033 // The state might change while waiting on a mutex.
7034 if ( stream_.state == STREAM_STOPPED ) {
7035 MUTEX_UNLOCK( &stream_.mutex );
7040 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7041 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7042 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7043 if ( result == -1 ) {
7044 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7045 errorText_ = errorStream_.str();
7048 handle->triggered = false;
7051 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7052 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7053 if ( result == -1 ) {
7054 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7055 errorText_ = errorStream_.str();
7061 stream_.state = STREAM_STOPPED;
7062 MUTEX_UNLOCK( &stream_.mutex );
7064 if ( result != -1 ) return;
7065 error( RtError::SYSTEM_ERROR );
7068 void RtApiOss :: callbackEvent()
7070 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7071 if ( stream_.state == STREAM_STOPPED ) {
7072 MUTEX_LOCK( &stream_.mutex );
7073 pthread_cond_wait( &handle->runnable, &stream_.mutex );
7074 if ( stream_.state != STREAM_RUNNING ) {
7075 MUTEX_UNLOCK( &stream_.mutex );
7078 MUTEX_UNLOCK( &stream_.mutex );
7081 if ( stream_.state == STREAM_CLOSED ) {
7082 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
7083 error( RtError::WARNING );
7087 // Invoke user callback to get fresh output data.
7088 int doStopStream = 0;
7089 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7090 double streamTime = getStreamTime();
7091 RtAudioStreamStatus status = 0;
7092 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
7093 status |= RTAUDIO_OUTPUT_UNDERFLOW;
7094 handle->xrun[0] = false;
7096 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
7097 status |= RTAUDIO_INPUT_OVERFLOW;
7098 handle->xrun[1] = false;
7100 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
7101 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
7102 if ( doStopStream == 2 ) {
7103 this->abortStream();
7107 MUTEX_LOCK( &stream_.mutex );
7109 // The state might change while waiting on a mutex.
7110 if ( stream_.state == STREAM_STOPPED ) goto unlock;
7115 RtAudioFormat format;
7117 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7119 // Setup parameters and do buffer conversion if necessary.
7120 if ( stream_.doConvertBuffer[0] ) {
7121 buffer = stream_.deviceBuffer;
7122 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
7123 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7124 format = stream_.deviceFormat[0];
7127 buffer = stream_.userBuffer[0];
7128 samples = stream_.bufferSize * stream_.nUserChannels[0];
7129 format = stream_.userFormat;
7132 // Do byte swapping if necessary.
7133 if ( stream_.doByteSwap[0] )
7134 byteSwapBuffer( buffer, samples, format );
7136 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
7138 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7139 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7140 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
7141 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7142 handle->triggered = true;
7145 // Write samples to device.
7146 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7148 if ( result == -1 ) {
7149 // We'll assume this is an underrun, though there isn't a
7150 // specific means for determining that.
7151 handle->xrun[0] = true;
7152 errorText_ = "RtApiOss::callbackEvent: audio write error.";
7153 error( RtError::WARNING );
7154 // Continue on to input section.
7158 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
7160 // Setup parameters.
7161 if ( stream_.doConvertBuffer[1] ) {
7162 buffer = stream_.deviceBuffer;
7163 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
7164 format = stream_.deviceFormat[1];
7167 buffer = stream_.userBuffer[1];
7168 samples = stream_.bufferSize * stream_.nUserChannels[1];
7169 format = stream_.userFormat;
7172 // Read samples from device.
7173 result = read( handle->id[1], buffer, samples * formatBytes(format) );
7175 if ( result == -1 ) {
7176 // We'll assume this is an overrun, though there isn't a
7177 // specific means for determining that.
7178 handle->xrun[1] = true;
7179 errorText_ = "RtApiOss::callbackEvent: audio read error.";
7180 error( RtError::WARNING );
7184 // Do byte swapping if necessary.
7185 if ( stream_.doByteSwap[1] )
7186 byteSwapBuffer( buffer, samples, format );
7188 // Do buffer conversion if necessary.
7189 if ( stream_.doConvertBuffer[1] )
7190 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
7194 MUTEX_UNLOCK( &stream_.mutex );
7196 RtApi::tickStreamTime();
7197 if ( doStopStream == 1 ) this->stopStream();
7200 extern "C" void *ossCallbackHandler( void *ptr )
7202 CallbackInfo *info = (CallbackInfo *) ptr;
7203 RtApiOss *object = (RtApiOss *) info->object;
7204 bool *isRunning = &info->isRunning;
7206 while ( *isRunning == true ) {
7207 pthread_testcancel();
7208 object->callbackEvent();
7211 pthread_exit( NULL );
7214 //******************** End of __LINUX_OSS__ *********************//
7218 // *************************************************** //
7220 // Protected common (OS-independent) RtAudio methods.
7222 // *************************************************** //
7224 // This method can be modified to control the behavior of error
7225 // message printing.
7226 void RtApi :: error( RtError::Type type )
7228 errorStream_.str(""); // clear the ostringstream
7229 if ( type == RtError::WARNING && showWarnings_ == true )
7230 std::cerr << '\n' << errorText_ << "\n\n";
7232 throw( RtError( errorText_, type ) );
7235 void RtApi :: verifyStream()
7237 if ( stream_.state == STREAM_CLOSED ) {
7238 errorText_ = "RtApi:: a stream is not open!";
7239 error( RtError::INVALID_USE );
7243 void RtApi :: clearStreamInfo()
7245 stream_.mode = UNINITIALIZED;
7246 stream_.state = STREAM_CLOSED;
7247 stream_.sampleRate = 0;
7248 stream_.bufferSize = 0;
7249 stream_.nBuffers = 0;
7250 stream_.userFormat = 0;
7251 stream_.userInterleaved = true;
7252 stream_.streamTime = 0.0;
7253 stream_.apiHandle = 0;
7254 stream_.deviceBuffer = 0;
7255 stream_.callbackInfo.callback = 0;
7256 stream_.callbackInfo.userData = 0;
7257 stream_.callbackInfo.isRunning = false;
7258 for ( int i=0; i<2; i++ ) {
7259 stream_.device[i] = 11111;
7260 stream_.doConvertBuffer[i] = false;
7261 stream_.deviceInterleaved[i] = true;
7262 stream_.doByteSwap[i] = false;
7263 stream_.nUserChannels[i] = 0;
7264 stream_.nDeviceChannels[i] = 0;
7265 stream_.channelOffset[i] = 0;
7266 stream_.deviceFormat[i] = 0;
7267 stream_.latency[i] = 0;
7268 stream_.userBuffer[i] = 0;
7269 stream_.convertInfo[i].channels = 0;
7270 stream_.convertInfo[i].inJump = 0;
7271 stream_.convertInfo[i].outJump = 0;
7272 stream_.convertInfo[i].inFormat = 0;
7273 stream_.convertInfo[i].outFormat = 0;
7274 stream_.convertInfo[i].inOffset.clear();
7275 stream_.convertInfo[i].outOffset.clear();
7279 unsigned int RtApi :: formatBytes( RtAudioFormat format )
7281 if ( format == RTAUDIO_SINT16 )
7283 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7284 format == RTAUDIO_FLOAT32 )
7286 else if ( format == RTAUDIO_FLOAT64 )
7288 else if ( format == RTAUDIO_SINT8 )
7291 errorText_ = "RtApi::formatBytes: undefined format.";
7292 error( RtError::WARNING );
7297 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
7299 if ( mode == INPUT ) { // convert device to user buffer
7300 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
7301 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
7302 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
7303 stream_.convertInfo[mode].outFormat = stream_.userFormat;
7305 else { // convert user to device buffer
7306 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
7307 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
7308 stream_.convertInfo[mode].inFormat = stream_.userFormat;
7309 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
7312 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
7313 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7315 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7317 // Set up the interleave/deinterleave offsets.
7318 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7319 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7320 ( mode == INPUT && stream_.userInterleaved ) ) {
7321 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7322 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7323 stream_.convertInfo[mode].outOffset.push_back( k );
7324 stream_.convertInfo[mode].inJump = 1;
7328 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7329 stream_.convertInfo[mode].inOffset.push_back( k );
7330 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7331 stream_.convertInfo[mode].outJump = 1;
7335 else { // no (de)interleaving
7336 if ( stream_.userInterleaved ) {
7337 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7338 stream_.convertInfo[mode].inOffset.push_back( k );
7339 stream_.convertInfo[mode].outOffset.push_back( k );
7343 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7344 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7345 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7346 stream_.convertInfo[mode].inJump = 1;
7347 stream_.convertInfo[mode].outJump = 1;
7352 // Add channel offset.
7353 if ( firstChannel > 0 ) {
7354 if ( stream_.deviceInterleaved[mode] ) {
7355 if ( mode == OUTPUT ) {
7356 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7357 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7360 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7361 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7365 if ( mode == OUTPUT ) {
7366 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7367 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7370 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7371 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7377 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7379 // This function does format conversion, input/output channel compensation, and
7380 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7381 // the upper three bytes of a 32-bit integer.
7383 // Clear our device buffer when in/out duplex device channels are different
7384 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7385 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7386 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7389 if (info.outFormat == RTAUDIO_FLOAT64) {
7391 Float64 *out = (Float64 *)outBuffer;
7393 if (info.inFormat == RTAUDIO_SINT8) {
7394 signed char *in = (signed char *)inBuffer;
7395 scale = 1.0 / 127.5;
7396 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7397 for (j=0; j<info.channels; j++) {
7398 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7399 out[info.outOffset[j]] += 0.5;
7400 out[info.outOffset[j]] *= scale;
7403 out += info.outJump;
7406 else if (info.inFormat == RTAUDIO_SINT16) {
7407 Int16 *in = (Int16 *)inBuffer;
7408 scale = 1.0 / 32767.5;
7409 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7410 for (j=0; j<info.channels; j++) {
7411 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7412 out[info.outOffset[j]] += 0.5;
7413 out[info.outOffset[j]] *= scale;
7416 out += info.outJump;
7419 else if (info.inFormat == RTAUDIO_SINT24) {
7420 Int32 *in = (Int32 *)inBuffer;
7421 scale = 1.0 / 8388607.5;
7422 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7423 for (j=0; j<info.channels; j++) {
7424 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7425 out[info.outOffset[j]] += 0.5;
7426 out[info.outOffset[j]] *= scale;
7429 out += info.outJump;
7432 else if (info.inFormat == RTAUDIO_SINT32) {
7433 Int32 *in = (Int32 *)inBuffer;
7434 scale = 1.0 / 2147483647.5;
7435 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7436 for (j=0; j<info.channels; j++) {
7437 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7438 out[info.outOffset[j]] += 0.5;
7439 out[info.outOffset[j]] *= scale;
7442 out += info.outJump;
7445 else if (info.inFormat == RTAUDIO_FLOAT32) {
7446 Float32 *in = (Float32 *)inBuffer;
7447 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7448 for (j=0; j<info.channels; j++) {
7449 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7452 out += info.outJump;
7455 else if (info.inFormat == RTAUDIO_FLOAT64) {
7456 // Channel compensation and/or (de)interleaving only.
7457 Float64 *in = (Float64 *)inBuffer;
7458 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7459 for (j=0; j<info.channels; j++) {
7460 out[info.outOffset[j]] = in[info.inOffset[j]];
7463 out += info.outJump;
7467 else if (info.outFormat == RTAUDIO_FLOAT32) {
7469 Float32 *out = (Float32 *)outBuffer;
7471 if (info.inFormat == RTAUDIO_SINT8) {
7472 signed char *in = (signed char *)inBuffer;
7473 scale = (Float32) ( 1.0 / 127.5 );
7474 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7475 for (j=0; j<info.channels; j++) {
7476 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7477 out[info.outOffset[j]] += 0.5;
7478 out[info.outOffset[j]] *= scale;
7481 out += info.outJump;
7484 else if (info.inFormat == RTAUDIO_SINT16) {
7485 Int16 *in = (Int16 *)inBuffer;
7486 scale = (Float32) ( 1.0 / 32767.5 );
7487 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7488 for (j=0; j<info.channels; j++) {
7489 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7490 out[info.outOffset[j]] += 0.5;
7491 out[info.outOffset[j]] *= scale;
7494 out += info.outJump;
7497 else if (info.inFormat == RTAUDIO_SINT24) {
7498 Int32 *in = (Int32 *)inBuffer;
7499 scale = (Float32) ( 1.0 / 8388607.5 );
7500 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7501 for (j=0; j<info.channels; j++) {
7502 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7503 out[info.outOffset[j]] += 0.5;
7504 out[info.outOffset[j]] *= scale;
7507 out += info.outJump;
7510 else if (info.inFormat == RTAUDIO_SINT32) {
7511 Int32 *in = (Int32 *)inBuffer;
7512 scale = (Float32) ( 1.0 / 2147483647.5 );
7513 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7514 for (j=0; j<info.channels; j++) {
7515 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7516 out[info.outOffset[j]] += 0.5;
7517 out[info.outOffset[j]] *= scale;
7520 out += info.outJump;
7523 else if (info.inFormat == RTAUDIO_FLOAT32) {
7524 // Channel compensation and/or (de)interleaving only.
7525 Float32 *in = (Float32 *)inBuffer;
7526 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7527 for (j=0; j<info.channels; j++) {
7528 out[info.outOffset[j]] = in[info.inOffset[j]];
7531 out += info.outJump;
7534 else if (info.inFormat == RTAUDIO_FLOAT64) {
7535 Float64 *in = (Float64 *)inBuffer;
7536 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7537 for (j=0; j<info.channels; j++) {
7538 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7541 out += info.outJump;
7545 else if (info.outFormat == RTAUDIO_SINT32) {
7546 Int32 *out = (Int32 *)outBuffer;
7547 if (info.inFormat == RTAUDIO_SINT8) {
7548 signed char *in = (signed char *)inBuffer;
7549 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7550 for (j=0; j<info.channels; j++) {
7551 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7552 out[info.outOffset[j]] <<= 24;
7555 out += info.outJump;
7558 else if (info.inFormat == RTAUDIO_SINT16) {
7559 Int16 *in = (Int16 *)inBuffer;
7560 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7561 for (j=0; j<info.channels; j++) {
7562 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7563 out[info.outOffset[j]] <<= 16;
7566 out += info.outJump;
7569 else if (info.inFormat == RTAUDIO_SINT24) {
7570 Int32 *in = (Int32 *)inBuffer;
7571 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7572 for (j=0; j<info.channels; j++) {
7573 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7574 out[info.outOffset[j]] <<= 8;
7577 out += info.outJump;
7580 else if (info.inFormat == RTAUDIO_SINT32) {
7581 // Channel compensation and/or (de)interleaving only.
7582 Int32 *in = (Int32 *)inBuffer;
7583 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7584 for (j=0; j<info.channels; j++) {
7585 out[info.outOffset[j]] = in[info.inOffset[j]];
7588 out += info.outJump;
7591 else if (info.inFormat == RTAUDIO_FLOAT32) {
7592 Float32 *in = (Float32 *)inBuffer;
7593 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7594 for (j=0; j<info.channels; j++) {
7595 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7598 out += info.outJump;
7601 else if (info.inFormat == RTAUDIO_FLOAT64) {
7602 Float64 *in = (Float64 *)inBuffer;
7603 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7604 for (j=0; j<info.channels; j++) {
7605 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7608 out += info.outJump;
7612 else if (info.outFormat == RTAUDIO_SINT24) {
7613 Int32 *out = (Int32 *)outBuffer;
7614 if (info.inFormat == RTAUDIO_SINT8) {
7615 signed char *in = (signed char *)inBuffer;
7616 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7617 for (j=0; j<info.channels; j++) {
7618 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7619 out[info.outOffset[j]] <<= 16;
7622 out += info.outJump;
7625 else if (info.inFormat == RTAUDIO_SINT16) {
7626 Int16 *in = (Int16 *)inBuffer;
7627 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7628 for (j=0; j<info.channels; j++) {
7629 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7630 out[info.outOffset[j]] <<= 8;
7633 out += info.outJump;
7636 else if (info.inFormat == RTAUDIO_SINT24) {
7637 // Channel compensation and/or (de)interleaving only.
7638 Int32 *in = (Int32 *)inBuffer;
7639 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7640 for (j=0; j<info.channels; j++) {
7641 out[info.outOffset[j]] = in[info.inOffset[j]];
7644 out += info.outJump;
7647 else if (info.inFormat == RTAUDIO_SINT32) {
7648 Int32 *in = (Int32 *)inBuffer;
7649 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7650 for (j=0; j<info.channels; j++) {
7651 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7652 out[info.outOffset[j]] >>= 8;
7655 out += info.outJump;
7658 else if (info.inFormat == RTAUDIO_FLOAT32) {
7659 Float32 *in = (Float32 *)inBuffer;
7660 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7661 for (j=0; j<info.channels; j++) {
7662 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7665 out += info.outJump;
7668 else if (info.inFormat == RTAUDIO_FLOAT64) {
7669 Float64 *in = (Float64 *)inBuffer;
7670 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7671 for (j=0; j<info.channels; j++) {
7672 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7675 out += info.outJump;
7679 else if (info.outFormat == RTAUDIO_SINT16) {
7680 Int16 *out = (Int16 *)outBuffer;
7681 if (info.inFormat == RTAUDIO_SINT8) {
7682 signed char *in = (signed char *)inBuffer;
7683 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7684 for (j=0; j<info.channels; j++) {
7685 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7686 out[info.outOffset[j]] <<= 8;
7689 out += info.outJump;
7692 else if (info.inFormat == RTAUDIO_SINT16) {
7693 // Channel compensation and/or (de)interleaving only.
7694 Int16 *in = (Int16 *)inBuffer;
7695 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7696 for (j=0; j<info.channels; j++) {
7697 out[info.outOffset[j]] = in[info.inOffset[j]];
7700 out += info.outJump;
7703 else if (info.inFormat == RTAUDIO_SINT24) {
7704 Int32 *in = (Int32 *)inBuffer;
7705 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7706 for (j=0; j<info.channels; j++) {
7707 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7710 out += info.outJump;
7713 else if (info.inFormat == RTAUDIO_SINT32) {
7714 Int32 *in = (Int32 *)inBuffer;
7715 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7716 for (j=0; j<info.channels; j++) {
7717 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7720 out += info.outJump;
7723 else if (info.inFormat == RTAUDIO_FLOAT32) {
7724 Float32 *in = (Float32 *)inBuffer;
7725 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7726 for (j=0; j<info.channels; j++) {
7727 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7730 out += info.outJump;
7733 else if (info.inFormat == RTAUDIO_FLOAT64) {
7734 Float64 *in = (Float64 *)inBuffer;
7735 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7736 for (j=0; j<info.channels; j++) {
7737 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7740 out += info.outJump;
7744 else if (info.outFormat == RTAUDIO_SINT8) {
7745 signed char *out = (signed char *)outBuffer;
7746 if (info.inFormat == RTAUDIO_SINT8) {
7747 // Channel compensation and/or (de)interleaving only.
7748 signed char *in = (signed char *)inBuffer;
7749 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7750 for (j=0; j<info.channels; j++) {
7751 out[info.outOffset[j]] = in[info.inOffset[j]];
7754 out += info.outJump;
7757 if (info.inFormat == RTAUDIO_SINT16) {
7758 Int16 *in = (Int16 *)inBuffer;
7759 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7760 for (j=0; j<info.channels; j++) {
7761 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7764 out += info.outJump;
7767 else if (info.inFormat == RTAUDIO_SINT24) {
7768 Int32 *in = (Int32 *)inBuffer;
7769 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7770 for (j=0; j<info.channels; j++) {
7771 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7774 out += info.outJump;
7777 else if (info.inFormat == RTAUDIO_SINT32) {
7778 Int32 *in = (Int32 *)inBuffer;
7779 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7780 for (j=0; j<info.channels; j++) {
7781 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7784 out += info.outJump;
7787 else if (info.inFormat == RTAUDIO_FLOAT32) {
7788 Float32 *in = (Float32 *)inBuffer;
7789 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7790 for (j=0; j<info.channels; j++) {
7791 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7794 out += info.outJump;
7797 else if (info.inFormat == RTAUDIO_FLOAT64) {
7798 Float64 *in = (Float64 *)inBuffer;
7799 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7800 for (j=0; j<info.channels; j++) {
7801 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7804 out += info.outJump;
7810 //static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
7811 //static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
7812 //static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
7814 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7820 if ( format == RTAUDIO_SINT16 ) {
7821 for ( unsigned int i=0; i<samples; i++ ) {
7822 // Swap 1st and 2nd bytes.
7827 // Increment 2 bytes.
7831 else if ( format == RTAUDIO_SINT24 ||
7832 format == RTAUDIO_SINT32 ||
7833 format == RTAUDIO_FLOAT32 ) {
7834 for ( unsigned int i=0; i<samples; i++ ) {
7835 // Swap 1st and 4th bytes.
7840 // Swap 2nd and 3rd bytes.
7846 // Increment 3 more bytes.
7850 else if ( format == RTAUDIO_FLOAT64 ) {
7851 for ( unsigned int i=0; i<samples; i++ ) {
7852 // Swap 1st and 8th bytes
7857 // Swap 2nd and 7th bytes
7863 // Swap 3rd and 6th bytes
7869 // Swap 4th and 5th bytes
7875 // Increment 5 more bytes.
7881 // Indentation settings for Vim and Emacs
7884 // c-basic-offset: 2
7885 // indent-tabs-mode: nil
7888 // vim: et sts=2 sw=2