1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2010 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.7
49 // Static variable definitions.
50 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
51 const unsigned int RtApi::SAMPLE_RATES[] = {
52 4000, 5512, 8000, 9600, 11025, 16000, 22050,
53 32000, 44100, 48000, 88200, 96000, 176400, 192000
56 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
57 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
58 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
59 #define MUTEX_LOCK(A) EnterCriticalSection(A)
60 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
61 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
63 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
64 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
65 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
66 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
68 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
69 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
72 // *************************************************** //
74 // RtAudio definitions.
76 // *************************************************** //
78 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
82 // The order here will control the order of RtAudio's API search in
84 #if defined(__UNIX_JACK__)
85 apis.push_back( UNIX_JACK );
87 #if defined(__LINUX_ALSA__)
88 apis.push_back( LINUX_ALSA );
90 #if defined(__LINUX_OSS__)
91 apis.push_back( LINUX_OSS );
93 #if defined(__WINDOWS_ASIO__)
94 apis.push_back( WINDOWS_ASIO );
96 #if defined(__WINDOWS_DS__)
97 apis.push_back( WINDOWS_DS );
99 #if defined(__MACOSX_CORE__)
100 apis.push_back( MACOSX_CORE );
102 #if defined(__RTAUDIO_DUMMY__)
103 apis.push_back( RTAUDIO_DUMMY );
107 void RtAudio :: openRtApi( RtAudio::Api api )
109 #if defined(__UNIX_JACK__)
110 if ( api == UNIX_JACK )
111 rtapi_ = new RtApiJack();
113 #if defined(__LINUX_ALSA__)
114 if ( api == LINUX_ALSA )
115 rtapi_ = new RtApiAlsa();
117 #if defined(__LINUX_OSS__)
118 if ( api == LINUX_OSS )
119 rtapi_ = new RtApiOss();
121 #if defined(__WINDOWS_ASIO__)
122 if ( api == WINDOWS_ASIO )
123 rtapi_ = new RtApiAsio();
125 #if defined(__WINDOWS_DS__)
126 if ( api == WINDOWS_DS )
127 rtapi_ = new RtApiDs();
129 #if defined(__MACOSX_CORE__)
130 if ( api == MACOSX_CORE )
131 rtapi_ = new RtApiCore();
133 #if defined(__RTAUDIO_DUMMY__)
134 if ( api == RTAUDIO_DUMMY )
135 rtapi_ = new RtApiDummy();
139 RtAudio :: RtAudio( RtAudio::Api api ) throw()
143 if ( api != UNSPECIFIED ) {
144 // Attempt to open the specified API.
146 if ( rtapi_ ) return;
148 // No compiled support for specified API value. Issue a debug
149 // warning and continue as if no API was specified.
150 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
153 // Iterate through the compiled APIs and return as soon as we find
154 // one with at least one device or we reach the end of the list.
155 std::vector< RtAudio::Api > apis;
156 getCompiledApi( apis );
157 for ( unsigned int i=0; i<apis.size(); i++ ) {
158 openRtApi( apis[i] );
159 if ( rtapi_->getDeviceCount() ) break;
162 if ( rtapi_ ) return;
164 // It should not be possible to get here because the preprocessor
165 // definition __RTAUDIO_DUMMY__ is automatically defined if no
166 // API-specific definitions are passed to the compiler. But just in
167 // case something weird happens, we'll print out an error message.
168 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
171 RtAudio :: ~RtAudio() throw()
176 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
177 RtAudio::StreamParameters *inputParameters,
178 RtAudioFormat format, unsigned int sampleRate,
179 unsigned int *bufferFrames,
180 RtAudioCallback callback, void *userData,
181 RtAudio::StreamOptions *options )
183 return rtapi_->openStream( outputParameters, inputParameters, format,
184 sampleRate, bufferFrames, callback,
188 // *************************************************** //
190 // Public RtApi definitions (see end of file for
191 // private or protected utility functions).
193 // *************************************************** //
197 stream_.state = STREAM_CLOSED;
198 stream_.mode = UNINITIALIZED;
199 stream_.apiHandle = 0;
200 stream_.userBuffer[0] = 0;
201 stream_.userBuffer[1] = 0;
202 MUTEX_INITIALIZE( &stream_.mutex );
203 showWarnings_ = true;
208 MUTEX_DESTROY( &stream_.mutex );
211 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
212 RtAudio::StreamParameters *iParams,
213 RtAudioFormat format, unsigned int sampleRate,
214 unsigned int *bufferFrames,
215 RtAudioCallback callback, void *userData,
216 RtAudio::StreamOptions *options )
218 if ( stream_.state != STREAM_CLOSED ) {
219 errorText_ = "RtApi::openStream: a stream is already open!";
220 error( RtError::INVALID_USE );
223 if ( oParams && oParams->nChannels < 1 ) {
224 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
225 error( RtError::INVALID_USE );
228 if ( iParams && iParams->nChannels < 1 ) {
229 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
230 error( RtError::INVALID_USE );
233 if ( oParams == NULL && iParams == NULL ) {
234 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
235 error( RtError::INVALID_USE );
238 if ( formatBytes(format) == 0 ) {
239 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
240 error( RtError::INVALID_USE );
243 unsigned int nDevices = getDeviceCount();
244 unsigned int oChannels = 0;
246 oChannels = oParams->nChannels;
247 if ( oParams->deviceId >= nDevices ) {
248 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
249 error( RtError::INVALID_USE );
253 unsigned int iChannels = 0;
255 iChannels = iParams->nChannels;
256 if ( iParams->deviceId >= nDevices ) {
257 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
258 error( RtError::INVALID_USE );
265 if ( oChannels > 0 ) {
267 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
268 sampleRate, format, bufferFrames, options );
269 if ( result == false ) error( RtError::SYSTEM_ERROR );
272 if ( iChannels > 0 ) {
274 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
275 sampleRate, format, bufferFrames, options );
276 if ( result == false ) {
277 if ( oChannels > 0 ) closeStream();
278 error( RtError::SYSTEM_ERROR );
282 stream_.callbackInfo.callback = (void *) callback;
283 stream_.callbackInfo.userData = userData;
285 if ( options ) options->numberOfBuffers = stream_.nBuffers;
286 stream_.state = STREAM_STOPPED;
289 unsigned int RtApi :: getDefaultInputDevice( void )
291 // Should be implemented in subclasses if possible.
295 unsigned int RtApi :: getDefaultOutputDevice( void )
297 // Should be implemented in subclasses if possible.
301 void RtApi :: closeStream( void )
303 // MUST be implemented in subclasses!
307 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
308 unsigned int firstChannel, unsigned int sampleRate,
309 RtAudioFormat format, unsigned int *bufferSize,
310 RtAudio::StreamOptions *options )
312 // MUST be implemented in subclasses!
316 void RtApi :: tickStreamTime( void )
318 // Subclasses that do not provide their own implementation of
319 // getStreamTime should call this function once per buffer I/O to
320 // provide basic stream time support.
322 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
324 #if defined( HAVE_GETTIMEOFDAY )
325 gettimeofday( &stream_.lastTickTimestamp, NULL );
329 long RtApi :: getStreamLatency( void )
333 long totalLatency = 0;
334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
335 totalLatency = stream_.latency[0];
336 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
337 totalLatency += stream_.latency[1];
342 double RtApi :: getStreamTime( void )
346 #if defined( HAVE_GETTIMEOFDAY )
347 // Return a very accurate estimate of the stream time by
348 // adding in the elapsed time since the last tick.
352 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
353 return stream_.streamTime;
355 gettimeofday( &now, NULL );
356 then = stream_.lastTickTimestamp;
357 return stream_.streamTime +
358 ((now.tv_sec + 0.000001 * now.tv_usec) -
359 (then.tv_sec + 0.000001 * then.tv_usec));
361 return stream_.streamTime;
365 unsigned int RtApi :: getStreamSampleRate( void )
369 return stream_.sampleRate;
373 // *************************************************** //
375 // OS/API-specific methods.
377 // *************************************************** //
379 #if defined(__MACOSX_CORE__)
381 // The OS X CoreAudio API is designed to use a separate callback
382 // procedure for each of its audio devices. A single RtAudio duplex
383 // stream using two different devices is supported here, though it
384 // cannot be guaranteed to always behave correctly because we cannot
385 // synchronize these two callbacks.
387 // A property listener is installed for over/underrun information.
388 // However, no functionality is currently provided to allow property
389 // listeners to trigger user handlers because it is unclear what could
390 // be done if a critical stream parameter (buffer size, sample rate,
391 // device disconnect) notification arrived. The listeners entail
392 // quite a bit of extra code and most likely, a user program wouldn't
393 // be prepared for the result anyway. However, we do provide a flag
394 // to the client callback function to inform of an over/underrun.
396 // The mechanism for querying and setting system parameters was
397 // updated (and perhaps simplified) in OS-X version 10.4. However,
398 // since 10.4 support is not necessarily available to all users, I've
399 // decided not to update the respective code at this time. Perhaps
400 // this will happen when Apple makes 10.4 free for everyone. :-)
402 // A structure to hold various information related to the CoreAudio API
405 AudioDeviceID id[2]; // device ids
406 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
407 AudioDeviceIOProcID procId[2];
409 UInt32 iStream[2]; // device stream index (or first if using multiple)
410 UInt32 nStreams[2]; // number of streams to use
413 pthread_cond_t condition;
414 int drainCounter; // Tracks callback counts when draining
415 bool internalDrain; // Indicates if stop is initiated from callback or not.
418 :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
421 RtApiCore :: RtApiCore()
423 // Nothing to do here.
426 RtApiCore :: ~RtApiCore()
428 // The subclass destructor gets called before the base class
429 // destructor, so close an existing stream before deallocating
430 // apiDeviceId memory.
431 if ( stream_.state != STREAM_CLOSED ) closeStream();
434 unsigned int RtApiCore :: getDeviceCount( void )
436 // Find out how many audio devices there are, if any.
438 AudioObjectPropertyAddress propertyAddress = { kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
439 OSStatus result = AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize );
440 if ( result != noErr ) {
441 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
442 error( RtError::WARNING );
446 return dataSize / sizeof( AudioDeviceID );
449 unsigned int RtApiCore :: getDefaultInputDevice( void )
451 unsigned int nDevices = getDeviceCount();
452 if ( nDevices <= 1 ) return 0;
455 UInt32 dataSize = sizeof( AudioDeviceID );
456 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultInputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
457 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
458 if ( result != noErr ) {
459 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
460 error( RtError::WARNING );
464 dataSize *= nDevices;
465 AudioDeviceID deviceList[ nDevices ];
466 property.mSelector = kAudioHardwarePropertyDevices;
467 result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
468 if ( result != noErr ) {
469 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
470 error( RtError::WARNING );
474 for ( unsigned int i=0; i<nDevices; i++ )
475 if ( id == deviceList[i] ) return i;
477 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
478 error( RtError::WARNING );
482 unsigned int RtApiCore :: getDefaultOutputDevice( void )
484 unsigned int nDevices = getDeviceCount();
485 if ( nDevices <= 1 ) return 0;
488 UInt32 dataSize = sizeof( AudioDeviceID );
489 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultOutputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
490 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
491 if ( result != noErr ) {
492 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
493 error( RtError::WARNING );
497 dataSize = sizeof( AudioDeviceID ) * nDevices;
498 AudioDeviceID deviceList[ nDevices ];
499 property.mSelector = kAudioHardwarePropertyDevices;
500 result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
501 if ( result != noErr ) {
502 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
503 error( RtError::WARNING );
507 for ( unsigned int i=0; i<nDevices; i++ )
508 if ( id == deviceList[i] ) return i;
510 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
511 error( RtError::WARNING );
515 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
517 RtAudio::DeviceInfo info;
521 unsigned int nDevices = getDeviceCount();
522 if ( nDevices == 0 ) {
523 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
524 error( RtError::INVALID_USE );
527 if ( device >= nDevices ) {
528 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
529 error( RtError::INVALID_USE );
532 AudioDeviceID deviceList[ nDevices ];
533 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
534 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
535 kAudioObjectPropertyScopeGlobal,
536 kAudioObjectPropertyElementMaster };
537 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
538 0, NULL, &dataSize, (void *) &deviceList );
539 if ( result != noErr ) {
540 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
541 error( RtError::WARNING );
545 AudioDeviceID id = deviceList[ device ];
547 // Get the device name.
550 dataSize = sizeof( CFStringRef );
551 property.mSelector = kAudioObjectPropertyManufacturer;
552 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
553 if ( result != noErr ) {
554 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
555 errorText_ = errorStream_.str();
556 error( RtError::WARNING );
560 const char *mname = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
561 info.name.append( (const char *)mname, strlen(mname) );
562 info.name.append( ": " );
565 property.mSelector = kAudioObjectPropertyName;
566 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
567 if ( result != noErr ) {
568 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
569 errorText_ = errorStream_.str();
570 error( RtError::WARNING );
574 const char *name = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
575 info.name.append( (const char *)name, strlen(name) );
578 // Get the output stream "configuration".
579 AudioBufferList *bufferList = nil;
580 property.mSelector = kAudioDevicePropertyStreamConfiguration;
581 property.mScope = kAudioDevicePropertyScopeOutput;
582 // property.mElement = kAudioObjectPropertyElementWildcard;
584 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
585 if ( result != noErr || dataSize == 0 ) {
586 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
587 errorText_ = errorStream_.str();
588 error( RtError::WARNING );
592 // Allocate the AudioBufferList.
593 bufferList = (AudioBufferList *) malloc( dataSize );
594 if ( bufferList == NULL ) {
595 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
596 error( RtError::WARNING );
600 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
601 if ( result != noErr || dataSize == 0 ) {
603 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
604 errorText_ = errorStream_.str();
605 error( RtError::WARNING );
609 // Get output channel information.
610 unsigned int i, nStreams = bufferList->mNumberBuffers;
611 for ( i=0; i<nStreams; i++ )
612 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
615 // Get the input stream "configuration".
616 property.mScope = kAudioDevicePropertyScopeInput;
617 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
618 if ( result != noErr || dataSize == 0 ) {
619 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
620 errorText_ = errorStream_.str();
621 error( RtError::WARNING );
625 // Allocate the AudioBufferList.
626 bufferList = (AudioBufferList *) malloc( dataSize );
627 if ( bufferList == NULL ) {
628 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
629 error( RtError::WARNING );
633 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
634 if (result != noErr || dataSize == 0) {
636 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
637 errorText_ = errorStream_.str();
638 error( RtError::WARNING );
642 // Get input channel information.
643 nStreams = bufferList->mNumberBuffers;
644 for ( i=0; i<nStreams; i++ )
645 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
648 // If device opens for both playback and capture, we determine the channels.
649 if ( info.outputChannels > 0 && info.inputChannels > 0 )
650 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
652 // Probe the device sample rates.
653 bool isInput = false;
654 if ( info.outputChannels == 0 ) isInput = true;
656 // Determine the supported sample rates.
657 property.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;
658 if ( isInput == false ) property.mScope = kAudioDevicePropertyScopeOutput;
659 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
660 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
661 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
662 errorText_ = errorStream_.str();
663 error( RtError::WARNING );
667 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
668 AudioValueRange rangeList[ nRanges ];
669 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &rangeList );
670 if ( result != kAudioHardwareNoError ) {
671 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
672 errorText_ = errorStream_.str();
673 error( RtError::WARNING );
677 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
678 for ( UInt32 i=0; i<nRanges; i++ ) {
679 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
680 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
683 info.sampleRates.clear();
684 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
685 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
686 info.sampleRates.push_back( SAMPLE_RATES[k] );
689 if ( info.sampleRates.size() == 0 ) {
690 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
691 errorText_ = errorStream_.str();
692 error( RtError::WARNING );
696 // CoreAudio always uses 32-bit floating point data for PCM streams.
697 // Thus, any other "physical" formats supported by the device are of
698 // no interest to the client.
699 info.nativeFormats = RTAUDIO_FLOAT32;
701 if ( info.outputChannels > 0 )
702 if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
703 if ( info.inputChannels > 0 )
704 if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
710 OSStatus callbackHandler( AudioDeviceID inDevice,
711 const AudioTimeStamp* inNow,
712 const AudioBufferList* inInputData,
713 const AudioTimeStamp* inInputTime,
714 AudioBufferList* outOutputData,
715 const AudioTimeStamp* inOutputTime,
718 CallbackInfo *info = (CallbackInfo *) infoPointer;
720 RtApiCore *object = (RtApiCore *) info->object;
721 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
722 return kAudioHardwareUnspecifiedError;
724 return kAudioHardwareNoError;
727 OSStatus deviceListener( AudioObjectID inDevice,
729 const AudioObjectPropertyAddress properties[],
730 void* handlePointer )
732 CoreHandle *handle = (CoreHandle *) handlePointer;
733 for ( UInt32 i=0; i<nAddresses; i++ ) {
734 if ( properties[i].mSelector == kAudioDeviceProcessorOverload ) {
735 if ( properties[i].mScope == kAudioDevicePropertyScopeInput )
736 handle->xrun[1] = true;
738 handle->xrun[0] = true;
742 return kAudioHardwareNoError;
745 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
746 unsigned int firstChannel, unsigned int sampleRate,
747 RtAudioFormat format, unsigned int *bufferSize,
748 RtAudio::StreamOptions *options )
751 unsigned int nDevices = getDeviceCount();
752 if ( nDevices == 0 ) {
753 // This should not happen because a check is made before this function is called.
754 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
758 if ( device >= nDevices ) {
759 // This should not happen because a check is made before this function is called.
760 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
764 AudioDeviceID deviceList[ nDevices ];
765 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
766 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
767 kAudioObjectPropertyScopeGlobal,
768 kAudioObjectPropertyElementMaster };
769 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
770 0, NULL, &dataSize, (void *) &deviceList );
771 if ( result != noErr ) {
772 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
776 AudioDeviceID id = deviceList[ device ];
778 // Setup for stream mode.
779 bool isInput = false;
780 if ( mode == INPUT ) {
782 property.mScope = kAudioDevicePropertyScopeInput;
785 property.mScope = kAudioDevicePropertyScopeOutput;
787 // Get the stream "configuration".
788 AudioBufferList *bufferList = nil;
790 property.mSelector = kAudioDevicePropertyStreamConfiguration;
791 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
792 if ( result != noErr || dataSize == 0 ) {
793 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
794 errorText_ = errorStream_.str();
798 // Allocate the AudioBufferList.
799 bufferList = (AudioBufferList *) malloc( dataSize );
800 if ( bufferList == NULL ) {
801 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
805 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
806 if (result != noErr || dataSize == 0) {
807 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
808 errorText_ = errorStream_.str();
812 // Search for one or more streams that contain the desired number of
813 // channels. CoreAudio devices can have an arbitrary number of
814 // streams and each stream can have an arbitrary number of channels.
815 // For each stream, a single buffer of interleaved samples is
816 // provided. RtAudio prefers the use of one stream of interleaved
817 // data or multiple consecutive single-channel streams. However, we
818 // now support multiple consecutive multi-channel streams of
819 // interleaved data as well.
820 UInt32 iStream, offsetCounter = firstChannel;
821 UInt32 nStreams = bufferList->mNumberBuffers;
822 bool monoMode = false;
823 bool foundStream = false;
825 // First check that the device supports the requested number of
827 UInt32 deviceChannels = 0;
828 for ( iStream=0; iStream<nStreams; iStream++ )
829 deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
831 if ( deviceChannels < ( channels + firstChannel ) ) {
833 errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
834 errorText_ = errorStream_.str();
838 // Look for a single stream meeting our needs.
839 UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
840 for ( iStream=0; iStream<nStreams; iStream++ ) {
841 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
842 if ( streamChannels >= channels + offsetCounter ) {
843 firstStream = iStream;
844 channelOffset = offsetCounter;
848 if ( streamChannels > offsetCounter ) break;
849 offsetCounter -= streamChannels;
852 // If we didn't find a single stream above, then we should be able
853 // to meet the channel specification with multiple streams.
854 if ( foundStream == false ) {
856 offsetCounter = firstChannel;
857 for ( iStream=0; iStream<nStreams; iStream++ ) {
858 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
859 if ( streamChannels > offsetCounter ) break;
860 offsetCounter -= streamChannels;
863 firstStream = iStream;
864 channelOffset = offsetCounter;
865 Int32 channelCounter = channels + offsetCounter - streamChannels;
867 if ( streamChannels > 1 ) monoMode = false;
868 while ( channelCounter > 0 ) {
869 streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
870 if ( streamChannels > 1 ) monoMode = false;
871 channelCounter -= streamChannels;
878 // Determine the buffer size.
879 AudioValueRange bufferRange;
880 dataSize = sizeof( AudioValueRange );
881 property.mSelector = kAudioDevicePropertyBufferFrameSizeRange;
882 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &bufferRange );
884 if ( result != noErr ) {
885 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
886 errorText_ = errorStream_.str();
890 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
891 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
892 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
894 // Set the buffer size. For multiple streams, I'm assuming we only
895 // need to make this setting for the master channel.
896 UInt32 theSize = (UInt32) *bufferSize;
897 dataSize = sizeof( UInt32 );
898 property.mSelector = kAudioDevicePropertyBufferFrameSize;
899 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &theSize );
901 if ( result != noErr ) {
902 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
903 errorText_ = errorStream_.str();
907 // If attempting to setup a duplex stream, the bufferSize parameter
908 // MUST be the same in both directions!
909 *bufferSize = theSize;
910 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
911 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
912 errorText_ = errorStream_.str();
916 stream_.bufferSize = *bufferSize;
917 stream_.nBuffers = 1;
919 // Check and if necessary, change the sample rate for the device.
921 dataSize = sizeof( Float64 );
922 property.mSelector = kAudioDevicePropertyNominalSampleRate;
923 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &nominalRate );
925 if ( result != noErr ) {
926 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting current sample rate.";
927 errorText_ = errorStream_.str();
931 // Only change the sample rate if off by more than 1 Hz.
932 if ( fabs( nominalRate - (double)sampleRate ) > 1.0 ) {
933 nominalRate = (Float64) sampleRate;
934 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &nominalRate );
936 if ( result != noErr ) {
937 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate for device (" << device << ").";
938 errorText_ = errorStream_.str();
943 // Try to set "hog" mode ... it's not clear to me this is working.
944 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) {
946 dataSize = sizeof( hog_pid );
947 property.mSelector = kAudioDevicePropertyHogMode;
948 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &hog_pid );
949 if ( result != noErr ) {
950 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting 'hog' state!";
951 errorText_ = errorStream_.str();
955 if ( hog_pid != getpid() ) {
957 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &hog_pid );
958 if ( result != noErr ) {
959 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
960 errorText_ = errorStream_.str();
966 // Get the stream ID(s) so we can set the stream format.
967 AudioStreamID streamIDs[ nStreams ];
968 dataSize = nStreams * sizeof( AudioStreamID );
969 property.mSelector = kAudioDevicePropertyStreams;
970 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &streamIDs );
972 if ( result != noErr ) {
973 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
974 errorText_ = errorStream_.str();
978 // Now set the stream format for each stream. Also, check the
979 // physical format of the device and change that if necessary.
980 AudioStreamBasicDescription description;
981 dataSize = sizeof( AudioStreamBasicDescription );
984 for ( UInt32 i=0; i<streamCount; i++ ) {
986 property.mSelector = kAudioStreamPropertyVirtualFormat;
987 result = AudioObjectGetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, &dataSize, &description );
989 if ( result != noErr ) {
990 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
991 errorText_ = errorStream_.str();
995 // Set the sample rate and data format id. However, only make the
996 // change if the sample rate is not within 1.0 of the desired
997 // rate and the format is not linear pcm.
998 updateFormat = false;
999 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
1000 description.mSampleRate = (double) sampleRate;
1001 updateFormat = true;
1004 if ( description.mFormatID != kAudioFormatLinearPCM ) {
1005 description.mFormatID = kAudioFormatLinearPCM;
1006 updateFormat = true;
1009 if ( updateFormat ) {
1010 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &description );
1011 if ( result != noErr ) {
1012 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
1013 errorText_ = errorStream_.str();
1018 // Now check the physical format.
1019 property.mSelector = kAudioStreamPropertyPhysicalFormat;
1020 result = AudioObjectGetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, &dataSize, &description );
1021 if ( result != noErr ) {
1022 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
1023 errorText_ = errorStream_.str();
1027 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
1028 description.mFormatID = kAudioFormatLinearPCM;
1029 AudioStreamBasicDescription testDescription = description;
1030 unsigned long formatFlags;
1032 // We'll try higher bit rates first and then work our way down.
1033 testDescription.mBitsPerChannel = 32;
1034 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1035 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1036 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
1037 testDescription.mFormatFlags = formatFlags;
1038 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1039 if ( result == noErr ) continue;
1041 testDescription = description;
1042 testDescription.mBitsPerChannel = 32;
1043 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1044 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1045 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
1046 testDescription.mFormatFlags = formatFlags;
1047 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1048 if ( result == noErr ) continue;
1050 testDescription = description;
1051 testDescription.mBitsPerChannel = 24;
1052 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1053 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1054 testDescription.mFormatFlags = formatFlags;
1055 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1056 if ( result == noErr ) continue;
1058 testDescription = description;
1059 testDescription.mBitsPerChannel = 16;
1060 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1061 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1062 testDescription.mFormatFlags = formatFlags;
1063 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1064 if ( result == noErr ) continue;
1066 testDescription = description;
1067 testDescription.mBitsPerChannel = 8;
1068 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1069 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1070 testDescription.mFormatFlags = formatFlags;
1071 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1072 if ( result != noErr ) {
1073 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1074 errorText_ = errorStream_.str();
1081 // Get the stream latency. There can be latency in both the device
1082 // and the stream. First, attempt to get the device latency on the
1083 // master channel or the first open channel. Errors that might
1084 // occur here are not deemed critical.
1087 dataSize = sizeof( UInt32 );
1088 property.mSelector = kAudioDevicePropertyLatency;
1089 if ( AudioObjectHasProperty( id, &property ) == true ) {
1090 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &latency );
1091 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1093 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1094 errorText_ = errorStream_.str();
1095 error( RtError::WARNING );
1099 // Now try to get the stream latency. For multiple streams, I assume the
1100 // latency is equal for each.
1101 result = AudioObjectGetPropertyData( streamIDs[firstStream], &property, 0, NULL, &dataSize, &latency );
1102 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1104 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1105 errorText_ = errorStream_.str();
1106 error( RtError::WARNING );
1109 // Byte-swapping: According to AudioHardware.h, the stream data will
1110 // always be presented in native-endian format, so we should never
1111 // need to byte swap.
1112 stream_.doByteSwap[mode] = false;
1114 // From the CoreAudio documentation, PCM data must be supplied as
1116 stream_.userFormat = format;
1117 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1119 if ( streamCount == 1 )
1120 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1121 else // multiple streams
1122 stream_.nDeviceChannels[mode] = channels;
1123 stream_.nUserChannels[mode] = channels;
1124 stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
1125 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1126 else stream_.userInterleaved = true;
1127 stream_.deviceInterleaved[mode] = true;
1128 if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
1130 // Set flags for buffer conversion.
1131 stream_.doConvertBuffer[mode] = false;
1132 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1133 stream_.doConvertBuffer[mode] = true;
1134 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1135 stream_.doConvertBuffer[mode] = true;
1136 if ( streamCount == 1 ) {
1137 if ( stream_.nUserChannels[mode] > 1 &&
1138 stream_.userInterleaved != stream_.deviceInterleaved[mode] )
1139 stream_.doConvertBuffer[mode] = true;
1141 else if ( monoMode && stream_.userInterleaved )
1142 stream_.doConvertBuffer[mode] = true;
1144 // Allocate our CoreHandle structure for the stream.
1145 CoreHandle *handle = 0;
1146 if ( stream_.apiHandle == 0 ) {
1148 handle = new CoreHandle;
1150 catch ( std::bad_alloc& ) {
1151 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1155 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1156 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1159 stream_.apiHandle = (void *) handle;
1162 handle = (CoreHandle *) stream_.apiHandle;
1163 handle->iStream[mode] = firstStream;
1164 handle->nStreams[mode] = streamCount;
1165 handle->id[mode] = id;
1167 // Allocate necessary internal buffers.
1168 unsigned long bufferBytes;
1169 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1170 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1171 if ( stream_.userBuffer[mode] == NULL ) {
1172 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1176 // If possible, we will make use of the CoreAudio stream buffers as
1177 // "device buffers". However, we can't do this if using multiple
1179 if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
1181 bool makeBuffer = true;
1182 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1183 if ( mode == INPUT ) {
1184 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1185 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1186 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1191 bufferBytes *= *bufferSize;
1192 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1193 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1194 if ( stream_.deviceBuffer == NULL ) {
1195 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1201 stream_.sampleRate = sampleRate;
1202 stream_.device[mode] = device;
1203 stream_.state = STREAM_STOPPED;
1204 stream_.callbackInfo.object = (void *) this;
1206 // Setup the buffer conversion information structure.
1207 if ( stream_.doConvertBuffer[mode] ) {
1208 if ( streamCount > 1 ) setConvertInfo( mode, 0 );
1209 else setConvertInfo( mode, channelOffset );
1212 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1213 // Only one callback procedure per device.
1214 stream_.mode = DUPLEX;
1216 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1217 result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
1219 // deprecated in favor of AudioDeviceCreateIOProcID()
1220 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1222 if ( result != noErr ) {
1223 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1224 errorText_ = errorStream_.str();
1227 if ( stream_.mode == OUTPUT && mode == INPUT )
1228 stream_.mode = DUPLEX;
1230 stream_.mode = mode;
1233 // Setup the device property listener for over/underload.
1234 property.mSelector = kAudioDeviceProcessorOverload;
1235 result = AudioObjectAddPropertyListener( id, &property, deviceListener, (void *) handle );
1241 pthread_cond_destroy( &handle->condition );
1243 stream_.apiHandle = 0;
1246 for ( int i=0; i<2; i++ ) {
1247 if ( stream_.userBuffer[i] ) {
1248 free( stream_.userBuffer[i] );
1249 stream_.userBuffer[i] = 0;
1253 if ( stream_.deviceBuffer ) {
1254 free( stream_.deviceBuffer );
1255 stream_.deviceBuffer = 0;
1261 void RtApiCore :: closeStream( void )
1263 if ( stream_.state == STREAM_CLOSED ) {
1264 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1265 error( RtError::WARNING );
1269 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1270 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1271 if ( stream_.state == STREAM_RUNNING )
1272 AudioDeviceStop( handle->id[0], callbackHandler );
1273 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1274 AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
1276 // deprecated in favor of AudioDeviceDestroyIOProcID()
1277 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1281 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1282 if ( stream_.state == STREAM_RUNNING )
1283 AudioDeviceStop( handle->id[1], callbackHandler );
1284 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1285 AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
1287 // deprecated in favor of AudioDeviceDestroyIOProcID()
1288 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1292 for ( int i=0; i<2; i++ ) {
1293 if ( stream_.userBuffer[i] ) {
1294 free( stream_.userBuffer[i] );
1295 stream_.userBuffer[i] = 0;
1299 if ( stream_.deviceBuffer ) {
1300 free( stream_.deviceBuffer );
1301 stream_.deviceBuffer = 0;
1304 // Destroy pthread condition variable.
1305 pthread_cond_destroy( &handle->condition );
1307 stream_.apiHandle = 0;
1309 stream_.mode = UNINITIALIZED;
1310 stream_.state = STREAM_CLOSED;
1313 void RtApiCore :: startStream( void )
1316 if ( stream_.state == STREAM_RUNNING ) {
1317 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1318 error( RtError::WARNING );
1322 MUTEX_LOCK( &stream_.mutex );
1324 OSStatus result = noErr;
1325 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1326 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1328 result = AudioDeviceStart( handle->id[0], callbackHandler );
1329 if ( result != noErr ) {
1330 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1331 errorText_ = errorStream_.str();
1336 if ( stream_.mode == INPUT ||
1337 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1339 result = AudioDeviceStart( handle->id[1], callbackHandler );
1340 if ( result != noErr ) {
1341 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1342 errorText_ = errorStream_.str();
1347 handle->drainCounter = 0;
1348 handle->internalDrain = false;
1349 stream_.state = STREAM_RUNNING;
1352 MUTEX_UNLOCK( &stream_.mutex );
1354 if ( result == noErr ) return;
1355 error( RtError::SYSTEM_ERROR );
1358 void RtApiCore :: stopStream( void )
1361 if ( stream_.state == STREAM_STOPPED ) {
1362 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1363 error( RtError::WARNING );
1367 MUTEX_LOCK( &stream_.mutex );
1369 if ( stream_.state == STREAM_STOPPED ) {
1370 MUTEX_UNLOCK( &stream_.mutex );
1374 OSStatus result = noErr;
1375 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1376 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1378 if ( handle->drainCounter == 0 ) {
1379 handle->drainCounter = 1;
1380 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1383 MUTEX_UNLOCK( &stream_.mutex );
1384 result = AudioDeviceStop( handle->id[0], callbackHandler );
1385 MUTEX_LOCK( &stream_.mutex );
1386 if ( result != noErr ) {
1387 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1388 errorText_ = errorStream_.str();
1393 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1395 result = AudioDeviceStop( handle->id[1], callbackHandler );
1396 if ( result != noErr ) {
1397 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1398 errorText_ = errorStream_.str();
1403 stream_.state = STREAM_STOPPED;
1406 MUTEX_UNLOCK( &stream_.mutex );
1408 if ( result == noErr ) return;
1409 error( RtError::SYSTEM_ERROR );
1412 void RtApiCore :: abortStream( void )
1415 if ( stream_.state == STREAM_STOPPED ) {
1416 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1417 error( RtError::WARNING );
1421 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1422 handle->drainCounter = 1;
1427 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1428 const AudioBufferList *inBufferList,
1429 const AudioBufferList *outBufferList )
1431 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1432 if ( stream_.state == STREAM_CLOSED ) {
1433 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1434 error( RtError::WARNING );
1438 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1439 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1441 // Check if we were draining the stream and signal is finished.
1442 if ( handle->drainCounter > 3 ) {
1443 if ( handle->internalDrain == false )
1444 pthread_cond_signal( &handle->condition );
1450 MUTEX_LOCK( &stream_.mutex );
1452 // The state might change while waiting on a mutex.
1453 if ( stream_.state == STREAM_STOPPED ) {
1454 MUTEX_UNLOCK( &stream_.mutex );
1458 AudioDeviceID outputDevice = handle->id[0];
1460 // Invoke user callback to get fresh output data UNLESS we are
1461 // draining stream or duplex mode AND the input/output devices are
1462 // different AND this function is called for the input device.
1463 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1464 RtAudioCallback callback = (RtAudioCallback) info->callback;
1465 double streamTime = getStreamTime();
1466 RtAudioStreamStatus status = 0;
1467 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1468 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1469 handle->xrun[0] = false;
1471 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1472 status |= RTAUDIO_INPUT_OVERFLOW;
1473 handle->xrun[1] = false;
1475 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1476 stream_.bufferSize, streamTime, status, info->userData );
1477 if ( handle->drainCounter == 2 ) {
1478 MUTEX_UNLOCK( &stream_.mutex );
1482 else if ( handle->drainCounter == 1 )
1483 handle->internalDrain = true;
1486 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1488 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1490 if ( handle->nStreams[0] == 1 ) {
1491 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1493 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1495 else { // fill multiple streams with zeros
1496 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1497 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1499 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1503 else if ( handle->nStreams[0] == 1 ) {
1504 if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
1505 convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
1506 stream_.userBuffer[0], stream_.convertInfo[0] );
1508 else { // copy from user buffer
1509 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1510 stream_.userBuffer[0],
1511 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1514 else { // fill multiple streams
1515 Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
1516 if ( stream_.doConvertBuffer[0] ) {
1517 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1518 inBuffer = (Float32 *) stream_.deviceBuffer;
1521 if ( stream_.deviceInterleaved[0] == false ) { // mono mode
1522 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1523 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1524 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1525 (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
1528 else { // fill multiple multi-channel streams with interleaved data
1529 UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
1532 bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
1533 UInt32 inChannels = stream_.nUserChannels[0];
1534 if ( stream_.doConvertBuffer[0] ) {
1535 inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1536 inChannels = stream_.nDeviceChannels[0];
1539 if ( inInterleaved ) inOffset = 1;
1540 else inOffset = stream_.bufferSize;
1542 channelsLeft = inChannels;
1543 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1545 out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
1546 streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
1549 // Account for possible channel offset in first stream
1550 if ( i == 0 && stream_.channelOffset[0] > 0 ) {
1551 streamChannels -= stream_.channelOffset[0];
1552 outJump = stream_.channelOffset[0];
1556 // Account for possible unfilled channels at end of the last stream
1557 if ( streamChannels > channelsLeft ) {
1558 outJump = streamChannels - channelsLeft;
1559 streamChannels = channelsLeft;
1562 // Determine input buffer offsets and skips
1563 if ( inInterleaved ) {
1564 inJump = inChannels;
1565 in += inChannels - channelsLeft;
1569 in += (inChannels - channelsLeft) * inOffset;
1572 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1573 for ( unsigned int j=0; j<streamChannels; j++ ) {
1574 *out++ = in[j*inOffset];
1579 channelsLeft -= streamChannels;
1584 if ( handle->drainCounter ) {
1585 handle->drainCounter++;
1590 AudioDeviceID inputDevice;
1591 inputDevice = handle->id[1];
1592 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1594 if ( handle->nStreams[1] == 1 ) {
1595 if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
1596 convertBuffer( stream_.userBuffer[1],
1597 (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
1598 stream_.convertInfo[1] );
1600 else { // copy to user buffer
1601 memcpy( stream_.userBuffer[1],
1602 inBufferList->mBuffers[handle->iStream[1]].mData,
1603 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1606 else { // read from multiple streams
1607 Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
1608 if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
1610 if ( stream_.deviceInterleaved[1] == false ) { // mono mode
1611 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1612 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1613 memcpy( (void *)&outBuffer[i*stream_.bufferSize],
1614 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1617 else { // read from multiple multi-channel streams
1618 UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
1621 bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
1622 UInt32 outChannels = stream_.nUserChannels[1];
1623 if ( stream_.doConvertBuffer[1] ) {
1624 outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1625 outChannels = stream_.nDeviceChannels[1];
1628 if ( outInterleaved ) outOffset = 1;
1629 else outOffset = stream_.bufferSize;
1631 channelsLeft = outChannels;
1632 for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
1634 in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
1635 streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
1638 // Account for possible channel offset in first stream
1639 if ( i == 0 && stream_.channelOffset[1] > 0 ) {
1640 streamChannels -= stream_.channelOffset[1];
1641 inJump = stream_.channelOffset[1];
1645 // Account for possible unread channels at end of the last stream
1646 if ( streamChannels > channelsLeft ) {
1647 inJump = streamChannels - channelsLeft;
1648 streamChannels = channelsLeft;
1651 // Determine output buffer offsets and skips
1652 if ( outInterleaved ) {
1653 outJump = outChannels;
1654 out += outChannels - channelsLeft;
1658 out += (outChannels - channelsLeft) * outOffset;
1661 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1662 for ( unsigned int j=0; j<streamChannels; j++ ) {
1663 out[j*outOffset] = *in++;
1668 channelsLeft -= streamChannels;
1672 if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
1673 convertBuffer( stream_.userBuffer[1],
1674 stream_.deviceBuffer,
1675 stream_.convertInfo[1] );
1681 MUTEX_UNLOCK( &stream_.mutex );
1683 RtApi::tickStreamTime();
1687 const char* RtApiCore :: getErrorCode( OSStatus code )
1691 case kAudioHardwareNotRunningError:
1692 return "kAudioHardwareNotRunningError";
1694 case kAudioHardwareUnspecifiedError:
1695 return "kAudioHardwareUnspecifiedError";
1697 case kAudioHardwareUnknownPropertyError:
1698 return "kAudioHardwareUnknownPropertyError";
1700 case kAudioHardwareBadPropertySizeError:
1701 return "kAudioHardwareBadPropertySizeError";
1703 case kAudioHardwareIllegalOperationError:
1704 return "kAudioHardwareIllegalOperationError";
1706 case kAudioHardwareBadObjectError:
1707 return "kAudioHardwareBadObjectError";
1709 case kAudioHardwareBadDeviceError:
1710 return "kAudioHardwareBadDeviceError";
1712 case kAudioHardwareBadStreamError:
1713 return "kAudioHardwareBadStreamError";
1715 case kAudioHardwareUnsupportedOperationError:
1716 return "kAudioHardwareUnsupportedOperationError";
1718 case kAudioDeviceUnsupportedFormatError:
1719 return "kAudioDeviceUnsupportedFormatError";
1721 case kAudioDevicePermissionsError:
1722 return "kAudioDevicePermissionsError";
1725 return "CoreAudio unknown error";
1729 //******************** End of __MACOSX_CORE__ *********************//
1732 #if defined(__UNIX_JACK__)
1734 // JACK is a low-latency audio server, originally written for the
1735 // GNU/Linux operating system and now also ported to OS-X. It can
1736 // connect a number of different applications to an audio device, as
1737 // well as allowing them to share audio between themselves.
1739 // When using JACK with RtAudio, "devices" refer to JACK clients that
1740 // have ports connected to the server. The JACK server is typically
1741 // started in a terminal as follows:
1743 // .jackd -d alsa -d hw:0
1745 // or through an interface program such as qjackctl. Many of the
1746 // parameters normally set for a stream are fixed by the JACK server
1747 // and can be specified when the JACK server is started. In
1750 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1752 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1753 // frames, and number of buffers = 4. Once the server is running, it
1754 // is not possible to override these values. If the values are not
1755 // specified in the command-line, the JACK server uses default values.
1757 // The JACK server does not have to be running when an instance of
1758 // RtApiJack is created, though the function getDeviceCount() will
1759 // report 0 devices found until JACK has been started. When no
1760 // devices are available (i.e., the JACK server is not running), a
1761 // stream cannot be opened.
1763 #include <jack/jack.h>
1767 // A structure to hold various information related to the Jack API
1770 jack_client_t *client;
1771 jack_port_t **ports[2];
1772 std::string deviceName[2];
1774 pthread_cond_t condition;
1775 int drainCounter; // Tracks callback counts when draining
1776 bool internalDrain; // Indicates if stop is initiated from callback or not.
1779 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1782 ThreadHandle threadId;
1783 void jackSilentError( const char * ) {};
1785 RtApiJack :: RtApiJack()
1787 // Nothing to do here.
1788 #if !defined(__RTAUDIO_DEBUG__)
1789 // Turn off Jack's internal error reporting.
1790 jack_set_error_function( &jackSilentError );
1794 RtApiJack :: ~RtApiJack()
1796 if ( stream_.state != STREAM_CLOSED ) closeStream();
1799 unsigned int RtApiJack :: getDeviceCount( void )
1801 // See if we can become a jack client.
1802 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1803 jack_status_t *status = NULL;
1804 jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
1805 if ( client == 0 ) return 0;
1808 std::string port, previousPort;
1809 unsigned int nChannels = 0, nDevices = 0;
1810 ports = jack_get_ports( client, NULL, NULL, 0 );
1812 // Parse the port names up to the first colon (:).
1815 port = (char *) ports[ nChannels ];
1816 iColon = port.find(":");
1817 if ( iColon != std::string::npos ) {
1818 port = port.substr( 0, iColon + 1 );
1819 if ( port != previousPort ) {
1821 previousPort = port;
1824 } while ( ports[++nChannels] );
1828 jack_client_close( client );
1832 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1834 RtAudio::DeviceInfo info;
1835 info.probed = false;
1837 jack_options_t options = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption
1838 jack_status_t *status = NULL;
1839 jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
1840 if ( client == 0 ) {
1841 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1842 error( RtError::WARNING );
1847 std::string port, previousPort;
1848 unsigned int nPorts = 0, nDevices = 0;
1849 ports = jack_get_ports( client, NULL, NULL, 0 );
1851 // Parse the port names up to the first colon (:).
1854 port = (char *) ports[ nPorts ];
1855 iColon = port.find(":");
1856 if ( iColon != std::string::npos ) {
1857 port = port.substr( 0, iColon );
1858 if ( port != previousPort ) {
1859 if ( nDevices == device ) info.name = port;
1861 previousPort = port;
1864 } while ( ports[++nPorts] );
1868 if ( device >= nDevices ) {
1869 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1870 error( RtError::INVALID_USE );
1873 // Get the current jack server sample rate.
1874 info.sampleRates.clear();
1875 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1877 // Count the available ports containing the client name as device
1878 // channels. Jack "input ports" equal RtAudio output channels.
1879 unsigned int nChannels = 0;
1880 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1882 while ( ports[ nChannels ] ) nChannels++;
1884 info.outputChannels = nChannels;
1887 // Jack "output ports" equal RtAudio input channels.
1889 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1891 while ( ports[ nChannels ] ) nChannels++;
1893 info.inputChannels = nChannels;
1896 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1897 jack_client_close(client);
1898 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1899 error( RtError::WARNING );
1903 // If device opens for both playback and capture, we determine the channels.
1904 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1905 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1907 // Jack always uses 32-bit floats.
1908 info.nativeFormats = RTAUDIO_FLOAT32;
1910 // Jack doesn't provide default devices so we'll use the first available one.
1911 if ( device == 0 && info.outputChannels > 0 )
1912 info.isDefaultOutput = true;
1913 if ( device == 0 && info.inputChannels > 0 )
1914 info.isDefaultInput = true;
1916 jack_client_close(client);
1921 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1923 CallbackInfo *info = (CallbackInfo *) infoPointer;
1925 RtApiJack *object = (RtApiJack *) info->object;
1926 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1931 // This function will be called by a spawned thread when the Jack
1932 // server signals that it is shutting down. It is necessary to handle
1933 // it this way because the jackShutdown() function must return before
1934 // the jack_deactivate() function (in closeStream()) will return.
1935 extern "C" void *jackCloseStream( void *ptr )
1937 CallbackInfo *info = (CallbackInfo *) ptr;
1938 RtApiJack *object = (RtApiJack *) info->object;
1940 object->closeStream();
1942 pthread_exit( NULL );
1944 void jackShutdown( void *infoPointer )
1946 CallbackInfo *info = (CallbackInfo *) infoPointer;
1947 RtApiJack *object = (RtApiJack *) info->object;
1949 // Check current stream state. If stopped, then we'll assume this
1950 // was called as a result of a call to RtApiJack::stopStream (the
1951 // deactivation of a client handle causes this function to be called).
1952 // If not, we'll assume the Jack server is shutting down or some
1953 // other problem occurred and we should close the stream.
1954 if ( object->isStreamRunning() == false ) return;
1956 pthread_create( &threadId, NULL, jackCloseStream, info );
1957 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1960 int jackXrun( void *infoPointer )
1962 JackHandle *handle = (JackHandle *) infoPointer;
1964 if ( handle->ports[0] ) handle->xrun[0] = true;
1965 if ( handle->ports[1] ) handle->xrun[1] = true;
1970 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1971 unsigned int firstChannel, unsigned int sampleRate,
1972 RtAudioFormat format, unsigned int *bufferSize,
1973 RtAudio::StreamOptions *options )
1975 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1977 // Look for jack server and try to become a client (only do once per stream).
1978 jack_client_t *client = 0;
1979 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1980 jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer | JackUseExactName ); //JackNullOption;
1981 jack_status_t *status = NULL;
1982 if ( options && !options->streamName.empty() )
1983 client = jack_client_open( options->streamName.c_str(), jackoptions, status );
1985 client = jack_client_open( "RtApiJack", jackoptions, status );
1986 if ( client == 0 ) {
1987 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1988 error( RtError::WARNING );
1993 // The handle must have been created on an earlier pass.
1994 client = handle->client;
1998 std::string port, previousPort, deviceName;
1999 unsigned int nPorts = 0, nDevices = 0;
2000 ports = jack_get_ports( client, NULL, NULL, 0 );
2002 // Parse the port names up to the first colon (:).
2005 port = (char *) ports[ nPorts ];
2006 iColon = port.find(":");
2007 if ( iColon != std::string::npos ) {
2008 port = port.substr( 0, iColon );
2009 if ( port != previousPort ) {
2010 if ( nDevices == device ) deviceName = port;
2012 previousPort = port;
2015 } while ( ports[++nPorts] );
2019 if ( device >= nDevices ) {
2020 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
2024 // Count the available ports containing the client name as device
2025 // channels. Jack "input ports" equal RtAudio output channels.
2026 unsigned int nChannels = 0;
2027 unsigned long flag = JackPortIsInput;
2028 if ( mode == INPUT ) flag = JackPortIsOutput;
2029 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
2031 while ( ports[ nChannels ] ) nChannels++;
2035 // Compare the jack ports for specified client to the requested number of channels.
2036 if ( nChannels < (channels + firstChannel) ) {
2037 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
2038 errorText_ = errorStream_.str();
2042 // Check the jack server sample rate.
2043 unsigned int jackRate = jack_get_sample_rate( client );
2044 if ( sampleRate != jackRate ) {
2045 jack_client_close( client );
2046 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
2047 errorText_ = errorStream_.str();
2050 stream_.sampleRate = jackRate;
2052 // Get the latency of the JACK port.
2053 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
2054 if ( ports[ firstChannel ] )
2055 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
2058 // The jack server always uses 32-bit floating-point data.
2059 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2060 stream_.userFormat = format;
2062 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2063 else stream_.userInterleaved = true;
2065 // Jack always uses non-interleaved buffers.
2066 stream_.deviceInterleaved[mode] = false;
2068 // Jack always provides host byte-ordered data.
2069 stream_.doByteSwap[mode] = false;
2071 // Get the buffer size. The buffer size and number of buffers
2072 // (periods) is set when the jack server is started.
2073 stream_.bufferSize = (int) jack_get_buffer_size( client );
2074 *bufferSize = stream_.bufferSize;
2076 stream_.nDeviceChannels[mode] = channels;
2077 stream_.nUserChannels[mode] = channels;
2079 // Set flags for buffer conversion.
2080 stream_.doConvertBuffer[mode] = false;
2081 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2082 stream_.doConvertBuffer[mode] = true;
2083 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2084 stream_.nUserChannels[mode] > 1 )
2085 stream_.doConvertBuffer[mode] = true;
2087 // Allocate our JackHandle structure for the stream.
2088 if ( handle == 0 ) {
2090 handle = new JackHandle;
2092 catch ( std::bad_alloc& ) {
2093 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
2097 if ( pthread_cond_init(&handle->condition, NULL) ) {
2098 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
2101 stream_.apiHandle = (void *) handle;
2102 handle->client = client;
2104 handle->deviceName[mode] = deviceName;
2106 // Allocate necessary internal buffers.
2107 unsigned long bufferBytes;
2108 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2109 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2110 if ( stream_.userBuffer[mode] == NULL ) {
2111 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
2115 if ( stream_.doConvertBuffer[mode] ) {
2117 bool makeBuffer = true;
2118 if ( mode == OUTPUT )
2119 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2120 else { // mode == INPUT
2121 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
2122 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2123 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2124 if ( bufferBytes < bytesOut ) makeBuffer = false;
2129 bufferBytes *= *bufferSize;
2130 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2131 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2132 if ( stream_.deviceBuffer == NULL ) {
2133 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
2139 // Allocate memory for the Jack ports (channels) identifiers.
2140 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
2141 if ( handle->ports[mode] == NULL ) {
2142 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
2146 stream_.device[mode] = device;
2147 stream_.channelOffset[mode] = firstChannel;
2148 stream_.state = STREAM_STOPPED;
2149 stream_.callbackInfo.object = (void *) this;
2151 if ( stream_.mode == OUTPUT && mode == INPUT )
2152 // We had already set up the stream for output.
2153 stream_.mode = DUPLEX;
2155 stream_.mode = mode;
2156 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2157 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
2158 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2161 // Register our ports.
2163 if ( mode == OUTPUT ) {
2164 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2165 snprintf( label, 64, "outport %d", i );
2166 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
2167 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
2171 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2172 snprintf( label, 64, "inport %d", i );
2173 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
2174 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
2178 // Setup the buffer conversion information structure. We don't use
2179 // buffers to do channel offsets, so we override that parameter
2181 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2187 pthread_cond_destroy( &handle->condition );
2188 jack_client_close( handle->client );
2190 if ( handle->ports[0] ) free( handle->ports[0] );
2191 if ( handle->ports[1] ) free( handle->ports[1] );
2194 stream_.apiHandle = 0;
2197 for ( int i=0; i<2; i++ ) {
2198 if ( stream_.userBuffer[i] ) {
2199 free( stream_.userBuffer[i] );
2200 stream_.userBuffer[i] = 0;
2204 if ( stream_.deviceBuffer ) {
2205 free( stream_.deviceBuffer );
2206 stream_.deviceBuffer = 0;
2212 void RtApiJack :: closeStream( void )
2214 if ( stream_.state == STREAM_CLOSED ) {
2215 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2216 error( RtError::WARNING );
2220 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2223 if ( stream_.state == STREAM_RUNNING )
2224 jack_deactivate( handle->client );
2226 jack_client_close( handle->client );
2230 if ( handle->ports[0] ) free( handle->ports[0] );
2231 if ( handle->ports[1] ) free( handle->ports[1] );
2232 pthread_cond_destroy( &handle->condition );
2234 stream_.apiHandle = 0;
2237 for ( int i=0; i<2; i++ ) {
2238 if ( stream_.userBuffer[i] ) {
2239 free( stream_.userBuffer[i] );
2240 stream_.userBuffer[i] = 0;
2244 if ( stream_.deviceBuffer ) {
2245 free( stream_.deviceBuffer );
2246 stream_.deviceBuffer = 0;
2249 stream_.mode = UNINITIALIZED;
2250 stream_.state = STREAM_CLOSED;
2253 void RtApiJack :: startStream( void )
2256 if ( stream_.state == STREAM_RUNNING ) {
2257 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2258 error( RtError::WARNING );
2262 MUTEX_LOCK(&stream_.mutex);
2264 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2265 int result = jack_activate( handle->client );
2267 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2273 // Get the list of available ports.
2274 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2276 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2277 if ( ports == NULL) {
2278 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2282 // Now make the port connections. Since RtAudio wasn't designed to
2283 // allow the user to select particular channels of a device, we'll
2284 // just open the first "nChannels" ports with offset.
2285 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2287 if ( ports[ stream_.channelOffset[0] + i ] )
2288 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2291 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2298 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2300 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2301 if ( ports == NULL) {
2302 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2306 // Now make the port connections. See note above.
2307 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2309 if ( ports[ stream_.channelOffset[1] + i ] )
2310 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2313 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2320 handle->drainCounter = 0;
2321 handle->internalDrain = false;
2322 stream_.state = STREAM_RUNNING;
2325 MUTEX_UNLOCK(&stream_.mutex);
2327 if ( result == 0 ) return;
2328 error( RtError::SYSTEM_ERROR );
2331 void RtApiJack :: stopStream( void )
2334 if ( stream_.state == STREAM_STOPPED ) {
2335 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2336 error( RtError::WARNING );
2340 MUTEX_LOCK( &stream_.mutex );
2342 if ( stream_.state == STREAM_STOPPED ) {
2343 MUTEX_UNLOCK( &stream_.mutex );
2347 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2348 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2350 if ( handle->drainCounter == 0 ) {
2351 handle->drainCounter = 1;
2352 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2356 jack_deactivate( handle->client );
2357 stream_.state = STREAM_STOPPED;
2359 MUTEX_UNLOCK( &stream_.mutex );
2362 void RtApiJack :: abortStream( void )
2365 if ( stream_.state == STREAM_STOPPED ) {
2366 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2367 error( RtError::WARNING );
2371 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2372 handle->drainCounter = 1;
2377 // This function will be called by a spawned thread when the user
2378 // callback function signals that the stream should be stopped or
2379 // aborted. It is necessary to handle it this way because the
2380 // callbackEvent() function must return before the jack_deactivate()
2381 // function will return.
2382 extern "C" void *jackStopStream( void *ptr )
2384 CallbackInfo *info = (CallbackInfo *) ptr;
2385 RtApiJack *object = (RtApiJack *) info->object;
2387 object->stopStream();
2389 pthread_exit( NULL );
2392 bool RtApiJack :: callbackEvent( unsigned long nframes )
2394 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2395 if ( stream_.state == STREAM_CLOSED ) {
2396 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2397 error( RtError::WARNING );
2400 if ( stream_.bufferSize != nframes ) {
2401 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2402 error( RtError::WARNING );
2406 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2407 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2409 // Check if we were draining the stream and signal is finished.
2410 if ( handle->drainCounter > 3 ) {
2411 if ( handle->internalDrain == true ) {
2412 pthread_create( &threadId, NULL, jackStopStream, info );
2415 pthread_cond_signal( &handle->condition );
2419 MUTEX_LOCK( &stream_.mutex );
2421 // The state might change while waiting on a mutex.
2422 if ( stream_.state == STREAM_STOPPED ) {
2423 MUTEX_UNLOCK( &stream_.mutex );
2427 // Invoke user callback first, to get fresh output data.
2428 if ( handle->drainCounter == 0 ) {
2429 RtAudioCallback callback = (RtAudioCallback) info->callback;
2430 double streamTime = getStreamTime();
2431 RtAudioStreamStatus status = 0;
2432 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2433 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2434 handle->xrun[0] = false;
2436 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2437 status |= RTAUDIO_INPUT_OVERFLOW;
2438 handle->xrun[1] = false;
2440 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2441 stream_.bufferSize, streamTime, status, info->userData );
2442 if ( handle->drainCounter == 2 ) {
2443 MUTEX_UNLOCK( &stream_.mutex );
2445 pthread_create( &id, NULL, jackStopStream, info );
2448 else if ( handle->drainCounter == 1 )
2449 handle->internalDrain = true;
2452 jack_default_audio_sample_t *jackbuffer;
2453 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2454 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2456 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2458 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2459 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2460 memset( jackbuffer, 0, bufferBytes );
2464 else if ( stream_.doConvertBuffer[0] ) {
2466 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2468 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2469 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2470 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2473 else { // no buffer conversion
2474 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2475 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2476 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2480 if ( handle->drainCounter ) {
2481 handle->drainCounter++;
2486 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2488 if ( stream_.doConvertBuffer[1] ) {
2489 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2490 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2491 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2493 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2495 else { // no buffer conversion
2496 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2497 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2498 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2504 MUTEX_UNLOCK(&stream_.mutex);
2506 RtApi::tickStreamTime();
2509 //******************** End of __UNIX_JACK__ *********************//
2512 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2514 // The ASIO API is designed around a callback scheme, so this
2515 // implementation is similar to that used for OS-X CoreAudio and Linux
2516 // Jack. The primary constraint with ASIO is that it only allows
2517 // access to a single driver at a time. Thus, it is not possible to
2518 // have more than one simultaneous RtAudio stream.
2520 // This implementation also requires a number of external ASIO files
2521 // and a few global variables. The ASIO callback scheme does not
2522 // allow for the passing of user data, so we must create a global
2523 // pointer to our callbackInfo structure.
2525 // On unix systems, we make use of a pthread condition variable.
2526 // Since there is no equivalent in Windows, I hacked something based
2527 // on information found in
2528 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2530 #include "asiosys.h"
2532 #include "iasiothiscallresolver.h"
2533 #include "asiodrivers.h"
2536 AsioDrivers drivers;
2537 ASIOCallbacks asioCallbacks;
2538 ASIODriverInfo driverInfo;
2539 CallbackInfo *asioCallbackInfo;
2543 int drainCounter; // Tracks callback counts when draining
2544 bool internalDrain; // Indicates if stop is initiated from callback or not.
2545 ASIOBufferInfo *bufferInfos;
2549 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2552 // Function declarations (definitions at end of section)
2553 static const char* getAsioErrorString( ASIOError result );
2554 void sampleRateChanged( ASIOSampleRate sRate );
2555 long asioMessages( long selector, long value, void* message, double* opt );
2557 RtApiAsio :: RtApiAsio()
2559 // ASIO cannot run on a multi-threaded appartment. You can call
2560 // CoInitialize beforehand, but it must be for appartment threading
2561 // (in which case, CoInitilialize will return S_FALSE here).
2562 coInitialized_ = false;
2563 HRESULT hr = CoInitialize( NULL );
2565 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2566 error( RtError::WARNING );
2568 coInitialized_ = true;
2570 drivers.removeCurrentDriver();
2571 driverInfo.asioVersion = 2;
2573 // See note in DirectSound implementation about GetDesktopWindow().
2574 driverInfo.sysRef = GetForegroundWindow();
2577 RtApiAsio :: ~RtApiAsio()
2579 if ( stream_.state != STREAM_CLOSED ) closeStream();
2580 if ( coInitialized_ ) CoUninitialize();
2583 unsigned int RtApiAsio :: getDeviceCount( void )
2585 return (unsigned int) drivers.asioGetNumDev();
2588 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2590 RtAudio::DeviceInfo info;
2591 info.probed = false;
2594 unsigned int nDevices = getDeviceCount();
2595 if ( nDevices == 0 ) {
2596 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2597 error( RtError::INVALID_USE );
2600 if ( device >= nDevices ) {
2601 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2602 error( RtError::INVALID_USE );
2605 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2606 if ( stream_.state != STREAM_CLOSED ) {
2607 if ( device >= devices_.size() ) {
2608 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2609 error( RtError::WARNING );
2612 return devices_[ device ];
2615 char driverName[32];
2616 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2617 if ( result != ASE_OK ) {
2618 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2619 errorText_ = errorStream_.str();
2620 error( RtError::WARNING );
2624 info.name = driverName;
2626 if ( !drivers.loadDriver( driverName ) ) {
2627 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2628 errorText_ = errorStream_.str();
2629 error( RtError::WARNING );
2633 result = ASIOInit( &driverInfo );
2634 if ( result != ASE_OK ) {
2635 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2636 errorText_ = errorStream_.str();
2637 error( RtError::WARNING );
2641 // Determine the device channel information.
2642 long inputChannels, outputChannels;
2643 result = ASIOGetChannels( &inputChannels, &outputChannels );
2644 if ( result != ASE_OK ) {
2645 drivers.removeCurrentDriver();
2646 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2647 errorText_ = errorStream_.str();
2648 error( RtError::WARNING );
2652 info.outputChannels = outputChannels;
2653 info.inputChannels = inputChannels;
2654 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2655 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2657 // Determine the supported sample rates.
2658 info.sampleRates.clear();
2659 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2660 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2661 if ( result == ASE_OK )
2662 info.sampleRates.push_back( SAMPLE_RATES[i] );
2665 // Determine supported data types ... just check first channel and assume rest are the same.
2666 ASIOChannelInfo channelInfo;
2667 channelInfo.channel = 0;
2668 channelInfo.isInput = true;
2669 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2670 result = ASIOGetChannelInfo( &channelInfo );
2671 if ( result != ASE_OK ) {
2672 drivers.removeCurrentDriver();
2673 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2674 errorText_ = errorStream_.str();
2675 error( RtError::WARNING );
2679 info.nativeFormats = 0;
2680 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2681 info.nativeFormats |= RTAUDIO_SINT16;
2682 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2683 info.nativeFormats |= RTAUDIO_SINT32;
2684 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2685 info.nativeFormats |= RTAUDIO_FLOAT32;
2686 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2687 info.nativeFormats |= RTAUDIO_FLOAT64;
2689 if ( info.outputChannels > 0 )
2690 if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
2691 if ( info.inputChannels > 0 )
2692 if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
2695 drivers.removeCurrentDriver();
2699 void bufferSwitch( long index, ASIOBool processNow )
2701 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2702 object->callbackEvent( index );
2705 void RtApiAsio :: saveDeviceInfo( void )
2709 unsigned int nDevices = getDeviceCount();
2710 devices_.resize( nDevices );
2711 for ( unsigned int i=0; i<nDevices; i++ )
2712 devices_[i] = getDeviceInfo( i );
2715 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2716 unsigned int firstChannel, unsigned int sampleRate,
2717 RtAudioFormat format, unsigned int *bufferSize,
2718 RtAudio::StreamOptions *options )
2720 // For ASIO, a duplex stream MUST use the same driver.
2721 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2722 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2726 char driverName[32];
2727 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2728 if ( result != ASE_OK ) {
2729 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2730 errorText_ = errorStream_.str();
2734 // The getDeviceInfo() function will not work when a stream is open
2735 // because ASIO does not allow multiple devices to run at the same
2736 // time. Thus, we'll probe the system before opening a stream and
2737 // save the results for use by getDeviceInfo().
2738 this->saveDeviceInfo();
2740 // Only load the driver once for duplex stream.
2741 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2742 if ( !drivers.loadDriver( driverName ) ) {
2743 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2744 errorText_ = errorStream_.str();
2748 result = ASIOInit( &driverInfo );
2749 if ( result != ASE_OK ) {
2750 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2751 errorText_ = errorStream_.str();
2756 // Check the device channel count.
2757 long inputChannels, outputChannels;
2758 result = ASIOGetChannels( &inputChannels, &outputChannels );
2759 if ( result != ASE_OK ) {
2760 drivers.removeCurrentDriver();
2761 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2762 errorText_ = errorStream_.str();
2766 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2767 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2768 drivers.removeCurrentDriver();
2769 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2770 errorText_ = errorStream_.str();
2773 stream_.nDeviceChannels[mode] = channels;
2774 stream_.nUserChannels[mode] = channels;
2775 stream_.channelOffset[mode] = firstChannel;
2777 // Verify the sample rate is supported.
2778 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2779 if ( result != ASE_OK ) {
2780 drivers.removeCurrentDriver();
2781 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2782 errorText_ = errorStream_.str();
2786 // Get the current sample rate
2787 ASIOSampleRate currentRate;
2788 result = ASIOGetSampleRate( ¤tRate );
2789 if ( result != ASE_OK ) {
2790 drivers.removeCurrentDriver();
2791 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2792 errorText_ = errorStream_.str();
2796 // Set the sample rate only if necessary
2797 if ( currentRate != sampleRate ) {
2798 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2799 if ( result != ASE_OK ) {
2800 drivers.removeCurrentDriver();
2801 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2802 errorText_ = errorStream_.str();
2807 // Determine the driver data type.
2808 ASIOChannelInfo channelInfo;
2809 channelInfo.channel = 0;
2810 if ( mode == OUTPUT ) channelInfo.isInput = false;
2811 else channelInfo.isInput = true;
2812 result = ASIOGetChannelInfo( &channelInfo );
2813 if ( result != ASE_OK ) {
2814 drivers.removeCurrentDriver();
2815 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2816 errorText_ = errorStream_.str();
2820 // Assuming WINDOWS host is always little-endian.
2821 stream_.doByteSwap[mode] = false;
2822 stream_.userFormat = format;
2823 stream_.deviceFormat[mode] = 0;
2824 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2825 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2826 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2828 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2829 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2830 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2832 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2833 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2834 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2836 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2837 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2838 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2841 if ( stream_.deviceFormat[mode] == 0 ) {
2842 drivers.removeCurrentDriver();
2843 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2844 errorText_ = errorStream_.str();
2848 // Set the buffer size. For a duplex stream, this will end up
2849 // setting the buffer size based on the input constraints, which
2851 long minSize, maxSize, preferSize, granularity;
2852 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2853 if ( result != ASE_OK ) {
2854 drivers.removeCurrentDriver();
2855 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2856 errorText_ = errorStream_.str();
2860 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2861 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2862 else if ( granularity == -1 ) {
2863 // Make sure bufferSize is a power of two.
2864 int log2_of_min_size = 0;
2865 int log2_of_max_size = 0;
2867 for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
2868 if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
2869 if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
2872 long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
2873 int min_delta_num = log2_of_min_size;
2875 for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
2876 long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
2877 if (current_delta < min_delta) {
2878 min_delta = current_delta;
2883 *bufferSize = ( (unsigned int)1 << min_delta_num );
2884 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2885 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2887 else if ( granularity != 0 ) {
2888 // Set to an even multiple of granularity, rounding up.
2889 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2892 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2893 drivers.removeCurrentDriver();
2894 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2898 stream_.bufferSize = *bufferSize;
2899 stream_.nBuffers = 2;
2901 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2902 else stream_.userInterleaved = true;
2904 // ASIO always uses non-interleaved buffers.
2905 stream_.deviceInterleaved[mode] = false;
2907 // Allocate, if necessary, our AsioHandle structure for the stream.
2908 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2909 if ( handle == 0 ) {
2911 handle = new AsioHandle;
2913 catch ( std::bad_alloc& ) {
2914 //if ( handle == NULL ) {
2915 drivers.removeCurrentDriver();
2916 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2919 handle->bufferInfos = 0;
2921 // Create a manual-reset event.
2922 handle->condition = CreateEvent( NULL, // no security
2923 TRUE, // manual-reset
2924 FALSE, // non-signaled initially
2926 stream_.apiHandle = (void *) handle;
2929 // Create the ASIO internal buffers. Since RtAudio sets up input
2930 // and output separately, we'll have to dispose of previously
2931 // created output buffers for a duplex stream.
2932 long inputLatency, outputLatency;
2933 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2934 ASIODisposeBuffers();
2935 if ( handle->bufferInfos ) free( handle->bufferInfos );
2938 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2939 bool buffersAllocated = false;
2940 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2941 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2942 if ( handle->bufferInfos == NULL ) {
2943 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2944 errorText_ = errorStream_.str();
2948 ASIOBufferInfo *infos;
2949 infos = handle->bufferInfos;
2950 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2951 infos->isInput = ASIOFalse;
2952 infos->channelNum = i + stream_.channelOffset[0];
2953 infos->buffers[0] = infos->buffers[1] = 0;
2955 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2956 infos->isInput = ASIOTrue;
2957 infos->channelNum = i + stream_.channelOffset[1];
2958 infos->buffers[0] = infos->buffers[1] = 0;
2961 // Set up the ASIO callback structure and create the ASIO data buffers.
2962 asioCallbacks.bufferSwitch = &bufferSwitch;
2963 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2964 asioCallbacks.asioMessage = &asioMessages;
2965 asioCallbacks.bufferSwitchTimeInfo = NULL;
2966 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2967 if ( result != ASE_OK ) {
2968 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2969 errorText_ = errorStream_.str();
2972 buffersAllocated = true;
2974 // Set flags for buffer conversion.
2975 stream_.doConvertBuffer[mode] = false;
2976 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2977 stream_.doConvertBuffer[mode] = true;
2978 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2979 stream_.nUserChannels[mode] > 1 )
2980 stream_.doConvertBuffer[mode] = true;
2982 // Allocate necessary internal buffers
2983 unsigned long bufferBytes;
2984 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2985 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2986 if ( stream_.userBuffer[mode] == NULL ) {
2987 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2991 if ( stream_.doConvertBuffer[mode] ) {
2993 bool makeBuffer = true;
2994 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2995 if ( mode == INPUT ) {
2996 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2997 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2998 if ( bufferBytes <= bytesOut ) makeBuffer = false;
3003 bufferBytes *= *bufferSize;
3004 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3005 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3006 if ( stream_.deviceBuffer == NULL ) {
3007 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
3013 stream_.sampleRate = sampleRate;
3014 stream_.device[mode] = device;
3015 stream_.state = STREAM_STOPPED;
3016 asioCallbackInfo = &stream_.callbackInfo;
3017 stream_.callbackInfo.object = (void *) this;
3018 if ( stream_.mode == OUTPUT && mode == INPUT )
3019 // We had already set up an output stream.
3020 stream_.mode = DUPLEX;
3022 stream_.mode = mode;
3024 // Determine device latencies
3025 result = ASIOGetLatencies( &inputLatency, &outputLatency );
3026 if ( result != ASE_OK ) {
3027 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
3028 errorText_ = errorStream_.str();
3029 error( RtError::WARNING); // warn but don't fail
3032 stream_.latency[0] = outputLatency;
3033 stream_.latency[1] = inputLatency;
3036 // Setup the buffer conversion information structure. We don't use
3037 // buffers to do channel offsets, so we override that parameter
3039 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
3044 if ( buffersAllocated )
3045 ASIODisposeBuffers();
3046 drivers.removeCurrentDriver();
3049 CloseHandle( handle->condition );
3050 if ( handle->bufferInfos )
3051 free( handle->bufferInfos );
3053 stream_.apiHandle = 0;
3056 for ( int i=0; i<2; i++ ) {
3057 if ( stream_.userBuffer[i] ) {
3058 free( stream_.userBuffer[i] );
3059 stream_.userBuffer[i] = 0;
3063 if ( stream_.deviceBuffer ) {
3064 free( stream_.deviceBuffer );
3065 stream_.deviceBuffer = 0;
3071 void RtApiAsio :: closeStream()
3073 if ( stream_.state == STREAM_CLOSED ) {
3074 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
3075 error( RtError::WARNING );
3079 if ( stream_.state == STREAM_RUNNING ) {
3080 stream_.state = STREAM_STOPPED;
3083 ASIODisposeBuffers();
3084 drivers.removeCurrentDriver();
3086 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3088 CloseHandle( handle->condition );
3089 if ( handle->bufferInfos )
3090 free( handle->bufferInfos );
3092 stream_.apiHandle = 0;
3095 for ( int i=0; i<2; i++ ) {
3096 if ( stream_.userBuffer[i] ) {
3097 free( stream_.userBuffer[i] );
3098 stream_.userBuffer[i] = 0;
3102 if ( stream_.deviceBuffer ) {
3103 free( stream_.deviceBuffer );
3104 stream_.deviceBuffer = 0;
3107 stream_.mode = UNINITIALIZED;
3108 stream_.state = STREAM_CLOSED;
3111 void RtApiAsio :: startStream()
3114 if ( stream_.state == STREAM_RUNNING ) {
3115 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
3116 error( RtError::WARNING );
3120 MUTEX_LOCK( &stream_.mutex );
3122 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3123 ASIOError result = ASIOStart();
3124 if ( result != ASE_OK ) {
3125 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
3126 errorText_ = errorStream_.str();
3130 handle->drainCounter = 0;
3131 handle->internalDrain = false;
3132 stream_.state = STREAM_RUNNING;
3136 MUTEX_UNLOCK( &stream_.mutex );
3138 if ( result == ASE_OK ) return;
3139 error( RtError::SYSTEM_ERROR );
3142 void RtApiAsio :: stopStream()
3145 if ( stream_.state == STREAM_STOPPED ) {
3146 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
3147 error( RtError::WARNING );
3151 MUTEX_LOCK( &stream_.mutex );
3153 if ( stream_.state == STREAM_STOPPED ) {
3154 MUTEX_UNLOCK( &stream_.mutex );
3158 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3159 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3160 if ( handle->drainCounter == 0 ) {
3161 handle->drainCounter = 1;
3162 MUTEX_UNLOCK( &stream_.mutex );
3163 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
3164 ResetEvent( handle->condition );
3165 MUTEX_LOCK( &stream_.mutex );
3169 ASIOError result = ASIOStop();
3170 if ( result != ASE_OK ) {
3171 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
3172 errorText_ = errorStream_.str();
3175 stream_.state = STREAM_STOPPED;
3176 MUTEX_UNLOCK( &stream_.mutex );
3178 if ( result == ASE_OK ) return;
3179 error( RtError::SYSTEM_ERROR );
3182 void RtApiAsio :: abortStream()
3185 if ( stream_.state == STREAM_STOPPED ) {
3186 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
3187 error( RtError::WARNING );
3191 // The following lines were commented-out because some behavior was
3192 // noted where the device buffers need to be zeroed to avoid
3193 // continuing sound, even when the device buffers are completely
3194 // disposed. So now, calling abort is the same as calling stop.
3195 // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3196 // handle->drainCounter = 1;
3200 bool RtApiAsio :: callbackEvent( long bufferIndex )
3202 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
3203 if ( stream_.state == STREAM_CLOSED ) {
3204 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
3205 error( RtError::WARNING );
3209 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3210 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3212 // Check if we were draining the stream and signal is finished.
3213 if ( handle->drainCounter > 3 ) {
3214 if ( handle->internalDrain == false )
3215 SetEvent( handle->condition );
3221 MUTEX_LOCK( &stream_.mutex );
3223 // The state might change while waiting on a mutex.
3224 if ( stream_.state == STREAM_STOPPED ) goto unlock;
3226 // Invoke user callback to get fresh output data UNLESS we are
3228 if ( handle->drainCounter == 0 ) {
3229 RtAudioCallback callback = (RtAudioCallback) info->callback;
3230 double streamTime = getStreamTime();
3231 RtAudioStreamStatus status = 0;
3232 if ( stream_.mode != INPUT && asioXRun == true ) {
3233 status |= RTAUDIO_OUTPUT_UNDERFLOW;
3236 if ( stream_.mode != OUTPUT && asioXRun == true ) {
3237 status |= RTAUDIO_INPUT_OVERFLOW;
3240 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
3241 stream_.bufferSize, streamTime, status, info->userData );
3242 if ( handle->drainCounter == 2 ) {
3243 MUTEX_UNLOCK( &stream_.mutex );
3247 else if ( handle->drainCounter == 1 )
3248 handle->internalDrain = true;
3251 unsigned int nChannels, bufferBytes, i, j;
3252 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
3253 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3255 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
3257 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3259 for ( i=0, j=0; i<nChannels; i++ ) {
3260 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3261 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3265 else if ( stream_.doConvertBuffer[0] ) {
3267 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3268 if ( stream_.doByteSwap[0] )
3269 byteSwapBuffer( stream_.deviceBuffer,
3270 stream_.bufferSize * stream_.nDeviceChannels[0],
3271 stream_.deviceFormat[0] );
3273 for ( i=0, j=0; i<nChannels; i++ ) {
3274 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3275 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3276 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3282 if ( stream_.doByteSwap[0] )
3283 byteSwapBuffer( stream_.userBuffer[0],
3284 stream_.bufferSize * stream_.nUserChannels[0],
3285 stream_.userFormat );
3287 for ( i=0, j=0; i<nChannels; i++ ) {
3288 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3289 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3290 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3295 if ( handle->drainCounter ) {
3296 handle->drainCounter++;
3301 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3303 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3305 if (stream_.doConvertBuffer[1]) {
3307 // Always interleave ASIO input data.
3308 for ( i=0, j=0; i<nChannels; i++ ) {
3309 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3310 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3311 handle->bufferInfos[i].buffers[bufferIndex],
3315 if ( stream_.doByteSwap[1] )
3316 byteSwapBuffer( stream_.deviceBuffer,
3317 stream_.bufferSize * stream_.nDeviceChannels[1],
3318 stream_.deviceFormat[1] );
3319 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3323 for ( i=0, j=0; i<nChannels; i++ ) {
3324 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3325 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3326 handle->bufferInfos[i].buffers[bufferIndex],
3331 if ( stream_.doByteSwap[1] )
3332 byteSwapBuffer( stream_.userBuffer[1],
3333 stream_.bufferSize * stream_.nUserChannels[1],
3334 stream_.userFormat );
3339 // The following call was suggested by Malte Clasen. While the API
3340 // documentation indicates it should not be required, some device
3341 // drivers apparently do not function correctly without it.
3344 MUTEX_UNLOCK( &stream_.mutex );
3346 RtApi::tickStreamTime();
3350 void sampleRateChanged( ASIOSampleRate sRate )
3352 // The ASIO documentation says that this usually only happens during
3353 // external sync. Audio processing is not stopped by the driver,
3354 // actual sample rate might not have even changed, maybe only the
3355 // sample rate status of an AES/EBU or S/PDIF digital input at the
3358 RtApi *object = (RtApi *) asioCallbackInfo->object;
3360 object->stopStream();
3362 catch ( RtError &exception ) {
3363 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3367 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3370 long asioMessages( long selector, long value, void* message, double* opt )
3374 switch( selector ) {
3375 case kAsioSelectorSupported:
3376 if ( value == kAsioResetRequest
3377 || value == kAsioEngineVersion
3378 || value == kAsioResyncRequest
3379 || value == kAsioLatenciesChanged
3380 // The following three were added for ASIO 2.0, you don't
3381 // necessarily have to support them.
3382 || value == kAsioSupportsTimeInfo
3383 || value == kAsioSupportsTimeCode
3384 || value == kAsioSupportsInputMonitor)
3387 case kAsioResetRequest:
3388 // Defer the task and perform the reset of the driver during the
3389 // next "safe" situation. You cannot reset the driver right now,
3390 // as this code is called from the driver. Reset the driver is
3391 // done by completely destruct is. I.e. ASIOStop(),
3392 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3394 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3397 case kAsioResyncRequest:
3398 // This informs the application that the driver encountered some
3399 // non-fatal data loss. It is used for synchronization purposes
3400 // of different media. Added mainly to work around the Win16Mutex
3401 // problems in Windows 95/98 with the Windows Multimedia system,
3402 // which could lose data because the Mutex was held too long by
3403 // another thread. However a driver can issue it in other
3405 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3409 case kAsioLatenciesChanged:
3410 // This will inform the host application that the drivers were
3411 // latencies changed. Beware, it this does not mean that the
3412 // buffer sizes have changed! You might need to update internal
3414 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3417 case kAsioEngineVersion:
3418 // Return the supported ASIO version of the host application. If
3419 // a host application does not implement this selector, ASIO 1.0
3420 // is assumed by the driver.
3423 case kAsioSupportsTimeInfo:
3424 // Informs the driver whether the
3425 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3426 // For compatibility with ASIO 1.0 drivers the host application
3427 // should always support the "old" bufferSwitch method, too.
3430 case kAsioSupportsTimeCode:
3431 // Informs the driver whether application is interested in time
3432 // code info. If an application does not need to know about time
3433 // code, the driver has less work to do.
3440 static const char* getAsioErrorString( ASIOError result )
3448 static Messages m[] =
3450 { ASE_NotPresent, "Hardware input or output is not present or available." },
3451 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3452 { ASE_InvalidParameter, "Invalid input parameter." },
3453 { ASE_InvalidMode, "Invalid mode." },
3454 { ASE_SPNotAdvancing, "Sample position not advancing." },
3455 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3456 { ASE_NoMemory, "Not enough memory to complete the request." }
3459 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3460 if ( m[i].value == result ) return m[i].message;
3462 return "Unknown error.";
3464 //******************** End of __WINDOWS_ASIO__ *********************//
3468 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3470 // Modified by Robin Davies, October 2005
3471 // - Improvements to DirectX pointer chasing.
3472 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3473 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3474 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3475 // Changed device query structure for RtAudio 4.0.7, January 2010
3479 #include <algorithm>
3481 #if defined(__MINGW32__)
3482 // missing from latest mingw winapi
3483 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3484 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3485 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3486 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3489 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3491 #ifdef _MSC_VER // if Microsoft Visual C++
3492 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3495 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3497 if ( pointer > bufferSize ) pointer -= bufferSize;
3498 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3499 if ( pointer < earlierPointer ) pointer += bufferSize;
3500 return pointer >= earlierPointer && pointer < laterPointer;
3503 // A structure to hold various information related to the DirectSound
3504 // API implementation.
3506 unsigned int drainCounter; // Tracks callback counts when draining
3507 bool internalDrain; // Indicates if stop is initiated from callback or not.
3511 UINT bufferPointer[2];
3512 DWORD dsBufferSize[2];
3513 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3517 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3520 // Declarations for utility functions, callbacks, and structures
3521 // specific to the DirectSound implementation.
3522 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3523 LPCTSTR description,
3527 static const char* getErrorString( int code );
3529 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3538 : found(false) { validId[0] = false; validId[1] = false; }
3541 std::vector< DsDevice > dsDevices;
3543 RtApiDs :: RtApiDs()
3545 // Dsound will run both-threaded. If CoInitialize fails, then just
3546 // accept whatever the mainline chose for a threading model.
3547 coInitialized_ = false;
3548 HRESULT hr = CoInitialize( NULL );
3549 if ( !FAILED( hr ) ) coInitialized_ = true;
3552 RtApiDs :: ~RtApiDs()
3554 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3555 if ( stream_.state != STREAM_CLOSED ) closeStream();
3558 // The DirectSound default output is always the first device.
3559 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3564 // The DirectSound default input is always the first input device,
3565 // which is the first capture device enumerated.
3566 unsigned int RtApiDs :: getDefaultInputDevice( void )
3571 unsigned int RtApiDs :: getDeviceCount( void )
3573 // Set query flag for previously found devices to false, so that we
3574 // can check for any devices that have disappeared.
3575 for ( unsigned int i=0; i<dsDevices.size(); i++ )
3576 dsDevices[i].found = false;
3578 // Query DirectSound devices.
3579 bool isInput = false;
3580 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &isInput );
3581 if ( FAILED( result ) ) {
3582 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3583 errorText_ = errorStream_.str();
3584 error( RtError::WARNING );
3587 // Query DirectSoundCapture devices.
3589 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &isInput );
3590 if ( FAILED( result ) ) {
3591 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3592 errorText_ = errorStream_.str();
3593 error( RtError::WARNING );
3596 // Clean out any devices that may have disappeared.
3597 std::vector< DsDevice > :: iterator it;
3598 for ( it=dsDevices.begin(); it < dsDevices.end(); it++ )
3599 if ( it->found == false ) dsDevices.erase( it );
3601 return dsDevices.size();
3604 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3606 RtAudio::DeviceInfo info;
3607 info.probed = false;
3609 if ( dsDevices.size() == 0 ) {
3610 // Force a query of all devices
3612 if ( dsDevices.size() == 0 ) {
3613 errorText_ = "RtApiDs::getDeviceInfo: no devices found!";
3614 error( RtError::INVALID_USE );
3618 if ( device >= dsDevices.size() ) {
3619 errorText_ = "RtApiDs::getDeviceInfo: device ID is invalid!";
3620 error( RtError::INVALID_USE );
3624 if ( dsDevices[ device ].validId[0] == false ) goto probeInput;
3626 LPDIRECTSOUND output;
3628 result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
3629 if ( FAILED( result ) ) {
3630 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
3631 errorText_ = errorStream_.str();
3632 error( RtError::WARNING );
3636 outCaps.dwSize = sizeof( outCaps );
3637 result = output->GetCaps( &outCaps );
3638 if ( FAILED( result ) ) {
3640 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3641 errorText_ = errorStream_.str();
3642 error( RtError::WARNING );
3646 // Get output channel information.
3647 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3649 // Get sample rate information.
3650 info.sampleRates.clear();
3651 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3652 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3653 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3654 info.sampleRates.push_back( SAMPLE_RATES[k] );
3657 // Get format information.
3658 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3659 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3663 if ( getDefaultOutputDevice() == device )
3664 info.isDefaultOutput = true;
3666 if ( dsDevices[ device ].validId[1] == false ) {
3667 info.name = dsDevices[ device ].name;
3674 LPDIRECTSOUNDCAPTURE input;
3675 result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
3676 if ( FAILED( result ) ) {
3677 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
3678 errorText_ = errorStream_.str();
3679 error( RtError::WARNING );
3684 inCaps.dwSize = sizeof( inCaps );
3685 result = input->GetCaps( &inCaps );
3686 if ( FAILED( result ) ) {
3688 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsDevices[ device ].name << ")!";
3689 errorText_ = errorStream_.str();
3690 error( RtError::WARNING );
3694 // Get input channel information.
3695 info.inputChannels = inCaps.dwChannels;
3697 // Get sample rate and format information.
3698 std::vector<unsigned int> rates;
3699 if ( inCaps.dwChannels == 2 ) {
3700 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3701 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3702 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3703 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3704 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3705 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3706 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3707 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3709 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3710 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) rates.push_back( 11025 );
3711 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) rates.push_back( 22050 );
3712 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) rates.push_back( 44100 );
3713 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) rates.push_back( 96000 );
3715 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3716 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) rates.push_back( 11025 );
3717 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) rates.push_back( 22050 );
3718 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) rates.push_back( 44100 );
3719 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) rates.push_back( 96000 );
3722 else if ( inCaps.dwChannels == 1 ) {
3723 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3724 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3725 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3726 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3727 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3728 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3729 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3730 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3732 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3733 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) rates.push_back( 11025 );
3734 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) rates.push_back( 22050 );
3735 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) rates.push_back( 44100 );
3736 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) rates.push_back( 96000 );
3738 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3739 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) rates.push_back( 11025 );
3740 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) rates.push_back( 22050 );
3741 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) rates.push_back( 44100 );
3742 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) rates.push_back( 96000 );
3745 else info.inputChannels = 0; // technically, this would be an error
3749 if ( info.inputChannels == 0 ) return info;
3751 // Copy the supported rates to the info structure but avoid duplication.
3753 for ( unsigned int i=0; i<rates.size(); i++ ) {
3755 for ( unsigned int j=0; j<info.sampleRates.size(); j++ ) {
3756 if ( rates[i] == info.sampleRates[j] ) {
3761 if ( found == false ) info.sampleRates.push_back( rates[i] );
3763 sort( info.sampleRates.begin(), info.sampleRates.end() );
3765 // If device opens for both playback and capture, we determine the channels.
3766 if ( info.outputChannels > 0 && info.inputChannels > 0 )
3767 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
3769 if ( device == 0 ) info.isDefaultInput = true;
3771 // Copy name and return.
3772 info.name = dsDevices[ device ].name;
3777 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3778 unsigned int firstChannel, unsigned int sampleRate,
3779 RtAudioFormat format, unsigned int *bufferSize,
3780 RtAudio::StreamOptions *options )
3782 if ( channels + firstChannel > 2 ) {
3783 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3787 unsigned int nDevices = dsDevices.size();
3788 if ( nDevices == 0 ) {
3789 // This should not happen because a check is made before this function is called.
3790 errorText_ = "RtApiDs::probeDeviceOpen: no devices found!";
3794 if ( device >= nDevices ) {
3795 // This should not happen because a check is made before this function is called.
3796 errorText_ = "RtApiDs::probeDeviceOpen: device ID is invalid!";
3800 if ( mode == OUTPUT ) {
3801 if ( dsDevices[ device ].validId[0] == false ) {
3802 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3803 errorText_ = errorStream_.str();
3807 else { // mode == INPUT
3808 if ( dsDevices[ device ].validId[1] == false ) {
3809 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3810 errorText_ = errorStream_.str();
3815 // According to a note in PortAudio, using GetDesktopWindow()
3816 // instead of GetForegroundWindow() is supposed to avoid problems
3817 // that occur when the application's window is not the foreground
3818 // window. Also, if the application window closes before the
3819 // DirectSound buffer, DirectSound can crash. In the past, I had
3820 // problems when using GetDesktopWindow() but it seems fine now
3821 // (January 2010). I'll leave it commented here.
3822 // HWND hWnd = GetForegroundWindow();
3823 HWND hWnd = GetDesktopWindow();
3825 // Check the numberOfBuffers parameter and limit the lowest value to
3826 // two. This is a judgement call and a value of two is probably too
3827 // low for capture, but it should work for playback.
3829 if ( options ) nBuffers = options->numberOfBuffers;
3830 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3831 if ( nBuffers < 2 ) nBuffers = 3;
3833 // Check the lower range of the user-specified buffer size and set
3834 // (arbitrarily) to a lower bound of 32.
3835 if ( *bufferSize < 32 ) *bufferSize = 32;
3837 // Create the wave format structure. The data format setting will
3838 // be determined later.
3839 WAVEFORMATEX waveFormat;
3840 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3841 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3842 waveFormat.nChannels = channels + firstChannel;
3843 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3845 // Determine the device buffer size. By default, we'll use the value
3846 // defined above (32K), but we will grow it to make allowances for
3847 // very large software buffer sizes.
3848 DWORD dsBufferSize = MINIMUM_DEVICE_BUFFER_SIZE;;
3849 DWORD dsPointerLeadTime = 0;
3851 void *ohandle = 0, *bhandle = 0;
3853 if ( mode == OUTPUT ) {
3855 LPDIRECTSOUND output;
3856 result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
3857 if ( FAILED( result ) ) {
3858 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
3859 errorText_ = errorStream_.str();
3864 outCaps.dwSize = sizeof( outCaps );
3865 result = output->GetCaps( &outCaps );
3866 if ( FAILED( result ) ) {
3868 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsDevices[ device ].name << ")!";
3869 errorText_ = errorStream_.str();
3873 // Check channel information.
3874 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3875 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsDevices[ device ].name << ") does not support stereo playback.";
3876 errorText_ = errorStream_.str();
3880 // Check format information. Use 16-bit format unless not
3881 // supported or user requests 8-bit.
3882 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3883 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3884 waveFormat.wBitsPerSample = 16;
3885 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3888 waveFormat.wBitsPerSample = 8;
3889 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3891 stream_.userFormat = format;
3893 // Update wave format structure and buffer information.
3894 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3895 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3896 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3898 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3899 while ( dsPointerLeadTime * 2U > dsBufferSize )
3902 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3903 // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3904 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3905 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3906 if ( FAILED( result ) ) {
3908 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsDevices[ device ].name << ")!";
3909 errorText_ = errorStream_.str();
3913 // Even though we will write to the secondary buffer, we need to
3914 // access the primary buffer to set the correct output format
3915 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3916 // buffer description.
3917 DSBUFFERDESC bufferDescription;
3918 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3919 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3920 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3922 // Obtain the primary buffer
3923 LPDIRECTSOUNDBUFFER buffer;
3924 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3925 if ( FAILED( result ) ) {
3927 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsDevices[ device ].name << ")!";
3928 errorText_ = errorStream_.str();
3932 // Set the primary DS buffer sound format.
3933 result = buffer->SetFormat( &waveFormat );
3934 if ( FAILED( result ) ) {
3936 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsDevices[ device ].name << ")!";
3937 errorText_ = errorStream_.str();
3941 // Setup the secondary DS buffer description.
3942 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3943 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3944 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3945 DSBCAPS_GLOBALFOCUS |
3946 DSBCAPS_GETCURRENTPOSITION2 |
3947 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3948 bufferDescription.dwBufferBytes = dsBufferSize;
3949 bufferDescription.lpwfxFormat = &waveFormat;
3951 // Try to create the secondary DS buffer. If that doesn't work,
3952 // try to use software mixing. Otherwise, there's a problem.
3953 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3954 if ( FAILED( result ) ) {
3955 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3956 DSBCAPS_GLOBALFOCUS |
3957 DSBCAPS_GETCURRENTPOSITION2 |
3958 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3959 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3960 if ( FAILED( result ) ) {
3962 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsDevices[ device ].name << ")!";
3963 errorText_ = errorStream_.str();
3968 // Get the buffer size ... might be different from what we specified.
3970 dsbcaps.dwSize = sizeof( DSBCAPS );
3971 result = buffer->GetCaps( &dsbcaps );
3972 if ( FAILED( result ) ) {
3975 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
3976 errorText_ = errorStream_.str();
3980 dsBufferSize = dsbcaps.dwBufferBytes;
3982 // Lock the DS buffer
3985 result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
3986 if ( FAILED( result ) ) {
3989 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsDevices[ device ].name << ")!";
3990 errorText_ = errorStream_.str();
3994 // Zero the DS buffer
3995 ZeroMemory( audioPtr, dataLen );
3997 // Unlock the DS buffer
3998 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3999 if ( FAILED( result ) ) {
4002 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsDevices[ device ].name << ")!";
4003 errorText_ = errorStream_.str();
4007 ohandle = (void *) output;
4008 bhandle = (void *) buffer;
4011 if ( mode == INPUT ) {
4013 LPDIRECTSOUNDCAPTURE input;
4014 result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
4015 if ( FAILED( result ) ) {
4016 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
4017 errorText_ = errorStream_.str();
4022 inCaps.dwSize = sizeof( inCaps );
4023 result = input->GetCaps( &inCaps );
4024 if ( FAILED( result ) ) {
4026 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsDevices[ device ].name << ")!";
4027 errorText_ = errorStream_.str();
4031 // Check channel information.
4032 if ( inCaps.dwChannels < channels + firstChannel ) {
4033 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
4037 // Check format information. Use 16-bit format unless user
4039 DWORD deviceFormats;
4040 if ( channels + firstChannel == 2 ) {
4041 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
4042 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4043 waveFormat.wBitsPerSample = 8;
4044 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4046 else { // assume 16-bit is supported
4047 waveFormat.wBitsPerSample = 16;
4048 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4051 else { // channel == 1
4052 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
4053 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4054 waveFormat.wBitsPerSample = 8;
4055 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4057 else { // assume 16-bit is supported
4058 waveFormat.wBitsPerSample = 16;
4059 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4062 stream_.userFormat = format;
4064 // Update wave format structure and buffer information.
4065 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
4066 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
4067 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
4069 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
4070 while ( dsPointerLeadTime * 2U > dsBufferSize )
4073 // Setup the secondary DS buffer description.
4074 DSCBUFFERDESC bufferDescription;
4075 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
4076 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
4077 bufferDescription.dwFlags = 0;
4078 bufferDescription.dwReserved = 0;
4079 bufferDescription.dwBufferBytes = dsBufferSize;
4080 bufferDescription.lpwfxFormat = &waveFormat;
4082 // Create the capture buffer.
4083 LPDIRECTSOUNDCAPTUREBUFFER buffer;
4084 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
4085 if ( FAILED( result ) ) {
4087 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsDevices[ device ].name << ")!";
4088 errorText_ = errorStream_.str();
4092 // Get the buffer size ... might be different from what we specified.
4094 dscbcaps.dwSize = sizeof( DSCBCAPS );
4095 result = buffer->GetCaps( &dscbcaps );
4096 if ( FAILED( result ) ) {
4099 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
4100 errorText_ = errorStream_.str();
4104 dsBufferSize = dscbcaps.dwBufferBytes;
4106 // NOTE: We could have a problem here if this is a duplex stream
4107 // and the play and capture hardware buffer sizes are different
4108 // (I'm actually not sure if that is a problem or not).
4109 // Currently, we are not verifying that.
4111 // Lock the capture buffer
4114 result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
4115 if ( FAILED( result ) ) {
4118 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsDevices[ device ].name << ")!";
4119 errorText_ = errorStream_.str();
4124 ZeroMemory( audioPtr, dataLen );
4126 // Unlock the buffer
4127 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4128 if ( FAILED( result ) ) {
4131 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsDevices[ device ].name << ")!";
4132 errorText_ = errorStream_.str();
4136 ohandle = (void *) input;
4137 bhandle = (void *) buffer;
4140 // Set various stream parameters
4141 DsHandle *handle = 0;
4142 stream_.nDeviceChannels[mode] = channels + firstChannel;
4143 stream_.nUserChannels[mode] = channels;
4144 stream_.bufferSize = *bufferSize;
4145 stream_.channelOffset[mode] = firstChannel;
4146 stream_.deviceInterleaved[mode] = true;
4147 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
4148 else stream_.userInterleaved = true;
4150 // Set flag for buffer conversion
4151 stream_.doConvertBuffer[mode] = false;
4152 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
4153 stream_.doConvertBuffer[mode] = true;
4154 if (stream_.userFormat != stream_.deviceFormat[mode])
4155 stream_.doConvertBuffer[mode] = true;
4156 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
4157 stream_.nUserChannels[mode] > 1 )
4158 stream_.doConvertBuffer[mode] = true;
4160 // Allocate necessary internal buffers
4161 long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
4162 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
4163 if ( stream_.userBuffer[mode] == NULL ) {
4164 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
4168 if ( stream_.doConvertBuffer[mode] ) {
4170 bool makeBuffer = true;
4171 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
4172 if ( mode == INPUT ) {
4173 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4174 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
4175 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
4180 bufferBytes *= *bufferSize;
4181 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
4182 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
4183 if ( stream_.deviceBuffer == NULL ) {
4184 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
4190 // Allocate our DsHandle structures for the stream.
4191 if ( stream_.apiHandle == 0 ) {
4193 handle = new DsHandle;
4195 catch ( std::bad_alloc& ) {
4196 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
4200 // Create a manual-reset event.
4201 handle->condition = CreateEvent( NULL, // no security
4202 TRUE, // manual-reset
4203 FALSE, // non-signaled initially
4205 stream_.apiHandle = (void *) handle;
4208 handle = (DsHandle *) stream_.apiHandle;
4209 handle->id[mode] = ohandle;
4210 handle->buffer[mode] = bhandle;
4211 handle->dsBufferSize[mode] = dsBufferSize;
4212 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
4214 stream_.device[mode] = device;
4215 stream_.state = STREAM_STOPPED;
4216 if ( stream_.mode == OUTPUT && mode == INPUT )
4217 // We had already set up an output stream.
4218 stream_.mode = DUPLEX;
4220 stream_.mode = mode;
4221 stream_.nBuffers = nBuffers;
4222 stream_.sampleRate = sampleRate;
4224 // Setup the buffer conversion information structure.
4225 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
4227 // Setup the callback thread.
4229 stream_.callbackInfo.object = (void *) this;
4230 stream_.callbackInfo.isRunning = true;
4231 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
4232 &stream_.callbackInfo, 0, &threadId );
4233 if ( stream_.callbackInfo.thread == 0 ) {
4234 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4238 // Boost DS thread priority
4239 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4244 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4245 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4246 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4247 if ( buffer ) buffer->Release();
4250 if ( handle->buffer[1] ) {
4251 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4252 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4253 if ( buffer ) buffer->Release();
4256 CloseHandle( handle->condition );
4258 stream_.apiHandle = 0;
4261 for ( int i=0; i<2; i++ ) {
4262 if ( stream_.userBuffer[i] ) {
4263 free( stream_.userBuffer[i] );
4264 stream_.userBuffer[i] = 0;
4268 if ( stream_.deviceBuffer ) {
4269 free( stream_.deviceBuffer );
4270 stream_.deviceBuffer = 0;
4276 void RtApiDs :: closeStream()
4278 if ( stream_.state == STREAM_CLOSED ) {
4279 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4280 error( RtError::WARNING );
4284 // Stop the callback thread.
4285 stream_.callbackInfo.isRunning = false;
4286 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4287 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4289 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4291 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4292 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4293 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4300 if ( handle->buffer[1] ) {
4301 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4302 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4309 CloseHandle( handle->condition );
4311 stream_.apiHandle = 0;
4314 for ( int i=0; i<2; i++ ) {
4315 if ( stream_.userBuffer[i] ) {
4316 free( stream_.userBuffer[i] );
4317 stream_.userBuffer[i] = 0;
4321 if ( stream_.deviceBuffer ) {
4322 free( stream_.deviceBuffer );
4323 stream_.deviceBuffer = 0;
4326 stream_.mode = UNINITIALIZED;
4327 stream_.state = STREAM_CLOSED;
4330 void RtApiDs :: startStream()
4333 if ( stream_.state == STREAM_RUNNING ) {
4334 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4335 error( RtError::WARNING );
4339 MUTEX_LOCK( &stream_.mutex );
4341 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4343 // Increase scheduler frequency on lesser windows (a side-effect of
4344 // increasing timer accuracy). On greater windows (Win2K or later),
4345 // this is already in effect.
4346 timeBeginPeriod( 1 );
4348 buffersRolling = false;
4349 duplexPrerollBytes = 0;
4351 if ( stream_.mode == DUPLEX ) {
4352 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4353 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4357 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4359 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4360 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4361 if ( FAILED( result ) ) {
4362 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4363 errorText_ = errorStream_.str();
4368 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4370 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4371 result = buffer->Start( DSCBSTART_LOOPING );
4372 if ( FAILED( result ) ) {
4373 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4374 errorText_ = errorStream_.str();
4379 handle->drainCounter = 0;
4380 handle->internalDrain = false;
4381 stream_.state = STREAM_RUNNING;
4384 MUTEX_UNLOCK( &stream_.mutex );
4386 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4389 void RtApiDs :: stopStream()
4392 if ( stream_.state == STREAM_STOPPED ) {
4393 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4394 error( RtError::WARNING );
4398 MUTEX_LOCK( &stream_.mutex );
4400 if ( stream_.state == STREAM_STOPPED ) {
4401 MUTEX_UNLOCK( &stream_.mutex );
4408 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4409 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4410 if ( handle->drainCounter == 0 ) {
4411 handle->drainCounter = 1;
4412 MUTEX_UNLOCK( &stream_.mutex );
4413 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4414 ResetEvent( handle->condition );
4415 MUTEX_LOCK( &stream_.mutex );
4418 // Stop the buffer and clear memory
4419 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4420 result = buffer->Stop();
4421 if ( FAILED( result ) ) {
4422 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4423 errorText_ = errorStream_.str();
4427 // Lock the buffer and clear it so that if we start to play again,
4428 // we won't have old data playing.
4429 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4430 if ( FAILED( result ) ) {
4431 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";
4432 errorText_ = errorStream_.str();
4436 // Zero the DS buffer
4437 ZeroMemory( audioPtr, dataLen );
4439 // Unlock the DS buffer
4440 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4441 if ( FAILED( result ) ) {
4442 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4443 errorText_ = errorStream_.str();
4447 // If we start playing again, we must begin at beginning of buffer.
4448 handle->bufferPointer[0] = 0;
4451 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4452 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4456 result = buffer->Stop();
4457 if ( FAILED( result ) ) {
4458 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4459 errorText_ = errorStream_.str();
4463 // Lock the buffer and clear it so that if we start to play again,
4464 // we won't have old data playing.
4465 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4466 if ( FAILED( result ) ) {
4467 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";
4468 errorText_ = errorStream_.str();
4472 // Zero the DS buffer
4473 ZeroMemory( audioPtr, dataLen );
4475 // Unlock the DS buffer
4476 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4477 if ( FAILED( result ) ) {
4478 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4479 errorText_ = errorStream_.str();
4483 // If we start recording again, we must begin at beginning of buffer.
4484 handle->bufferPointer[1] = 0;
4488 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4489 stream_.state = STREAM_STOPPED;
4490 MUTEX_UNLOCK( &stream_.mutex );
4492 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4495 void RtApiDs :: abortStream()
4498 if ( stream_.state == STREAM_STOPPED ) {
4499 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4500 error( RtError::WARNING );
4504 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4505 handle->drainCounter = 1;
4510 void RtApiDs :: callbackEvent()
4512 if ( stream_.state == STREAM_STOPPED ) {
4513 Sleep( 50 ); // sleep 50 milliseconds
4517 if ( stream_.state == STREAM_CLOSED ) {
4518 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4519 error( RtError::WARNING );
4523 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4524 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4526 // Check if we were draining the stream and signal is finished.
4527 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4528 if ( handle->internalDrain == false )
4529 SetEvent( handle->condition );
4535 MUTEX_LOCK( &stream_.mutex );
4537 // The state might change while waiting on a mutex.
4538 if ( stream_.state == STREAM_STOPPED ) {
4539 MUTEX_UNLOCK( &stream_.mutex );
4543 // Invoke user callback to get fresh output data UNLESS we are
4545 if ( handle->drainCounter == 0 ) {
4546 RtAudioCallback callback = (RtAudioCallback) info->callback;
4547 double streamTime = getStreamTime();
4548 RtAudioStreamStatus status = 0;
4549 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4550 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4551 handle->xrun[0] = false;
4553 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4554 status |= RTAUDIO_INPUT_OVERFLOW;
4555 handle->xrun[1] = false;
4557 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4558 stream_.bufferSize, streamTime, status, info->userData );
4559 if ( handle->drainCounter == 2 ) {
4560 MUTEX_UNLOCK( &stream_.mutex );
4564 else if ( handle->drainCounter == 1 )
4565 handle->internalDrain = true;
4569 DWORD currentWritePointer, safeWritePointer;
4570 DWORD currentReadPointer, safeReadPointer;
4571 UINT nextWritePointer;
4573 LPVOID buffer1 = NULL;
4574 LPVOID buffer2 = NULL;
4575 DWORD bufferSize1 = 0;
4576 DWORD bufferSize2 = 0;
4581 if ( buffersRolling == false ) {
4582 if ( stream_.mode == DUPLEX ) {
4583 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4585 // It takes a while for the devices to get rolling. As a result,
4586 // there's no guarantee that the capture and write device pointers
4587 // will move in lockstep. Wait here for both devices to start
4588 // rolling, and then set our buffer pointers accordingly.
4589 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4590 // bytes later than the write buffer.
4592 // Stub: a serious risk of having a pre-emptive scheduling round
4593 // take place between the two GetCurrentPosition calls... but I'm
4594 // really not sure how to solve the problem. Temporarily boost to
4595 // Realtime priority, maybe; but I'm not sure what priority the
4596 // DirectSound service threads run at. We *should* be roughly
4597 // within a ms or so of correct.
4599 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4600 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4602 DWORD startSafeWritePointer, startSafeReadPointer;
4604 result = dsWriteBuffer->GetCurrentPosition( NULL, &startSafeWritePointer );
4605 if ( FAILED( result ) ) {
4606 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4607 errorText_ = errorStream_.str();
4608 error( RtError::SYSTEM_ERROR );
4610 result = dsCaptureBuffer->GetCurrentPosition( NULL, &startSafeReadPointer );
4611 if ( FAILED( result ) ) {
4612 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4613 errorText_ = errorStream_.str();
4614 error( RtError::SYSTEM_ERROR );
4617 result = dsWriteBuffer->GetCurrentPosition( NULL, &safeWritePointer );
4618 if ( FAILED( result ) ) {
4619 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4620 errorText_ = errorStream_.str();
4621 error( RtError::SYSTEM_ERROR );
4623 result = dsCaptureBuffer->GetCurrentPosition( NULL, &safeReadPointer );
4624 if ( FAILED( result ) ) {
4625 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4626 errorText_ = errorStream_.str();
4627 error( RtError::SYSTEM_ERROR );
4629 if ( safeWritePointer != startSafeWritePointer && safeReadPointer != startSafeReadPointer ) break;
4633 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4635 handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
4636 if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
4637 handle->bufferPointer[1] = safeReadPointer;
4639 else if ( stream_.mode == OUTPUT ) {
4641 // Set the proper nextWritePosition after initial startup.
4642 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4643 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
4644 if ( FAILED( result ) ) {
4645 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4646 errorText_ = errorStream_.str();
4647 error( RtError::SYSTEM_ERROR );
4649 handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
4650 if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
4653 buffersRolling = true;
4656 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4658 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4660 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4661 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4662 bufferBytes *= formatBytes( stream_.userFormat );
4663 memset( stream_.userBuffer[0], 0, bufferBytes );
4666 // Setup parameters and do buffer conversion if necessary.
4667 if ( stream_.doConvertBuffer[0] ) {
4668 buffer = stream_.deviceBuffer;
4669 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4670 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4671 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4674 buffer = stream_.userBuffer[0];
4675 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4676 bufferBytes *= formatBytes( stream_.userFormat );
4679 // No byte swapping necessary in DirectSound implementation.
4681 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4682 // unsigned. So, we need to convert our signed 8-bit data here to
4684 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4685 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4687 DWORD dsBufferSize = handle->dsBufferSize[0];
4688 nextWritePointer = handle->bufferPointer[0];
4690 DWORD endWrite, leadPointer;
4692 // Find out where the read and "safe write" pointers are.
4693 result = dsBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
4694 if ( FAILED( result ) ) {
4695 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4696 errorText_ = errorStream_.str();
4697 error( RtError::SYSTEM_ERROR );
4700 // We will copy our output buffer into the region between
4701 // safeWritePointer and leadPointer. If leadPointer is not
4702 // beyond the next endWrite position, wait until it is.
4703 leadPointer = safeWritePointer + handle->dsPointerLeadTime[0];
4704 //std::cout << "safeWritePointer = " << safeWritePointer << ", leadPointer = " << leadPointer << ", nextWritePointer = " << nextWritePointer << std::endl;
4705 if ( leadPointer > dsBufferSize ) leadPointer -= dsBufferSize;
4706 if ( leadPointer < nextWritePointer ) leadPointer += dsBufferSize; // unwrap offset
4707 endWrite = nextWritePointer + bufferBytes;
4709 // Check whether the entire write region is behind the play pointer.
4710 if ( leadPointer >= endWrite ) break;
4712 // If we are here, then we must wait until the leadPointer advances
4713 // beyond the end of our next write region. We use the
4714 // Sleep() function to suspend operation until that happens.
4715 double millis = ( endWrite - leadPointer ) * 1000.0;
4716 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4717 if ( millis < 1.0 ) millis = 1.0;
4718 Sleep( (DWORD) millis );
4721 if ( dsPointerBetween( nextWritePointer, safeWritePointer, currentWritePointer, dsBufferSize )
4722 || dsPointerBetween( endWrite, safeWritePointer, currentWritePointer, dsBufferSize ) ) {
4723 // We've strayed into the forbidden zone ... resync the read pointer.
4724 handle->xrun[0] = true;
4725 nextWritePointer = safeWritePointer + handle->dsPointerLeadTime[0] - bufferBytes;
4726 if ( nextWritePointer >= dsBufferSize ) nextWritePointer -= dsBufferSize;
4727 handle->bufferPointer[0] = nextWritePointer;
4728 endWrite = nextWritePointer + bufferBytes;
4731 // Lock free space in the buffer
4732 result = dsBuffer->Lock( nextWritePointer, bufferBytes, &buffer1,
4733 &bufferSize1, &buffer2, &bufferSize2, 0 );
4734 if ( FAILED( result ) ) {
4735 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4736 errorText_ = errorStream_.str();
4737 error( RtError::SYSTEM_ERROR );
4740 // Copy our buffer into the DS buffer
4741 CopyMemory( buffer1, buffer, bufferSize1 );
4742 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4744 // Update our buffer offset and unlock sound buffer
4745 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4746 if ( FAILED( result ) ) {
4747 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4748 errorText_ = errorStream_.str();
4749 error( RtError::SYSTEM_ERROR );
4751 nextWritePointer = ( nextWritePointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
4752 handle->bufferPointer[0] = nextWritePointer;
4754 if ( handle->drainCounter ) {
4755 handle->drainCounter++;
4760 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4762 // Setup parameters.
4763 if ( stream_.doConvertBuffer[1] ) {
4764 buffer = stream_.deviceBuffer;
4765 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4766 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4769 buffer = stream_.userBuffer[1];
4770 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4771 bufferBytes *= formatBytes( stream_.userFormat );
4774 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4775 long nextReadPointer = handle->bufferPointer[1];
4776 DWORD dsBufferSize = handle->dsBufferSize[1];
4778 // Find out where the write and "safe read" pointers are.
4779 result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
4780 if ( FAILED( result ) ) {
4781 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4782 errorText_ = errorStream_.str();
4783 error( RtError::SYSTEM_ERROR );
4786 if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
4787 DWORD endRead = nextReadPointer + bufferBytes;
4789 // Handling depends on whether we are INPUT or DUPLEX.
4790 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4791 // then a wait here will drag the write pointers into the forbidden zone.
4793 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4794 // it's in a safe position. This causes dropouts, but it seems to be the only
4795 // practical way to sync up the read and write pointers reliably, given the
4796 // the very complex relationship between phase and increment of the read and write
4799 // In order to minimize audible dropouts in DUPLEX mode, we will
4800 // provide a pre-roll period of 0.5 seconds in which we return
4801 // zeros from the read buffer while the pointers sync up.
4803 if ( stream_.mode == DUPLEX ) {
4804 if ( safeReadPointer < endRead ) {
4805 if ( duplexPrerollBytes <= 0 ) {
4806 // Pre-roll time over. Be more agressive.
4807 int adjustment = endRead-safeReadPointer;
4809 handle->xrun[1] = true;
4811 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4812 // and perform fine adjustments later.
4813 // - small adjustments: back off by twice as much.
4814 if ( adjustment >= 2*bufferBytes )
4815 nextReadPointer = safeReadPointer-2*bufferBytes;
4817 nextReadPointer = safeReadPointer-bufferBytes-adjustment;
4819 if ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
4823 // In pre=roll time. Just do it.
4824 nextReadPointer = safeReadPointer - bufferBytes;
4825 while ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
4827 endRead = nextReadPointer + bufferBytes;
4830 else { // mode == INPUT
4831 while ( safeReadPointer < endRead ) {
4832 // See comments for playback.
4833 double millis = (endRead - safeReadPointer) * 1000.0;
4834 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4835 if ( millis < 1.0 ) millis = 1.0;
4836 Sleep( (DWORD) millis );
4838 // Wake up and find out where we are now.
4839 result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
4840 if ( FAILED( result ) ) {
4841 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4842 errorText_ = errorStream_.str();
4843 error( RtError::SYSTEM_ERROR );
4846 if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
4850 // Lock free space in the buffer
4851 result = dsBuffer->Lock( nextReadPointer, bufferBytes, &buffer1,
4852 &bufferSize1, &buffer2, &bufferSize2, 0 );
4853 if ( FAILED( result ) ) {
4854 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4855 errorText_ = errorStream_.str();
4856 error( RtError::SYSTEM_ERROR );
4859 if ( duplexPrerollBytes <= 0 ) {
4860 // Copy our buffer into the DS buffer
4861 CopyMemory( buffer, buffer1, bufferSize1 );
4862 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4865 memset( buffer, 0, bufferSize1 );
4866 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4867 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4870 // Update our buffer offset and unlock sound buffer
4871 nextReadPointer = ( nextReadPointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
4872 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4873 if ( FAILED( result ) ) {
4874 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4875 errorText_ = errorStream_.str();
4876 error( RtError::SYSTEM_ERROR );
4878 handle->bufferPointer[1] = nextReadPointer;
4880 // No byte swapping necessary in DirectSound implementation.
4882 // If necessary, convert 8-bit data from unsigned to signed.
4883 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4884 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4886 // Do buffer conversion if necessary.
4887 if ( stream_.doConvertBuffer[1] )
4888 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4892 MUTEX_UNLOCK( &stream_.mutex );
4894 RtApi::tickStreamTime();
4897 // Definitions for utility functions and callbacks
4898 // specific to the DirectSound implementation.
4900 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4902 CallbackInfo *info = (CallbackInfo *) ptr;
4903 RtApiDs *object = (RtApiDs *) info->object;
4904 bool* isRunning = &info->isRunning;
4906 while ( *isRunning == true ) {
4907 object->callbackEvent();
4916 std::string convertTChar( LPCTSTR name )
4920 #if defined( UNICODE ) || defined( _UNICODE )
4921 // Yes, this conversion doesn't make sense for two-byte characters
4922 // but RtAudio is currently written to return an std::string of
4923 // one-byte chars for the device name.
4924 for ( unsigned int i=0; i<wcslen( name ); i++ )
4925 s.push_back( name[i] );
4927 s.append( std::string( name ) );
4933 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4934 LPCTSTR description,
4938 bool *isInput = (bool *) lpContext;
4941 bool validDevice = false;
4942 if ( *isInput == true ) {
4944 LPDIRECTSOUNDCAPTURE object;
4946 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4947 if ( hr != DS_OK ) return TRUE;
4949 caps.dwSize = sizeof(caps);
4950 hr = object->GetCaps( &caps );
4951 if ( hr == DS_OK ) {
4952 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4959 LPDIRECTSOUND object;
4960 hr = DirectSoundCreate( lpguid, &object, NULL );
4961 if ( hr != DS_OK ) return TRUE;
4963 caps.dwSize = sizeof(caps);
4964 hr = object->GetCaps( &caps );
4965 if ( hr == DS_OK ) {
4966 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4972 // If good device, then save its name and guid.
4973 std::string name = convertTChar( description );
4974 if ( name == "Primary Sound Driver" || name == "Primary Sound Capture Driver" )
4975 name = "Default Device";
4976 if ( validDevice ) {
4977 for ( unsigned int i=0; i<dsDevices.size(); i++ ) {
4978 if ( dsDevices[i].name == name ) {
4979 dsDevices[i].found = true;
4981 dsDevices[i].id[1] = lpguid;
4982 dsDevices[i].validId[1] = true;
4985 dsDevices[i].id[0] = lpguid;
4986 dsDevices[i].validId[0] = true;
4994 device.found = true;
4996 device.id[1] = lpguid;
4997 device.validId[1] = true;
5000 device.id[0] = lpguid;
5001 device.validId[0] = true;
5003 dsDevices.push_back( device );
5009 static const char* getErrorString( int code )
5013 case DSERR_ALLOCATED:
5014 return "Already allocated";
5016 case DSERR_CONTROLUNAVAIL:
5017 return "Control unavailable";
5019 case DSERR_INVALIDPARAM:
5020 return "Invalid parameter";
5022 case DSERR_INVALIDCALL:
5023 return "Invalid call";
5026 return "Generic error";
5028 case DSERR_PRIOLEVELNEEDED:
5029 return "Priority level needed";
5031 case DSERR_OUTOFMEMORY:
5032 return "Out of memory";
5034 case DSERR_BADFORMAT:
5035 return "The sample rate or the channel format is not supported";
5037 case DSERR_UNSUPPORTED:
5038 return "Not supported";
5040 case DSERR_NODRIVER:
5043 case DSERR_ALREADYINITIALIZED:
5044 return "Already initialized";
5046 case DSERR_NOAGGREGATION:
5047 return "No aggregation";
5049 case DSERR_BUFFERLOST:
5050 return "Buffer lost";
5052 case DSERR_OTHERAPPHASPRIO:
5053 return "Another application already has priority";
5055 case DSERR_UNINITIALIZED:
5056 return "Uninitialized";
5059 return "DirectSound unknown error";
5062 //******************** End of __WINDOWS_DS__ *********************//
5066 #if defined(__LINUX_ALSA__)
5068 #include <alsa/asoundlib.h>
5071 // A structure to hold various information related to the ALSA API
5074 snd_pcm_t *handles[2];
5077 pthread_cond_t runnable;
5080 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
5083 extern "C" void *alsaCallbackHandler( void * ptr );
5085 RtApiAlsa :: RtApiAlsa()
5087 // Nothing to do here.
5090 RtApiAlsa :: ~RtApiAlsa()
5092 if ( stream_.state != STREAM_CLOSED ) closeStream();
5095 unsigned int RtApiAlsa :: getDeviceCount( void )
5097 unsigned nDevices = 0;
5098 int result, subdevice, card;
5102 // Count cards and devices
5104 snd_card_next( &card );
5105 while ( card >= 0 ) {
5106 sprintf( name, "hw:%d", card );
5107 result = snd_ctl_open( &handle, name, 0 );
5109 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5110 errorText_ = errorStream_.str();
5111 error( RtError::WARNING );
5116 result = snd_ctl_pcm_next_device( handle, &subdevice );
5118 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5119 errorText_ = errorStream_.str();
5120 error( RtError::WARNING );
5123 if ( subdevice < 0 )
5128 snd_ctl_close( handle );
5129 snd_card_next( &card );
5135 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
5137 RtAudio::DeviceInfo info;
5138 info.probed = false;
5140 unsigned nDevices = 0;
5141 int result, subdevice, card;
5145 // Count cards and devices
5147 snd_card_next( &card );
5148 while ( card >= 0 ) {
5149 sprintf( name, "hw:%d", card );
5150 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5152 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5153 errorText_ = errorStream_.str();
5154 error( RtError::WARNING );
5159 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5161 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5162 errorText_ = errorStream_.str();
5163 error( RtError::WARNING );
5166 if ( subdevice < 0 ) break;
5167 if ( nDevices == device ) {
5168 sprintf( name, "hw:%d,%d", card, subdevice );
5174 snd_ctl_close( chandle );
5175 snd_card_next( &card );
5178 if ( nDevices == 0 ) {
5179 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
5180 error( RtError::INVALID_USE );
5183 if ( device >= nDevices ) {
5184 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
5185 error( RtError::INVALID_USE );
5190 // If a stream is already open, we cannot probe the stream devices.
5191 // Thus, use the saved results.
5192 if ( stream_.state != STREAM_CLOSED &&
5193 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
5194 if ( device >= devices_.size() ) {
5195 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
5196 error( RtError::WARNING );
5199 return devices_[ device ];
5202 int openMode = SND_PCM_ASYNC;
5203 snd_pcm_stream_t stream;
5204 snd_pcm_info_t *pcminfo;
5205 snd_pcm_info_alloca( &pcminfo );
5207 snd_pcm_hw_params_t *params;
5208 snd_pcm_hw_params_alloca( ¶ms );
5210 // First try for playback
5211 stream = SND_PCM_STREAM_PLAYBACK;
5212 snd_pcm_info_set_device( pcminfo, subdevice );
5213 snd_pcm_info_set_subdevice( pcminfo, 0 );
5214 snd_pcm_info_set_stream( pcminfo, stream );
5216 result = snd_ctl_pcm_info( chandle, pcminfo );
5218 // Device probably doesn't support playback.
5222 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
5224 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5225 errorText_ = errorStream_.str();
5226 error( RtError::WARNING );
5230 // The device is open ... fill the parameter structure.
5231 result = snd_pcm_hw_params_any( phandle, params );
5233 snd_pcm_close( phandle );
5234 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5235 errorText_ = errorStream_.str();
5236 error( RtError::WARNING );
5240 // Get output channel information.
5242 result = snd_pcm_hw_params_get_channels_max( params, &value );
5244 snd_pcm_close( phandle );
5245 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
5246 errorText_ = errorStream_.str();
5247 error( RtError::WARNING );
5250 info.outputChannels = value;
5251 snd_pcm_close( phandle );
5254 // Now try for capture
5255 stream = SND_PCM_STREAM_CAPTURE;
5256 snd_pcm_info_set_stream( pcminfo, stream );
5258 result = snd_ctl_pcm_info( chandle, pcminfo );
5259 snd_ctl_close( chandle );
5261 // Device probably doesn't support capture.
5262 if ( info.outputChannels == 0 ) return info;
5263 goto probeParameters;
5266 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5268 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5269 errorText_ = errorStream_.str();
5270 error( RtError::WARNING );
5271 if ( info.outputChannels == 0 ) return info;
5272 goto probeParameters;
5275 // The device is open ... fill the parameter structure.
5276 result = snd_pcm_hw_params_any( phandle, params );
5278 snd_pcm_close( phandle );
5279 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5280 errorText_ = errorStream_.str();
5281 error( RtError::WARNING );
5282 if ( info.outputChannels == 0 ) return info;
5283 goto probeParameters;
5286 result = snd_pcm_hw_params_get_channels_max( params, &value );
5288 snd_pcm_close( phandle );
5289 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5290 errorText_ = errorStream_.str();
5291 error( RtError::WARNING );
5292 if ( info.outputChannels == 0 ) return info;
5293 goto probeParameters;
5295 info.inputChannels = value;
5296 snd_pcm_close( phandle );
5298 // If device opens for both playback and capture, we determine the channels.
5299 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5300 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5302 // ALSA doesn't provide default devices so we'll use the first available one.
5303 if ( device == 0 && info.outputChannels > 0 )
5304 info.isDefaultOutput = true;
5305 if ( device == 0 && info.inputChannels > 0 )
5306 info.isDefaultInput = true;
5309 // At this point, we just need to figure out the supported data
5310 // formats and sample rates. We'll proceed by opening the device in
5311 // the direction with the maximum number of channels, or playback if
5312 // they are equal. This might limit our sample rate options, but so
5315 if ( info.outputChannels >= info.inputChannels )
5316 stream = SND_PCM_STREAM_PLAYBACK;
5318 stream = SND_PCM_STREAM_CAPTURE;
5319 snd_pcm_info_set_stream( pcminfo, stream );
5321 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5323 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5324 errorText_ = errorStream_.str();
5325 error( RtError::WARNING );
5329 // The device is open ... fill the parameter structure.
5330 result = snd_pcm_hw_params_any( phandle, params );
5332 snd_pcm_close( phandle );
5333 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5334 errorText_ = errorStream_.str();
5335 error( RtError::WARNING );
5339 // Test our discrete set of sample rate values.
5340 info.sampleRates.clear();
5341 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5342 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5343 info.sampleRates.push_back( SAMPLE_RATES[i] );
5345 if ( info.sampleRates.size() == 0 ) {
5346 snd_pcm_close( phandle );
5347 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5348 errorText_ = errorStream_.str();
5349 error( RtError::WARNING );
5353 // Probe the supported data formats ... we don't care about endian-ness just yet
5354 snd_pcm_format_t format;
5355 info.nativeFormats = 0;
5356 format = SND_PCM_FORMAT_S8;
5357 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5358 info.nativeFormats |= RTAUDIO_SINT8;
5359 format = SND_PCM_FORMAT_S16;
5360 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5361 info.nativeFormats |= RTAUDIO_SINT16;
5362 format = SND_PCM_FORMAT_S24;
5363 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5364 info.nativeFormats |= RTAUDIO_SINT24;
5365 format = SND_PCM_FORMAT_S32;
5366 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5367 info.nativeFormats |= RTAUDIO_SINT32;
5368 format = SND_PCM_FORMAT_FLOAT;
5369 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5370 info.nativeFormats |= RTAUDIO_FLOAT32;
5371 format = SND_PCM_FORMAT_FLOAT64;
5372 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5373 info.nativeFormats |= RTAUDIO_FLOAT64;
5375 // Check that we have at least one supported format
5376 if ( info.nativeFormats == 0 ) {
5377 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5378 errorText_ = errorStream_.str();
5379 error( RtError::WARNING );
5383 // Get the device name
5385 result = snd_card_get_name( card, &cardname );
5387 sprintf( name, "hw:%s,%d", cardname, subdevice );
5390 // That's all ... close the device and return
5391 snd_pcm_close( phandle );
5396 void RtApiAlsa :: saveDeviceInfo( void )
5400 unsigned int nDevices = getDeviceCount();
5401 devices_.resize( nDevices );
5402 for ( unsigned int i=0; i<nDevices; i++ )
5403 devices_[i] = getDeviceInfo( i );
5406 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5407 unsigned int firstChannel, unsigned int sampleRate,
5408 RtAudioFormat format, unsigned int *bufferSize,
5409 RtAudio::StreamOptions *options )
5412 #if defined(__RTAUDIO_DEBUG__)
5414 snd_output_stdio_attach(&out, stderr, 0);
5417 // I'm not using the "plug" interface ... too much inconsistent behavior.
5419 unsigned nDevices = 0;
5420 int result, subdevice, card;
5424 // Count cards and devices
5426 snd_card_next( &card );
5427 while ( card >= 0 ) {
5428 sprintf( name, "hw:%d", card );
5429 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5431 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5432 errorText_ = errorStream_.str();
5437 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5438 if ( result < 0 ) break;
5439 if ( subdevice < 0 ) break;
5440 if ( nDevices == device ) {
5441 sprintf( name, "hw:%d,%d", card, subdevice );
5442 snd_ctl_close( chandle );
5447 snd_ctl_close( chandle );
5448 snd_card_next( &card );
5451 if ( nDevices == 0 ) {
5452 // This should not happen because a check is made before this function is called.
5453 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5457 if ( device >= nDevices ) {
5458 // This should not happen because a check is made before this function is called.
5459 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5465 // The getDeviceInfo() function will not work for a device that is
5466 // already open. Thus, we'll probe the system before opening a
5467 // stream and save the results for use by getDeviceInfo().
5468 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5469 this->saveDeviceInfo();
5471 snd_pcm_stream_t stream;
5472 if ( mode == OUTPUT )
5473 stream = SND_PCM_STREAM_PLAYBACK;
5475 stream = SND_PCM_STREAM_CAPTURE;
5478 int openMode = SND_PCM_ASYNC;
5479 result = snd_pcm_open( &phandle, name, stream, openMode );
5481 if ( mode == OUTPUT )
5482 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5484 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5485 errorText_ = errorStream_.str();
5489 // Fill the parameter structure.
5490 snd_pcm_hw_params_t *hw_params;
5491 snd_pcm_hw_params_alloca( &hw_params );
5492 result = snd_pcm_hw_params_any( phandle, hw_params );
5494 snd_pcm_close( phandle );
5495 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5496 errorText_ = errorStream_.str();
5500 #if defined(__RTAUDIO_DEBUG__)
5501 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5502 snd_pcm_hw_params_dump( hw_params, out );
5505 // Set access ... check user preference.
5506 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5507 stream_.userInterleaved = false;
5508 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5510 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5511 stream_.deviceInterleaved[mode] = true;
5514 stream_.deviceInterleaved[mode] = false;
5517 stream_.userInterleaved = true;
5518 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5520 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5521 stream_.deviceInterleaved[mode] = false;
5524 stream_.deviceInterleaved[mode] = true;
5528 snd_pcm_close( phandle );
5529 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5530 errorText_ = errorStream_.str();
5534 // Determine how to set the device format.
5535 stream_.userFormat = format;
5536 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5538 if ( format == RTAUDIO_SINT8 )
5539 deviceFormat = SND_PCM_FORMAT_S8;
5540 else if ( format == RTAUDIO_SINT16 )
5541 deviceFormat = SND_PCM_FORMAT_S16;
5542 else if ( format == RTAUDIO_SINT24 )
5543 deviceFormat = SND_PCM_FORMAT_S24;
5544 else if ( format == RTAUDIO_SINT32 )
5545 deviceFormat = SND_PCM_FORMAT_S32;
5546 else if ( format == RTAUDIO_FLOAT32 )
5547 deviceFormat = SND_PCM_FORMAT_FLOAT;
5548 else if ( format == RTAUDIO_FLOAT64 )
5549 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5551 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5552 stream_.deviceFormat[mode] = format;
5556 // The user requested format is not natively supported by the device.
5557 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5558 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5559 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5563 deviceFormat = SND_PCM_FORMAT_FLOAT;
5564 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5565 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5569 deviceFormat = SND_PCM_FORMAT_S32;
5570 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5571 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5575 deviceFormat = SND_PCM_FORMAT_S24;
5576 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5577 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5581 deviceFormat = SND_PCM_FORMAT_S16;
5582 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5583 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5587 deviceFormat = SND_PCM_FORMAT_S8;
5588 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5589 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5593 // If we get here, no supported format was found.
5594 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5595 errorText_ = errorStream_.str();
5599 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5601 snd_pcm_close( phandle );
5602 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5603 errorText_ = errorStream_.str();
5607 // Determine whether byte-swaping is necessary.
5608 stream_.doByteSwap[mode] = false;
5609 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5610 result = snd_pcm_format_cpu_endian( deviceFormat );
5612 stream_.doByteSwap[mode] = true;
5613 else if (result < 0) {
5614 snd_pcm_close( phandle );
5615 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5616 errorText_ = errorStream_.str();
5621 // Set the sample rate.
5622 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5624 snd_pcm_close( phandle );
5625 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5626 errorText_ = errorStream_.str();
5630 // Determine the number of channels for this device. We support a possible
5631 // minimum device channel number > than the value requested by the user.
5632 stream_.nUserChannels[mode] = channels;
5634 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5635 unsigned int deviceChannels = value;
5636 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5637 snd_pcm_close( phandle );
5638 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5639 errorText_ = errorStream_.str();
5643 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5645 snd_pcm_close( phandle );
5646 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5647 errorText_ = errorStream_.str();
5650 deviceChannels = value;
5651 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5652 stream_.nDeviceChannels[mode] = deviceChannels;
5654 // Set the device channels.
5655 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5657 snd_pcm_close( phandle );
5658 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5659 errorText_ = errorStream_.str();
5663 // Set the buffer number, which in ALSA is referred to as the "period".
5664 int totalSize, dir = 0;
5665 unsigned int periods = 0;
5666 if ( options ) periods = options->numberOfBuffers;
5667 totalSize = *bufferSize * periods;
5669 // Set the buffer (or period) size.
5670 snd_pcm_uframes_t periodSize = *bufferSize;
5671 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5673 snd_pcm_close( phandle );
5674 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5675 errorText_ = errorStream_.str();
5678 *bufferSize = periodSize;
5680 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5681 else periods = totalSize / *bufferSize;
5682 // Even though the hardware might allow 1 buffer, it won't work reliably.
5683 if ( periods < 2 ) periods = 2;
5684 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5686 snd_pcm_close( phandle );
5687 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5688 errorText_ = errorStream_.str();
5692 // If attempting to setup a duplex stream, the bufferSize parameter
5693 // MUST be the same in both directions!
5694 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5695 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5696 errorText_ = errorStream_.str();
5700 stream_.bufferSize = *bufferSize;
5702 // Install the hardware configuration
5703 result = snd_pcm_hw_params( phandle, hw_params );
5705 snd_pcm_close( phandle );
5706 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5707 errorText_ = errorStream_.str();
5711 #if defined(__RTAUDIO_DEBUG__)
5712 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5713 snd_pcm_hw_params_dump( hw_params, out );
5716 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5717 snd_pcm_sw_params_t *sw_params = NULL;
5718 snd_pcm_sw_params_alloca( &sw_params );
5719 snd_pcm_sw_params_current( phandle, sw_params );
5720 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5721 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
5722 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5724 // The following two settings were suggested by Theo Veenker
5725 //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
5726 //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
5728 // here are two options for a fix
5729 //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
5730 snd_pcm_uframes_t val;
5731 snd_pcm_sw_params_get_boundary( sw_params, &val );
5732 snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
5734 result = snd_pcm_sw_params( phandle, sw_params );
5736 snd_pcm_close( phandle );
5737 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5738 errorText_ = errorStream_.str();
5742 #if defined(__RTAUDIO_DEBUG__)
5743 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5744 snd_pcm_sw_params_dump( sw_params, out );
5747 // Set flags for buffer conversion
5748 stream_.doConvertBuffer[mode] = false;
5749 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5750 stream_.doConvertBuffer[mode] = true;
5751 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5752 stream_.doConvertBuffer[mode] = true;
5753 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5754 stream_.nUserChannels[mode] > 1 )
5755 stream_.doConvertBuffer[mode] = true;
5757 // Allocate the ApiHandle if necessary and then save.
5758 AlsaHandle *apiInfo = 0;
5759 if ( stream_.apiHandle == 0 ) {
5761 apiInfo = (AlsaHandle *) new AlsaHandle;
5763 catch ( std::bad_alloc& ) {
5764 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5768 if ( pthread_cond_init( &apiInfo->runnable, NULL ) ) {
5769 errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
5773 stream_.apiHandle = (void *) apiInfo;
5774 apiInfo->handles[0] = 0;
5775 apiInfo->handles[1] = 0;
5778 apiInfo = (AlsaHandle *) stream_.apiHandle;
5780 apiInfo->handles[mode] = phandle;
5782 // Allocate necessary internal buffers.
5783 unsigned long bufferBytes;
5784 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5785 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5786 if ( stream_.userBuffer[mode] == NULL ) {
5787 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5791 if ( stream_.doConvertBuffer[mode] ) {
5793 bool makeBuffer = true;
5794 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5795 if ( mode == INPUT ) {
5796 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5797 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5798 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5803 bufferBytes *= *bufferSize;
5804 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5805 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5806 if ( stream_.deviceBuffer == NULL ) {
5807 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5813 stream_.sampleRate = sampleRate;
5814 stream_.nBuffers = periods;
5815 stream_.device[mode] = device;
5816 stream_.state = STREAM_STOPPED;
5818 // Setup the buffer conversion information structure.
5819 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5821 // Setup thread if necessary.
5822 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5823 // We had already set up an output stream.
5824 stream_.mode = DUPLEX;
5825 // Link the streams if possible.
5826 apiInfo->synchronized = false;
5827 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5828 apiInfo->synchronized = true;
5830 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5831 error( RtError::WARNING );
5835 stream_.mode = mode;
5837 // Setup callback thread.
5838 stream_.callbackInfo.object = (void *) this;
5840 // Set the thread attributes for joinable and realtime scheduling
5841 // priority (optional). The higher priority will only take affect
5842 // if the program is run as root or suid. Note, under Linux
5843 // processes with CAP_SYS_NICE privilege, a user can change
5844 // scheduling policy and priority (thus need not be root). See
5845 // POSIX "capabilities".
5846 pthread_attr_t attr;
5847 pthread_attr_init( &attr );
5848 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5849 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5850 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
5851 struct sched_param param;
5852 int priority = options->priority;
5853 int min = sched_get_priority_min( SCHED_RR );
5854 int max = sched_get_priority_max( SCHED_RR );
5855 if ( priority < min ) priority = min;
5856 else if ( priority > max ) priority = max;
5857 param.sched_priority = priority;
5858 pthread_attr_setschedparam( &attr, ¶m );
5859 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5862 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5864 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5867 stream_.callbackInfo.isRunning = true;
5868 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5869 pthread_attr_destroy( &attr );
5871 stream_.callbackInfo.isRunning = false;
5872 errorText_ = "RtApiAlsa::error creating callback thread!";
5881 pthread_cond_destroy( &apiInfo->runnable );
5882 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5883 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5885 stream_.apiHandle = 0;
5888 for ( int i=0; i<2; i++ ) {
5889 if ( stream_.userBuffer[i] ) {
5890 free( stream_.userBuffer[i] );
5891 stream_.userBuffer[i] = 0;
5895 if ( stream_.deviceBuffer ) {
5896 free( stream_.deviceBuffer );
5897 stream_.deviceBuffer = 0;
5903 void RtApiAlsa :: closeStream()
5905 if ( stream_.state == STREAM_CLOSED ) {
5906 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5907 error( RtError::WARNING );
5911 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5912 stream_.callbackInfo.isRunning = false;
5913 MUTEX_LOCK( &stream_.mutex );
5914 if ( stream_.state == STREAM_STOPPED )
5915 pthread_cond_signal( &apiInfo->runnable );
5916 MUTEX_UNLOCK( &stream_.mutex );
5917 pthread_join( stream_.callbackInfo.thread, NULL );
5919 if ( stream_.state == STREAM_RUNNING ) {
5920 stream_.state = STREAM_STOPPED;
5921 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5922 snd_pcm_drop( apiInfo->handles[0] );
5923 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5924 snd_pcm_drop( apiInfo->handles[1] );
5928 pthread_cond_destroy( &apiInfo->runnable );
5929 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5930 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5932 stream_.apiHandle = 0;
5935 for ( int i=0; i<2; i++ ) {
5936 if ( stream_.userBuffer[i] ) {
5937 free( stream_.userBuffer[i] );
5938 stream_.userBuffer[i] = 0;
5942 if ( stream_.deviceBuffer ) {
5943 free( stream_.deviceBuffer );
5944 stream_.deviceBuffer = 0;
5947 stream_.mode = UNINITIALIZED;
5948 stream_.state = STREAM_CLOSED;
5951 void RtApiAlsa :: startStream()
5953 // This method calls snd_pcm_prepare if the device isn't already in that state.
5956 if ( stream_.state == STREAM_RUNNING ) {
5957 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5958 error( RtError::WARNING );
5962 MUTEX_LOCK( &stream_.mutex );
5965 snd_pcm_state_t state;
5966 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5967 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5968 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5969 state = snd_pcm_state( handle[0] );
5970 if ( state != SND_PCM_STATE_PREPARED ) {
5971 result = snd_pcm_prepare( handle[0] );
5973 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5974 errorText_ = errorStream_.str();
5980 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5981 state = snd_pcm_state( handle[1] );
5982 if ( state != SND_PCM_STATE_PREPARED ) {
5983 result = snd_pcm_prepare( handle[1] );
5985 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5986 errorText_ = errorStream_.str();
5992 stream_.state = STREAM_RUNNING;
5995 MUTEX_UNLOCK( &stream_.mutex );
5997 pthread_cond_signal( &apiInfo->runnable );
5999 if ( result >= 0 ) return;
6000 error( RtError::SYSTEM_ERROR );
6003 void RtApiAlsa :: stopStream()
6006 if ( stream_.state == STREAM_STOPPED ) {
6007 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
6008 error( RtError::WARNING );
6012 MUTEX_LOCK( &stream_.mutex );
6014 if ( stream_.state == STREAM_STOPPED ) {
6015 MUTEX_UNLOCK( &stream_.mutex );
6020 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6021 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6022 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6023 if ( apiInfo->synchronized )
6024 result = snd_pcm_drop( handle[0] );
6026 result = snd_pcm_drain( handle[0] );
6028 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
6029 errorText_ = errorStream_.str();
6034 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6035 result = snd_pcm_drop( handle[1] );
6037 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
6038 errorText_ = errorStream_.str();
6044 stream_.state = STREAM_STOPPED;
6045 MUTEX_UNLOCK( &stream_.mutex );
6047 if ( result >= 0 ) return;
6048 error( RtError::SYSTEM_ERROR );
6051 void RtApiAlsa :: abortStream()
6054 if ( stream_.state == STREAM_STOPPED ) {
6055 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
6056 error( RtError::WARNING );
6060 MUTEX_LOCK( &stream_.mutex );
6062 if ( stream_.state == STREAM_STOPPED ) {
6063 MUTEX_UNLOCK( &stream_.mutex );
6068 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6069 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6070 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6071 result = snd_pcm_drop( handle[0] );
6073 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
6074 errorText_ = errorStream_.str();
6079 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6080 result = snd_pcm_drop( handle[1] );
6082 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
6083 errorText_ = errorStream_.str();
6089 stream_.state = STREAM_STOPPED;
6090 MUTEX_UNLOCK( &stream_.mutex );
6092 if ( result >= 0 ) return;
6093 error( RtError::SYSTEM_ERROR );
6096 void RtApiAlsa :: callbackEvent()
6098 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6099 if ( stream_.state == STREAM_STOPPED ) {
6100 MUTEX_LOCK( &stream_.mutex );
6101 pthread_cond_wait( &apiInfo->runnable, &stream_.mutex );
6102 if ( stream_.state != STREAM_RUNNING ) {
6103 MUTEX_UNLOCK( &stream_.mutex );
6106 MUTEX_UNLOCK( &stream_.mutex );
6109 if ( stream_.state == STREAM_CLOSED ) {
6110 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
6111 error( RtError::WARNING );
6115 int doStopStream = 0;
6116 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6117 double streamTime = getStreamTime();
6118 RtAudioStreamStatus status = 0;
6119 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
6120 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6121 apiInfo->xrun[0] = false;
6123 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
6124 status |= RTAUDIO_INPUT_OVERFLOW;
6125 apiInfo->xrun[1] = false;
6127 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6128 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6130 if ( doStopStream == 2 ) {
6135 MUTEX_LOCK( &stream_.mutex );
6137 // The state might change while waiting on a mutex.
6138 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6144 snd_pcm_sframes_t frames;
6145 RtAudioFormat format;
6146 handle = (snd_pcm_t **) apiInfo->handles;
6148 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6150 // Setup parameters.
6151 if ( stream_.doConvertBuffer[1] ) {
6152 buffer = stream_.deviceBuffer;
6153 channels = stream_.nDeviceChannels[1];
6154 format = stream_.deviceFormat[1];
6157 buffer = stream_.userBuffer[1];
6158 channels = stream_.nUserChannels[1];
6159 format = stream_.userFormat;
6162 // Read samples from device in interleaved/non-interleaved format.
6163 if ( stream_.deviceInterleaved[1] )
6164 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
6166 void *bufs[channels];
6167 size_t offset = stream_.bufferSize * formatBytes( format );
6168 for ( int i=0; i<channels; i++ )
6169 bufs[i] = (void *) (buffer + (i * offset));
6170 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
6173 if ( result < (int) stream_.bufferSize ) {
6174 // Either an error or overrun occured.
6175 if ( result == -EPIPE ) {
6176 snd_pcm_state_t state = snd_pcm_state( handle[1] );
6177 if ( state == SND_PCM_STATE_XRUN ) {
6178 apiInfo->xrun[1] = true;
6179 result = snd_pcm_prepare( handle[1] );
6181 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
6182 errorText_ = errorStream_.str();
6186 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6187 errorText_ = errorStream_.str();
6191 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
6192 errorText_ = errorStream_.str();
6194 error( RtError::WARNING );
6198 // Do byte swapping if necessary.
6199 if ( stream_.doByteSwap[1] )
6200 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
6202 // Do buffer conversion if necessary.
6203 if ( stream_.doConvertBuffer[1] )
6204 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6206 // Check stream latency
6207 result = snd_pcm_delay( handle[1], &frames );
6208 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
6213 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6215 // Setup parameters and do buffer conversion if necessary.
6216 if ( stream_.doConvertBuffer[0] ) {
6217 buffer = stream_.deviceBuffer;
6218 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6219 channels = stream_.nDeviceChannels[0];
6220 format = stream_.deviceFormat[0];
6223 buffer = stream_.userBuffer[0];
6224 channels = stream_.nUserChannels[0];
6225 format = stream_.userFormat;
6228 // Do byte swapping if necessary.
6229 if ( stream_.doByteSwap[0] )
6230 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
6232 // Write samples to device in interleaved/non-interleaved format.
6233 if ( stream_.deviceInterleaved[0] )
6234 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
6236 void *bufs[channels];
6237 size_t offset = stream_.bufferSize * formatBytes( format );
6238 for ( int i=0; i<channels; i++ )
6239 bufs[i] = (void *) (buffer + (i * offset));
6240 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
6243 if ( result < (int) stream_.bufferSize ) {
6244 // Either an error or underrun occured.
6245 if ( result == -EPIPE ) {
6246 snd_pcm_state_t state = snd_pcm_state( handle[0] );
6247 if ( state == SND_PCM_STATE_XRUN ) {
6248 apiInfo->xrun[0] = true;
6249 result = snd_pcm_prepare( handle[0] );
6251 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
6252 errorText_ = errorStream_.str();
6256 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6257 errorText_ = errorStream_.str();
6261 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
6262 errorText_ = errorStream_.str();
6264 error( RtError::WARNING );
6268 // Check stream latency
6269 result = snd_pcm_delay( handle[0], &frames );
6270 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
6274 MUTEX_UNLOCK( &stream_.mutex );
6276 RtApi::tickStreamTime();
6277 if ( doStopStream == 1 ) this->stopStream();
6280 extern "C" void *alsaCallbackHandler( void *ptr )
6282 CallbackInfo *info = (CallbackInfo *) ptr;
6283 RtApiAlsa *object = (RtApiAlsa *) info->object;
6284 bool *isRunning = &info->isRunning;
6286 while ( *isRunning == true ) {
6287 pthread_testcancel();
6288 object->callbackEvent();
6291 pthread_exit( NULL );
6294 //******************** End of __LINUX_ALSA__ *********************//
6298 #if defined(__LINUX_OSS__)
6301 #include <sys/ioctl.h>
6304 #include "soundcard.h"
6308 extern "C" void *ossCallbackHandler(void * ptr);
6310 // A structure to hold various information related to the OSS API
6313 int id[2]; // device ids
6316 pthread_cond_t runnable;
6319 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6322 RtApiOss :: RtApiOss()
6324 // Nothing to do here.
6327 RtApiOss :: ~RtApiOss()
6329 if ( stream_.state != STREAM_CLOSED ) closeStream();
6332 unsigned int RtApiOss :: getDeviceCount( void )
6334 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6335 if ( mixerfd == -1 ) {
6336 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6337 error( RtError::WARNING );
6341 oss_sysinfo sysinfo;
6342 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6344 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6345 error( RtError::WARNING );
6350 return sysinfo.numaudios;
6353 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6355 RtAudio::DeviceInfo info;
6356 info.probed = false;
6358 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6359 if ( mixerfd == -1 ) {
6360 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6361 error( RtError::WARNING );
6365 oss_sysinfo sysinfo;
6366 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6367 if ( result == -1 ) {
6369 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6370 error( RtError::WARNING );
6374 unsigned nDevices = sysinfo.numaudios;
6375 if ( nDevices == 0 ) {
6377 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6378 error( RtError::INVALID_USE );
6381 if ( device >= nDevices ) {
6383 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6384 error( RtError::INVALID_USE );
6387 oss_audioinfo ainfo;
6389 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6391 if ( result == -1 ) {
6392 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6393 errorText_ = errorStream_.str();
6394 error( RtError::WARNING );
6399 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6400 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6401 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6402 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6403 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6406 // Probe data formats ... do for input
6407 unsigned long mask = ainfo.iformats;
6408 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6409 info.nativeFormats |= RTAUDIO_SINT16;
6410 if ( mask & AFMT_S8 )
6411 info.nativeFormats |= RTAUDIO_SINT8;
6412 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6413 info.nativeFormats |= RTAUDIO_SINT32;
6414 if ( mask & AFMT_FLOAT )
6415 info.nativeFormats |= RTAUDIO_FLOAT32;
6416 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6417 info.nativeFormats |= RTAUDIO_SINT24;
6419 // Check that we have at least one supported format
6420 if ( info.nativeFormats == 0 ) {
6421 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6422 errorText_ = errorStream_.str();
6423 error( RtError::WARNING );
6427 // Probe the supported sample rates.
6428 info.sampleRates.clear();
6429 if ( ainfo.nrates ) {
6430 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6431 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6432 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6433 info.sampleRates.push_back( SAMPLE_RATES[k] );
6440 // Check min and max rate values;
6441 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6442 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6443 info.sampleRates.push_back( SAMPLE_RATES[k] );
6447 if ( info.sampleRates.size() == 0 ) {
6448 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6449 errorText_ = errorStream_.str();
6450 error( RtError::WARNING );
6454 info.name = ainfo.name;
6461 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6462 unsigned int firstChannel, unsigned int sampleRate,
6463 RtAudioFormat format, unsigned int *bufferSize,
6464 RtAudio::StreamOptions *options )
6466 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6467 if ( mixerfd == -1 ) {
6468 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6472 oss_sysinfo sysinfo;
6473 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6474 if ( result == -1 ) {
6476 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6480 unsigned nDevices = sysinfo.numaudios;
6481 if ( nDevices == 0 ) {
6482 // This should not happen because a check is made before this function is called.
6484 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6488 if ( device >= nDevices ) {
6489 // This should not happen because a check is made before this function is called.
6491 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6495 oss_audioinfo ainfo;
6497 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6499 if ( result == -1 ) {
6500 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6501 errorText_ = errorStream_.str();
6505 // Check if device supports input or output
6506 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6507 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6508 if ( mode == OUTPUT )
6509 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6511 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6512 errorText_ = errorStream_.str();
6517 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6518 if ( mode == OUTPUT )
6520 else { // mode == INPUT
6521 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6522 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6523 close( handle->id[0] );
6525 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6526 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6527 errorText_ = errorStream_.str();
6530 // Check that the number previously set channels is the same.
6531 if ( stream_.nUserChannels[0] != channels ) {
6532 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6533 errorText_ = errorStream_.str();
6542 // Set exclusive access if specified.
6543 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6545 // Try to open the device.
6547 fd = open( ainfo.devnode, flags, 0 );
6549 if ( errno == EBUSY )
6550 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6552 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6553 errorText_ = errorStream_.str();
6557 // For duplex operation, specifically set this mode (this doesn't seem to work).
6559 if ( flags | O_RDWR ) {
6560 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6561 if ( result == -1) {
6562 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6563 errorText_ = errorStream_.str();
6569 // Check the device channel support.
6570 stream_.nUserChannels[mode] = channels;
6571 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6573 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6574 errorText_ = errorStream_.str();
6578 // Set the number of channels.
6579 int deviceChannels = channels + firstChannel;
6580 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6581 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6583 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6584 errorText_ = errorStream_.str();
6587 stream_.nDeviceChannels[mode] = deviceChannels;
6589 // Get the data format mask
6591 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6592 if ( result == -1 ) {
6594 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6595 errorText_ = errorStream_.str();
6599 // Determine how to set the device format.
6600 stream_.userFormat = format;
6601 int deviceFormat = -1;
6602 stream_.doByteSwap[mode] = false;
6603 if ( format == RTAUDIO_SINT8 ) {
6604 if ( mask & AFMT_S8 ) {
6605 deviceFormat = AFMT_S8;
6606 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6609 else if ( format == RTAUDIO_SINT16 ) {
6610 if ( mask & AFMT_S16_NE ) {
6611 deviceFormat = AFMT_S16_NE;
6612 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6614 else if ( mask & AFMT_S16_OE ) {
6615 deviceFormat = AFMT_S16_OE;
6616 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6617 stream_.doByteSwap[mode] = true;
6620 else if ( format == RTAUDIO_SINT24 ) {
6621 if ( mask & AFMT_S24_NE ) {
6622 deviceFormat = AFMT_S24_NE;
6623 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6625 else if ( mask & AFMT_S24_OE ) {
6626 deviceFormat = AFMT_S24_OE;
6627 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6628 stream_.doByteSwap[mode] = true;
6631 else if ( format == RTAUDIO_SINT32 ) {
6632 if ( mask & AFMT_S32_NE ) {
6633 deviceFormat = AFMT_S32_NE;
6634 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6636 else if ( mask & AFMT_S32_OE ) {
6637 deviceFormat = AFMT_S32_OE;
6638 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6639 stream_.doByteSwap[mode] = true;
6643 if ( deviceFormat == -1 ) {
6644 // The user requested format is not natively supported by the device.
6645 if ( mask & AFMT_S16_NE ) {
6646 deviceFormat = AFMT_S16_NE;
6647 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6649 else if ( mask & AFMT_S32_NE ) {
6650 deviceFormat = AFMT_S32_NE;
6651 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6653 else if ( mask & AFMT_S24_NE ) {
6654 deviceFormat = AFMT_S24_NE;
6655 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6657 else if ( mask & AFMT_S16_OE ) {
6658 deviceFormat = AFMT_S16_OE;
6659 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6660 stream_.doByteSwap[mode] = true;
6662 else if ( mask & AFMT_S32_OE ) {
6663 deviceFormat = AFMT_S32_OE;
6664 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6665 stream_.doByteSwap[mode] = true;
6667 else if ( mask & AFMT_S24_OE ) {
6668 deviceFormat = AFMT_S24_OE;
6669 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6670 stream_.doByteSwap[mode] = true;
6672 else if ( mask & AFMT_S8) {
6673 deviceFormat = AFMT_S8;
6674 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6678 if ( stream_.deviceFormat[mode] == 0 ) {
6679 // This really shouldn't happen ...
6681 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6682 errorText_ = errorStream_.str();
6686 // Set the data format.
6687 int temp = deviceFormat;
6688 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6689 if ( result == -1 || deviceFormat != temp ) {
6691 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6692 errorText_ = errorStream_.str();
6696 // Attempt to set the buffer size. According to OSS, the minimum
6697 // number of buffers is two. The supposed minimum buffer size is 16
6698 // bytes, so that will be our lower bound. The argument to this
6699 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6700 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6701 // We'll check the actual value used near the end of the setup
6703 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6704 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6706 if ( options ) buffers = options->numberOfBuffers;
6707 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6708 if ( buffers < 2 ) buffers = 3;
6709 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6710 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6711 if ( result == -1 ) {
6713 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6714 errorText_ = errorStream_.str();
6717 stream_.nBuffers = buffers;
6719 // Save buffer size (in sample frames).
6720 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6721 stream_.bufferSize = *bufferSize;
6723 // Set the sample rate.
6724 int srate = sampleRate;
6725 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6726 if ( result == -1 ) {
6728 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6729 errorText_ = errorStream_.str();
6733 // Verify the sample rate setup worked.
6734 if ( abs( srate - sampleRate ) > 100 ) {
6736 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6737 errorText_ = errorStream_.str();
6740 stream_.sampleRate = sampleRate;
6742 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6743 // We're doing duplex setup here.
6744 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6745 stream_.nDeviceChannels[0] = deviceChannels;
6748 // Set interleaving parameters.
6749 stream_.userInterleaved = true;
6750 stream_.deviceInterleaved[mode] = true;
6751 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6752 stream_.userInterleaved = false;
6754 // Set flags for buffer conversion
6755 stream_.doConvertBuffer[mode] = false;
6756 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6757 stream_.doConvertBuffer[mode] = true;
6758 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6759 stream_.doConvertBuffer[mode] = true;
6760 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6761 stream_.nUserChannels[mode] > 1 )
6762 stream_.doConvertBuffer[mode] = true;
6764 // Allocate the stream handles if necessary and then save.
6765 if ( stream_.apiHandle == 0 ) {
6767 handle = new OssHandle;
6769 catch ( std::bad_alloc& ) {
6770 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6774 if ( pthread_cond_init( &handle->runnable, NULL ) ) {
6775 errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
6779 stream_.apiHandle = (void *) handle;
6782 handle = (OssHandle *) stream_.apiHandle;
6784 handle->id[mode] = fd;
6786 // Allocate necessary internal buffers.
6787 unsigned long bufferBytes;
6788 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6789 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6790 if ( stream_.userBuffer[mode] == NULL ) {
6791 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6795 if ( stream_.doConvertBuffer[mode] ) {
6797 bool makeBuffer = true;
6798 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6799 if ( mode == INPUT ) {
6800 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6801 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6802 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6807 bufferBytes *= *bufferSize;
6808 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6809 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6810 if ( stream_.deviceBuffer == NULL ) {
6811 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6817 stream_.device[mode] = device;
6818 stream_.state = STREAM_STOPPED;
6820 // Setup the buffer conversion information structure.
6821 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6823 // Setup thread if necessary.
6824 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6825 // We had already set up an output stream.
6826 stream_.mode = DUPLEX;
6827 if ( stream_.device[0] == device ) handle->id[0] = fd;
6830 stream_.mode = mode;
6832 // Setup callback thread.
6833 stream_.callbackInfo.object = (void *) this;
6835 // Set the thread attributes for joinable and realtime scheduling
6836 // priority. The higher priority will only take affect if the
6837 // program is run as root or suid.
6838 pthread_attr_t attr;
6839 pthread_attr_init( &attr );
6840 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6841 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6842 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
6843 struct sched_param param;
6844 int priority = options->priority;
6845 int min = sched_get_priority_min( SCHED_RR );
6846 int max = sched_get_priority_max( SCHED_RR );
6847 if ( priority < min ) priority = min;
6848 else if ( priority > max ) priority = max;
6849 param.sched_priority = priority;
6850 pthread_attr_setschedparam( &attr, ¶m );
6851 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6854 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6856 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6859 stream_.callbackInfo.isRunning = true;
6860 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6861 pthread_attr_destroy( &attr );
6863 stream_.callbackInfo.isRunning = false;
6864 errorText_ = "RtApiOss::error creating callback thread!";
6873 pthread_cond_destroy( &handle->runnable );
6874 if ( handle->id[0] ) close( handle->id[0] );
6875 if ( handle->id[1] ) close( handle->id[1] );
6877 stream_.apiHandle = 0;
6880 for ( int i=0; i<2; i++ ) {
6881 if ( stream_.userBuffer[i] ) {
6882 free( stream_.userBuffer[i] );
6883 stream_.userBuffer[i] = 0;
6887 if ( stream_.deviceBuffer ) {
6888 free( stream_.deviceBuffer );
6889 stream_.deviceBuffer = 0;
6895 void RtApiOss :: closeStream()
6897 if ( stream_.state == STREAM_CLOSED ) {
6898 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6899 error( RtError::WARNING );
6903 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6904 stream_.callbackInfo.isRunning = false;
6905 MUTEX_LOCK( &stream_.mutex );
6906 if ( stream_.state == STREAM_STOPPED )
6907 pthread_cond_signal( &handle->runnable );
6908 MUTEX_UNLOCK( &stream_.mutex );
6909 pthread_join( stream_.callbackInfo.thread, NULL );
6911 if ( stream_.state == STREAM_RUNNING ) {
6912 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6913 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6915 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6916 stream_.state = STREAM_STOPPED;
6920 pthread_cond_destroy( &handle->runnable );
6921 if ( handle->id[0] ) close( handle->id[0] );
6922 if ( handle->id[1] ) close( handle->id[1] );
6924 stream_.apiHandle = 0;
6927 for ( int i=0; i<2; i++ ) {
6928 if ( stream_.userBuffer[i] ) {
6929 free( stream_.userBuffer[i] );
6930 stream_.userBuffer[i] = 0;
6934 if ( stream_.deviceBuffer ) {
6935 free( stream_.deviceBuffer );
6936 stream_.deviceBuffer = 0;
6939 stream_.mode = UNINITIALIZED;
6940 stream_.state = STREAM_CLOSED;
6943 void RtApiOss :: startStream()
6946 if ( stream_.state == STREAM_RUNNING ) {
6947 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6948 error( RtError::WARNING );
6952 MUTEX_LOCK( &stream_.mutex );
6954 stream_.state = STREAM_RUNNING;
6956 // No need to do anything else here ... OSS automatically starts
6957 // when fed samples.
6959 MUTEX_UNLOCK( &stream_.mutex );
6961 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6962 pthread_cond_signal( &handle->runnable );
6965 void RtApiOss :: stopStream()
6968 if ( stream_.state == STREAM_STOPPED ) {
6969 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6970 error( RtError::WARNING );
6974 MUTEX_LOCK( &stream_.mutex );
6976 // The state might change while waiting on a mutex.
6977 if ( stream_.state == STREAM_STOPPED ) {
6978 MUTEX_UNLOCK( &stream_.mutex );
6983 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6984 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6986 // Flush the output with zeros a few times.
6989 RtAudioFormat format;
6991 if ( stream_.doConvertBuffer[0] ) {
6992 buffer = stream_.deviceBuffer;
6993 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6994 format = stream_.deviceFormat[0];
6997 buffer = stream_.userBuffer[0];
6998 samples = stream_.bufferSize * stream_.nUserChannels[0];
6999 format = stream_.userFormat;
7002 memset( buffer, 0, samples * formatBytes(format) );
7003 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
7004 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7005 if ( result == -1 ) {
7006 errorText_ = "RtApiOss::stopStream: audio write error.";
7007 error( RtError::WARNING );
7011 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7012 if ( result == -1 ) {
7013 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7014 errorText_ = errorStream_.str();
7017 handle->triggered = false;
7020 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7021 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7022 if ( result == -1 ) {
7023 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7024 errorText_ = errorStream_.str();
7030 stream_.state = STREAM_STOPPED;
7031 MUTEX_UNLOCK( &stream_.mutex );
7033 if ( result != -1 ) return;
7034 error( RtError::SYSTEM_ERROR );
7037 void RtApiOss :: abortStream()
7040 if ( stream_.state == STREAM_STOPPED ) {
7041 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
7042 error( RtError::WARNING );
7046 MUTEX_LOCK( &stream_.mutex );
7048 // The state might change while waiting on a mutex.
7049 if ( stream_.state == STREAM_STOPPED ) {
7050 MUTEX_UNLOCK( &stream_.mutex );
7055 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7056 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7057 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7058 if ( result == -1 ) {
7059 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7060 errorText_ = errorStream_.str();
7063 handle->triggered = false;
7066 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7067 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7068 if ( result == -1 ) {
7069 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7070 errorText_ = errorStream_.str();
7076 stream_.state = STREAM_STOPPED;
7077 MUTEX_UNLOCK( &stream_.mutex );
7079 if ( result != -1 ) return;
7080 error( RtError::SYSTEM_ERROR );
7083 void RtApiOss :: callbackEvent()
7085 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7086 if ( stream_.state == STREAM_STOPPED ) {
7087 MUTEX_LOCK( &stream_.mutex );
7088 pthread_cond_wait( &handle->runnable, &stream_.mutex );
7089 if ( stream_.state != STREAM_RUNNING ) {
7090 MUTEX_UNLOCK( &stream_.mutex );
7093 MUTEX_UNLOCK( &stream_.mutex );
7096 if ( stream_.state == STREAM_CLOSED ) {
7097 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
7098 error( RtError::WARNING );
7102 // Invoke user callback to get fresh output data.
7103 int doStopStream = 0;
7104 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7105 double streamTime = getStreamTime();
7106 RtAudioStreamStatus status = 0;
7107 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
7108 status |= RTAUDIO_OUTPUT_UNDERFLOW;
7109 handle->xrun[0] = false;
7111 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
7112 status |= RTAUDIO_INPUT_OVERFLOW;
7113 handle->xrun[1] = false;
7115 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
7116 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
7117 if ( doStopStream == 2 ) {
7118 this->abortStream();
7122 MUTEX_LOCK( &stream_.mutex );
7124 // The state might change while waiting on a mutex.
7125 if ( stream_.state == STREAM_STOPPED ) goto unlock;
7130 RtAudioFormat format;
7132 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7134 // Setup parameters and do buffer conversion if necessary.
7135 if ( stream_.doConvertBuffer[0] ) {
7136 buffer = stream_.deviceBuffer;
7137 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
7138 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7139 format = stream_.deviceFormat[0];
7142 buffer = stream_.userBuffer[0];
7143 samples = stream_.bufferSize * stream_.nUserChannels[0];
7144 format = stream_.userFormat;
7147 // Do byte swapping if necessary.
7148 if ( stream_.doByteSwap[0] )
7149 byteSwapBuffer( buffer, samples, format );
7151 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
7153 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7154 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7155 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
7156 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7157 handle->triggered = true;
7160 // Write samples to device.
7161 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7163 if ( result == -1 ) {
7164 // We'll assume this is an underrun, though there isn't a
7165 // specific means for determining that.
7166 handle->xrun[0] = true;
7167 errorText_ = "RtApiOss::callbackEvent: audio write error.";
7168 error( RtError::WARNING );
7169 // Continue on to input section.
7173 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
7175 // Setup parameters.
7176 if ( stream_.doConvertBuffer[1] ) {
7177 buffer = stream_.deviceBuffer;
7178 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
7179 format = stream_.deviceFormat[1];
7182 buffer = stream_.userBuffer[1];
7183 samples = stream_.bufferSize * stream_.nUserChannels[1];
7184 format = stream_.userFormat;
7187 // Read samples from device.
7188 result = read( handle->id[1], buffer, samples * formatBytes(format) );
7190 if ( result == -1 ) {
7191 // We'll assume this is an overrun, though there isn't a
7192 // specific means for determining that.
7193 handle->xrun[1] = true;
7194 errorText_ = "RtApiOss::callbackEvent: audio read error.";
7195 error( RtError::WARNING );
7199 // Do byte swapping if necessary.
7200 if ( stream_.doByteSwap[1] )
7201 byteSwapBuffer( buffer, samples, format );
7203 // Do buffer conversion if necessary.
7204 if ( stream_.doConvertBuffer[1] )
7205 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
7209 MUTEX_UNLOCK( &stream_.mutex );
7211 RtApi::tickStreamTime();
7212 if ( doStopStream == 1 ) this->stopStream();
7215 extern "C" void *ossCallbackHandler( void *ptr )
7217 CallbackInfo *info = (CallbackInfo *) ptr;
7218 RtApiOss *object = (RtApiOss *) info->object;
7219 bool *isRunning = &info->isRunning;
7221 while ( *isRunning == true ) {
7222 pthread_testcancel();
7223 object->callbackEvent();
7226 pthread_exit( NULL );
7229 //******************** End of __LINUX_OSS__ *********************//
7233 // *************************************************** //
7235 // Protected common (OS-independent) RtAudio methods.
7237 // *************************************************** //
7239 // This method can be modified to control the behavior of error
7240 // message printing.
7241 void RtApi :: error( RtError::Type type )
7243 errorStream_.str(""); // clear the ostringstream
7244 if ( type == RtError::WARNING && showWarnings_ == true )
7245 std::cerr << '\n' << errorText_ << "\n\n";
7247 throw( RtError( errorText_, type ) );
7250 void RtApi :: verifyStream()
7252 if ( stream_.state == STREAM_CLOSED ) {
7253 errorText_ = "RtApi:: a stream is not open!";
7254 error( RtError::INVALID_USE );
7258 void RtApi :: clearStreamInfo()
7260 stream_.mode = UNINITIALIZED;
7261 stream_.state = STREAM_CLOSED;
7262 stream_.sampleRate = 0;
7263 stream_.bufferSize = 0;
7264 stream_.nBuffers = 0;
7265 stream_.userFormat = 0;
7266 stream_.userInterleaved = true;
7267 stream_.streamTime = 0.0;
7268 stream_.apiHandle = 0;
7269 stream_.deviceBuffer = 0;
7270 stream_.callbackInfo.callback = 0;
7271 stream_.callbackInfo.userData = 0;
7272 stream_.callbackInfo.isRunning = false;
7273 for ( int i=0; i<2; i++ ) {
7274 stream_.device[i] = 11111;
7275 stream_.doConvertBuffer[i] = false;
7276 stream_.deviceInterleaved[i] = true;
7277 stream_.doByteSwap[i] = false;
7278 stream_.nUserChannels[i] = 0;
7279 stream_.nDeviceChannels[i] = 0;
7280 stream_.channelOffset[i] = 0;
7281 stream_.deviceFormat[i] = 0;
7282 stream_.latency[i] = 0;
7283 stream_.userBuffer[i] = 0;
7284 stream_.convertInfo[i].channels = 0;
7285 stream_.convertInfo[i].inJump = 0;
7286 stream_.convertInfo[i].outJump = 0;
7287 stream_.convertInfo[i].inFormat = 0;
7288 stream_.convertInfo[i].outFormat = 0;
7289 stream_.convertInfo[i].inOffset.clear();
7290 stream_.convertInfo[i].outOffset.clear();
7294 unsigned int RtApi :: formatBytes( RtAudioFormat format )
7296 if ( format == RTAUDIO_SINT16 )
7298 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7299 format == RTAUDIO_FLOAT32 )
7301 else if ( format == RTAUDIO_FLOAT64 )
7303 else if ( format == RTAUDIO_SINT8 )
7306 errorText_ = "RtApi::formatBytes: undefined format.";
7307 error( RtError::WARNING );
7312 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
7314 if ( mode == INPUT ) { // convert device to user buffer
7315 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
7316 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
7317 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
7318 stream_.convertInfo[mode].outFormat = stream_.userFormat;
7320 else { // convert user to device buffer
7321 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
7322 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
7323 stream_.convertInfo[mode].inFormat = stream_.userFormat;
7324 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
7327 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
7328 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7330 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7332 // Set up the interleave/deinterleave offsets.
7333 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7334 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7335 ( mode == INPUT && stream_.userInterleaved ) ) {
7336 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7337 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7338 stream_.convertInfo[mode].outOffset.push_back( k );
7339 stream_.convertInfo[mode].inJump = 1;
7343 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7344 stream_.convertInfo[mode].inOffset.push_back( k );
7345 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7346 stream_.convertInfo[mode].outJump = 1;
7350 else { // no (de)interleaving
7351 if ( stream_.userInterleaved ) {
7352 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7353 stream_.convertInfo[mode].inOffset.push_back( k );
7354 stream_.convertInfo[mode].outOffset.push_back( k );
7358 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7359 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7360 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7361 stream_.convertInfo[mode].inJump = 1;
7362 stream_.convertInfo[mode].outJump = 1;
7367 // Add channel offset.
7368 if ( firstChannel > 0 ) {
7369 if ( stream_.deviceInterleaved[mode] ) {
7370 if ( mode == OUTPUT ) {
7371 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7372 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7375 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7376 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7380 if ( mode == OUTPUT ) {
7381 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7382 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7385 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7386 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7392 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7394 // This function does format conversion, input/output channel compensation, and
7395 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7396 // the upper three bytes of a 32-bit integer.
7398 // Clear our device buffer when in/out duplex device channels are different
7399 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7400 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7401 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7404 if (info.outFormat == RTAUDIO_FLOAT64) {
7406 Float64 *out = (Float64 *)outBuffer;
7408 if (info.inFormat == RTAUDIO_SINT8) {
7409 signed char *in = (signed char *)inBuffer;
7410 scale = 1.0 / 127.5;
7411 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7412 for (j=0; j<info.channels; j++) {
7413 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7414 out[info.outOffset[j]] += 0.5;
7415 out[info.outOffset[j]] *= scale;
7418 out += info.outJump;
7421 else if (info.inFormat == RTAUDIO_SINT16) {
7422 Int16 *in = (Int16 *)inBuffer;
7423 scale = 1.0 / 32767.5;
7424 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7425 for (j=0; j<info.channels; j++) {
7426 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7427 out[info.outOffset[j]] += 0.5;
7428 out[info.outOffset[j]] *= scale;
7431 out += info.outJump;
7434 else if (info.inFormat == RTAUDIO_SINT24) {
7435 Int32 *in = (Int32 *)inBuffer;
7436 scale = 1.0 / 8388607.5;
7437 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7438 for (j=0; j<info.channels; j++) {
7439 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7440 out[info.outOffset[j]] += 0.5;
7441 out[info.outOffset[j]] *= scale;
7444 out += info.outJump;
7447 else if (info.inFormat == RTAUDIO_SINT32) {
7448 Int32 *in = (Int32 *)inBuffer;
7449 scale = 1.0 / 2147483647.5;
7450 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7451 for (j=0; j<info.channels; j++) {
7452 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7453 out[info.outOffset[j]] += 0.5;
7454 out[info.outOffset[j]] *= scale;
7457 out += info.outJump;
7460 else if (info.inFormat == RTAUDIO_FLOAT32) {
7461 Float32 *in = (Float32 *)inBuffer;
7462 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7463 for (j=0; j<info.channels; j++) {
7464 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7467 out += info.outJump;
7470 else if (info.inFormat == RTAUDIO_FLOAT64) {
7471 // Channel compensation and/or (de)interleaving only.
7472 Float64 *in = (Float64 *)inBuffer;
7473 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7474 for (j=0; j<info.channels; j++) {
7475 out[info.outOffset[j]] = in[info.inOffset[j]];
7478 out += info.outJump;
7482 else if (info.outFormat == RTAUDIO_FLOAT32) {
7484 Float32 *out = (Float32 *)outBuffer;
7486 if (info.inFormat == RTAUDIO_SINT8) {
7487 signed char *in = (signed char *)inBuffer;
7488 scale = (Float32) ( 1.0 / 127.5 );
7489 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7490 for (j=0; j<info.channels; j++) {
7491 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7492 out[info.outOffset[j]] += 0.5;
7493 out[info.outOffset[j]] *= scale;
7496 out += info.outJump;
7499 else if (info.inFormat == RTAUDIO_SINT16) {
7500 Int16 *in = (Int16 *)inBuffer;
7501 scale = (Float32) ( 1.0 / 32767.5 );
7502 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7503 for (j=0; j<info.channels; j++) {
7504 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7505 out[info.outOffset[j]] += 0.5;
7506 out[info.outOffset[j]] *= scale;
7509 out += info.outJump;
7512 else if (info.inFormat == RTAUDIO_SINT24) {
7513 Int32 *in = (Int32 *)inBuffer;
7514 scale = (Float32) ( 1.0 / 8388607.5 );
7515 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7516 for (j=0; j<info.channels; j++) {
7517 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7518 out[info.outOffset[j]] += 0.5;
7519 out[info.outOffset[j]] *= scale;
7522 out += info.outJump;
7525 else if (info.inFormat == RTAUDIO_SINT32) {
7526 Int32 *in = (Int32 *)inBuffer;
7527 scale = (Float32) ( 1.0 / 2147483647.5 );
7528 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7529 for (j=0; j<info.channels; j++) {
7530 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7531 out[info.outOffset[j]] += 0.5;
7532 out[info.outOffset[j]] *= scale;
7535 out += info.outJump;
7538 else if (info.inFormat == RTAUDIO_FLOAT32) {
7539 // Channel compensation and/or (de)interleaving only.
7540 Float32 *in = (Float32 *)inBuffer;
7541 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7542 for (j=0; j<info.channels; j++) {
7543 out[info.outOffset[j]] = in[info.inOffset[j]];
7546 out += info.outJump;
7549 else if (info.inFormat == RTAUDIO_FLOAT64) {
7550 Float64 *in = (Float64 *)inBuffer;
7551 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7552 for (j=0; j<info.channels; j++) {
7553 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7556 out += info.outJump;
7560 else if (info.outFormat == RTAUDIO_SINT32) {
7561 Int32 *out = (Int32 *)outBuffer;
7562 if (info.inFormat == RTAUDIO_SINT8) {
7563 signed char *in = (signed char *)inBuffer;
7564 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7565 for (j=0; j<info.channels; j++) {
7566 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7567 out[info.outOffset[j]] <<= 24;
7570 out += info.outJump;
7573 else if (info.inFormat == RTAUDIO_SINT16) {
7574 Int16 *in = (Int16 *)inBuffer;
7575 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7576 for (j=0; j<info.channels; j++) {
7577 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7578 out[info.outOffset[j]] <<= 16;
7581 out += info.outJump;
7584 else if (info.inFormat == RTAUDIO_SINT24) {
7585 Int32 *in = (Int32 *)inBuffer;
7586 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7587 for (j=0; j<info.channels; j++) {
7588 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7589 out[info.outOffset[j]] <<= 8;
7592 out += info.outJump;
7595 else if (info.inFormat == RTAUDIO_SINT32) {
7596 // Channel compensation and/or (de)interleaving only.
7597 Int32 *in = (Int32 *)inBuffer;
7598 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7599 for (j=0; j<info.channels; j++) {
7600 out[info.outOffset[j]] = in[info.inOffset[j]];
7603 out += info.outJump;
7606 else if (info.inFormat == RTAUDIO_FLOAT32) {
7607 Float32 *in = (Float32 *)inBuffer;
7608 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7609 for (j=0; j<info.channels; j++) {
7610 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7613 out += info.outJump;
7616 else if (info.inFormat == RTAUDIO_FLOAT64) {
7617 Float64 *in = (Float64 *)inBuffer;
7618 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7619 for (j=0; j<info.channels; j++) {
7620 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7623 out += info.outJump;
7627 else if (info.outFormat == RTAUDIO_SINT24) {
7628 Int32 *out = (Int32 *)outBuffer;
7629 if (info.inFormat == RTAUDIO_SINT8) {
7630 signed char *in = (signed char *)inBuffer;
7631 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7632 for (j=0; j<info.channels; j++) {
7633 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7634 out[info.outOffset[j]] <<= 16;
7637 out += info.outJump;
7640 else if (info.inFormat == RTAUDIO_SINT16) {
7641 Int16 *in = (Int16 *)inBuffer;
7642 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7643 for (j=0; j<info.channels; j++) {
7644 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7645 out[info.outOffset[j]] <<= 8;
7648 out += info.outJump;
7651 else if (info.inFormat == RTAUDIO_SINT24) {
7652 // Channel compensation and/or (de)interleaving only.
7653 Int32 *in = (Int32 *)inBuffer;
7654 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7655 for (j=0; j<info.channels; j++) {
7656 out[info.outOffset[j]] = in[info.inOffset[j]];
7659 out += info.outJump;
7662 else if (info.inFormat == RTAUDIO_SINT32) {
7663 Int32 *in = (Int32 *)inBuffer;
7664 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7665 for (j=0; j<info.channels; j++) {
7666 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7667 out[info.outOffset[j]] >>= 8;
7670 out += info.outJump;
7673 else if (info.inFormat == RTAUDIO_FLOAT32) {
7674 Float32 *in = (Float32 *)inBuffer;
7675 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7676 for (j=0; j<info.channels; j++) {
7677 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7680 out += info.outJump;
7683 else if (info.inFormat == RTAUDIO_FLOAT64) {
7684 Float64 *in = (Float64 *)inBuffer;
7685 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7686 for (j=0; j<info.channels; j++) {
7687 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7690 out += info.outJump;
7694 else if (info.outFormat == RTAUDIO_SINT16) {
7695 Int16 *out = (Int16 *)outBuffer;
7696 if (info.inFormat == RTAUDIO_SINT8) {
7697 signed char *in = (signed char *)inBuffer;
7698 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7699 for (j=0; j<info.channels; j++) {
7700 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7701 out[info.outOffset[j]] <<= 8;
7704 out += info.outJump;
7707 else if (info.inFormat == RTAUDIO_SINT16) {
7708 // Channel compensation and/or (de)interleaving only.
7709 Int16 *in = (Int16 *)inBuffer;
7710 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7711 for (j=0; j<info.channels; j++) {
7712 out[info.outOffset[j]] = in[info.inOffset[j]];
7715 out += info.outJump;
7718 else if (info.inFormat == RTAUDIO_SINT24) {
7719 Int32 *in = (Int32 *)inBuffer;
7720 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7721 for (j=0; j<info.channels; j++) {
7722 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7725 out += info.outJump;
7728 else if (info.inFormat == RTAUDIO_SINT32) {
7729 Int32 *in = (Int32 *)inBuffer;
7730 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7731 for (j=0; j<info.channels; j++) {
7732 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7735 out += info.outJump;
7738 else if (info.inFormat == RTAUDIO_FLOAT32) {
7739 Float32 *in = (Float32 *)inBuffer;
7740 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7741 for (j=0; j<info.channels; j++) {
7742 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7745 out += info.outJump;
7748 else if (info.inFormat == RTAUDIO_FLOAT64) {
7749 Float64 *in = (Float64 *)inBuffer;
7750 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7751 for (j=0; j<info.channels; j++) {
7752 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7755 out += info.outJump;
7759 else if (info.outFormat == RTAUDIO_SINT8) {
7760 signed char *out = (signed char *)outBuffer;
7761 if (info.inFormat == RTAUDIO_SINT8) {
7762 // Channel compensation and/or (de)interleaving only.
7763 signed char *in = (signed char *)inBuffer;
7764 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7765 for (j=0; j<info.channels; j++) {
7766 out[info.outOffset[j]] = in[info.inOffset[j]];
7769 out += info.outJump;
7772 if (info.inFormat == RTAUDIO_SINT16) {
7773 Int16 *in = (Int16 *)inBuffer;
7774 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7775 for (j=0; j<info.channels; j++) {
7776 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7779 out += info.outJump;
7782 else if (info.inFormat == RTAUDIO_SINT24) {
7783 Int32 *in = (Int32 *)inBuffer;
7784 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7785 for (j=0; j<info.channels; j++) {
7786 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7789 out += info.outJump;
7792 else if (info.inFormat == RTAUDIO_SINT32) {
7793 Int32 *in = (Int32 *)inBuffer;
7794 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7795 for (j=0; j<info.channels; j++) {
7796 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7799 out += info.outJump;
7802 else if (info.inFormat == RTAUDIO_FLOAT32) {
7803 Float32 *in = (Float32 *)inBuffer;
7804 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7805 for (j=0; j<info.channels; j++) {
7806 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7809 out += info.outJump;
7812 else if (info.inFormat == RTAUDIO_FLOAT64) {
7813 Float64 *in = (Float64 *)inBuffer;
7814 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7815 for (j=0; j<info.channels; j++) {
7816 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7819 out += info.outJump;
7825 //static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
7826 //static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
7827 //static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
7829 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7835 if ( format == RTAUDIO_SINT16 ) {
7836 for ( unsigned int i=0; i<samples; i++ ) {
7837 // Swap 1st and 2nd bytes.
7842 // Increment 2 bytes.
7846 else if ( format == RTAUDIO_SINT24 ||
7847 format == RTAUDIO_SINT32 ||
7848 format == RTAUDIO_FLOAT32 ) {
7849 for ( unsigned int i=0; i<samples; i++ ) {
7850 // Swap 1st and 4th bytes.
7855 // Swap 2nd and 3rd bytes.
7861 // Increment 3 more bytes.
7865 else if ( format == RTAUDIO_FLOAT64 ) {
7866 for ( unsigned int i=0; i<samples; i++ ) {
7867 // Swap 1st and 8th bytes
7872 // Swap 2nd and 7th bytes
7878 // Swap 3rd and 6th bytes
7884 // Swap 4th and 5th bytes
7890 // Increment 5 more bytes.
7896 // Indentation settings for Vim and Emacs
7899 // c-basic-offset: 2
7900 // indent-tabs-mode: nil
7903 // vim: et sts=2 sw=2