1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2011 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.8
49 // Static variable definitions.
50 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
51 const unsigned int RtApi::SAMPLE_RATES[] = {
52 4000, 5512, 8000, 9600, 11025, 16000, 22050,
53 32000, 44100, 48000, 88200, 96000, 176400, 192000
56 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
57 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
58 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
59 #define MUTEX_LOCK(A) EnterCriticalSection(A)
60 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
61 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
63 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
64 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
65 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
66 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
68 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
69 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
72 // *************************************************** //
74 // RtAudio definitions.
76 // *************************************************** //
78 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
82 // The order here will control the order of RtAudio's API search in
84 #if defined(__UNIX_JACK__)
85 apis.push_back( UNIX_JACK );
87 #if defined(__LINUX_ALSA__)
88 apis.push_back( LINUX_ALSA );
90 #if defined(__LINUX_OSS__)
91 apis.push_back( LINUX_OSS );
93 #if defined(__WINDOWS_ASIO__)
94 apis.push_back( WINDOWS_ASIO );
96 #if defined(__WINDOWS_DS__)
97 apis.push_back( WINDOWS_DS );
99 #if defined(__MACOSX_CORE__)
100 apis.push_back( MACOSX_CORE );
102 #if defined(__RTAUDIO_DUMMY__)
103 apis.push_back( RTAUDIO_DUMMY );
107 void RtAudio :: openRtApi( RtAudio::Api api )
109 #if defined(__UNIX_JACK__)
110 if ( api == UNIX_JACK )
111 rtapi_ = new RtApiJack();
113 #if defined(__LINUX_ALSA__)
114 if ( api == LINUX_ALSA )
115 rtapi_ = new RtApiAlsa();
117 #if defined(__LINUX_OSS__)
118 if ( api == LINUX_OSS )
119 rtapi_ = new RtApiOss();
121 #if defined(__WINDOWS_ASIO__)
122 if ( api == WINDOWS_ASIO )
123 rtapi_ = new RtApiAsio();
125 #if defined(__WINDOWS_DS__)
126 if ( api == WINDOWS_DS )
127 rtapi_ = new RtApiDs();
129 #if defined(__MACOSX_CORE__)
130 if ( api == MACOSX_CORE )
131 rtapi_ = new RtApiCore();
133 #if defined(__RTAUDIO_DUMMY__)
134 if ( api == RTAUDIO_DUMMY )
135 rtapi_ = new RtApiDummy();
139 RtAudio :: RtAudio( RtAudio::Api api ) throw()
143 if ( api != UNSPECIFIED ) {
144 // Attempt to open the specified API.
146 if ( rtapi_ ) return;
148 // No compiled support for specified API value. Issue a debug
149 // warning and continue as if no API was specified.
150 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
153 // Iterate through the compiled APIs and return as soon as we find
154 // one with at least one device or we reach the end of the list.
155 std::vector< RtAudio::Api > apis;
156 getCompiledApi( apis );
157 for ( unsigned int i=0; i<apis.size(); i++ ) {
158 openRtApi( apis[i] );
159 if ( rtapi_->getDeviceCount() ) break;
162 if ( rtapi_ ) return;
164 // It should not be possible to get here because the preprocessor
165 // definition __RTAUDIO_DUMMY__ is automatically defined if no
166 // API-specific definitions are passed to the compiler. But just in
167 // case something weird happens, we'll print out an error message.
168 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
171 RtAudio :: ~RtAudio() throw()
176 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
177 RtAudio::StreamParameters *inputParameters,
178 RtAudioFormat format, unsigned int sampleRate,
179 unsigned int *bufferFrames,
180 RtAudioCallback callback, void *userData,
181 RtAudio::StreamOptions *options )
183 return rtapi_->openStream( outputParameters, inputParameters, format,
184 sampleRate, bufferFrames, callback,
188 // *************************************************** //
190 // Public RtApi definitions (see end of file for
191 // private or protected utility functions).
193 // *************************************************** //
197 stream_.state = STREAM_CLOSED;
198 stream_.mode = UNINITIALIZED;
199 stream_.apiHandle = 0;
200 stream_.userBuffer[0] = 0;
201 stream_.userBuffer[1] = 0;
202 MUTEX_INITIALIZE( &stream_.mutex );
203 showWarnings_ = true;
208 MUTEX_DESTROY( &stream_.mutex );
211 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
212 RtAudio::StreamParameters *iParams,
213 RtAudioFormat format, unsigned int sampleRate,
214 unsigned int *bufferFrames,
215 RtAudioCallback callback, void *userData,
216 RtAudio::StreamOptions *options )
218 if ( stream_.state != STREAM_CLOSED ) {
219 errorText_ = "RtApi::openStream: a stream is already open!";
220 error( RtError::INVALID_USE );
223 if ( oParams && oParams->nChannels < 1 ) {
224 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
225 error( RtError::INVALID_USE );
228 if ( iParams && iParams->nChannels < 1 ) {
229 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
230 error( RtError::INVALID_USE );
233 if ( oParams == NULL && iParams == NULL ) {
234 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
235 error( RtError::INVALID_USE );
238 if ( formatBytes(format) == 0 ) {
239 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
240 error( RtError::INVALID_USE );
243 unsigned int nDevices = getDeviceCount();
244 unsigned int oChannels = 0;
246 oChannels = oParams->nChannels;
247 if ( oParams->deviceId >= nDevices ) {
248 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
249 error( RtError::INVALID_USE );
253 unsigned int iChannels = 0;
255 iChannels = iParams->nChannels;
256 if ( iParams->deviceId >= nDevices ) {
257 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
258 error( RtError::INVALID_USE );
265 if ( oChannels > 0 ) {
267 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
268 sampleRate, format, bufferFrames, options );
269 if ( result == false ) error( RtError::SYSTEM_ERROR );
272 if ( iChannels > 0 ) {
274 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
275 sampleRate, format, bufferFrames, options );
276 if ( result == false ) {
277 if ( oChannels > 0 ) closeStream();
278 error( RtError::SYSTEM_ERROR );
282 stream_.callbackInfo.callback = (void *) callback;
283 stream_.callbackInfo.userData = userData;
285 if ( options ) options->numberOfBuffers = stream_.nBuffers;
286 stream_.state = STREAM_STOPPED;
289 unsigned int RtApi :: getDefaultInputDevice( void )
291 // Should be implemented in subclasses if possible.
295 unsigned int RtApi :: getDefaultOutputDevice( void )
297 // Should be implemented in subclasses if possible.
301 void RtApi :: closeStream( void )
303 // MUST be implemented in subclasses!
307 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
308 unsigned int firstChannel, unsigned int sampleRate,
309 RtAudioFormat format, unsigned int *bufferSize,
310 RtAudio::StreamOptions *options )
312 // MUST be implemented in subclasses!
316 void RtApi :: tickStreamTime( void )
318 // Subclasses that do not provide their own implementation of
319 // getStreamTime should call this function once per buffer I/O to
320 // provide basic stream time support.
322 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
324 #if defined( HAVE_GETTIMEOFDAY )
325 gettimeofday( &stream_.lastTickTimestamp, NULL );
329 long RtApi :: getStreamLatency( void )
333 long totalLatency = 0;
334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
335 totalLatency = stream_.latency[0];
336 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
337 totalLatency += stream_.latency[1];
342 double RtApi :: getStreamTime( void )
346 #if defined( HAVE_GETTIMEOFDAY )
347 // Return a very accurate estimate of the stream time by
348 // adding in the elapsed time since the last tick.
352 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
353 return stream_.streamTime;
355 gettimeofday( &now, NULL );
356 then = stream_.lastTickTimestamp;
357 return stream_.streamTime +
358 ((now.tv_sec + 0.000001 * now.tv_usec) -
359 (then.tv_sec + 0.000001 * then.tv_usec));
361 return stream_.streamTime;
365 unsigned int RtApi :: getStreamSampleRate( void )
369 return stream_.sampleRate;
373 // *************************************************** //
375 // OS/API-specific methods.
377 // *************************************************** //
379 #if defined(__MACOSX_CORE__)
381 // The OS X CoreAudio API is designed to use a separate callback
382 // procedure for each of its audio devices. A single RtAudio duplex
383 // stream using two different devices is supported here, though it
384 // cannot be guaranteed to always behave correctly because we cannot
385 // synchronize these two callbacks.
387 // A property listener is installed for over/underrun information.
388 // However, no functionality is currently provided to allow property
389 // listeners to trigger user handlers because it is unclear what could
390 // be done if a critical stream parameter (buffer size, sample rate,
391 // device disconnect) notification arrived. The listeners entail
392 // quite a bit of extra code and most likely, a user program wouldn't
393 // be prepared for the result anyway. However, we do provide a flag
394 // to the client callback function to inform of an over/underrun.
396 // The mechanism for querying and setting system parameters was
397 // updated (and perhaps simplified) in OS-X version 10.4. However,
398 // since 10.4 support is not necessarily available to all users, I've
399 // decided not to update the respective code at this time. Perhaps
400 // this will happen when Apple makes 10.4 free for everyone. :-)
402 // A structure to hold various information related to the CoreAudio API
405 AudioDeviceID id[2]; // device ids
406 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
407 AudioDeviceIOProcID procId[2];
409 UInt32 iStream[2]; // device stream index (or first if using multiple)
410 UInt32 nStreams[2]; // number of streams to use
413 pthread_cond_t condition;
414 int drainCounter; // Tracks callback counts when draining
415 bool internalDrain; // Indicates if stop is initiated from callback or not.
418 :deviceBuffer(0), drainCounter(0), internalDrain(false) { nStreams[0] = 1; nStreams[1] = 1; id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
421 RtApiCore :: RtApiCore()
423 // Nothing to do here.
426 RtApiCore :: ~RtApiCore()
428 // The subclass destructor gets called before the base class
429 // destructor, so close an existing stream before deallocating
430 // apiDeviceId memory.
431 if ( stream_.state != STREAM_CLOSED ) closeStream();
434 unsigned int RtApiCore :: getDeviceCount( void )
436 // Find out how many audio devices there are, if any.
438 AudioObjectPropertyAddress propertyAddress = { kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
439 OSStatus result = AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize );
440 if ( result != noErr ) {
441 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
442 error( RtError::WARNING );
446 return dataSize / sizeof( AudioDeviceID );
449 unsigned int RtApiCore :: getDefaultInputDevice( void )
451 unsigned int nDevices = getDeviceCount();
452 if ( nDevices <= 1 ) return 0;
455 UInt32 dataSize = sizeof( AudioDeviceID );
456 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultInputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
457 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
458 if ( result != noErr ) {
459 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
460 error( RtError::WARNING );
464 dataSize *= nDevices;
465 AudioDeviceID deviceList[ nDevices ];
466 property.mSelector = kAudioHardwarePropertyDevices;
467 result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
468 if ( result != noErr ) {
469 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
470 error( RtError::WARNING );
474 for ( unsigned int i=0; i<nDevices; i++ )
475 if ( id == deviceList[i] ) return i;
477 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
478 error( RtError::WARNING );
482 unsigned int RtApiCore :: getDefaultOutputDevice( void )
484 unsigned int nDevices = getDeviceCount();
485 if ( nDevices <= 1 ) return 0;
488 UInt32 dataSize = sizeof( AudioDeviceID );
489 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDefaultOutputDevice, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };
490 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, &id );
491 if ( result != noErr ) {
492 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
493 error( RtError::WARNING );
497 dataSize = sizeof( AudioDeviceID ) * nDevices;
498 AudioDeviceID deviceList[ nDevices ];
499 property.mSelector = kAudioHardwarePropertyDevices;
500 result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property, 0, NULL, &dataSize, (void *) &deviceList );
501 if ( result != noErr ) {
502 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
503 error( RtError::WARNING );
507 for ( unsigned int i=0; i<nDevices; i++ )
508 if ( id == deviceList[i] ) return i;
510 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
511 error( RtError::WARNING );
515 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
517 RtAudio::DeviceInfo info;
521 unsigned int nDevices = getDeviceCount();
522 if ( nDevices == 0 ) {
523 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
524 error( RtError::INVALID_USE );
527 if ( device >= nDevices ) {
528 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
529 error( RtError::INVALID_USE );
532 AudioDeviceID deviceList[ nDevices ];
533 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
534 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
535 kAudioObjectPropertyScopeGlobal,
536 kAudioObjectPropertyElementMaster };
537 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
538 0, NULL, &dataSize, (void *) &deviceList );
539 if ( result != noErr ) {
540 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
541 error( RtError::WARNING );
545 AudioDeviceID id = deviceList[ device ];
547 // Get the device name.
550 dataSize = sizeof( CFStringRef );
551 property.mSelector = kAudioObjectPropertyManufacturer;
552 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
553 if ( result != noErr ) {
554 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
555 errorText_ = errorStream_.str();
556 error( RtError::WARNING );
560 //const char *mname = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
561 int length = CFStringGetLength(cfname);
562 char *mname = (char *)malloc(length * 3 + 1);
563 CFStringGetCString(cfname, mname, length * 3 + 1, CFStringGetSystemEncoding());
564 info.name.append( (const char *)mname, strlen(mname) );
565 info.name.append( ": " );
569 property.mSelector = kAudioObjectPropertyName;
570 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &cfname );
571 if ( result != noErr ) {
572 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
573 errorText_ = errorStream_.str();
574 error( RtError::WARNING );
578 //const char *name = CFStringGetCStringPtr( cfname, CFStringGetSystemEncoding() );
579 length = CFStringGetLength(cfname);
580 char *name = (char *)malloc(length * 3 + 1);
581 CFStringGetCString(cfname, name, length * 3 + 1, CFStringGetSystemEncoding());
582 info.name.append( (const char *)name, strlen(name) );
586 // Get the output stream "configuration".
587 AudioBufferList *bufferList = nil;
588 property.mSelector = kAudioDevicePropertyStreamConfiguration;
589 property.mScope = kAudioDevicePropertyScopeOutput;
590 // property.mElement = kAudioObjectPropertyElementWildcard;
592 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
593 if ( result != noErr || dataSize == 0 ) {
594 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
595 errorText_ = errorStream_.str();
596 error( RtError::WARNING );
600 // Allocate the AudioBufferList.
601 bufferList = (AudioBufferList *) malloc( dataSize );
602 if ( bufferList == NULL ) {
603 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
604 error( RtError::WARNING );
608 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
609 if ( result != noErr || dataSize == 0 ) {
611 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
612 errorText_ = errorStream_.str();
613 error( RtError::WARNING );
617 // Get output channel information.
618 unsigned int i, nStreams = bufferList->mNumberBuffers;
619 for ( i=0; i<nStreams; i++ )
620 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
623 // Get the input stream "configuration".
624 property.mScope = kAudioDevicePropertyScopeInput;
625 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
626 if ( result != noErr || dataSize == 0 ) {
627 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
628 errorText_ = errorStream_.str();
629 error( RtError::WARNING );
633 // Allocate the AudioBufferList.
634 bufferList = (AudioBufferList *) malloc( dataSize );
635 if ( bufferList == NULL ) {
636 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
637 error( RtError::WARNING );
641 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
642 if (result != noErr || dataSize == 0) {
644 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
645 errorText_ = errorStream_.str();
646 error( RtError::WARNING );
650 // Get input channel information.
651 nStreams = bufferList->mNumberBuffers;
652 for ( i=0; i<nStreams; i++ )
653 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
656 // If device opens for both playback and capture, we determine the channels.
657 if ( info.outputChannels > 0 && info.inputChannels > 0 )
658 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
660 // Probe the device sample rates.
661 bool isInput = false;
662 if ( info.outputChannels == 0 ) isInput = true;
664 // Determine the supported sample rates.
665 property.mSelector = kAudioDevicePropertyAvailableNominalSampleRates;
666 if ( isInput == false ) property.mScope = kAudioDevicePropertyScopeOutput;
667 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
668 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
669 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
670 errorText_ = errorStream_.str();
671 error( RtError::WARNING );
675 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
676 AudioValueRange rangeList[ nRanges ];
677 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &rangeList );
678 if ( result != kAudioHardwareNoError ) {
679 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
680 errorText_ = errorStream_.str();
681 error( RtError::WARNING );
685 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
686 for ( UInt32 i=0; i<nRanges; i++ ) {
687 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
688 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
691 info.sampleRates.clear();
692 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
693 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
694 info.sampleRates.push_back( SAMPLE_RATES[k] );
697 if ( info.sampleRates.size() == 0 ) {
698 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
699 errorText_ = errorStream_.str();
700 error( RtError::WARNING );
704 // CoreAudio always uses 32-bit floating point data for PCM streams.
705 // Thus, any other "physical" formats supported by the device are of
706 // no interest to the client.
707 info.nativeFormats = RTAUDIO_FLOAT32;
709 if ( info.outputChannels > 0 )
710 if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
711 if ( info.inputChannels > 0 )
712 if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
718 OSStatus callbackHandler( AudioDeviceID inDevice,
719 const AudioTimeStamp* inNow,
720 const AudioBufferList* inInputData,
721 const AudioTimeStamp* inInputTime,
722 AudioBufferList* outOutputData,
723 const AudioTimeStamp* inOutputTime,
726 CallbackInfo *info = (CallbackInfo *) infoPointer;
728 RtApiCore *object = (RtApiCore *) info->object;
729 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
730 return kAudioHardwareUnspecifiedError;
732 return kAudioHardwareNoError;
735 OSStatus deviceListener( AudioObjectID inDevice,
737 const AudioObjectPropertyAddress properties[],
738 void* handlePointer )
740 CoreHandle *handle = (CoreHandle *) handlePointer;
741 for ( UInt32 i=0; i<nAddresses; i++ ) {
742 if ( properties[i].mSelector == kAudioDeviceProcessorOverload ) {
743 if ( properties[i].mScope == kAudioDevicePropertyScopeInput )
744 handle->xrun[1] = true;
746 handle->xrun[0] = true;
750 return kAudioHardwareNoError;
753 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
754 unsigned int firstChannel, unsigned int sampleRate,
755 RtAudioFormat format, unsigned int *bufferSize,
756 RtAudio::StreamOptions *options )
759 unsigned int nDevices = getDeviceCount();
760 if ( nDevices == 0 ) {
761 // This should not happen because a check is made before this function is called.
762 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
766 if ( device >= nDevices ) {
767 // This should not happen because a check is made before this function is called.
768 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
772 AudioDeviceID deviceList[ nDevices ];
773 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
774 AudioObjectPropertyAddress property = { kAudioHardwarePropertyDevices,
775 kAudioObjectPropertyScopeGlobal,
776 kAudioObjectPropertyElementMaster };
777 OSStatus result = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property,
778 0, NULL, &dataSize, (void *) &deviceList );
779 if ( result != noErr ) {
780 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
784 AudioDeviceID id = deviceList[ device ];
786 // Setup for stream mode.
787 bool isInput = false;
788 if ( mode == INPUT ) {
790 property.mScope = kAudioDevicePropertyScopeInput;
793 property.mScope = kAudioDevicePropertyScopeOutput;
795 // Get the stream "configuration".
796 AudioBufferList *bufferList = nil;
798 property.mSelector = kAudioDevicePropertyStreamConfiguration;
799 result = AudioObjectGetPropertyDataSize( id, &property, 0, NULL, &dataSize );
800 if ( result != noErr || dataSize == 0 ) {
801 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
802 errorText_ = errorStream_.str();
806 // Allocate the AudioBufferList.
807 bufferList = (AudioBufferList *) malloc( dataSize );
808 if ( bufferList == NULL ) {
809 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
813 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, bufferList );
814 if (result != noErr || dataSize == 0) {
815 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
816 errorText_ = errorStream_.str();
820 // Search for one or more streams that contain the desired number of
821 // channels. CoreAudio devices can have an arbitrary number of
822 // streams and each stream can have an arbitrary number of channels.
823 // For each stream, a single buffer of interleaved samples is
824 // provided. RtAudio prefers the use of one stream of interleaved
825 // data or multiple consecutive single-channel streams. However, we
826 // now support multiple consecutive multi-channel streams of
827 // interleaved data as well.
828 UInt32 iStream, offsetCounter = firstChannel;
829 UInt32 nStreams = bufferList->mNumberBuffers;
830 bool monoMode = false;
831 bool foundStream = false;
833 // First check that the device supports the requested number of
835 UInt32 deviceChannels = 0;
836 for ( iStream=0; iStream<nStreams; iStream++ )
837 deviceChannels += bufferList->mBuffers[iStream].mNumberChannels;
839 if ( deviceChannels < ( channels + firstChannel ) ) {
841 errorStream_ << "RtApiCore::probeDeviceOpen: the device (" << device << ") does not support the requested channel count.";
842 errorText_ = errorStream_.str();
846 // Look for a single stream meeting our needs.
847 UInt32 firstStream, streamCount = 1, streamChannels = 0, channelOffset = 0;
848 for ( iStream=0; iStream<nStreams; iStream++ ) {
849 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
850 if ( streamChannels >= channels + offsetCounter ) {
851 firstStream = iStream;
852 channelOffset = offsetCounter;
856 if ( streamChannels > offsetCounter ) break;
857 offsetCounter -= streamChannels;
860 // If we didn't find a single stream above, then we should be able
861 // to meet the channel specification with multiple streams.
862 if ( foundStream == false ) {
864 offsetCounter = firstChannel;
865 for ( iStream=0; iStream<nStreams; iStream++ ) {
866 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
867 if ( streamChannels > offsetCounter ) break;
868 offsetCounter -= streamChannels;
871 firstStream = iStream;
872 channelOffset = offsetCounter;
873 Int32 channelCounter = channels + offsetCounter - streamChannels;
875 if ( streamChannels > 1 ) monoMode = false;
876 while ( channelCounter > 0 ) {
877 streamChannels = bufferList->mBuffers[++iStream].mNumberChannels;
878 if ( streamChannels > 1 ) monoMode = false;
879 channelCounter -= streamChannels;
886 // Determine the buffer size.
887 AudioValueRange bufferRange;
888 dataSize = sizeof( AudioValueRange );
889 property.mSelector = kAudioDevicePropertyBufferFrameSizeRange;
890 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &bufferRange );
892 if ( result != noErr ) {
893 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
894 errorText_ = errorStream_.str();
898 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
899 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
900 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
902 // Set the buffer size. For multiple streams, I'm assuming we only
903 // need to make this setting for the master channel.
904 UInt32 theSize = (UInt32) *bufferSize;
905 dataSize = sizeof( UInt32 );
906 property.mSelector = kAudioDevicePropertyBufferFrameSize;
907 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &theSize );
909 if ( result != noErr ) {
910 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
911 errorText_ = errorStream_.str();
915 // If attempting to setup a duplex stream, the bufferSize parameter
916 // MUST be the same in both directions!
917 *bufferSize = theSize;
918 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
919 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
920 errorText_ = errorStream_.str();
924 stream_.bufferSize = *bufferSize;
925 stream_.nBuffers = 1;
927 // Check and if necessary, change the sample rate for the device.
929 dataSize = sizeof( Float64 );
930 property.mSelector = kAudioDevicePropertyNominalSampleRate;
931 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &nominalRate );
933 if ( result != noErr ) {
934 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting current sample rate.";
935 errorText_ = errorStream_.str();
939 // Only change the sample rate if off by more than 1 Hz.
940 if ( fabs( nominalRate - (double)sampleRate ) > 1.0 ) {
941 nominalRate = (Float64) sampleRate;
942 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &nominalRate );
944 if ( result != noErr ) {
945 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate for device (" << device << ").";
946 errorText_ = errorStream_.str();
951 // Try to set "hog" mode ... it's not clear to me this is working.
952 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) {
954 dataSize = sizeof( hog_pid );
955 property.mSelector = kAudioDevicePropertyHogMode;
956 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &hog_pid );
957 if ( result != noErr ) {
958 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting 'hog' state!";
959 errorText_ = errorStream_.str();
963 if ( hog_pid != getpid() ) {
965 result = AudioObjectSetPropertyData( id, &property, 0, NULL, dataSize, &hog_pid );
966 if ( result != noErr ) {
967 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
968 errorText_ = errorStream_.str();
974 // Get the stream ID(s) so we can set the stream format.
975 AudioStreamID streamIDs[ nStreams ];
976 dataSize = nStreams * sizeof( AudioStreamID );
977 property.mSelector = kAudioDevicePropertyStreams;
978 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &streamIDs );
980 if ( result != noErr ) {
981 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
982 errorText_ = errorStream_.str();
986 // Now set the stream format for each stream. Also, check the
987 // physical format of the device and change that if necessary.
988 AudioStreamBasicDescription description;
989 dataSize = sizeof( AudioStreamBasicDescription );
992 for ( UInt32 i=0; i<streamCount; i++ ) {
994 property.mSelector = kAudioStreamPropertyVirtualFormat;
995 result = AudioObjectGetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, &dataSize, &description );
997 if ( result != noErr ) {
998 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
999 errorText_ = errorStream_.str();
1003 // Set the sample rate and data format id. However, only make the
1004 // change if the sample rate is not within 1.0 of the desired
1005 // rate and the format is not linear pcm.
1006 updateFormat = false;
1007 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
1008 description.mSampleRate = (double) sampleRate;
1009 updateFormat = true;
1012 if ( description.mFormatID != kAudioFormatLinearPCM ) {
1013 description.mFormatID = kAudioFormatLinearPCM;
1014 updateFormat = true;
1017 if ( updateFormat ) {
1018 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &description );
1019 if ( result != noErr ) {
1020 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
1021 errorText_ = errorStream_.str();
1026 // Now check the physical format.
1027 property.mSelector = kAudioStreamPropertyPhysicalFormat;
1028 result = AudioObjectGetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, &dataSize, &description );
1029 if ( result != noErr ) {
1030 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
1031 errorText_ = errorStream_.str();
1035 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
1036 description.mFormatID = kAudioFormatLinearPCM;
1037 AudioStreamBasicDescription testDescription = description;
1038 unsigned long formatFlags;
1040 // We'll try higher bit rates first and then work our way down.
1041 testDescription.mBitsPerChannel = 32;
1042 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1043 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1044 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
1045 testDescription.mFormatFlags = formatFlags;
1046 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1047 if ( result == noErr ) continue;
1049 testDescription = description;
1050 testDescription.mBitsPerChannel = 32;
1051 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1052 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1053 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
1054 testDescription.mFormatFlags = formatFlags;
1055 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1056 if ( result == noErr ) continue;
1058 testDescription = description;
1059 testDescription.mBitsPerChannel = 24;
1060 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1061 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1062 testDescription.mFormatFlags = formatFlags;
1063 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1064 if ( result == noErr ) continue;
1066 testDescription = description;
1067 testDescription.mBitsPerChannel = 16;
1068 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1069 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1070 testDescription.mFormatFlags = formatFlags;
1071 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1072 if ( result == noErr ) continue;
1074 testDescription = description;
1075 testDescription.mBitsPerChannel = 8;
1076 testDescription.mBytesPerFrame = testDescription.mBitsPerChannel/8 * testDescription.mChannelsPerFrame;
1077 testDescription.mBytesPerPacket = testDescription.mBytesPerFrame * testDescription.mFramesPerPacket;
1078 testDescription.mFormatFlags = formatFlags;
1079 result = AudioObjectSetPropertyData( streamIDs[firstStream+i], &property, 0, NULL, dataSize, &testDescription );
1080 if ( result != noErr ) {
1081 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1082 errorText_ = errorStream_.str();
1089 // Get the stream latency. There can be latency in both the device
1090 // and the stream. First, attempt to get the device latency on the
1091 // master channel or the first open channel. Errors that might
1092 // occur here are not deemed critical.
1095 dataSize = sizeof( UInt32 );
1096 property.mSelector = kAudioDevicePropertyLatency;
1097 if ( AudioObjectHasProperty( id, &property ) == true ) {
1098 result = AudioObjectGetPropertyData( id, &property, 0, NULL, &dataSize, &latency );
1099 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1101 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1102 errorText_ = errorStream_.str();
1103 error( RtError::WARNING );
1107 // Now try to get the stream latency. For multiple streams, I assume the
1108 // latency is equal for each.
1109 result = AudioObjectGetPropertyData( streamIDs[firstStream], &property, 0, NULL, &dataSize, &latency );
1110 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1112 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1113 errorText_ = errorStream_.str();
1114 error( RtError::WARNING );
1117 // Byte-swapping: According to AudioHardware.h, the stream data will
1118 // always be presented in native-endian format, so we should never
1119 // need to byte swap.
1120 stream_.doByteSwap[mode] = false;
1122 // From the CoreAudio documentation, PCM data must be supplied as
1124 stream_.userFormat = format;
1125 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1127 if ( streamCount == 1 )
1128 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1129 else // multiple streams
1130 stream_.nDeviceChannels[mode] = channels;
1131 stream_.nUserChannels[mode] = channels;
1132 stream_.channelOffset[mode] = channelOffset; // offset within a CoreAudio stream
1133 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1134 else stream_.userInterleaved = true;
1135 stream_.deviceInterleaved[mode] = true;
1136 if ( monoMode == true ) stream_.deviceInterleaved[mode] = false;
1138 // Set flags for buffer conversion.
1139 stream_.doConvertBuffer[mode] = false;
1140 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1141 stream_.doConvertBuffer[mode] = true;
1142 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1143 stream_.doConvertBuffer[mode] = true;
1144 if ( streamCount == 1 ) {
1145 if ( stream_.nUserChannels[mode] > 1 &&
1146 stream_.userInterleaved != stream_.deviceInterleaved[mode] )
1147 stream_.doConvertBuffer[mode] = true;
1149 else if ( monoMode && stream_.userInterleaved )
1150 stream_.doConvertBuffer[mode] = true;
1152 // Allocate our CoreHandle structure for the stream.
1153 CoreHandle *handle = 0;
1154 if ( stream_.apiHandle == 0 ) {
1156 handle = new CoreHandle;
1158 catch ( std::bad_alloc& ) {
1159 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1163 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1164 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1167 stream_.apiHandle = (void *) handle;
1170 handle = (CoreHandle *) stream_.apiHandle;
1171 handle->iStream[mode] = firstStream;
1172 handle->nStreams[mode] = streamCount;
1173 handle->id[mode] = id;
1175 // Allocate necessary internal buffers.
1176 unsigned long bufferBytes;
1177 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1178 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1179 if ( stream_.userBuffer[mode] == NULL ) {
1180 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1184 // If possible, we will make use of the CoreAudio stream buffers as
1185 // "device buffers". However, we can't do this if using multiple
1187 if ( stream_.doConvertBuffer[mode] && handle->nStreams[mode] > 1 ) {
1189 bool makeBuffer = true;
1190 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1191 if ( mode == INPUT ) {
1192 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1193 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1194 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1199 bufferBytes *= *bufferSize;
1200 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1201 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1202 if ( stream_.deviceBuffer == NULL ) {
1203 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1209 stream_.sampleRate = sampleRate;
1210 stream_.device[mode] = device;
1211 stream_.state = STREAM_STOPPED;
1212 stream_.callbackInfo.object = (void *) this;
1214 // Setup the buffer conversion information structure.
1215 if ( stream_.doConvertBuffer[mode] ) {
1216 if ( streamCount > 1 ) setConvertInfo( mode, 0 );
1217 else setConvertInfo( mode, channelOffset );
1220 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1221 // Only one callback procedure per device.
1222 stream_.mode = DUPLEX;
1224 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1225 result = AudioDeviceCreateIOProcID( id, callbackHandler, (void *) &stream_.callbackInfo, &handle->procId[mode] );
1227 // deprecated in favor of AudioDeviceCreateIOProcID()
1228 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1230 if ( result != noErr ) {
1231 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1232 errorText_ = errorStream_.str();
1235 if ( stream_.mode == OUTPUT && mode == INPUT )
1236 stream_.mode = DUPLEX;
1238 stream_.mode = mode;
1241 // Setup the device property listener for over/underload.
1242 property.mSelector = kAudioDeviceProcessorOverload;
1243 result = AudioObjectAddPropertyListener( id, &property, deviceListener, (void *) handle );
1249 pthread_cond_destroy( &handle->condition );
1251 stream_.apiHandle = 0;
1254 for ( int i=0; i<2; i++ ) {
1255 if ( stream_.userBuffer[i] ) {
1256 free( stream_.userBuffer[i] );
1257 stream_.userBuffer[i] = 0;
1261 if ( stream_.deviceBuffer ) {
1262 free( stream_.deviceBuffer );
1263 stream_.deviceBuffer = 0;
1269 void RtApiCore :: closeStream( void )
1271 if ( stream_.state == STREAM_CLOSED ) {
1272 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1273 error( RtError::WARNING );
1277 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1278 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1279 if ( stream_.state == STREAM_RUNNING )
1280 AudioDeviceStop( handle->id[0], callbackHandler );
1281 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1282 AudioDeviceDestroyIOProcID( handle->id[0], handle->procId[0] );
1284 // deprecated in favor of AudioDeviceDestroyIOProcID()
1285 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1289 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1290 if ( stream_.state == STREAM_RUNNING )
1291 AudioDeviceStop( handle->id[1], callbackHandler );
1292 #if defined( MAC_OS_X_VERSION_10_5 ) && ( MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_5 )
1293 AudioDeviceDestroyIOProcID( handle->id[1], handle->procId[1] );
1295 // deprecated in favor of AudioDeviceDestroyIOProcID()
1296 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1300 for ( int i=0; i<2; i++ ) {
1301 if ( stream_.userBuffer[i] ) {
1302 free( stream_.userBuffer[i] );
1303 stream_.userBuffer[i] = 0;
1307 if ( stream_.deviceBuffer ) {
1308 free( stream_.deviceBuffer );
1309 stream_.deviceBuffer = 0;
1312 // Destroy pthread condition variable.
1313 pthread_cond_destroy( &handle->condition );
1315 stream_.apiHandle = 0;
1317 stream_.mode = UNINITIALIZED;
1318 stream_.state = STREAM_CLOSED;
1321 void RtApiCore :: startStream( void )
1324 if ( stream_.state == STREAM_RUNNING ) {
1325 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1326 error( RtError::WARNING );
1330 MUTEX_LOCK( &stream_.mutex );
1332 OSStatus result = noErr;
1333 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1334 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1336 result = AudioDeviceStart( handle->id[0], callbackHandler );
1337 if ( result != noErr ) {
1338 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1339 errorText_ = errorStream_.str();
1344 if ( stream_.mode == INPUT ||
1345 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1347 result = AudioDeviceStart( handle->id[1], callbackHandler );
1348 if ( result != noErr ) {
1349 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1350 errorText_ = errorStream_.str();
1355 handle->drainCounter = 0;
1356 handle->internalDrain = false;
1357 stream_.state = STREAM_RUNNING;
1360 MUTEX_UNLOCK( &stream_.mutex );
1362 if ( result == noErr ) return;
1363 error( RtError::SYSTEM_ERROR );
1366 void RtApiCore :: stopStream( void )
1369 if ( stream_.state == STREAM_STOPPED ) {
1370 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1371 error( RtError::WARNING );
1375 MUTEX_LOCK( &stream_.mutex );
1377 if ( stream_.state == STREAM_STOPPED ) {
1378 MUTEX_UNLOCK( &stream_.mutex );
1382 OSStatus result = noErr;
1383 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1384 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1386 if ( handle->drainCounter == 0 ) {
1387 handle->drainCounter = 1;
1388 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1391 MUTEX_UNLOCK( &stream_.mutex );
1392 result = AudioDeviceStop( handle->id[0], callbackHandler );
1393 MUTEX_LOCK( &stream_.mutex );
1394 if ( result != noErr ) {
1395 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1396 errorText_ = errorStream_.str();
1401 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1403 MUTEX_UNLOCK( &stream_.mutex );
1404 result = AudioDeviceStop( handle->id[1], callbackHandler );
1405 MUTEX_LOCK( &stream_.mutex );
1406 if ( result != noErr ) {
1407 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1408 errorText_ = errorStream_.str();
1413 stream_.state = STREAM_STOPPED;
1416 MUTEX_UNLOCK( &stream_.mutex );
1418 if ( result == noErr ) return;
1419 error( RtError::SYSTEM_ERROR );
1422 void RtApiCore :: abortStream( void )
1425 if ( stream_.state == STREAM_STOPPED ) {
1426 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1427 error( RtError::WARNING );
1431 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1432 handle->drainCounter = 1;
1437 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1438 const AudioBufferList *inBufferList,
1439 const AudioBufferList *outBufferList )
1441 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1442 if ( stream_.state == STREAM_CLOSED ) {
1443 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1444 error( RtError::WARNING );
1448 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1449 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1451 // Check if we were draining the stream and signal is finished.
1452 if ( handle->drainCounter > 3 ) {
1453 if ( handle->internalDrain == false )
1454 pthread_cond_signal( &handle->condition );
1460 MUTEX_LOCK( &stream_.mutex );
1462 // The state might change while waiting on a mutex.
1463 if ( stream_.state == STREAM_STOPPED ) {
1464 MUTEX_UNLOCK( &stream_.mutex );
1468 AudioDeviceID outputDevice = handle->id[0];
1470 // Invoke user callback to get fresh output data UNLESS we are
1471 // draining stream or duplex mode AND the input/output devices are
1472 // different AND this function is called for the input device.
1473 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1474 RtAudioCallback callback = (RtAudioCallback) info->callback;
1475 double streamTime = getStreamTime();
1476 RtAudioStreamStatus status = 0;
1477 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1478 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1479 handle->xrun[0] = false;
1481 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1482 status |= RTAUDIO_INPUT_OVERFLOW;
1483 handle->xrun[1] = false;
1486 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1487 stream_.bufferSize, streamTime, status, info->userData );
1488 if ( handle->drainCounter == 2 ) {
1489 MUTEX_UNLOCK( &stream_.mutex );
1493 else if ( handle->drainCounter == 1 )
1494 handle->internalDrain = true;
1497 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1499 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1501 if ( handle->nStreams[0] == 1 ) {
1502 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1504 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1506 else { // fill multiple streams with zeros
1507 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1508 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1510 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1514 else if ( handle->nStreams[0] == 1 ) {
1515 if ( stream_.doConvertBuffer[0] ) { // convert directly to CoreAudio stream buffer
1516 convertBuffer( (char *) outBufferList->mBuffers[handle->iStream[0]].mData,
1517 stream_.userBuffer[0], stream_.convertInfo[0] );
1519 else { // copy from user buffer
1520 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1521 stream_.userBuffer[0],
1522 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1525 else { // fill multiple streams
1526 Float32 *inBuffer = (Float32 *) stream_.userBuffer[0];
1527 if ( stream_.doConvertBuffer[0] ) {
1528 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1529 inBuffer = (Float32 *) stream_.deviceBuffer;
1532 if ( stream_.deviceInterleaved[0] == false ) { // mono mode
1533 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1534 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1535 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1536 (void *)&inBuffer[i*stream_.bufferSize], bufferBytes );
1539 else { // fill multiple multi-channel streams with interleaved data
1540 UInt32 streamChannels, channelsLeft, inJump, outJump, inOffset;
1543 bool inInterleaved = ( stream_.userInterleaved ) ? true : false;
1544 UInt32 inChannels = stream_.nUserChannels[0];
1545 if ( stream_.doConvertBuffer[0] ) {
1546 inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1547 inChannels = stream_.nDeviceChannels[0];
1550 if ( inInterleaved ) inOffset = 1;
1551 else inOffset = stream_.bufferSize;
1553 channelsLeft = inChannels;
1554 for ( unsigned int i=0; i<handle->nStreams[0]; i++ ) {
1556 out = (Float32 *) outBufferList->mBuffers[handle->iStream[0]+i].mData;
1557 streamChannels = outBufferList->mBuffers[handle->iStream[0]+i].mNumberChannels;
1560 // Account for possible channel offset in first stream
1561 if ( i == 0 && stream_.channelOffset[0] > 0 ) {
1562 streamChannels -= stream_.channelOffset[0];
1563 outJump = stream_.channelOffset[0];
1567 // Account for possible unfilled channels at end of the last stream
1568 if ( streamChannels > channelsLeft ) {
1569 outJump = streamChannels - channelsLeft;
1570 streamChannels = channelsLeft;
1573 // Determine input buffer offsets and skips
1574 if ( inInterleaved ) {
1575 inJump = inChannels;
1576 in += inChannels - channelsLeft;
1580 in += (inChannels - channelsLeft) * inOffset;
1583 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1584 for ( unsigned int j=0; j<streamChannels; j++ ) {
1585 *out++ = in[j*inOffset];
1590 channelsLeft -= streamChannels;
1595 if ( handle->drainCounter ) {
1596 handle->drainCounter++;
1601 AudioDeviceID inputDevice;
1602 inputDevice = handle->id[1];
1603 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1605 if ( handle->nStreams[1] == 1 ) {
1606 if ( stream_.doConvertBuffer[1] ) { // convert directly from CoreAudio stream buffer
1607 convertBuffer( stream_.userBuffer[1],
1608 (char *) inBufferList->mBuffers[handle->iStream[1]].mData,
1609 stream_.convertInfo[1] );
1611 else { // copy to user buffer
1612 memcpy( stream_.userBuffer[1],
1613 inBufferList->mBuffers[handle->iStream[1]].mData,
1614 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1617 else { // read from multiple streams
1618 Float32 *outBuffer = (Float32 *) stream_.userBuffer[1];
1619 if ( stream_.doConvertBuffer[1] ) outBuffer = (Float32 *) stream_.deviceBuffer;
1621 if ( stream_.deviceInterleaved[1] == false ) { // mono mode
1622 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1623 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1624 memcpy( (void *)&outBuffer[i*stream_.bufferSize],
1625 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1628 else { // read from multiple multi-channel streams
1629 UInt32 streamChannels, channelsLeft, inJump, outJump, outOffset;
1632 bool outInterleaved = ( stream_.userInterleaved ) ? true : false;
1633 UInt32 outChannels = stream_.nUserChannels[1];
1634 if ( stream_.doConvertBuffer[1] ) {
1635 outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode
1636 outChannels = stream_.nDeviceChannels[1];
1639 if ( outInterleaved ) outOffset = 1;
1640 else outOffset = stream_.bufferSize;
1642 channelsLeft = outChannels;
1643 for ( unsigned int i=0; i<handle->nStreams[1]; i++ ) {
1645 in = (Float32 *) inBufferList->mBuffers[handle->iStream[1]+i].mData;
1646 streamChannels = inBufferList->mBuffers[handle->iStream[1]+i].mNumberChannels;
1649 // Account for possible channel offset in first stream
1650 if ( i == 0 && stream_.channelOffset[1] > 0 ) {
1651 streamChannels -= stream_.channelOffset[1];
1652 inJump = stream_.channelOffset[1];
1656 // Account for possible unread channels at end of the last stream
1657 if ( streamChannels > channelsLeft ) {
1658 inJump = streamChannels - channelsLeft;
1659 streamChannels = channelsLeft;
1662 // Determine output buffer offsets and skips
1663 if ( outInterleaved ) {
1664 outJump = outChannels;
1665 out += outChannels - channelsLeft;
1669 out += (outChannels - channelsLeft) * outOffset;
1672 for ( unsigned int i=0; i<stream_.bufferSize; i++ ) {
1673 for ( unsigned int j=0; j<streamChannels; j++ ) {
1674 out[j*outOffset] = *in++;
1679 channelsLeft -= streamChannels;
1683 if ( stream_.doConvertBuffer[1] ) { // convert from our internal "device" buffer
1684 convertBuffer( stream_.userBuffer[1],
1685 stream_.deviceBuffer,
1686 stream_.convertInfo[1] );
1692 MUTEX_UNLOCK( &stream_.mutex );
1694 RtApi::tickStreamTime();
1698 const char* RtApiCore :: getErrorCode( OSStatus code )
1702 case kAudioHardwareNotRunningError:
1703 return "kAudioHardwareNotRunningError";
1705 case kAudioHardwareUnspecifiedError:
1706 return "kAudioHardwareUnspecifiedError";
1708 case kAudioHardwareUnknownPropertyError:
1709 return "kAudioHardwareUnknownPropertyError";
1711 case kAudioHardwareBadPropertySizeError:
1712 return "kAudioHardwareBadPropertySizeError";
1714 case kAudioHardwareIllegalOperationError:
1715 return "kAudioHardwareIllegalOperationError";
1717 case kAudioHardwareBadObjectError:
1718 return "kAudioHardwareBadObjectError";
1720 case kAudioHardwareBadDeviceError:
1721 return "kAudioHardwareBadDeviceError";
1723 case kAudioHardwareBadStreamError:
1724 return "kAudioHardwareBadStreamError";
1726 case kAudioHardwareUnsupportedOperationError:
1727 return "kAudioHardwareUnsupportedOperationError";
1729 case kAudioDeviceUnsupportedFormatError:
1730 return "kAudioDeviceUnsupportedFormatError";
1732 case kAudioDevicePermissionsError:
1733 return "kAudioDevicePermissionsError";
1736 return "CoreAudio unknown error";
1740 //******************** End of __MACOSX_CORE__ *********************//
1743 #if defined(__UNIX_JACK__)
1745 // JACK is a low-latency audio server, originally written for the
1746 // GNU/Linux operating system and now also ported to OS-X. It can
1747 // connect a number of different applications to an audio device, as
1748 // well as allowing them to share audio between themselves.
1750 // When using JACK with RtAudio, "devices" refer to JACK clients that
1751 // have ports connected to the server. The JACK server is typically
1752 // started in a terminal as follows:
1754 // .jackd -d alsa -d hw:0
1756 // or through an interface program such as qjackctl. Many of the
1757 // parameters normally set for a stream are fixed by the JACK server
1758 // and can be specified when the JACK server is started. In
1761 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1763 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1764 // frames, and number of buffers = 4. Once the server is running, it
1765 // is not possible to override these values. If the values are not
1766 // specified in the command-line, the JACK server uses default values.
1768 // The JACK server does not have to be running when an instance of
1769 // RtApiJack is created, though the function getDeviceCount() will
1770 // report 0 devices found until JACK has been started. When no
1771 // devices are available (i.e., the JACK server is not running), a
1772 // stream cannot be opened.
1774 #include <jack/jack.h>
1778 // A structure to hold various information related to the Jack API
1781 jack_client_t *client;
1782 jack_port_t **ports[2];
1783 std::string deviceName[2];
1785 pthread_cond_t condition;
1786 int drainCounter; // Tracks callback counts when draining
1787 bool internalDrain; // Indicates if stop is initiated from callback or not.
1790 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1793 ThreadHandle threadId;
1794 void jackSilentError( const char * ) {};
1796 RtApiJack :: RtApiJack()
1798 // Nothing to do here.
1799 #if !defined(__RTAUDIO_DEBUG__)
1800 // Turn off Jack's internal error reporting.
1801 jack_set_error_function( &jackSilentError );
1805 RtApiJack :: ~RtApiJack()
1807 if ( stream_.state != STREAM_CLOSED ) closeStream();
1810 unsigned int RtApiJack :: getDeviceCount( void )
1812 // See if we can become a jack client.
1813 jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption;
1814 jack_status_t *status = NULL;
1815 jack_client_t *client = jack_client_open( "RtApiJackCount", options, status );
1816 if ( client == 0 ) return 0;
1819 std::string port, previousPort;
1820 unsigned int nChannels = 0, nDevices = 0;
1821 ports = jack_get_ports( client, NULL, NULL, 0 );
1823 // Parse the port names up to the first colon (:).
1826 port = (char *) ports[ nChannels ];
1827 iColon = port.find(":");
1828 if ( iColon != std::string::npos ) {
1829 port = port.substr( 0, iColon + 1 );
1830 if ( port != previousPort ) {
1832 previousPort = port;
1835 } while ( ports[++nChannels] );
1839 jack_client_close( client );
1843 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1845 RtAudio::DeviceInfo info;
1846 info.probed = false;
1848 jack_options_t options = (jack_options_t) ( JackNoStartServer ); //JackNullOption
1849 jack_status_t *status = NULL;
1850 jack_client_t *client = jack_client_open( "RtApiJackInfo", options, status );
1851 if ( client == 0 ) {
1852 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1853 error( RtError::WARNING );
1858 std::string port, previousPort;
1859 unsigned int nPorts = 0, nDevices = 0;
1860 ports = jack_get_ports( client, NULL, NULL, 0 );
1862 // Parse the port names up to the first colon (:).
1865 port = (char *) ports[ nPorts ];
1866 iColon = port.find(":");
1867 if ( iColon != std::string::npos ) {
1868 port = port.substr( 0, iColon );
1869 if ( port != previousPort ) {
1870 if ( nDevices == device ) info.name = port;
1872 previousPort = port;
1875 } while ( ports[++nPorts] );
1879 if ( device >= nDevices ) {
1880 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1881 error( RtError::INVALID_USE );
1884 // Get the current jack server sample rate.
1885 info.sampleRates.clear();
1886 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1888 // Count the available ports containing the client name as device
1889 // channels. Jack "input ports" equal RtAudio output channels.
1890 unsigned int nChannels = 0;
1891 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1893 while ( ports[ nChannels ] ) nChannels++;
1895 info.outputChannels = nChannels;
1898 // Jack "output ports" equal RtAudio input channels.
1900 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1902 while ( ports[ nChannels ] ) nChannels++;
1904 info.inputChannels = nChannels;
1907 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1908 jack_client_close(client);
1909 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1910 error( RtError::WARNING );
1914 // If device opens for both playback and capture, we determine the channels.
1915 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1916 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1918 // Jack always uses 32-bit floats.
1919 info.nativeFormats = RTAUDIO_FLOAT32;
1921 // Jack doesn't provide default devices so we'll use the first available one.
1922 if ( device == 0 && info.outputChannels > 0 )
1923 info.isDefaultOutput = true;
1924 if ( device == 0 && info.inputChannels > 0 )
1925 info.isDefaultInput = true;
1927 jack_client_close(client);
1932 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1934 CallbackInfo *info = (CallbackInfo *) infoPointer;
1936 RtApiJack *object = (RtApiJack *) info->object;
1937 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1942 // This function will be called by a spawned thread when the Jack
1943 // server signals that it is shutting down. It is necessary to handle
1944 // it this way because the jackShutdown() function must return before
1945 // the jack_deactivate() function (in closeStream()) will return.
1946 extern "C" void *jackCloseStream( void *ptr )
1948 CallbackInfo *info = (CallbackInfo *) ptr;
1949 RtApiJack *object = (RtApiJack *) info->object;
1951 object->closeStream();
1953 pthread_exit( NULL );
1955 void jackShutdown( void *infoPointer )
1957 CallbackInfo *info = (CallbackInfo *) infoPointer;
1958 RtApiJack *object = (RtApiJack *) info->object;
1960 // Check current stream state. If stopped, then we'll assume this
1961 // was called as a result of a call to RtApiJack::stopStream (the
1962 // deactivation of a client handle causes this function to be called).
1963 // If not, we'll assume the Jack server is shutting down or some
1964 // other problem occurred and we should close the stream.
1965 if ( object->isStreamRunning() == false ) return;
1967 pthread_create( &threadId, NULL, jackCloseStream, info );
1968 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1971 int jackXrun( void *infoPointer )
1973 JackHandle *handle = (JackHandle *) infoPointer;
1975 if ( handle->ports[0] ) handle->xrun[0] = true;
1976 if ( handle->ports[1] ) handle->xrun[1] = true;
1981 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1982 unsigned int firstChannel, unsigned int sampleRate,
1983 RtAudioFormat format, unsigned int *bufferSize,
1984 RtAudio::StreamOptions *options )
1986 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1988 // Look for jack server and try to become a client (only do once per stream).
1989 jack_client_t *client = 0;
1990 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1991 jack_options_t jackoptions = (jack_options_t) ( JackNoStartServer ); //JackNullOption;
1992 jack_status_t *status = NULL;
1993 if ( options && !options->streamName.empty() )
1994 client = jack_client_open( options->streamName.c_str(), jackoptions, status );
1996 client = jack_client_open( "RtApiJack", jackoptions, status );
1997 if ( client == 0 ) {
1998 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1999 error( RtError::WARNING );
2004 // The handle must have been created on an earlier pass.
2005 client = handle->client;
2009 std::string port, previousPort, deviceName;
2010 unsigned int nPorts = 0, nDevices = 0;
2011 ports = jack_get_ports( client, NULL, NULL, 0 );
2013 // Parse the port names up to the first colon (:).
2016 port = (char *) ports[ nPorts ];
2017 iColon = port.find(":");
2018 if ( iColon != std::string::npos ) {
2019 port = port.substr( 0, iColon );
2020 if ( port != previousPort ) {
2021 if ( nDevices == device ) deviceName = port;
2023 previousPort = port;
2026 } while ( ports[++nPorts] );
2030 if ( device >= nDevices ) {
2031 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
2035 // Count the available ports containing the client name as device
2036 // channels. Jack "input ports" equal RtAudio output channels.
2037 unsigned int nChannels = 0;
2038 unsigned long flag = JackPortIsInput;
2039 if ( mode == INPUT ) flag = JackPortIsOutput;
2040 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
2042 while ( ports[ nChannels ] ) nChannels++;
2046 // Compare the jack ports for specified client to the requested number of channels.
2047 if ( nChannels < (channels + firstChannel) ) {
2048 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
2049 errorText_ = errorStream_.str();
2053 // Check the jack server sample rate.
2054 unsigned int jackRate = jack_get_sample_rate( client );
2055 if ( sampleRate != jackRate ) {
2056 jack_client_close( client );
2057 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
2058 errorText_ = errorStream_.str();
2061 stream_.sampleRate = jackRate;
2063 // Get the latency of the JACK port.
2064 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
2065 if ( ports[ firstChannel ] )
2066 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
2069 // The jack server always uses 32-bit floating-point data.
2070 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2071 stream_.userFormat = format;
2073 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2074 else stream_.userInterleaved = true;
2076 // Jack always uses non-interleaved buffers.
2077 stream_.deviceInterleaved[mode] = false;
2079 // Jack always provides host byte-ordered data.
2080 stream_.doByteSwap[mode] = false;
2082 // Get the buffer size. The buffer size and number of buffers
2083 // (periods) is set when the jack server is started.
2084 stream_.bufferSize = (int) jack_get_buffer_size( client );
2085 *bufferSize = stream_.bufferSize;
2087 stream_.nDeviceChannels[mode] = channels;
2088 stream_.nUserChannels[mode] = channels;
2090 // Set flags for buffer conversion.
2091 stream_.doConvertBuffer[mode] = false;
2092 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2093 stream_.doConvertBuffer[mode] = true;
2094 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2095 stream_.nUserChannels[mode] > 1 )
2096 stream_.doConvertBuffer[mode] = true;
2098 // Allocate our JackHandle structure for the stream.
2099 if ( handle == 0 ) {
2101 handle = new JackHandle;
2103 catch ( std::bad_alloc& ) {
2104 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
2108 if ( pthread_cond_init(&handle->condition, NULL) ) {
2109 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
2112 stream_.apiHandle = (void *) handle;
2113 handle->client = client;
2115 handle->deviceName[mode] = deviceName;
2117 // Allocate necessary internal buffers.
2118 unsigned long bufferBytes;
2119 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2120 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2121 if ( stream_.userBuffer[mode] == NULL ) {
2122 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
2126 if ( stream_.doConvertBuffer[mode] ) {
2128 bool makeBuffer = true;
2129 if ( mode == OUTPUT )
2130 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2131 else { // mode == INPUT
2132 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
2133 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2134 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2135 if ( bufferBytes < bytesOut ) makeBuffer = false;
2140 bufferBytes *= *bufferSize;
2141 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2142 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2143 if ( stream_.deviceBuffer == NULL ) {
2144 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
2150 // Allocate memory for the Jack ports (channels) identifiers.
2151 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
2152 if ( handle->ports[mode] == NULL ) {
2153 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
2157 stream_.device[mode] = device;
2158 stream_.channelOffset[mode] = firstChannel;
2159 stream_.state = STREAM_STOPPED;
2160 stream_.callbackInfo.object = (void *) this;
2162 if ( stream_.mode == OUTPUT && mode == INPUT )
2163 // We had already set up the stream for output.
2164 stream_.mode = DUPLEX;
2166 stream_.mode = mode;
2167 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
2168 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
2169 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
2172 // Register our ports.
2174 if ( mode == OUTPUT ) {
2175 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2176 snprintf( label, 64, "outport %d", i );
2177 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
2178 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
2182 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2183 snprintf( label, 64, "inport %d", i );
2184 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
2185 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
2189 // Setup the buffer conversion information structure. We don't use
2190 // buffers to do channel offsets, so we override that parameter
2192 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2198 pthread_cond_destroy( &handle->condition );
2199 jack_client_close( handle->client );
2201 if ( handle->ports[0] ) free( handle->ports[0] );
2202 if ( handle->ports[1] ) free( handle->ports[1] );
2205 stream_.apiHandle = 0;
2208 for ( int i=0; i<2; i++ ) {
2209 if ( stream_.userBuffer[i] ) {
2210 free( stream_.userBuffer[i] );
2211 stream_.userBuffer[i] = 0;
2215 if ( stream_.deviceBuffer ) {
2216 free( stream_.deviceBuffer );
2217 stream_.deviceBuffer = 0;
2223 void RtApiJack :: closeStream( void )
2225 if ( stream_.state == STREAM_CLOSED ) {
2226 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2227 error( RtError::WARNING );
2231 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2234 if ( stream_.state == STREAM_RUNNING )
2235 jack_deactivate( handle->client );
2237 jack_client_close( handle->client );
2241 if ( handle->ports[0] ) free( handle->ports[0] );
2242 if ( handle->ports[1] ) free( handle->ports[1] );
2243 pthread_cond_destroy( &handle->condition );
2245 stream_.apiHandle = 0;
2248 for ( int i=0; i<2; i++ ) {
2249 if ( stream_.userBuffer[i] ) {
2250 free( stream_.userBuffer[i] );
2251 stream_.userBuffer[i] = 0;
2255 if ( stream_.deviceBuffer ) {
2256 free( stream_.deviceBuffer );
2257 stream_.deviceBuffer = 0;
2260 stream_.mode = UNINITIALIZED;
2261 stream_.state = STREAM_CLOSED;
2264 void RtApiJack :: startStream( void )
2267 if ( stream_.state == STREAM_RUNNING ) {
2268 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2269 error( RtError::WARNING );
2273 MUTEX_LOCK(&stream_.mutex);
2275 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2276 int result = jack_activate( handle->client );
2278 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2284 // Get the list of available ports.
2285 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2287 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2288 if ( ports == NULL) {
2289 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2293 // Now make the port connections. Since RtAudio wasn't designed to
2294 // allow the user to select particular channels of a device, we'll
2295 // just open the first "nChannels" ports with offset.
2296 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2298 if ( ports[ stream_.channelOffset[0] + i ] )
2299 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2302 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2309 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2311 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2312 if ( ports == NULL) {
2313 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2317 // Now make the port connections. See note above.
2318 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2320 if ( ports[ stream_.channelOffset[1] + i ] )
2321 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2324 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2331 handle->drainCounter = 0;
2332 handle->internalDrain = false;
2333 stream_.state = STREAM_RUNNING;
2336 MUTEX_UNLOCK(&stream_.mutex);
2338 if ( result == 0 ) return;
2339 error( RtError::SYSTEM_ERROR );
2342 void RtApiJack :: stopStream( void )
2345 if ( stream_.state == STREAM_STOPPED ) {
2346 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2347 error( RtError::WARNING );
2351 MUTEX_LOCK( &stream_.mutex );
2353 if ( stream_.state == STREAM_STOPPED ) {
2354 MUTEX_UNLOCK( &stream_.mutex );
2358 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2359 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2361 if ( handle->drainCounter == 0 ) {
2362 handle->drainCounter = 1;
2363 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2367 jack_deactivate( handle->client );
2368 stream_.state = STREAM_STOPPED;
2370 MUTEX_UNLOCK( &stream_.mutex );
2373 void RtApiJack :: abortStream( void )
2376 if ( stream_.state == STREAM_STOPPED ) {
2377 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2378 error( RtError::WARNING );
2382 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2383 handle->drainCounter = 1;
2388 // This function will be called by a spawned thread when the user
2389 // callback function signals that the stream should be stopped or
2390 // aborted. It is necessary to handle it this way because the
2391 // callbackEvent() function must return before the jack_deactivate()
2392 // function will return.
2393 extern "C" void *jackStopStream( void *ptr )
2395 CallbackInfo *info = (CallbackInfo *) ptr;
2396 RtApiJack *object = (RtApiJack *) info->object;
2398 object->stopStream();
2400 pthread_exit( NULL );
2403 bool RtApiJack :: callbackEvent( unsigned long nframes )
2405 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2406 if ( stream_.state == STREAM_CLOSED ) {
2407 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2408 error( RtError::WARNING );
2411 if ( stream_.bufferSize != nframes ) {
2412 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2413 error( RtError::WARNING );
2417 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2418 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2420 // Check if we were draining the stream and signal is finished.
2421 if ( handle->drainCounter > 3 ) {
2422 if ( handle->internalDrain == true ) {
2423 pthread_create( &threadId, NULL, jackStopStream, info );
2426 pthread_cond_signal( &handle->condition );
2430 MUTEX_LOCK( &stream_.mutex );
2432 // The state might change while waiting on a mutex.
2433 if ( stream_.state == STREAM_STOPPED ) {
2434 MUTEX_UNLOCK( &stream_.mutex );
2438 // Invoke user callback first, to get fresh output data.
2439 if ( handle->drainCounter == 0 ) {
2440 RtAudioCallback callback = (RtAudioCallback) info->callback;
2441 double streamTime = getStreamTime();
2442 RtAudioStreamStatus status = 0;
2443 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2444 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2445 handle->xrun[0] = false;
2447 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2448 status |= RTAUDIO_INPUT_OVERFLOW;
2449 handle->xrun[1] = false;
2451 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2452 stream_.bufferSize, streamTime, status, info->userData );
2453 if ( handle->drainCounter == 2 ) {
2454 MUTEX_UNLOCK( &stream_.mutex );
2456 pthread_create( &id, NULL, jackStopStream, info );
2459 else if ( handle->drainCounter == 1 )
2460 handle->internalDrain = true;
2463 jack_default_audio_sample_t *jackbuffer;
2464 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2465 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2467 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2469 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2470 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2471 memset( jackbuffer, 0, bufferBytes );
2475 else if ( stream_.doConvertBuffer[0] ) {
2477 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2479 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2480 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2481 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2484 else { // no buffer conversion
2485 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2486 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2487 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2491 if ( handle->drainCounter ) {
2492 handle->drainCounter++;
2497 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2499 if ( stream_.doConvertBuffer[1] ) {
2500 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2501 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2502 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2504 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2506 else { // no buffer conversion
2507 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2508 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2509 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2515 MUTEX_UNLOCK(&stream_.mutex);
2517 RtApi::tickStreamTime();
2520 //******************** End of __UNIX_JACK__ *********************//
2523 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2525 // The ASIO API is designed around a callback scheme, so this
2526 // implementation is similar to that used for OS-X CoreAudio and Linux
2527 // Jack. The primary constraint with ASIO is that it only allows
2528 // access to a single driver at a time. Thus, it is not possible to
2529 // have more than one simultaneous RtAudio stream.
2531 // This implementation also requires a number of external ASIO files
2532 // and a few global variables. The ASIO callback scheme does not
2533 // allow for the passing of user data, so we must create a global
2534 // pointer to our callbackInfo structure.
2536 // On unix systems, we make use of a pthread condition variable.
2537 // Since there is no equivalent in Windows, I hacked something based
2538 // on information found in
2539 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2541 #include "asiosys.h"
2543 #include "iasiothiscallresolver.h"
2544 #include "asiodrivers.h"
2547 AsioDrivers drivers;
2548 ASIOCallbacks asioCallbacks;
2549 ASIODriverInfo driverInfo;
2550 CallbackInfo *asioCallbackInfo;
2554 int drainCounter; // Tracks callback counts when draining
2555 bool internalDrain; // Indicates if stop is initiated from callback or not.
2556 ASIOBufferInfo *bufferInfos;
2560 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2563 // Function declarations (definitions at end of section)
2564 static const char* getAsioErrorString( ASIOError result );
2565 void sampleRateChanged( ASIOSampleRate sRate );
2566 long asioMessages( long selector, long value, void* message, double* opt );
2568 RtApiAsio :: RtApiAsio()
2570 // ASIO cannot run on a multi-threaded appartment. You can call
2571 // CoInitialize beforehand, but it must be for appartment threading
2572 // (in which case, CoInitilialize will return S_FALSE here).
2573 coInitialized_ = false;
2574 HRESULT hr = CoInitialize( NULL );
2576 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2577 error( RtError::WARNING );
2579 coInitialized_ = true;
2581 drivers.removeCurrentDriver();
2582 driverInfo.asioVersion = 2;
2584 // See note in DirectSound implementation about GetDesktopWindow().
2585 driverInfo.sysRef = GetForegroundWindow();
2588 RtApiAsio :: ~RtApiAsio()
2590 if ( stream_.state != STREAM_CLOSED ) closeStream();
2591 if ( coInitialized_ ) CoUninitialize();
2594 unsigned int RtApiAsio :: getDeviceCount( void )
2596 return (unsigned int) drivers.asioGetNumDev();
2599 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2601 RtAudio::DeviceInfo info;
2602 info.probed = false;
2605 unsigned int nDevices = getDeviceCount();
2606 if ( nDevices == 0 ) {
2607 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2608 error( RtError::INVALID_USE );
2611 if ( device >= nDevices ) {
2612 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2613 error( RtError::INVALID_USE );
2616 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2617 if ( stream_.state != STREAM_CLOSED ) {
2618 if ( device >= devices_.size() ) {
2619 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2620 error( RtError::WARNING );
2623 return devices_[ device ];
2626 char driverName[32];
2627 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2628 if ( result != ASE_OK ) {
2629 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2630 errorText_ = errorStream_.str();
2631 error( RtError::WARNING );
2635 info.name = driverName;
2637 if ( !drivers.loadDriver( driverName ) ) {
2638 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2639 errorText_ = errorStream_.str();
2640 error( RtError::WARNING );
2644 result = ASIOInit( &driverInfo );
2645 if ( result != ASE_OK ) {
2646 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2647 errorText_ = errorStream_.str();
2648 error( RtError::WARNING );
2652 // Determine the device channel information.
2653 long inputChannels, outputChannels;
2654 result = ASIOGetChannels( &inputChannels, &outputChannels );
2655 if ( result != ASE_OK ) {
2656 drivers.removeCurrentDriver();
2657 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2658 errorText_ = errorStream_.str();
2659 error( RtError::WARNING );
2663 info.outputChannels = outputChannels;
2664 info.inputChannels = inputChannels;
2665 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2666 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2668 // Determine the supported sample rates.
2669 info.sampleRates.clear();
2670 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2671 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2672 if ( result == ASE_OK )
2673 info.sampleRates.push_back( SAMPLE_RATES[i] );
2676 // Determine supported data types ... just check first channel and assume rest are the same.
2677 ASIOChannelInfo channelInfo;
2678 channelInfo.channel = 0;
2679 channelInfo.isInput = true;
2680 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2681 result = ASIOGetChannelInfo( &channelInfo );
2682 if ( result != ASE_OK ) {
2683 drivers.removeCurrentDriver();
2684 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2685 errorText_ = errorStream_.str();
2686 error( RtError::WARNING );
2690 info.nativeFormats = 0;
2691 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2692 info.nativeFormats |= RTAUDIO_SINT16;
2693 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2694 info.nativeFormats |= RTAUDIO_SINT32;
2695 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2696 info.nativeFormats |= RTAUDIO_FLOAT32;
2697 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2698 info.nativeFormats |= RTAUDIO_FLOAT64;
2700 if ( info.outputChannels > 0 )
2701 if ( getDefaultOutputDevice() == device ) info.isDefaultOutput = true;
2702 if ( info.inputChannels > 0 )
2703 if ( getDefaultInputDevice() == device ) info.isDefaultInput = true;
2706 drivers.removeCurrentDriver();
2710 void bufferSwitch( long index, ASIOBool processNow )
2712 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2713 object->callbackEvent( index );
2716 void RtApiAsio :: saveDeviceInfo( void )
2720 unsigned int nDevices = getDeviceCount();
2721 devices_.resize( nDevices );
2722 for ( unsigned int i=0; i<nDevices; i++ )
2723 devices_[i] = getDeviceInfo( i );
2726 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2727 unsigned int firstChannel, unsigned int sampleRate,
2728 RtAudioFormat format, unsigned int *bufferSize,
2729 RtAudio::StreamOptions *options )
2731 // For ASIO, a duplex stream MUST use the same driver.
2732 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2733 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2737 char driverName[32];
2738 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2739 if ( result != ASE_OK ) {
2740 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2741 errorText_ = errorStream_.str();
2745 // The getDeviceInfo() function will not work when a stream is open
2746 // because ASIO does not allow multiple devices to run at the same
2747 // time. Thus, we'll probe the system before opening a stream and
2748 // save the results for use by getDeviceInfo().
2749 this->saveDeviceInfo();
2751 // Only load the driver once for duplex stream.
2752 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2753 if ( !drivers.loadDriver( driverName ) ) {
2754 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2755 errorText_ = errorStream_.str();
2759 result = ASIOInit( &driverInfo );
2760 if ( result != ASE_OK ) {
2761 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2762 errorText_ = errorStream_.str();
2767 // Check the device channel count.
2768 long inputChannels, outputChannels;
2769 result = ASIOGetChannels( &inputChannels, &outputChannels );
2770 if ( result != ASE_OK ) {
2771 drivers.removeCurrentDriver();
2772 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2773 errorText_ = errorStream_.str();
2777 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2778 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2779 drivers.removeCurrentDriver();
2780 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2781 errorText_ = errorStream_.str();
2784 stream_.nDeviceChannels[mode] = channels;
2785 stream_.nUserChannels[mode] = channels;
2786 stream_.channelOffset[mode] = firstChannel;
2788 // Verify the sample rate is supported.
2789 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2790 if ( result != ASE_OK ) {
2791 drivers.removeCurrentDriver();
2792 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2793 errorText_ = errorStream_.str();
2797 // Get the current sample rate
2798 ASIOSampleRate currentRate;
2799 result = ASIOGetSampleRate( ¤tRate );
2800 if ( result != ASE_OK ) {
2801 drivers.removeCurrentDriver();
2802 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2803 errorText_ = errorStream_.str();
2807 // Set the sample rate only if necessary
2808 if ( currentRate != sampleRate ) {
2809 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2810 if ( result != ASE_OK ) {
2811 drivers.removeCurrentDriver();
2812 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2813 errorText_ = errorStream_.str();
2818 // Determine the driver data type.
2819 ASIOChannelInfo channelInfo;
2820 channelInfo.channel = 0;
2821 if ( mode == OUTPUT ) channelInfo.isInput = false;
2822 else channelInfo.isInput = true;
2823 result = ASIOGetChannelInfo( &channelInfo );
2824 if ( result != ASE_OK ) {
2825 drivers.removeCurrentDriver();
2826 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2827 errorText_ = errorStream_.str();
2831 // Assuming WINDOWS host is always little-endian.
2832 stream_.doByteSwap[mode] = false;
2833 stream_.userFormat = format;
2834 stream_.deviceFormat[mode] = 0;
2835 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2836 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2837 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2839 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2840 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2841 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2843 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2844 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2845 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2847 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2848 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2849 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2852 if ( stream_.deviceFormat[mode] == 0 ) {
2853 drivers.removeCurrentDriver();
2854 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2855 errorText_ = errorStream_.str();
2859 // Set the buffer size. For a duplex stream, this will end up
2860 // setting the buffer size based on the input constraints, which
2862 long minSize, maxSize, preferSize, granularity;
2863 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2864 if ( result != ASE_OK ) {
2865 drivers.removeCurrentDriver();
2866 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2867 errorText_ = errorStream_.str();
2871 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2872 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2873 else if ( granularity == -1 ) {
2874 // Make sure bufferSize is a power of two.
2875 int log2_of_min_size = 0;
2876 int log2_of_max_size = 0;
2878 for ( unsigned int i = 0; i < sizeof(long) * 8; i++ ) {
2879 if ( minSize & ((long)1 << i) ) log2_of_min_size = i;
2880 if ( maxSize & ((long)1 << i) ) log2_of_max_size = i;
2883 long min_delta = std::abs( (long)*bufferSize - ((long)1 << log2_of_min_size) );
2884 int min_delta_num = log2_of_min_size;
2886 for (int i = log2_of_min_size + 1; i <= log2_of_max_size; i++) {
2887 long current_delta = std::abs( (long)*bufferSize - ((long)1 << i) );
2888 if (current_delta < min_delta) {
2889 min_delta = current_delta;
2894 *bufferSize = ( (unsigned int)1 << min_delta_num );
2895 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2896 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2898 else if ( granularity != 0 ) {
2899 // Set to an even multiple of granularity, rounding up.
2900 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2903 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2904 drivers.removeCurrentDriver();
2905 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2909 stream_.bufferSize = *bufferSize;
2910 stream_.nBuffers = 2;
2912 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2913 else stream_.userInterleaved = true;
2915 // ASIO always uses non-interleaved buffers.
2916 stream_.deviceInterleaved[mode] = false;
2918 // Allocate, if necessary, our AsioHandle structure for the stream.
2919 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2920 if ( handle == 0 ) {
2922 handle = new AsioHandle;
2924 catch ( std::bad_alloc& ) {
2925 //if ( handle == NULL ) {
2926 drivers.removeCurrentDriver();
2927 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2930 handle->bufferInfos = 0;
2932 // Create a manual-reset event.
2933 handle->condition = CreateEvent( NULL, // no security
2934 TRUE, // manual-reset
2935 FALSE, // non-signaled initially
2937 stream_.apiHandle = (void *) handle;
2940 // Create the ASIO internal buffers. Since RtAudio sets up input
2941 // and output separately, we'll have to dispose of previously
2942 // created output buffers for a duplex stream.
2943 long inputLatency, outputLatency;
2944 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2945 ASIODisposeBuffers();
2946 if ( handle->bufferInfos ) free( handle->bufferInfos );
2949 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2950 bool buffersAllocated = false;
2951 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2952 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2953 if ( handle->bufferInfos == NULL ) {
2954 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2955 errorText_ = errorStream_.str();
2959 ASIOBufferInfo *infos;
2960 infos = handle->bufferInfos;
2961 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2962 infos->isInput = ASIOFalse;
2963 infos->channelNum = i + stream_.channelOffset[0];
2964 infos->buffers[0] = infos->buffers[1] = 0;
2966 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2967 infos->isInput = ASIOTrue;
2968 infos->channelNum = i + stream_.channelOffset[1];
2969 infos->buffers[0] = infos->buffers[1] = 0;
2972 // Set up the ASIO callback structure and create the ASIO data buffers.
2973 asioCallbacks.bufferSwitch = &bufferSwitch;
2974 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2975 asioCallbacks.asioMessage = &asioMessages;
2976 asioCallbacks.bufferSwitchTimeInfo = NULL;
2977 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2978 if ( result != ASE_OK ) {
2979 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2980 errorText_ = errorStream_.str();
2983 buffersAllocated = true;
2985 // Set flags for buffer conversion.
2986 stream_.doConvertBuffer[mode] = false;
2987 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2988 stream_.doConvertBuffer[mode] = true;
2989 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2990 stream_.nUserChannels[mode] > 1 )
2991 stream_.doConvertBuffer[mode] = true;
2993 // Allocate necessary internal buffers
2994 unsigned long bufferBytes;
2995 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2996 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2997 if ( stream_.userBuffer[mode] == NULL ) {
2998 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
3002 if ( stream_.doConvertBuffer[mode] ) {
3004 bool makeBuffer = true;
3005 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
3006 if ( mode == INPUT ) {
3007 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3008 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
3009 if ( bufferBytes <= bytesOut ) makeBuffer = false;
3014 bufferBytes *= *bufferSize;
3015 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3016 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3017 if ( stream_.deviceBuffer == NULL ) {
3018 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
3024 stream_.sampleRate = sampleRate;
3025 stream_.device[mode] = device;
3026 stream_.state = STREAM_STOPPED;
3027 asioCallbackInfo = &stream_.callbackInfo;
3028 stream_.callbackInfo.object = (void *) this;
3029 if ( stream_.mode == OUTPUT && mode == INPUT )
3030 // We had already set up an output stream.
3031 stream_.mode = DUPLEX;
3033 stream_.mode = mode;
3035 // Determine device latencies
3036 result = ASIOGetLatencies( &inputLatency, &outputLatency );
3037 if ( result != ASE_OK ) {
3038 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
3039 errorText_ = errorStream_.str();
3040 error( RtError::WARNING); // warn but don't fail
3043 stream_.latency[0] = outputLatency;
3044 stream_.latency[1] = inputLatency;
3047 // Setup the buffer conversion information structure. We don't use
3048 // buffers to do channel offsets, so we override that parameter
3050 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
3055 if ( buffersAllocated )
3056 ASIODisposeBuffers();
3057 drivers.removeCurrentDriver();
3060 CloseHandle( handle->condition );
3061 if ( handle->bufferInfos )
3062 free( handle->bufferInfos );
3064 stream_.apiHandle = 0;
3067 for ( int i=0; i<2; i++ ) {
3068 if ( stream_.userBuffer[i] ) {
3069 free( stream_.userBuffer[i] );
3070 stream_.userBuffer[i] = 0;
3074 if ( stream_.deviceBuffer ) {
3075 free( stream_.deviceBuffer );
3076 stream_.deviceBuffer = 0;
3082 void RtApiAsio :: closeStream()
3084 if ( stream_.state == STREAM_CLOSED ) {
3085 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
3086 error( RtError::WARNING );
3090 if ( stream_.state == STREAM_RUNNING ) {
3091 stream_.state = STREAM_STOPPED;
3094 ASIODisposeBuffers();
3095 drivers.removeCurrentDriver();
3097 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3099 CloseHandle( handle->condition );
3100 if ( handle->bufferInfos )
3101 free( handle->bufferInfos );
3103 stream_.apiHandle = 0;
3106 for ( int i=0; i<2; i++ ) {
3107 if ( stream_.userBuffer[i] ) {
3108 free( stream_.userBuffer[i] );
3109 stream_.userBuffer[i] = 0;
3113 if ( stream_.deviceBuffer ) {
3114 free( stream_.deviceBuffer );
3115 stream_.deviceBuffer = 0;
3118 stream_.mode = UNINITIALIZED;
3119 stream_.state = STREAM_CLOSED;
3122 void RtApiAsio :: startStream()
3125 if ( stream_.state == STREAM_RUNNING ) {
3126 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
3127 error( RtError::WARNING );
3131 MUTEX_LOCK( &stream_.mutex );
3133 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3134 ASIOError result = ASIOStart();
3135 if ( result != ASE_OK ) {
3136 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
3137 errorText_ = errorStream_.str();
3141 handle->drainCounter = 0;
3142 handle->internalDrain = false;
3143 stream_.state = STREAM_RUNNING;
3147 MUTEX_UNLOCK( &stream_.mutex );
3149 if ( result == ASE_OK ) return;
3150 error( RtError::SYSTEM_ERROR );
3153 void RtApiAsio :: stopStream()
3156 if ( stream_.state == STREAM_STOPPED ) {
3157 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
3158 error( RtError::WARNING );
3162 MUTEX_LOCK( &stream_.mutex );
3164 if ( stream_.state == STREAM_STOPPED ) {
3165 MUTEX_UNLOCK( &stream_.mutex );
3169 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3170 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3171 if ( handle->drainCounter == 0 ) {
3172 handle->drainCounter = 1;
3173 MUTEX_UNLOCK( &stream_.mutex );
3174 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
3175 ResetEvent( handle->condition );
3176 MUTEX_LOCK( &stream_.mutex );
3180 ASIOError result = ASIOStop();
3181 if ( result != ASE_OK ) {
3182 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
3183 errorText_ = errorStream_.str();
3186 stream_.state = STREAM_STOPPED;
3187 MUTEX_UNLOCK( &stream_.mutex );
3189 if ( result == ASE_OK ) return;
3190 error( RtError::SYSTEM_ERROR );
3193 void RtApiAsio :: abortStream()
3196 if ( stream_.state == STREAM_STOPPED ) {
3197 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
3198 error( RtError::WARNING );
3202 // The following lines were commented-out because some behavior was
3203 // noted where the device buffers need to be zeroed to avoid
3204 // continuing sound, even when the device buffers are completely
3205 // disposed. So now, calling abort is the same as calling stop.
3206 // AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3207 // handle->drainCounter = 1;
3211 bool RtApiAsio :: callbackEvent( long bufferIndex )
3213 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
3214 if ( stream_.state == STREAM_CLOSED ) {
3215 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
3216 error( RtError::WARNING );
3220 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3221 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
3223 // Check if we were draining the stream and signal is finished.
3224 if ( handle->drainCounter > 3 ) {
3225 if ( handle->internalDrain == false )
3226 SetEvent( handle->condition );
3232 MUTEX_LOCK( &stream_.mutex );
3234 // The state might change while waiting on a mutex.
3235 if ( stream_.state == STREAM_STOPPED ) goto unlock;
3237 // Invoke user callback to get fresh output data UNLESS we are
3239 if ( handle->drainCounter == 0 ) {
3240 RtAudioCallback callback = (RtAudioCallback) info->callback;
3241 double streamTime = getStreamTime();
3242 RtAudioStreamStatus status = 0;
3243 if ( stream_.mode != INPUT && asioXRun == true ) {
3244 status |= RTAUDIO_OUTPUT_UNDERFLOW;
3247 if ( stream_.mode != OUTPUT && asioXRun == true ) {
3248 status |= RTAUDIO_INPUT_OVERFLOW;
3251 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
3252 stream_.bufferSize, streamTime, status, info->userData );
3253 if ( handle->drainCounter == 2 ) {
3254 MUTEX_UNLOCK( &stream_.mutex );
3258 else if ( handle->drainCounter == 1 )
3259 handle->internalDrain = true;
3262 unsigned int nChannels, bufferBytes, i, j;
3263 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
3264 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3266 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
3268 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3270 for ( i=0, j=0; i<nChannels; i++ ) {
3271 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3272 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3276 else if ( stream_.doConvertBuffer[0] ) {
3278 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3279 if ( stream_.doByteSwap[0] )
3280 byteSwapBuffer( stream_.deviceBuffer,
3281 stream_.bufferSize * stream_.nDeviceChannels[0],
3282 stream_.deviceFormat[0] );
3284 for ( i=0, j=0; i<nChannels; i++ ) {
3285 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3286 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3287 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3293 if ( stream_.doByteSwap[0] )
3294 byteSwapBuffer( stream_.userBuffer[0],
3295 stream_.bufferSize * stream_.nUserChannels[0],
3296 stream_.userFormat );
3298 for ( i=0, j=0; i<nChannels; i++ ) {
3299 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3300 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3301 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3306 if ( handle->drainCounter ) {
3307 handle->drainCounter++;
3312 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3314 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3316 if (stream_.doConvertBuffer[1]) {
3318 // Always interleave ASIO input data.
3319 for ( i=0, j=0; i<nChannels; i++ ) {
3320 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3321 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3322 handle->bufferInfos[i].buffers[bufferIndex],
3326 if ( stream_.doByteSwap[1] )
3327 byteSwapBuffer( stream_.deviceBuffer,
3328 stream_.bufferSize * stream_.nDeviceChannels[1],
3329 stream_.deviceFormat[1] );
3330 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3334 for ( i=0, j=0; i<nChannels; i++ ) {
3335 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3336 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3337 handle->bufferInfos[i].buffers[bufferIndex],
3342 if ( stream_.doByteSwap[1] )
3343 byteSwapBuffer( stream_.userBuffer[1],
3344 stream_.bufferSize * stream_.nUserChannels[1],
3345 stream_.userFormat );
3350 // The following call was suggested by Malte Clasen. While the API
3351 // documentation indicates it should not be required, some device
3352 // drivers apparently do not function correctly without it.
3355 MUTEX_UNLOCK( &stream_.mutex );
3357 RtApi::tickStreamTime();
3361 void sampleRateChanged( ASIOSampleRate sRate )
3363 // The ASIO documentation says that this usually only happens during
3364 // external sync. Audio processing is not stopped by the driver,
3365 // actual sample rate might not have even changed, maybe only the
3366 // sample rate status of an AES/EBU or S/PDIF digital input at the
3369 RtApi *object = (RtApi *) asioCallbackInfo->object;
3371 object->stopStream();
3373 catch ( RtError &exception ) {
3374 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3378 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3381 long asioMessages( long selector, long value, void* message, double* opt )
3385 switch( selector ) {
3386 case kAsioSelectorSupported:
3387 if ( value == kAsioResetRequest
3388 || value == kAsioEngineVersion
3389 || value == kAsioResyncRequest
3390 || value == kAsioLatenciesChanged
3391 // The following three were added for ASIO 2.0, you don't
3392 // necessarily have to support them.
3393 || value == kAsioSupportsTimeInfo
3394 || value == kAsioSupportsTimeCode
3395 || value == kAsioSupportsInputMonitor)
3398 case kAsioResetRequest:
3399 // Defer the task and perform the reset of the driver during the
3400 // next "safe" situation. You cannot reset the driver right now,
3401 // as this code is called from the driver. Reset the driver is
3402 // done by completely destruct is. I.e. ASIOStop(),
3403 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3405 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3408 case kAsioResyncRequest:
3409 // This informs the application that the driver encountered some
3410 // non-fatal data loss. It is used for synchronization purposes
3411 // of different media. Added mainly to work around the Win16Mutex
3412 // problems in Windows 95/98 with the Windows Multimedia system,
3413 // which could lose data because the Mutex was held too long by
3414 // another thread. However a driver can issue it in other
3416 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3420 case kAsioLatenciesChanged:
3421 // This will inform the host application that the drivers were
3422 // latencies changed. Beware, it this does not mean that the
3423 // buffer sizes have changed! You might need to update internal
3425 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3428 case kAsioEngineVersion:
3429 // Return the supported ASIO version of the host application. If
3430 // a host application does not implement this selector, ASIO 1.0
3431 // is assumed by the driver.
3434 case kAsioSupportsTimeInfo:
3435 // Informs the driver whether the
3436 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3437 // For compatibility with ASIO 1.0 drivers the host application
3438 // should always support the "old" bufferSwitch method, too.
3441 case kAsioSupportsTimeCode:
3442 // Informs the driver whether application is interested in time
3443 // code info. If an application does not need to know about time
3444 // code, the driver has less work to do.
3451 static const char* getAsioErrorString( ASIOError result )
3459 static Messages m[] =
3461 { ASE_NotPresent, "Hardware input or output is not present or available." },
3462 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3463 { ASE_InvalidParameter, "Invalid input parameter." },
3464 { ASE_InvalidMode, "Invalid mode." },
3465 { ASE_SPNotAdvancing, "Sample position not advancing." },
3466 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3467 { ASE_NoMemory, "Not enough memory to complete the request." }
3470 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3471 if ( m[i].value == result ) return m[i].message;
3473 return "Unknown error.";
3475 //******************** End of __WINDOWS_ASIO__ *********************//
3479 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3481 // Modified by Robin Davies, October 2005
3482 // - Improvements to DirectX pointer chasing.
3483 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3484 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3485 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3486 // Changed device query structure for RtAudio 4.0.7, January 2010
3490 #include <algorithm>
3492 #if defined(__MINGW32__)
3493 // missing from latest mingw winapi
3494 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3495 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3496 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3497 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3500 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3502 #ifdef _MSC_VER // if Microsoft Visual C++
3503 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3506 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3508 if ( pointer > bufferSize ) pointer -= bufferSize;
3509 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3510 if ( pointer < earlierPointer ) pointer += bufferSize;
3511 return pointer >= earlierPointer && pointer < laterPointer;
3514 // A structure to hold various information related to the DirectSound
3515 // API implementation.
3517 unsigned int drainCounter; // Tracks callback counts when draining
3518 bool internalDrain; // Indicates if stop is initiated from callback or not.
3522 UINT bufferPointer[2];
3523 DWORD dsBufferSize[2];
3524 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3528 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3531 // Declarations for utility functions, callbacks, and structures
3532 // specific to the DirectSound implementation.
3533 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3534 LPCTSTR description,
3538 static const char* getErrorString( int code );
3540 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3549 : found(false) { validId[0] = false; validId[1] = false; }
3552 std::vector< DsDevice > dsDevices;
3554 RtApiDs :: RtApiDs()
3556 // Dsound will run both-threaded. If CoInitialize fails, then just
3557 // accept whatever the mainline chose for a threading model.
3558 coInitialized_ = false;
3559 HRESULT hr = CoInitialize( NULL );
3560 if ( !FAILED( hr ) ) coInitialized_ = true;
3563 RtApiDs :: ~RtApiDs()
3565 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3566 if ( stream_.state != STREAM_CLOSED ) closeStream();
3569 // The DirectSound default output is always the first device.
3570 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3575 // The DirectSound default input is always the first input device,
3576 // which is the first capture device enumerated.
3577 unsigned int RtApiDs :: getDefaultInputDevice( void )
3582 unsigned int RtApiDs :: getDeviceCount( void )
3584 // Set query flag for previously found devices to false, so that we
3585 // can check for any devices that have disappeared.
3586 for ( unsigned int i=0; i<dsDevices.size(); i++ )
3587 dsDevices[i].found = false;
3589 // Query DirectSound devices.
3590 bool isInput = false;
3591 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &isInput );
3592 if ( FAILED( result ) ) {
3593 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3594 errorText_ = errorStream_.str();
3595 error( RtError::WARNING );
3598 // Query DirectSoundCapture devices.
3600 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &isInput );
3601 if ( FAILED( result ) ) {
3602 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3603 errorText_ = errorStream_.str();
3604 error( RtError::WARNING );
3607 // Clean out any devices that may have disappeared.
3608 std::vector< DsDevice > :: iterator it;
3609 for ( it=dsDevices.begin(); it < dsDevices.end(); it++ )
3610 if ( it->found == false ) dsDevices.erase( it );
3612 return dsDevices.size();
3615 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3617 RtAudio::DeviceInfo info;
3618 info.probed = false;
3620 if ( dsDevices.size() == 0 ) {
3621 // Force a query of all devices
3623 if ( dsDevices.size() == 0 ) {
3624 errorText_ = "RtApiDs::getDeviceInfo: no devices found!";
3625 error( RtError::INVALID_USE );
3629 if ( device >= dsDevices.size() ) {
3630 errorText_ = "RtApiDs::getDeviceInfo: device ID is invalid!";
3631 error( RtError::INVALID_USE );
3635 if ( dsDevices[ device ].validId[0] == false ) goto probeInput;
3637 LPDIRECTSOUND output;
3639 result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
3640 if ( FAILED( result ) ) {
3641 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
3642 errorText_ = errorStream_.str();
3643 error( RtError::WARNING );
3647 outCaps.dwSize = sizeof( outCaps );
3648 result = output->GetCaps( &outCaps );
3649 if ( FAILED( result ) ) {
3651 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3652 errorText_ = errorStream_.str();
3653 error( RtError::WARNING );
3657 // Get output channel information.
3658 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3660 // Get sample rate information.
3661 info.sampleRates.clear();
3662 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3663 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3664 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3665 info.sampleRates.push_back( SAMPLE_RATES[k] );
3668 // Get format information.
3669 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3670 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3674 if ( getDefaultOutputDevice() == device )
3675 info.isDefaultOutput = true;
3677 if ( dsDevices[ device ].validId[1] == false ) {
3678 info.name = dsDevices[ device ].name;
3685 LPDIRECTSOUNDCAPTURE input;
3686 result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
3687 if ( FAILED( result ) ) {
3688 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
3689 errorText_ = errorStream_.str();
3690 error( RtError::WARNING );
3695 inCaps.dwSize = sizeof( inCaps );
3696 result = input->GetCaps( &inCaps );
3697 if ( FAILED( result ) ) {
3699 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsDevices[ device ].name << ")!";
3700 errorText_ = errorStream_.str();
3701 error( RtError::WARNING );
3705 // Get input channel information.
3706 info.inputChannels = inCaps.dwChannels;
3708 // Get sample rate and format information.
3709 std::vector<unsigned int> rates;
3710 if ( inCaps.dwChannels == 2 ) {
3711 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3712 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3713 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3714 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3715 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3716 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3717 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3718 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3720 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3721 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) rates.push_back( 11025 );
3722 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) rates.push_back( 22050 );
3723 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) rates.push_back( 44100 );
3724 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) rates.push_back( 96000 );
3726 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3727 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) rates.push_back( 11025 );
3728 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) rates.push_back( 22050 );
3729 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) rates.push_back( 44100 );
3730 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) rates.push_back( 96000 );
3733 else if ( inCaps.dwChannels == 1 ) {
3734 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3735 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3736 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3737 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3738 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3739 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3740 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3741 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3743 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3744 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) rates.push_back( 11025 );
3745 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) rates.push_back( 22050 );
3746 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) rates.push_back( 44100 );
3747 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) rates.push_back( 96000 );
3749 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3750 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) rates.push_back( 11025 );
3751 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) rates.push_back( 22050 );
3752 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) rates.push_back( 44100 );
3753 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) rates.push_back( 96000 );
3756 else info.inputChannels = 0; // technically, this would be an error
3760 if ( info.inputChannels == 0 ) return info;
3762 // Copy the supported rates to the info structure but avoid duplication.
3764 for ( unsigned int i=0; i<rates.size(); i++ ) {
3766 for ( unsigned int j=0; j<info.sampleRates.size(); j++ ) {
3767 if ( rates[i] == info.sampleRates[j] ) {
3772 if ( found == false ) info.sampleRates.push_back( rates[i] );
3774 sort( info.sampleRates.begin(), info.sampleRates.end() );
3776 // If device opens for both playback and capture, we determine the channels.
3777 if ( info.outputChannels > 0 && info.inputChannels > 0 )
3778 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
3780 if ( device == 0 ) info.isDefaultInput = true;
3782 // Copy name and return.
3783 info.name = dsDevices[ device ].name;
3788 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3789 unsigned int firstChannel, unsigned int sampleRate,
3790 RtAudioFormat format, unsigned int *bufferSize,
3791 RtAudio::StreamOptions *options )
3793 if ( channels + firstChannel > 2 ) {
3794 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3798 unsigned int nDevices = dsDevices.size();
3799 if ( nDevices == 0 ) {
3800 // This should not happen because a check is made before this function is called.
3801 errorText_ = "RtApiDs::probeDeviceOpen: no devices found!";
3805 if ( device >= nDevices ) {
3806 // This should not happen because a check is made before this function is called.
3807 errorText_ = "RtApiDs::probeDeviceOpen: device ID is invalid!";
3811 if ( mode == OUTPUT ) {
3812 if ( dsDevices[ device ].validId[0] == false ) {
3813 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3814 errorText_ = errorStream_.str();
3818 else { // mode == INPUT
3819 if ( dsDevices[ device ].validId[1] == false ) {
3820 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3821 errorText_ = errorStream_.str();
3826 // According to a note in PortAudio, using GetDesktopWindow()
3827 // instead of GetForegroundWindow() is supposed to avoid problems
3828 // that occur when the application's window is not the foreground
3829 // window. Also, if the application window closes before the
3830 // DirectSound buffer, DirectSound can crash. In the past, I had
3831 // problems when using GetDesktopWindow() but it seems fine now
3832 // (January 2010). I'll leave it commented here.
3833 // HWND hWnd = GetForegroundWindow();
3834 HWND hWnd = GetDesktopWindow();
3836 // Check the numberOfBuffers parameter and limit the lowest value to
3837 // two. This is a judgement call and a value of two is probably too
3838 // low for capture, but it should work for playback.
3840 if ( options ) nBuffers = options->numberOfBuffers;
3841 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3842 if ( nBuffers < 2 ) nBuffers = 3;
3844 // Check the lower range of the user-specified buffer size and set
3845 // (arbitrarily) to a lower bound of 32.
3846 if ( *bufferSize < 32 ) *bufferSize = 32;
3848 // Create the wave format structure. The data format setting will
3849 // be determined later.
3850 WAVEFORMATEX waveFormat;
3851 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3852 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3853 waveFormat.nChannels = channels + firstChannel;
3854 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3856 // Determine the device buffer size. By default, we'll use the value
3857 // defined above (32K), but we will grow it to make allowances for
3858 // very large software buffer sizes.
3859 DWORD dsBufferSize = MINIMUM_DEVICE_BUFFER_SIZE;;
3860 DWORD dsPointerLeadTime = 0;
3862 void *ohandle = 0, *bhandle = 0;
3864 if ( mode == OUTPUT ) {
3866 LPDIRECTSOUND output;
3867 result = DirectSoundCreate( dsDevices[ device ].id[0], &output, NULL );
3868 if ( FAILED( result ) ) {
3869 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsDevices[ device ].name << ")!";
3870 errorText_ = errorStream_.str();
3875 outCaps.dwSize = sizeof( outCaps );
3876 result = output->GetCaps( &outCaps );
3877 if ( FAILED( result ) ) {
3879 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsDevices[ device ].name << ")!";
3880 errorText_ = errorStream_.str();
3884 // Check channel information.
3885 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3886 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsDevices[ device ].name << ") does not support stereo playback.";
3887 errorText_ = errorStream_.str();
3891 // Check format information. Use 16-bit format unless not
3892 // supported or user requests 8-bit.
3893 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3894 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3895 waveFormat.wBitsPerSample = 16;
3896 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3899 waveFormat.wBitsPerSample = 8;
3900 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3902 stream_.userFormat = format;
3904 // Update wave format structure and buffer information.
3905 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3906 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3907 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3909 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3910 while ( dsPointerLeadTime * 2U > dsBufferSize )
3913 // Set cooperative level to DSSCL_EXCLUSIVE ... sound stops when window focus changes.
3914 // result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3915 // Set cooperative level to DSSCL_PRIORITY ... sound remains when window focus changes.
3916 result = output->SetCooperativeLevel( hWnd, DSSCL_PRIORITY );
3917 if ( FAILED( result ) ) {
3919 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsDevices[ device ].name << ")!";
3920 errorText_ = errorStream_.str();
3924 // Even though we will write to the secondary buffer, we need to
3925 // access the primary buffer to set the correct output format
3926 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3927 // buffer description.
3928 DSBUFFERDESC bufferDescription;
3929 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3930 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3931 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3933 // Obtain the primary buffer
3934 LPDIRECTSOUNDBUFFER buffer;
3935 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3936 if ( FAILED( result ) ) {
3938 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsDevices[ device ].name << ")!";
3939 errorText_ = errorStream_.str();
3943 // Set the primary DS buffer sound format.
3944 result = buffer->SetFormat( &waveFormat );
3945 if ( FAILED( result ) ) {
3947 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsDevices[ device ].name << ")!";
3948 errorText_ = errorStream_.str();
3952 // Setup the secondary DS buffer description.
3953 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3954 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3955 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3956 DSBCAPS_GLOBALFOCUS |
3957 DSBCAPS_GETCURRENTPOSITION2 |
3958 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3959 bufferDescription.dwBufferBytes = dsBufferSize;
3960 bufferDescription.lpwfxFormat = &waveFormat;
3962 // Try to create the secondary DS buffer. If that doesn't work,
3963 // try to use software mixing. Otherwise, there's a problem.
3964 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3965 if ( FAILED( result ) ) {
3966 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3967 DSBCAPS_GLOBALFOCUS |
3968 DSBCAPS_GETCURRENTPOSITION2 |
3969 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3970 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3971 if ( FAILED( result ) ) {
3973 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsDevices[ device ].name << ")!";
3974 errorText_ = errorStream_.str();
3979 // Get the buffer size ... might be different from what we specified.
3981 dsbcaps.dwSize = sizeof( DSBCAPS );
3982 result = buffer->GetCaps( &dsbcaps );
3983 if ( FAILED( result ) ) {
3986 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
3987 errorText_ = errorStream_.str();
3991 dsBufferSize = dsbcaps.dwBufferBytes;
3993 // Lock the DS buffer
3996 result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
3997 if ( FAILED( result ) ) {
4000 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsDevices[ device ].name << ")!";
4001 errorText_ = errorStream_.str();
4005 // Zero the DS buffer
4006 ZeroMemory( audioPtr, dataLen );
4008 // Unlock the DS buffer
4009 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4010 if ( FAILED( result ) ) {
4013 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsDevices[ device ].name << ")!";
4014 errorText_ = errorStream_.str();
4018 ohandle = (void *) output;
4019 bhandle = (void *) buffer;
4022 if ( mode == INPUT ) {
4024 LPDIRECTSOUNDCAPTURE input;
4025 result = DirectSoundCaptureCreate( dsDevices[ device ].id[1], &input, NULL );
4026 if ( FAILED( result ) ) {
4027 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsDevices[ device ].name << ")!";
4028 errorText_ = errorStream_.str();
4033 inCaps.dwSize = sizeof( inCaps );
4034 result = input->GetCaps( &inCaps );
4035 if ( FAILED( result ) ) {
4037 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsDevices[ device ].name << ")!";
4038 errorText_ = errorStream_.str();
4042 // Check channel information.
4043 if ( inCaps.dwChannels < channels + firstChannel ) {
4044 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
4048 // Check format information. Use 16-bit format unless user
4050 DWORD deviceFormats;
4051 if ( channels + firstChannel == 2 ) {
4052 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
4053 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4054 waveFormat.wBitsPerSample = 8;
4055 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4057 else { // assume 16-bit is supported
4058 waveFormat.wBitsPerSample = 16;
4059 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4062 else { // channel == 1
4063 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
4064 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
4065 waveFormat.wBitsPerSample = 8;
4066 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
4068 else { // assume 16-bit is supported
4069 waveFormat.wBitsPerSample = 16;
4070 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
4073 stream_.userFormat = format;
4075 // Update wave format structure and buffer information.
4076 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
4077 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
4078 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
4080 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
4081 while ( dsPointerLeadTime * 2U > dsBufferSize )
4084 // Setup the secondary DS buffer description.
4085 DSCBUFFERDESC bufferDescription;
4086 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
4087 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
4088 bufferDescription.dwFlags = 0;
4089 bufferDescription.dwReserved = 0;
4090 bufferDescription.dwBufferBytes = dsBufferSize;
4091 bufferDescription.lpwfxFormat = &waveFormat;
4093 // Create the capture buffer.
4094 LPDIRECTSOUNDCAPTUREBUFFER buffer;
4095 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
4096 if ( FAILED( result ) ) {
4098 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsDevices[ device ].name << ")!";
4099 errorText_ = errorStream_.str();
4103 // Get the buffer size ... might be different from what we specified.
4105 dscbcaps.dwSize = sizeof( DSCBCAPS );
4106 result = buffer->GetCaps( &dscbcaps );
4107 if ( FAILED( result ) ) {
4110 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsDevices[ device ].name << ")!";
4111 errorText_ = errorStream_.str();
4115 dsBufferSize = dscbcaps.dwBufferBytes;
4117 // NOTE: We could have a problem here if this is a duplex stream
4118 // and the play and capture hardware buffer sizes are different
4119 // (I'm actually not sure if that is a problem or not).
4120 // Currently, we are not verifying that.
4122 // Lock the capture buffer
4125 result = buffer->Lock( 0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0 );
4126 if ( FAILED( result ) ) {
4129 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsDevices[ device ].name << ")!";
4130 errorText_ = errorStream_.str();
4135 ZeroMemory( audioPtr, dataLen );
4137 // Unlock the buffer
4138 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4139 if ( FAILED( result ) ) {
4142 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsDevices[ device ].name << ")!";
4143 errorText_ = errorStream_.str();
4147 ohandle = (void *) input;
4148 bhandle = (void *) buffer;
4151 // Set various stream parameters
4152 DsHandle *handle = 0;
4153 stream_.nDeviceChannels[mode] = channels + firstChannel;
4154 stream_.nUserChannels[mode] = channels;
4155 stream_.bufferSize = *bufferSize;
4156 stream_.channelOffset[mode] = firstChannel;
4157 stream_.deviceInterleaved[mode] = true;
4158 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
4159 else stream_.userInterleaved = true;
4161 // Set flag for buffer conversion
4162 stream_.doConvertBuffer[mode] = false;
4163 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
4164 stream_.doConvertBuffer[mode] = true;
4165 if (stream_.userFormat != stream_.deviceFormat[mode])
4166 stream_.doConvertBuffer[mode] = true;
4167 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
4168 stream_.nUserChannels[mode] > 1 )
4169 stream_.doConvertBuffer[mode] = true;
4171 // Allocate necessary internal buffers
4172 long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
4173 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
4174 if ( stream_.userBuffer[mode] == NULL ) {
4175 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
4179 if ( stream_.doConvertBuffer[mode] ) {
4181 bool makeBuffer = true;
4182 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
4183 if ( mode == INPUT ) {
4184 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4185 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
4186 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
4191 bufferBytes *= *bufferSize;
4192 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
4193 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
4194 if ( stream_.deviceBuffer == NULL ) {
4195 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
4201 // Allocate our DsHandle structures for the stream.
4202 if ( stream_.apiHandle == 0 ) {
4204 handle = new DsHandle;
4206 catch ( std::bad_alloc& ) {
4207 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
4211 // Create a manual-reset event.
4212 handle->condition = CreateEvent( NULL, // no security
4213 TRUE, // manual-reset
4214 FALSE, // non-signaled initially
4216 stream_.apiHandle = (void *) handle;
4219 handle = (DsHandle *) stream_.apiHandle;
4220 handle->id[mode] = ohandle;
4221 handle->buffer[mode] = bhandle;
4222 handle->dsBufferSize[mode] = dsBufferSize;
4223 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
4225 stream_.device[mode] = device;
4226 stream_.state = STREAM_STOPPED;
4227 if ( stream_.mode == OUTPUT && mode == INPUT )
4228 // We had already set up an output stream.
4229 stream_.mode = DUPLEX;
4231 stream_.mode = mode;
4232 stream_.nBuffers = nBuffers;
4233 stream_.sampleRate = sampleRate;
4235 // Setup the buffer conversion information structure.
4236 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
4238 // Setup the callback thread.
4240 stream_.callbackInfo.object = (void *) this;
4241 stream_.callbackInfo.isRunning = true;
4242 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
4243 &stream_.callbackInfo, 0, &threadId );
4244 if ( stream_.callbackInfo.thread == 0 ) {
4245 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4249 // Boost DS thread priority
4250 SetThreadPriority( (HANDLE) stream_.callbackInfo.thread, THREAD_PRIORITY_HIGHEST );
4255 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4256 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4257 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4258 if ( buffer ) buffer->Release();
4261 if ( handle->buffer[1] ) {
4262 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4263 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4264 if ( buffer ) buffer->Release();
4267 CloseHandle( handle->condition );
4269 stream_.apiHandle = 0;
4272 for ( int i=0; i<2; i++ ) {
4273 if ( stream_.userBuffer[i] ) {
4274 free( stream_.userBuffer[i] );
4275 stream_.userBuffer[i] = 0;
4279 if ( stream_.deviceBuffer ) {
4280 free( stream_.deviceBuffer );
4281 stream_.deviceBuffer = 0;
4287 void RtApiDs :: closeStream()
4289 if ( stream_.state == STREAM_CLOSED ) {
4290 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4291 error( RtError::WARNING );
4295 // Stop the callback thread.
4296 stream_.callbackInfo.isRunning = false;
4297 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4298 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4300 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4302 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4303 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4304 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4311 if ( handle->buffer[1] ) {
4312 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4313 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4320 CloseHandle( handle->condition );
4322 stream_.apiHandle = 0;
4325 for ( int i=0; i<2; i++ ) {
4326 if ( stream_.userBuffer[i] ) {
4327 free( stream_.userBuffer[i] );
4328 stream_.userBuffer[i] = 0;
4332 if ( stream_.deviceBuffer ) {
4333 free( stream_.deviceBuffer );
4334 stream_.deviceBuffer = 0;
4337 stream_.mode = UNINITIALIZED;
4338 stream_.state = STREAM_CLOSED;
4341 void RtApiDs :: startStream()
4344 if ( stream_.state == STREAM_RUNNING ) {
4345 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4346 error( RtError::WARNING );
4350 MUTEX_LOCK( &stream_.mutex );
4352 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4354 // Increase scheduler frequency on lesser windows (a side-effect of
4355 // increasing timer accuracy). On greater windows (Win2K or later),
4356 // this is already in effect.
4357 timeBeginPeriod( 1 );
4359 buffersRolling = false;
4360 duplexPrerollBytes = 0;
4362 if ( stream_.mode == DUPLEX ) {
4363 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4364 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4368 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4370 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4371 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4372 if ( FAILED( result ) ) {
4373 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4374 errorText_ = errorStream_.str();
4379 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4381 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4382 result = buffer->Start( DSCBSTART_LOOPING );
4383 if ( FAILED( result ) ) {
4384 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4385 errorText_ = errorStream_.str();
4390 handle->drainCounter = 0;
4391 handle->internalDrain = false;
4392 stream_.state = STREAM_RUNNING;
4395 MUTEX_UNLOCK( &stream_.mutex );
4397 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4400 void RtApiDs :: stopStream()
4403 if ( stream_.state == STREAM_STOPPED ) {
4404 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4405 error( RtError::WARNING );
4409 MUTEX_LOCK( &stream_.mutex );
4411 if ( stream_.state == STREAM_STOPPED ) {
4412 MUTEX_UNLOCK( &stream_.mutex );
4419 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4420 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4421 if ( handle->drainCounter == 0 ) {
4422 handle->drainCounter = 1;
4423 MUTEX_UNLOCK( &stream_.mutex );
4424 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4425 ResetEvent( handle->condition );
4426 MUTEX_LOCK( &stream_.mutex );
4429 // Stop the buffer and clear memory
4430 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4431 result = buffer->Stop();
4432 if ( FAILED( result ) ) {
4433 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4434 errorText_ = errorStream_.str();
4438 // Lock the buffer and clear it so that if we start to play again,
4439 // we won't have old data playing.
4440 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4441 if ( FAILED( result ) ) {
4442 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking output buffer!";
4443 errorText_ = errorStream_.str();
4447 // Zero the DS buffer
4448 ZeroMemory( audioPtr, dataLen );
4450 // Unlock the DS buffer
4451 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4452 if ( FAILED( result ) ) {
4453 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4454 errorText_ = errorStream_.str();
4458 // If we start playing again, we must begin at beginning of buffer.
4459 handle->bufferPointer[0] = 0;
4462 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4463 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4467 result = buffer->Stop();
4468 if ( FAILED( result ) ) {
4469 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4470 errorText_ = errorStream_.str();
4474 // Lock the buffer and clear it so that if we start to play again,
4475 // we won't have old data playing.
4476 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4477 if ( FAILED( result ) ) {
4478 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") locking input buffer!";
4479 errorText_ = errorStream_.str();
4483 // Zero the DS buffer
4484 ZeroMemory( audioPtr, dataLen );
4486 // Unlock the DS buffer
4487 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4488 if ( FAILED( result ) ) {
4489 errorStream_ << "RtApiDs::stopStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4490 errorText_ = errorStream_.str();
4494 // If we start recording again, we must begin at beginning of buffer.
4495 handle->bufferPointer[1] = 0;
4499 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4500 stream_.state = STREAM_STOPPED;
4501 MUTEX_UNLOCK( &stream_.mutex );
4503 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4506 void RtApiDs :: abortStream()
4509 if ( stream_.state == STREAM_STOPPED ) {
4510 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4511 error( RtError::WARNING );
4515 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4516 handle->drainCounter = 1;
4521 void RtApiDs :: callbackEvent()
4523 if ( stream_.state == STREAM_STOPPED ) {
4524 Sleep( 50 ); // sleep 50 milliseconds
4528 if ( stream_.state == STREAM_CLOSED ) {
4529 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4530 error( RtError::WARNING );
4534 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4535 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4537 // Check if we were draining the stream and signal is finished.
4538 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4539 if ( handle->internalDrain == false )
4540 SetEvent( handle->condition );
4546 MUTEX_LOCK( &stream_.mutex );
4548 // The state might change while waiting on a mutex.
4549 if ( stream_.state == STREAM_STOPPED ) {
4550 MUTEX_UNLOCK( &stream_.mutex );
4554 // Invoke user callback to get fresh output data UNLESS we are
4556 if ( handle->drainCounter == 0 ) {
4557 RtAudioCallback callback = (RtAudioCallback) info->callback;
4558 double streamTime = getStreamTime();
4559 RtAudioStreamStatus status = 0;
4560 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4561 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4562 handle->xrun[0] = false;
4564 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4565 status |= RTAUDIO_INPUT_OVERFLOW;
4566 handle->xrun[1] = false;
4568 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4569 stream_.bufferSize, streamTime, status, info->userData );
4570 if ( handle->drainCounter == 2 ) {
4571 MUTEX_UNLOCK( &stream_.mutex );
4575 else if ( handle->drainCounter == 1 )
4576 handle->internalDrain = true;
4580 DWORD currentWritePointer, safeWritePointer;
4581 DWORD currentReadPointer, safeReadPointer;
4582 UINT nextWritePointer;
4584 LPVOID buffer1 = NULL;
4585 LPVOID buffer2 = NULL;
4586 DWORD bufferSize1 = 0;
4587 DWORD bufferSize2 = 0;
4592 if ( buffersRolling == false ) {
4593 if ( stream_.mode == DUPLEX ) {
4594 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4596 // It takes a while for the devices to get rolling. As a result,
4597 // there's no guarantee that the capture and write device pointers
4598 // will move in lockstep. Wait here for both devices to start
4599 // rolling, and then set our buffer pointers accordingly.
4600 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4601 // bytes later than the write buffer.
4603 // Stub: a serious risk of having a pre-emptive scheduling round
4604 // take place between the two GetCurrentPosition calls... but I'm
4605 // really not sure how to solve the problem. Temporarily boost to
4606 // Realtime priority, maybe; but I'm not sure what priority the
4607 // DirectSound service threads run at. We *should* be roughly
4608 // within a ms or so of correct.
4610 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4611 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4613 DWORD startSafeWritePointer, startSafeReadPointer;
4615 result = dsWriteBuffer->GetCurrentPosition( NULL, &startSafeWritePointer );
4616 if ( FAILED( result ) ) {
4617 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4618 errorText_ = errorStream_.str();
4619 error( RtError::SYSTEM_ERROR );
4621 result = dsCaptureBuffer->GetCurrentPosition( NULL, &startSafeReadPointer );
4622 if ( FAILED( result ) ) {
4623 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4624 errorText_ = errorStream_.str();
4625 error( RtError::SYSTEM_ERROR );
4628 result = dsWriteBuffer->GetCurrentPosition( NULL, &safeWritePointer );
4629 if ( FAILED( result ) ) {
4630 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4631 errorText_ = errorStream_.str();
4632 error( RtError::SYSTEM_ERROR );
4634 result = dsCaptureBuffer->GetCurrentPosition( NULL, &safeReadPointer );
4635 if ( FAILED( result ) ) {
4636 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4637 errorText_ = errorStream_.str();
4638 error( RtError::SYSTEM_ERROR );
4640 if ( safeWritePointer != startSafeWritePointer && safeReadPointer != startSafeReadPointer ) break;
4644 //assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4646 handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
4647 if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
4648 handle->bufferPointer[1] = safeReadPointer;
4650 else if ( stream_.mode == OUTPUT ) {
4652 // Set the proper nextWritePosition after initial startup.
4653 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4654 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
4655 if ( FAILED( result ) ) {
4656 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4657 errorText_ = errorStream_.str();
4658 error( RtError::SYSTEM_ERROR );
4660 handle->bufferPointer[0] = safeWritePointer + handle->dsPointerLeadTime[0];
4661 if ( handle->bufferPointer[0] >= handle->dsBufferSize[0] ) handle->bufferPointer[0] -= handle->dsBufferSize[0];
4664 buffersRolling = true;
4667 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4669 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4671 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4672 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4673 bufferBytes *= formatBytes( stream_.userFormat );
4674 memset( stream_.userBuffer[0], 0, bufferBytes );
4677 // Setup parameters and do buffer conversion if necessary.
4678 if ( stream_.doConvertBuffer[0] ) {
4679 buffer = stream_.deviceBuffer;
4680 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4681 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4682 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4685 buffer = stream_.userBuffer[0];
4686 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4687 bufferBytes *= formatBytes( stream_.userFormat );
4690 // No byte swapping necessary in DirectSound implementation.
4692 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4693 // unsigned. So, we need to convert our signed 8-bit data here to
4695 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4696 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4698 DWORD dsBufferSize = handle->dsBufferSize[0];
4699 nextWritePointer = handle->bufferPointer[0];
4701 DWORD endWrite, leadPointer;
4703 // Find out where the read and "safe write" pointers are.
4704 result = dsBuffer->GetCurrentPosition( ¤tWritePointer, &safeWritePointer );
4705 if ( FAILED( result ) ) {
4706 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4707 errorText_ = errorStream_.str();
4708 error( RtError::SYSTEM_ERROR );
4711 // We will copy our output buffer into the region between
4712 // safeWritePointer and leadPointer. If leadPointer is not
4713 // beyond the next endWrite position, wait until it is.
4714 leadPointer = safeWritePointer + handle->dsPointerLeadTime[0];
4715 //std::cout << "safeWritePointer = " << safeWritePointer << ", leadPointer = " << leadPointer << ", nextWritePointer = " << nextWritePointer << std::endl;
4716 if ( leadPointer > dsBufferSize ) leadPointer -= dsBufferSize;
4717 if ( leadPointer < nextWritePointer ) leadPointer += dsBufferSize; // unwrap offset
4718 endWrite = nextWritePointer + bufferBytes;
4720 // Check whether the entire write region is behind the play pointer.
4721 if ( leadPointer >= endWrite ) break;
4723 // If we are here, then we must wait until the leadPointer advances
4724 // beyond the end of our next write region. We use the
4725 // Sleep() function to suspend operation until that happens.
4726 double millis = ( endWrite - leadPointer ) * 1000.0;
4727 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4728 if ( millis < 1.0 ) millis = 1.0;
4729 Sleep( (DWORD) millis );
4732 if ( dsPointerBetween( nextWritePointer, safeWritePointer, currentWritePointer, dsBufferSize )
4733 || dsPointerBetween( endWrite, safeWritePointer, currentWritePointer, dsBufferSize ) ) {
4734 // We've strayed into the forbidden zone ... resync the read pointer.
4735 handle->xrun[0] = true;
4736 nextWritePointer = safeWritePointer + handle->dsPointerLeadTime[0] - bufferBytes;
4737 if ( nextWritePointer >= dsBufferSize ) nextWritePointer -= dsBufferSize;
4738 handle->bufferPointer[0] = nextWritePointer;
4739 endWrite = nextWritePointer + bufferBytes;
4742 // Lock free space in the buffer
4743 result = dsBuffer->Lock( nextWritePointer, bufferBytes, &buffer1,
4744 &bufferSize1, &buffer2, &bufferSize2, 0 );
4745 if ( FAILED( result ) ) {
4746 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4747 errorText_ = errorStream_.str();
4748 error( RtError::SYSTEM_ERROR );
4751 // Copy our buffer into the DS buffer
4752 CopyMemory( buffer1, buffer, bufferSize1 );
4753 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4755 // Update our buffer offset and unlock sound buffer
4756 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4757 if ( FAILED( result ) ) {
4758 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4759 errorText_ = errorStream_.str();
4760 error( RtError::SYSTEM_ERROR );
4762 nextWritePointer = ( nextWritePointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
4763 handle->bufferPointer[0] = nextWritePointer;
4765 if ( handle->drainCounter ) {
4766 handle->drainCounter++;
4771 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4773 // Setup parameters.
4774 if ( stream_.doConvertBuffer[1] ) {
4775 buffer = stream_.deviceBuffer;
4776 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4777 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4780 buffer = stream_.userBuffer[1];
4781 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4782 bufferBytes *= formatBytes( stream_.userFormat );
4785 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4786 long nextReadPointer = handle->bufferPointer[1];
4787 DWORD dsBufferSize = handle->dsBufferSize[1];
4789 // Find out where the write and "safe read" pointers are.
4790 result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
4791 if ( FAILED( result ) ) {
4792 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4793 errorText_ = errorStream_.str();
4794 error( RtError::SYSTEM_ERROR );
4797 if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
4798 DWORD endRead = nextReadPointer + bufferBytes;
4800 // Handling depends on whether we are INPUT or DUPLEX.
4801 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4802 // then a wait here will drag the write pointers into the forbidden zone.
4804 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4805 // it's in a safe position. This causes dropouts, but it seems to be the only
4806 // practical way to sync up the read and write pointers reliably, given the
4807 // the very complex relationship between phase and increment of the read and write
4810 // In order to minimize audible dropouts in DUPLEX mode, we will
4811 // provide a pre-roll period of 0.5 seconds in which we return
4812 // zeros from the read buffer while the pointers sync up.
4814 if ( stream_.mode == DUPLEX ) {
4815 if ( safeReadPointer < endRead ) {
4816 if ( duplexPrerollBytes <= 0 ) {
4817 // Pre-roll time over. Be more agressive.
4818 int adjustment = endRead-safeReadPointer;
4820 handle->xrun[1] = true;
4822 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4823 // and perform fine adjustments later.
4824 // - small adjustments: back off by twice as much.
4825 if ( adjustment >= 2*bufferBytes )
4826 nextReadPointer = safeReadPointer-2*bufferBytes;
4828 nextReadPointer = safeReadPointer-bufferBytes-adjustment;
4830 if ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
4834 // In pre=roll time. Just do it.
4835 nextReadPointer = safeReadPointer - bufferBytes;
4836 while ( nextReadPointer < 0 ) nextReadPointer += dsBufferSize;
4838 endRead = nextReadPointer + bufferBytes;
4841 else { // mode == INPUT
4842 while ( safeReadPointer < endRead ) {
4843 // See comments for playback.
4844 double millis = (endRead - safeReadPointer) * 1000.0;
4845 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4846 if ( millis < 1.0 ) millis = 1.0;
4847 Sleep( (DWORD) millis );
4849 // Wake up and find out where we are now.
4850 result = dsBuffer->GetCurrentPosition( ¤tReadPointer, &safeReadPointer );
4851 if ( FAILED( result ) ) {
4852 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4853 errorText_ = errorStream_.str();
4854 error( RtError::SYSTEM_ERROR );
4857 if ( safeReadPointer < (DWORD)nextReadPointer ) safeReadPointer += dsBufferSize; // unwrap offset
4861 // Lock free space in the buffer
4862 result = dsBuffer->Lock( nextReadPointer, bufferBytes, &buffer1,
4863 &bufferSize1, &buffer2, &bufferSize2, 0 );
4864 if ( FAILED( result ) ) {
4865 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4866 errorText_ = errorStream_.str();
4867 error( RtError::SYSTEM_ERROR );
4870 if ( duplexPrerollBytes <= 0 ) {
4871 // Copy our buffer into the DS buffer
4872 CopyMemory( buffer, buffer1, bufferSize1 );
4873 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4876 memset( buffer, 0, bufferSize1 );
4877 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4878 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4881 // Update our buffer offset and unlock sound buffer
4882 nextReadPointer = ( nextReadPointer + bufferSize1 + bufferSize2 ) % dsBufferSize;
4883 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4884 if ( FAILED( result ) ) {
4885 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4886 errorText_ = errorStream_.str();
4887 error( RtError::SYSTEM_ERROR );
4889 handle->bufferPointer[1] = nextReadPointer;
4891 // No byte swapping necessary in DirectSound implementation.
4893 // If necessary, convert 8-bit data from unsigned to signed.
4894 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4895 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4897 // Do buffer conversion if necessary.
4898 if ( stream_.doConvertBuffer[1] )
4899 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4903 MUTEX_UNLOCK( &stream_.mutex );
4905 RtApi::tickStreamTime();
4908 // Definitions for utility functions and callbacks
4909 // specific to the DirectSound implementation.
4911 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4913 CallbackInfo *info = (CallbackInfo *) ptr;
4914 RtApiDs *object = (RtApiDs *) info->object;
4915 bool* isRunning = &info->isRunning;
4917 while ( *isRunning == true ) {
4918 object->callbackEvent();
4927 std::string convertTChar( LPCTSTR name )
4931 #if defined( UNICODE ) || defined( _UNICODE )
4932 // Yes, this conversion doesn't make sense for two-byte characters
4933 // but RtAudio is currently written to return an std::string of
4934 // one-byte chars for the device name.
4935 for ( unsigned int i=0; i<wcslen( name ); i++ )
4936 s.push_back( name[i] );
4938 s.append( std::string( name ) );
4944 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4945 LPCTSTR description,
4949 bool *isInput = (bool *) lpContext;
4952 bool validDevice = false;
4953 if ( *isInput == true ) {
4955 LPDIRECTSOUNDCAPTURE object;
4957 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4958 if ( hr != DS_OK ) return TRUE;
4960 caps.dwSize = sizeof(caps);
4961 hr = object->GetCaps( &caps );
4962 if ( hr == DS_OK ) {
4963 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4970 LPDIRECTSOUND object;
4971 hr = DirectSoundCreate( lpguid, &object, NULL );
4972 if ( hr != DS_OK ) return TRUE;
4974 caps.dwSize = sizeof(caps);
4975 hr = object->GetCaps( &caps );
4976 if ( hr == DS_OK ) {
4977 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4983 // If good device, then save its name and guid.
4984 std::string name = convertTChar( description );
4985 if ( name == "Primary Sound Driver" || name == "Primary Sound Capture Driver" )
4986 name = "Default Device";
4987 if ( validDevice ) {
4988 for ( unsigned int i=0; i<dsDevices.size(); i++ ) {
4989 if ( dsDevices[i].name == name ) {
4990 dsDevices[i].found = true;
4992 dsDevices[i].id[1] = lpguid;
4993 dsDevices[i].validId[1] = true;
4996 dsDevices[i].id[0] = lpguid;
4997 dsDevices[i].validId[0] = true;
5005 device.found = true;
5007 device.id[1] = lpguid;
5008 device.validId[1] = true;
5011 device.id[0] = lpguid;
5012 device.validId[0] = true;
5014 dsDevices.push_back( device );
5020 static const char* getErrorString( int code )
5024 case DSERR_ALLOCATED:
5025 return "Already allocated";
5027 case DSERR_CONTROLUNAVAIL:
5028 return "Control unavailable";
5030 case DSERR_INVALIDPARAM:
5031 return "Invalid parameter";
5033 case DSERR_INVALIDCALL:
5034 return "Invalid call";
5037 return "Generic error";
5039 case DSERR_PRIOLEVELNEEDED:
5040 return "Priority level needed";
5042 case DSERR_OUTOFMEMORY:
5043 return "Out of memory";
5045 case DSERR_BADFORMAT:
5046 return "The sample rate or the channel format is not supported";
5048 case DSERR_UNSUPPORTED:
5049 return "Not supported";
5051 case DSERR_NODRIVER:
5054 case DSERR_ALREADYINITIALIZED:
5055 return "Already initialized";
5057 case DSERR_NOAGGREGATION:
5058 return "No aggregation";
5060 case DSERR_BUFFERLOST:
5061 return "Buffer lost";
5063 case DSERR_OTHERAPPHASPRIO:
5064 return "Another application already has priority";
5066 case DSERR_UNINITIALIZED:
5067 return "Uninitialized";
5070 return "DirectSound unknown error";
5073 //******************** End of __WINDOWS_DS__ *********************//
5077 #if defined(__LINUX_ALSA__)
5079 #include <alsa/asoundlib.h>
5082 // A structure to hold various information related to the ALSA API
5085 snd_pcm_t *handles[2];
5088 pthread_cond_t runnable;
5091 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
5094 extern "C" void *alsaCallbackHandler( void * ptr );
5096 RtApiAlsa :: RtApiAlsa()
5098 // Nothing to do here.
5101 RtApiAlsa :: ~RtApiAlsa()
5103 if ( stream_.state != STREAM_CLOSED ) closeStream();
5106 unsigned int RtApiAlsa :: getDeviceCount( void )
5108 unsigned nDevices = 0;
5109 int result, subdevice, card;
5113 // Count cards and devices
5115 snd_card_next( &card );
5116 while ( card >= 0 ) {
5117 sprintf( name, "hw:%d", card );
5118 result = snd_ctl_open( &handle, name, 0 );
5120 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5121 errorText_ = errorStream_.str();
5122 error( RtError::WARNING );
5127 result = snd_ctl_pcm_next_device( handle, &subdevice );
5129 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5130 errorText_ = errorStream_.str();
5131 error( RtError::WARNING );
5134 if ( subdevice < 0 )
5139 snd_ctl_close( handle );
5140 snd_card_next( &card );
5146 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
5148 RtAudio::DeviceInfo info;
5149 info.probed = false;
5151 unsigned nDevices = 0;
5152 int result, subdevice, card;
5156 // Count cards and devices
5158 snd_card_next( &card );
5159 while ( card >= 0 ) {
5160 sprintf( name, "hw:%d", card );
5161 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5163 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5164 errorText_ = errorStream_.str();
5165 error( RtError::WARNING );
5170 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5172 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
5173 errorText_ = errorStream_.str();
5174 error( RtError::WARNING );
5177 if ( subdevice < 0 ) break;
5178 if ( nDevices == device ) {
5179 sprintf( name, "hw:%d,%d", card, subdevice );
5185 snd_ctl_close( chandle );
5186 snd_card_next( &card );
5189 if ( nDevices == 0 ) {
5190 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
5191 error( RtError::INVALID_USE );
5194 if ( device >= nDevices ) {
5195 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
5196 error( RtError::INVALID_USE );
5201 // If a stream is already open, we cannot probe the stream devices.
5202 // Thus, use the saved results.
5203 if ( stream_.state != STREAM_CLOSED &&
5204 ( stream_.device[0] == device || stream_.device[1] == device ) ) {
5205 if ( device >= devices_.size() ) {
5206 errorText_ = "RtApiAlsa::getDeviceInfo: device ID was not present before stream was opened.";
5207 error( RtError::WARNING );
5210 return devices_[ device ];
5213 int openMode = SND_PCM_ASYNC;
5214 snd_pcm_stream_t stream;
5215 snd_pcm_info_t *pcminfo;
5216 snd_pcm_info_alloca( &pcminfo );
5218 snd_pcm_hw_params_t *params;
5219 snd_pcm_hw_params_alloca( ¶ms );
5221 // First try for playback
5222 stream = SND_PCM_STREAM_PLAYBACK;
5223 snd_pcm_info_set_device( pcminfo, subdevice );
5224 snd_pcm_info_set_subdevice( pcminfo, 0 );
5225 snd_pcm_info_set_stream( pcminfo, stream );
5227 result = snd_ctl_pcm_info( chandle, pcminfo );
5229 // Device probably doesn't support playback.
5233 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
5235 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5236 errorText_ = errorStream_.str();
5237 error( RtError::WARNING );
5241 // The device is open ... fill the parameter structure.
5242 result = snd_pcm_hw_params_any( phandle, params );
5244 snd_pcm_close( phandle );
5245 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5246 errorText_ = errorStream_.str();
5247 error( RtError::WARNING );
5251 // Get output channel information.
5253 result = snd_pcm_hw_params_get_channels_max( params, &value );
5255 snd_pcm_close( phandle );
5256 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
5257 errorText_ = errorStream_.str();
5258 error( RtError::WARNING );
5261 info.outputChannels = value;
5262 snd_pcm_close( phandle );
5265 // Now try for capture
5266 stream = SND_PCM_STREAM_CAPTURE;
5267 snd_pcm_info_set_stream( pcminfo, stream );
5269 result = snd_ctl_pcm_info( chandle, pcminfo );
5270 snd_ctl_close( chandle );
5272 // Device probably doesn't support capture.
5273 if ( info.outputChannels == 0 ) return info;
5274 goto probeParameters;
5277 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5279 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5280 errorText_ = errorStream_.str();
5281 error( RtError::WARNING );
5282 if ( info.outputChannels == 0 ) return info;
5283 goto probeParameters;
5286 // The device is open ... fill the parameter structure.
5287 result = snd_pcm_hw_params_any( phandle, params );
5289 snd_pcm_close( phandle );
5290 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5291 errorText_ = errorStream_.str();
5292 error( RtError::WARNING );
5293 if ( info.outputChannels == 0 ) return info;
5294 goto probeParameters;
5297 result = snd_pcm_hw_params_get_channels_max( params, &value );
5299 snd_pcm_close( phandle );
5300 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5301 errorText_ = errorStream_.str();
5302 error( RtError::WARNING );
5303 if ( info.outputChannels == 0 ) return info;
5304 goto probeParameters;
5306 info.inputChannels = value;
5307 snd_pcm_close( phandle );
5309 // If device opens for both playback and capture, we determine the channels.
5310 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5311 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5313 // ALSA doesn't provide default devices so we'll use the first available one.
5314 if ( device == 0 && info.outputChannels > 0 )
5315 info.isDefaultOutput = true;
5316 if ( device == 0 && info.inputChannels > 0 )
5317 info.isDefaultInput = true;
5320 // At this point, we just need to figure out the supported data
5321 // formats and sample rates. We'll proceed by opening the device in
5322 // the direction with the maximum number of channels, or playback if
5323 // they are equal. This might limit our sample rate options, but so
5326 if ( info.outputChannels >= info.inputChannels )
5327 stream = SND_PCM_STREAM_PLAYBACK;
5329 stream = SND_PCM_STREAM_CAPTURE;
5330 snd_pcm_info_set_stream( pcminfo, stream );
5332 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5334 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5335 errorText_ = errorStream_.str();
5336 error( RtError::WARNING );
5340 // The device is open ... fill the parameter structure.
5341 result = snd_pcm_hw_params_any( phandle, params );
5343 snd_pcm_close( phandle );
5344 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5345 errorText_ = errorStream_.str();
5346 error( RtError::WARNING );
5350 // Test our discrete set of sample rate values.
5351 info.sampleRates.clear();
5352 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5353 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5354 info.sampleRates.push_back( SAMPLE_RATES[i] );
5356 if ( info.sampleRates.size() == 0 ) {
5357 snd_pcm_close( phandle );
5358 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5359 errorText_ = errorStream_.str();
5360 error( RtError::WARNING );
5364 // Probe the supported data formats ... we don't care about endian-ness just yet
5365 snd_pcm_format_t format;
5366 info.nativeFormats = 0;
5367 format = SND_PCM_FORMAT_S8;
5368 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5369 info.nativeFormats |= RTAUDIO_SINT8;
5370 format = SND_PCM_FORMAT_S16;
5371 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5372 info.nativeFormats |= RTAUDIO_SINT16;
5373 format = SND_PCM_FORMAT_S24;
5374 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5375 info.nativeFormats |= RTAUDIO_SINT24;
5376 format = SND_PCM_FORMAT_S32;
5377 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5378 info.nativeFormats |= RTAUDIO_SINT32;
5379 format = SND_PCM_FORMAT_FLOAT;
5380 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5381 info.nativeFormats |= RTAUDIO_FLOAT32;
5382 format = SND_PCM_FORMAT_FLOAT64;
5383 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5384 info.nativeFormats |= RTAUDIO_FLOAT64;
5386 // Check that we have at least one supported format
5387 if ( info.nativeFormats == 0 ) {
5388 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5389 errorText_ = errorStream_.str();
5390 error( RtError::WARNING );
5394 // Get the device name
5396 result = snd_card_get_name( card, &cardname );
5398 sprintf( name, "hw:%s,%d", cardname, subdevice );
5401 // That's all ... close the device and return
5402 snd_pcm_close( phandle );
5407 void RtApiAlsa :: saveDeviceInfo( void )
5411 unsigned int nDevices = getDeviceCount();
5412 devices_.resize( nDevices );
5413 for ( unsigned int i=0; i<nDevices; i++ )
5414 devices_[i] = getDeviceInfo( i );
5417 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5418 unsigned int firstChannel, unsigned int sampleRate,
5419 RtAudioFormat format, unsigned int *bufferSize,
5420 RtAudio::StreamOptions *options )
5423 #if defined(__RTAUDIO_DEBUG__)
5425 snd_output_stdio_attach(&out, stderr, 0);
5428 // I'm not using the "plug" interface ... too much inconsistent behavior.
5430 unsigned nDevices = 0;
5431 int result, subdevice, card;
5435 // Count cards and devices
5437 snd_card_next( &card );
5438 while ( card >= 0 ) {
5439 sprintf( name, "hw:%d", card );
5440 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5442 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5443 errorText_ = errorStream_.str();
5448 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5449 if ( result < 0 ) break;
5450 if ( subdevice < 0 ) break;
5451 if ( nDevices == device ) {
5452 sprintf( name, "hw:%d,%d", card, subdevice );
5453 snd_ctl_close( chandle );
5458 snd_ctl_close( chandle );
5459 snd_card_next( &card );
5462 if ( nDevices == 0 ) {
5463 // This should not happen because a check is made before this function is called.
5464 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5468 if ( device >= nDevices ) {
5469 // This should not happen because a check is made before this function is called.
5470 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5476 // The getDeviceInfo() function will not work for a device that is
5477 // already open. Thus, we'll probe the system before opening a
5478 // stream and save the results for use by getDeviceInfo().
5479 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) // only do once
5480 this->saveDeviceInfo();
5482 snd_pcm_stream_t stream;
5483 if ( mode == OUTPUT )
5484 stream = SND_PCM_STREAM_PLAYBACK;
5486 stream = SND_PCM_STREAM_CAPTURE;
5489 int openMode = SND_PCM_ASYNC;
5490 result = snd_pcm_open( &phandle, name, stream, openMode );
5492 if ( mode == OUTPUT )
5493 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5495 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5496 errorText_ = errorStream_.str();
5500 // Fill the parameter structure.
5501 snd_pcm_hw_params_t *hw_params;
5502 snd_pcm_hw_params_alloca( &hw_params );
5503 result = snd_pcm_hw_params_any( phandle, hw_params );
5505 snd_pcm_close( phandle );
5506 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5507 errorText_ = errorStream_.str();
5511 #if defined(__RTAUDIO_DEBUG__)
5512 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5513 snd_pcm_hw_params_dump( hw_params, out );
5516 // Set access ... check user preference.
5517 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5518 stream_.userInterleaved = false;
5519 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5521 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5522 stream_.deviceInterleaved[mode] = true;
5525 stream_.deviceInterleaved[mode] = false;
5528 stream_.userInterleaved = true;
5529 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5531 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5532 stream_.deviceInterleaved[mode] = false;
5535 stream_.deviceInterleaved[mode] = true;
5539 snd_pcm_close( phandle );
5540 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5541 errorText_ = errorStream_.str();
5545 // Determine how to set the device format.
5546 stream_.userFormat = format;
5547 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5549 if ( format == RTAUDIO_SINT8 )
5550 deviceFormat = SND_PCM_FORMAT_S8;
5551 else if ( format == RTAUDIO_SINT16 )
5552 deviceFormat = SND_PCM_FORMAT_S16;
5553 else if ( format == RTAUDIO_SINT24 )
5554 deviceFormat = SND_PCM_FORMAT_S24;
5555 else if ( format == RTAUDIO_SINT32 )
5556 deviceFormat = SND_PCM_FORMAT_S32;
5557 else if ( format == RTAUDIO_FLOAT32 )
5558 deviceFormat = SND_PCM_FORMAT_FLOAT;
5559 else if ( format == RTAUDIO_FLOAT64 )
5560 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5562 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5563 stream_.deviceFormat[mode] = format;
5567 // The user requested format is not natively supported by the device.
5568 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5569 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5570 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5574 deviceFormat = SND_PCM_FORMAT_FLOAT;
5575 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5576 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5580 deviceFormat = SND_PCM_FORMAT_S32;
5581 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5582 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5586 deviceFormat = SND_PCM_FORMAT_S24;
5587 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5588 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5592 deviceFormat = SND_PCM_FORMAT_S16;
5593 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5594 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5598 deviceFormat = SND_PCM_FORMAT_S8;
5599 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5600 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5604 // If we get here, no supported format was found.
5605 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5606 errorText_ = errorStream_.str();
5610 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5612 snd_pcm_close( phandle );
5613 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5614 errorText_ = errorStream_.str();
5618 // Determine whether byte-swaping is necessary.
5619 stream_.doByteSwap[mode] = false;
5620 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5621 result = snd_pcm_format_cpu_endian( deviceFormat );
5623 stream_.doByteSwap[mode] = true;
5624 else if (result < 0) {
5625 snd_pcm_close( phandle );
5626 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5627 errorText_ = errorStream_.str();
5632 // Set the sample rate.
5633 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5635 snd_pcm_close( phandle );
5636 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5637 errorText_ = errorStream_.str();
5641 // Determine the number of channels for this device. We support a possible
5642 // minimum device channel number > than the value requested by the user.
5643 stream_.nUserChannels[mode] = channels;
5645 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5646 unsigned int deviceChannels = value;
5647 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5648 snd_pcm_close( phandle );
5649 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5650 errorText_ = errorStream_.str();
5654 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5656 snd_pcm_close( phandle );
5657 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5658 errorText_ = errorStream_.str();
5661 deviceChannels = value;
5662 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5663 stream_.nDeviceChannels[mode] = deviceChannels;
5665 // Set the device channels.
5666 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5668 snd_pcm_close( phandle );
5669 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5670 errorText_ = errorStream_.str();
5674 // Set the buffer number, which in ALSA is referred to as the "period".
5675 int totalSize, dir = 0;
5676 unsigned int periods = 0;
5677 if ( options ) periods = options->numberOfBuffers;
5678 totalSize = *bufferSize * periods;
5680 // Set the buffer (or period) size.
5681 snd_pcm_uframes_t periodSize = *bufferSize;
5682 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5684 snd_pcm_close( phandle );
5685 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5686 errorText_ = errorStream_.str();
5689 *bufferSize = periodSize;
5691 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5692 else periods = totalSize / *bufferSize;
5693 // Even though the hardware might allow 1 buffer, it won't work reliably.
5694 if ( periods < 2 ) periods = 2;
5695 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5697 snd_pcm_close( phandle );
5698 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5699 errorText_ = errorStream_.str();
5703 // If attempting to setup a duplex stream, the bufferSize parameter
5704 // MUST be the same in both directions!
5705 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5706 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5707 errorText_ = errorStream_.str();
5711 stream_.bufferSize = *bufferSize;
5713 // Install the hardware configuration
5714 result = snd_pcm_hw_params( phandle, hw_params );
5716 snd_pcm_close( phandle );
5717 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5718 errorText_ = errorStream_.str();
5722 #if defined(__RTAUDIO_DEBUG__)
5723 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5724 snd_pcm_hw_params_dump( hw_params, out );
5727 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5728 snd_pcm_sw_params_t *sw_params = NULL;
5729 snd_pcm_sw_params_alloca( &sw_params );
5730 snd_pcm_sw_params_current( phandle, sw_params );
5731 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5732 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, ULONG_MAX );
5733 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5735 // The following two settings were suggested by Theo Veenker
5736 //snd_pcm_sw_params_set_avail_min( phandle, sw_params, *bufferSize );
5737 //snd_pcm_sw_params_set_xfer_align( phandle, sw_params, 1 );
5739 // here are two options for a fix
5740 //snd_pcm_sw_params_set_silence_size( phandle, sw_params, ULONG_MAX );
5741 snd_pcm_uframes_t val;
5742 snd_pcm_sw_params_get_boundary( sw_params, &val );
5743 snd_pcm_sw_params_set_silence_size( phandle, sw_params, val );
5745 result = snd_pcm_sw_params( phandle, sw_params );
5747 snd_pcm_close( phandle );
5748 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5749 errorText_ = errorStream_.str();
5753 #if defined(__RTAUDIO_DEBUG__)
5754 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5755 snd_pcm_sw_params_dump( sw_params, out );
5758 // Set flags for buffer conversion
5759 stream_.doConvertBuffer[mode] = false;
5760 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5761 stream_.doConvertBuffer[mode] = true;
5762 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5763 stream_.doConvertBuffer[mode] = true;
5764 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5765 stream_.nUserChannels[mode] > 1 )
5766 stream_.doConvertBuffer[mode] = true;
5768 // Allocate the ApiHandle if necessary and then save.
5769 AlsaHandle *apiInfo = 0;
5770 if ( stream_.apiHandle == 0 ) {
5772 apiInfo = (AlsaHandle *) new AlsaHandle;
5774 catch ( std::bad_alloc& ) {
5775 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5779 if ( pthread_cond_init( &apiInfo->runnable, NULL ) ) {
5780 errorText_ = "RtApiAlsa::probeDeviceOpen: error initializing pthread condition variable.";
5784 stream_.apiHandle = (void *) apiInfo;
5785 apiInfo->handles[0] = 0;
5786 apiInfo->handles[1] = 0;
5789 apiInfo = (AlsaHandle *) stream_.apiHandle;
5791 apiInfo->handles[mode] = phandle;
5793 // Allocate necessary internal buffers.
5794 unsigned long bufferBytes;
5795 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5796 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5797 if ( stream_.userBuffer[mode] == NULL ) {
5798 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5802 if ( stream_.doConvertBuffer[mode] ) {
5804 bool makeBuffer = true;
5805 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5806 if ( mode == INPUT ) {
5807 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5808 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5809 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5814 bufferBytes *= *bufferSize;
5815 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5816 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5817 if ( stream_.deviceBuffer == NULL ) {
5818 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5824 stream_.sampleRate = sampleRate;
5825 stream_.nBuffers = periods;
5826 stream_.device[mode] = device;
5827 stream_.state = STREAM_STOPPED;
5829 // Setup the buffer conversion information structure.
5830 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5832 // Setup thread if necessary.
5833 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5834 // We had already set up an output stream.
5835 stream_.mode = DUPLEX;
5836 // Link the streams if possible.
5837 apiInfo->synchronized = false;
5838 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5839 apiInfo->synchronized = true;
5841 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5842 error( RtError::WARNING );
5846 stream_.mode = mode;
5848 // Setup callback thread.
5849 stream_.callbackInfo.object = (void *) this;
5851 // Set the thread attributes for joinable and realtime scheduling
5852 // priority (optional). The higher priority will only take affect
5853 // if the program is run as root or suid. Note, under Linux
5854 // processes with CAP_SYS_NICE privilege, a user can change
5855 // scheduling policy and priority (thus need not be root). See
5856 // POSIX "capabilities".
5857 pthread_attr_t attr;
5858 pthread_attr_init( &attr );
5859 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5860 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5861 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
5862 struct sched_param param;
5863 int priority = options->priority;
5864 int min = sched_get_priority_min( SCHED_RR );
5865 int max = sched_get_priority_max( SCHED_RR );
5866 if ( priority < min ) priority = min;
5867 else if ( priority > max ) priority = max;
5868 param.sched_priority = priority;
5869 pthread_attr_setschedparam( &attr, ¶m );
5870 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5873 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5875 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5878 stream_.callbackInfo.isRunning = true;
5879 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5880 pthread_attr_destroy( &attr );
5882 stream_.callbackInfo.isRunning = false;
5883 errorText_ = "RtApiAlsa::error creating callback thread!";
5892 pthread_cond_destroy( &apiInfo->runnable );
5893 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5894 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5896 stream_.apiHandle = 0;
5899 for ( int i=0; i<2; i++ ) {
5900 if ( stream_.userBuffer[i] ) {
5901 free( stream_.userBuffer[i] );
5902 stream_.userBuffer[i] = 0;
5906 if ( stream_.deviceBuffer ) {
5907 free( stream_.deviceBuffer );
5908 stream_.deviceBuffer = 0;
5914 void RtApiAlsa :: closeStream()
5916 if ( stream_.state == STREAM_CLOSED ) {
5917 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5918 error( RtError::WARNING );
5922 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5923 stream_.callbackInfo.isRunning = false;
5924 MUTEX_LOCK( &stream_.mutex );
5925 if ( stream_.state == STREAM_STOPPED )
5926 pthread_cond_signal( &apiInfo->runnable );
5927 MUTEX_UNLOCK( &stream_.mutex );
5928 pthread_join( stream_.callbackInfo.thread, NULL );
5930 if ( stream_.state == STREAM_RUNNING ) {
5931 stream_.state = STREAM_STOPPED;
5932 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5933 snd_pcm_drop( apiInfo->handles[0] );
5934 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5935 snd_pcm_drop( apiInfo->handles[1] );
5939 pthread_cond_destroy( &apiInfo->runnable );
5940 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5941 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5943 stream_.apiHandle = 0;
5946 for ( int i=0; i<2; i++ ) {
5947 if ( stream_.userBuffer[i] ) {
5948 free( stream_.userBuffer[i] );
5949 stream_.userBuffer[i] = 0;
5953 if ( stream_.deviceBuffer ) {
5954 free( stream_.deviceBuffer );
5955 stream_.deviceBuffer = 0;
5958 stream_.mode = UNINITIALIZED;
5959 stream_.state = STREAM_CLOSED;
5962 void RtApiAlsa :: startStream()
5964 // This method calls snd_pcm_prepare if the device isn't already in that state.
5967 if ( stream_.state == STREAM_RUNNING ) {
5968 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5969 error( RtError::WARNING );
5973 MUTEX_LOCK( &stream_.mutex );
5976 snd_pcm_state_t state;
5977 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5978 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5979 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5980 state = snd_pcm_state( handle[0] );
5981 if ( state != SND_PCM_STATE_PREPARED ) {
5982 result = snd_pcm_prepare( handle[0] );
5984 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5985 errorText_ = errorStream_.str();
5991 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5992 state = snd_pcm_state( handle[1] );
5993 if ( state != SND_PCM_STATE_PREPARED ) {
5994 result = snd_pcm_prepare( handle[1] );
5996 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5997 errorText_ = errorStream_.str();
6003 stream_.state = STREAM_RUNNING;
6006 MUTEX_UNLOCK( &stream_.mutex );
6008 pthread_cond_signal( &apiInfo->runnable );
6010 if ( result >= 0 ) return;
6011 error( RtError::SYSTEM_ERROR );
6014 void RtApiAlsa :: stopStream()
6017 if ( stream_.state == STREAM_STOPPED ) {
6018 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
6019 error( RtError::WARNING );
6023 MUTEX_LOCK( &stream_.mutex );
6025 if ( stream_.state == STREAM_STOPPED ) {
6026 MUTEX_UNLOCK( &stream_.mutex );
6031 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6032 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6033 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6034 if ( apiInfo->synchronized )
6035 result = snd_pcm_drop( handle[0] );
6037 result = snd_pcm_drain( handle[0] );
6039 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
6040 errorText_ = errorStream_.str();
6045 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6046 result = snd_pcm_drop( handle[1] );
6048 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
6049 errorText_ = errorStream_.str();
6055 stream_.state = STREAM_STOPPED;
6056 MUTEX_UNLOCK( &stream_.mutex );
6058 if ( result >= 0 ) return;
6059 error( RtError::SYSTEM_ERROR );
6062 void RtApiAlsa :: abortStream()
6065 if ( stream_.state == STREAM_STOPPED ) {
6066 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
6067 error( RtError::WARNING );
6071 MUTEX_LOCK( &stream_.mutex );
6073 if ( stream_.state == STREAM_STOPPED ) {
6074 MUTEX_UNLOCK( &stream_.mutex );
6079 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6080 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
6081 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6082 result = snd_pcm_drop( handle[0] );
6084 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
6085 errorText_ = errorStream_.str();
6090 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
6091 result = snd_pcm_drop( handle[1] );
6093 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
6094 errorText_ = errorStream_.str();
6100 stream_.state = STREAM_STOPPED;
6101 MUTEX_UNLOCK( &stream_.mutex );
6103 if ( result >= 0 ) return;
6104 error( RtError::SYSTEM_ERROR );
6107 void RtApiAlsa :: callbackEvent()
6109 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
6110 if ( stream_.state == STREAM_STOPPED ) {
6111 MUTEX_LOCK( &stream_.mutex );
6112 pthread_cond_wait( &apiInfo->runnable, &stream_.mutex );
6113 if ( stream_.state != STREAM_RUNNING ) {
6114 MUTEX_UNLOCK( &stream_.mutex );
6117 MUTEX_UNLOCK( &stream_.mutex );
6120 if ( stream_.state == STREAM_CLOSED ) {
6121 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
6122 error( RtError::WARNING );
6126 int doStopStream = 0;
6127 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6128 double streamTime = getStreamTime();
6129 RtAudioStreamStatus status = 0;
6130 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
6131 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6132 apiInfo->xrun[0] = false;
6134 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
6135 status |= RTAUDIO_INPUT_OVERFLOW;
6136 apiInfo->xrun[1] = false;
6138 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6139 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6141 if ( doStopStream == 2 ) {
6146 MUTEX_LOCK( &stream_.mutex );
6148 // The state might change while waiting on a mutex.
6149 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6155 snd_pcm_sframes_t frames;
6156 RtAudioFormat format;
6157 handle = (snd_pcm_t **) apiInfo->handles;
6159 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6161 // Setup parameters.
6162 if ( stream_.doConvertBuffer[1] ) {
6163 buffer = stream_.deviceBuffer;
6164 channels = stream_.nDeviceChannels[1];
6165 format = stream_.deviceFormat[1];
6168 buffer = stream_.userBuffer[1];
6169 channels = stream_.nUserChannels[1];
6170 format = stream_.userFormat;
6173 // Read samples from device in interleaved/non-interleaved format.
6174 if ( stream_.deviceInterleaved[1] )
6175 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
6177 void *bufs[channels];
6178 size_t offset = stream_.bufferSize * formatBytes( format );
6179 for ( int i=0; i<channels; i++ )
6180 bufs[i] = (void *) (buffer + (i * offset));
6181 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
6184 if ( result < (int) stream_.bufferSize ) {
6185 // Either an error or overrun occured.
6186 if ( result == -EPIPE ) {
6187 snd_pcm_state_t state = snd_pcm_state( handle[1] );
6188 if ( state == SND_PCM_STATE_XRUN ) {
6189 apiInfo->xrun[1] = true;
6190 result = snd_pcm_prepare( handle[1] );
6192 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
6193 errorText_ = errorStream_.str();
6197 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6198 errorText_ = errorStream_.str();
6202 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
6203 errorText_ = errorStream_.str();
6205 error( RtError::WARNING );
6209 // Do byte swapping if necessary.
6210 if ( stream_.doByteSwap[1] )
6211 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
6213 // Do buffer conversion if necessary.
6214 if ( stream_.doConvertBuffer[1] )
6215 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6217 // Check stream latency
6218 result = snd_pcm_delay( handle[1], &frames );
6219 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
6224 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6226 // Setup parameters and do buffer conversion if necessary.
6227 if ( stream_.doConvertBuffer[0] ) {
6228 buffer = stream_.deviceBuffer;
6229 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6230 channels = stream_.nDeviceChannels[0];
6231 format = stream_.deviceFormat[0];
6234 buffer = stream_.userBuffer[0];
6235 channels = stream_.nUserChannels[0];
6236 format = stream_.userFormat;
6239 // Do byte swapping if necessary.
6240 if ( stream_.doByteSwap[0] )
6241 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
6243 // Write samples to device in interleaved/non-interleaved format.
6244 if ( stream_.deviceInterleaved[0] )
6245 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
6247 void *bufs[channels];
6248 size_t offset = stream_.bufferSize * formatBytes( format );
6249 for ( int i=0; i<channels; i++ )
6250 bufs[i] = (void *) (buffer + (i * offset));
6251 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
6254 if ( result < (int) stream_.bufferSize ) {
6255 // Either an error or underrun occured.
6256 if ( result == -EPIPE ) {
6257 snd_pcm_state_t state = snd_pcm_state( handle[0] );
6258 if ( state == SND_PCM_STATE_XRUN ) {
6259 apiInfo->xrun[0] = true;
6260 result = snd_pcm_prepare( handle[0] );
6262 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
6263 errorText_ = errorStream_.str();
6267 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
6268 errorText_ = errorStream_.str();
6272 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
6273 errorText_ = errorStream_.str();
6275 error( RtError::WARNING );
6279 // Check stream latency
6280 result = snd_pcm_delay( handle[0], &frames );
6281 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
6285 MUTEX_UNLOCK( &stream_.mutex );
6287 RtApi::tickStreamTime();
6288 if ( doStopStream == 1 ) this->stopStream();
6291 extern "C" void *alsaCallbackHandler( void *ptr )
6293 CallbackInfo *info = (CallbackInfo *) ptr;
6294 RtApiAlsa *object = (RtApiAlsa *) info->object;
6295 bool *isRunning = &info->isRunning;
6297 while ( *isRunning == true ) {
6298 pthread_testcancel();
6299 object->callbackEvent();
6302 pthread_exit( NULL );
6305 //******************** End of __LINUX_ALSA__ *********************//
6309 #if defined(__LINUX_OSS__)
6312 #include <sys/ioctl.h>
6315 #include "soundcard.h"
6319 extern "C" void *ossCallbackHandler(void * ptr);
6321 // A structure to hold various information related to the OSS API
6324 int id[2]; // device ids
6327 pthread_cond_t runnable;
6330 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
6333 RtApiOss :: RtApiOss()
6335 // Nothing to do here.
6338 RtApiOss :: ~RtApiOss()
6340 if ( stream_.state != STREAM_CLOSED ) closeStream();
6343 unsigned int RtApiOss :: getDeviceCount( void )
6345 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6346 if ( mixerfd == -1 ) {
6347 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6348 error( RtError::WARNING );
6352 oss_sysinfo sysinfo;
6353 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6355 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6356 error( RtError::WARNING );
6361 return sysinfo.numaudios;
6364 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6366 RtAudio::DeviceInfo info;
6367 info.probed = false;
6369 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6370 if ( mixerfd == -1 ) {
6371 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6372 error( RtError::WARNING );
6376 oss_sysinfo sysinfo;
6377 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6378 if ( result == -1 ) {
6380 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6381 error( RtError::WARNING );
6385 unsigned nDevices = sysinfo.numaudios;
6386 if ( nDevices == 0 ) {
6388 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6389 error( RtError::INVALID_USE );
6392 if ( device >= nDevices ) {
6394 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6395 error( RtError::INVALID_USE );
6398 oss_audioinfo ainfo;
6400 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6402 if ( result == -1 ) {
6403 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6404 errorText_ = errorStream_.str();
6405 error( RtError::WARNING );
6410 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6411 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6412 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6413 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6414 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6417 // Probe data formats ... do for input
6418 unsigned long mask = ainfo.iformats;
6419 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6420 info.nativeFormats |= RTAUDIO_SINT16;
6421 if ( mask & AFMT_S8 )
6422 info.nativeFormats |= RTAUDIO_SINT8;
6423 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6424 info.nativeFormats |= RTAUDIO_SINT32;
6425 if ( mask & AFMT_FLOAT )
6426 info.nativeFormats |= RTAUDIO_FLOAT32;
6427 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6428 info.nativeFormats |= RTAUDIO_SINT24;
6430 // Check that we have at least one supported format
6431 if ( info.nativeFormats == 0 ) {
6432 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6433 errorText_ = errorStream_.str();
6434 error( RtError::WARNING );
6438 // Probe the supported sample rates.
6439 info.sampleRates.clear();
6440 if ( ainfo.nrates ) {
6441 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6442 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6443 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6444 info.sampleRates.push_back( SAMPLE_RATES[k] );
6451 // Check min and max rate values;
6452 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6453 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6454 info.sampleRates.push_back( SAMPLE_RATES[k] );
6458 if ( info.sampleRates.size() == 0 ) {
6459 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6460 errorText_ = errorStream_.str();
6461 error( RtError::WARNING );
6465 info.name = ainfo.name;
6472 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6473 unsigned int firstChannel, unsigned int sampleRate,
6474 RtAudioFormat format, unsigned int *bufferSize,
6475 RtAudio::StreamOptions *options )
6477 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6478 if ( mixerfd == -1 ) {
6479 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6483 oss_sysinfo sysinfo;
6484 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6485 if ( result == -1 ) {
6487 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6491 unsigned nDevices = sysinfo.numaudios;
6492 if ( nDevices == 0 ) {
6493 // This should not happen because a check is made before this function is called.
6495 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6499 if ( device >= nDevices ) {
6500 // This should not happen because a check is made before this function is called.
6502 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6506 oss_audioinfo ainfo;
6508 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6510 if ( result == -1 ) {
6511 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6512 errorText_ = errorStream_.str();
6516 // Check if device supports input or output
6517 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6518 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6519 if ( mode == OUTPUT )
6520 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6522 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6523 errorText_ = errorStream_.str();
6528 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6529 if ( mode == OUTPUT )
6531 else { // mode == INPUT
6532 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6533 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6534 close( handle->id[0] );
6536 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6537 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6538 errorText_ = errorStream_.str();
6541 // Check that the number previously set channels is the same.
6542 if ( stream_.nUserChannels[0] != channels ) {
6543 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6544 errorText_ = errorStream_.str();
6553 // Set exclusive access if specified.
6554 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6556 // Try to open the device.
6558 fd = open( ainfo.devnode, flags, 0 );
6560 if ( errno == EBUSY )
6561 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6563 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6564 errorText_ = errorStream_.str();
6568 // For duplex operation, specifically set this mode (this doesn't seem to work).
6570 if ( flags | O_RDWR ) {
6571 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6572 if ( result == -1) {
6573 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6574 errorText_ = errorStream_.str();
6580 // Check the device channel support.
6581 stream_.nUserChannels[mode] = channels;
6582 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6584 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6585 errorText_ = errorStream_.str();
6589 // Set the number of channels.
6590 int deviceChannels = channels + firstChannel;
6591 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6592 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6594 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6595 errorText_ = errorStream_.str();
6598 stream_.nDeviceChannels[mode] = deviceChannels;
6600 // Get the data format mask
6602 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6603 if ( result == -1 ) {
6605 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6606 errorText_ = errorStream_.str();
6610 // Determine how to set the device format.
6611 stream_.userFormat = format;
6612 int deviceFormat = -1;
6613 stream_.doByteSwap[mode] = false;
6614 if ( format == RTAUDIO_SINT8 ) {
6615 if ( mask & AFMT_S8 ) {
6616 deviceFormat = AFMT_S8;
6617 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6620 else if ( format == RTAUDIO_SINT16 ) {
6621 if ( mask & AFMT_S16_NE ) {
6622 deviceFormat = AFMT_S16_NE;
6623 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6625 else if ( mask & AFMT_S16_OE ) {
6626 deviceFormat = AFMT_S16_OE;
6627 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6628 stream_.doByteSwap[mode] = true;
6631 else if ( format == RTAUDIO_SINT24 ) {
6632 if ( mask & AFMT_S24_NE ) {
6633 deviceFormat = AFMT_S24_NE;
6634 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6636 else if ( mask & AFMT_S24_OE ) {
6637 deviceFormat = AFMT_S24_OE;
6638 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6639 stream_.doByteSwap[mode] = true;
6642 else if ( format == RTAUDIO_SINT32 ) {
6643 if ( mask & AFMT_S32_NE ) {
6644 deviceFormat = AFMT_S32_NE;
6645 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6647 else if ( mask & AFMT_S32_OE ) {
6648 deviceFormat = AFMT_S32_OE;
6649 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6650 stream_.doByteSwap[mode] = true;
6654 if ( deviceFormat == -1 ) {
6655 // The user requested format is not natively supported by the device.
6656 if ( mask & AFMT_S16_NE ) {
6657 deviceFormat = AFMT_S16_NE;
6658 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6660 else if ( mask & AFMT_S32_NE ) {
6661 deviceFormat = AFMT_S32_NE;
6662 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6664 else if ( mask & AFMT_S24_NE ) {
6665 deviceFormat = AFMT_S24_NE;
6666 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6668 else if ( mask & AFMT_S16_OE ) {
6669 deviceFormat = AFMT_S16_OE;
6670 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6671 stream_.doByteSwap[mode] = true;
6673 else if ( mask & AFMT_S32_OE ) {
6674 deviceFormat = AFMT_S32_OE;
6675 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6676 stream_.doByteSwap[mode] = true;
6678 else if ( mask & AFMT_S24_OE ) {
6679 deviceFormat = AFMT_S24_OE;
6680 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6681 stream_.doByteSwap[mode] = true;
6683 else if ( mask & AFMT_S8) {
6684 deviceFormat = AFMT_S8;
6685 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6689 if ( stream_.deviceFormat[mode] == 0 ) {
6690 // This really shouldn't happen ...
6692 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6693 errorText_ = errorStream_.str();
6697 // Set the data format.
6698 int temp = deviceFormat;
6699 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6700 if ( result == -1 || deviceFormat != temp ) {
6702 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6703 errorText_ = errorStream_.str();
6707 // Attempt to set the buffer size. According to OSS, the minimum
6708 // number of buffers is two. The supposed minimum buffer size is 16
6709 // bytes, so that will be our lower bound. The argument to this
6710 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6711 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6712 // We'll check the actual value used near the end of the setup
6714 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6715 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6717 if ( options ) buffers = options->numberOfBuffers;
6718 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6719 if ( buffers < 2 ) buffers = 3;
6720 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6721 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6722 if ( result == -1 ) {
6724 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6725 errorText_ = errorStream_.str();
6728 stream_.nBuffers = buffers;
6730 // Save buffer size (in sample frames).
6731 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6732 stream_.bufferSize = *bufferSize;
6734 // Set the sample rate.
6735 int srate = sampleRate;
6736 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6737 if ( result == -1 ) {
6739 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6740 errorText_ = errorStream_.str();
6744 // Verify the sample rate setup worked.
6745 if ( abs( srate - sampleRate ) > 100 ) {
6747 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6748 errorText_ = errorStream_.str();
6751 stream_.sampleRate = sampleRate;
6753 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6754 // We're doing duplex setup here.
6755 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6756 stream_.nDeviceChannels[0] = deviceChannels;
6759 // Set interleaving parameters.
6760 stream_.userInterleaved = true;
6761 stream_.deviceInterleaved[mode] = true;
6762 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6763 stream_.userInterleaved = false;
6765 // Set flags for buffer conversion
6766 stream_.doConvertBuffer[mode] = false;
6767 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6768 stream_.doConvertBuffer[mode] = true;
6769 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6770 stream_.doConvertBuffer[mode] = true;
6771 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6772 stream_.nUserChannels[mode] > 1 )
6773 stream_.doConvertBuffer[mode] = true;
6775 // Allocate the stream handles if necessary and then save.
6776 if ( stream_.apiHandle == 0 ) {
6778 handle = new OssHandle;
6780 catch ( std::bad_alloc& ) {
6781 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6785 if ( pthread_cond_init( &handle->runnable, NULL ) ) {
6786 errorText_ = "RtApiOss::probeDeviceOpen: error initializing pthread condition variable.";
6790 stream_.apiHandle = (void *) handle;
6793 handle = (OssHandle *) stream_.apiHandle;
6795 handle->id[mode] = fd;
6797 // Allocate necessary internal buffers.
6798 unsigned long bufferBytes;
6799 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6800 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6801 if ( stream_.userBuffer[mode] == NULL ) {
6802 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6806 if ( stream_.doConvertBuffer[mode] ) {
6808 bool makeBuffer = true;
6809 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6810 if ( mode == INPUT ) {
6811 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6812 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6813 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6818 bufferBytes *= *bufferSize;
6819 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6820 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6821 if ( stream_.deviceBuffer == NULL ) {
6822 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6828 stream_.device[mode] = device;
6829 stream_.state = STREAM_STOPPED;
6831 // Setup the buffer conversion information structure.
6832 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6834 // Setup thread if necessary.
6835 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6836 // We had already set up an output stream.
6837 stream_.mode = DUPLEX;
6838 if ( stream_.device[0] == device ) handle->id[0] = fd;
6841 stream_.mode = mode;
6843 // Setup callback thread.
6844 stream_.callbackInfo.object = (void *) this;
6846 // Set the thread attributes for joinable and realtime scheduling
6847 // priority. The higher priority will only take affect if the
6848 // program is run as root or suid.
6849 pthread_attr_t attr;
6850 pthread_attr_init( &attr );
6851 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6852 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6853 if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) {
6854 struct sched_param param;
6855 int priority = options->priority;
6856 int min = sched_get_priority_min( SCHED_RR );
6857 int max = sched_get_priority_max( SCHED_RR );
6858 if ( priority < min ) priority = min;
6859 else if ( priority > max ) priority = max;
6860 param.sched_priority = priority;
6861 pthread_attr_setschedparam( &attr, ¶m );
6862 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6865 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6867 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6870 stream_.callbackInfo.isRunning = true;
6871 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6872 pthread_attr_destroy( &attr );
6874 stream_.callbackInfo.isRunning = false;
6875 errorText_ = "RtApiOss::error creating callback thread!";
6884 pthread_cond_destroy( &handle->runnable );
6885 if ( handle->id[0] ) close( handle->id[0] );
6886 if ( handle->id[1] ) close( handle->id[1] );
6888 stream_.apiHandle = 0;
6891 for ( int i=0; i<2; i++ ) {
6892 if ( stream_.userBuffer[i] ) {
6893 free( stream_.userBuffer[i] );
6894 stream_.userBuffer[i] = 0;
6898 if ( stream_.deviceBuffer ) {
6899 free( stream_.deviceBuffer );
6900 stream_.deviceBuffer = 0;
6906 void RtApiOss :: closeStream()
6908 if ( stream_.state == STREAM_CLOSED ) {
6909 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6910 error( RtError::WARNING );
6914 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6915 stream_.callbackInfo.isRunning = false;
6916 MUTEX_LOCK( &stream_.mutex );
6917 if ( stream_.state == STREAM_STOPPED )
6918 pthread_cond_signal( &handle->runnable );
6919 MUTEX_UNLOCK( &stream_.mutex );
6920 pthread_join( stream_.callbackInfo.thread, NULL );
6922 if ( stream_.state == STREAM_RUNNING ) {
6923 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6924 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6926 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6927 stream_.state = STREAM_STOPPED;
6931 pthread_cond_destroy( &handle->runnable );
6932 if ( handle->id[0] ) close( handle->id[0] );
6933 if ( handle->id[1] ) close( handle->id[1] );
6935 stream_.apiHandle = 0;
6938 for ( int i=0; i<2; i++ ) {
6939 if ( stream_.userBuffer[i] ) {
6940 free( stream_.userBuffer[i] );
6941 stream_.userBuffer[i] = 0;
6945 if ( stream_.deviceBuffer ) {
6946 free( stream_.deviceBuffer );
6947 stream_.deviceBuffer = 0;
6950 stream_.mode = UNINITIALIZED;
6951 stream_.state = STREAM_CLOSED;
6954 void RtApiOss :: startStream()
6957 if ( stream_.state == STREAM_RUNNING ) {
6958 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6959 error( RtError::WARNING );
6963 MUTEX_LOCK( &stream_.mutex );
6965 stream_.state = STREAM_RUNNING;
6967 // No need to do anything else here ... OSS automatically starts
6968 // when fed samples.
6970 MUTEX_UNLOCK( &stream_.mutex );
6972 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6973 pthread_cond_signal( &handle->runnable );
6976 void RtApiOss :: stopStream()
6979 if ( stream_.state == STREAM_STOPPED ) {
6980 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6981 error( RtError::WARNING );
6985 MUTEX_LOCK( &stream_.mutex );
6987 // The state might change while waiting on a mutex.
6988 if ( stream_.state == STREAM_STOPPED ) {
6989 MUTEX_UNLOCK( &stream_.mutex );
6994 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6995 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6997 // Flush the output with zeros a few times.
7000 RtAudioFormat format;
7002 if ( stream_.doConvertBuffer[0] ) {
7003 buffer = stream_.deviceBuffer;
7004 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7005 format = stream_.deviceFormat[0];
7008 buffer = stream_.userBuffer[0];
7009 samples = stream_.bufferSize * stream_.nUserChannels[0];
7010 format = stream_.userFormat;
7013 memset( buffer, 0, samples * formatBytes(format) );
7014 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
7015 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7016 if ( result == -1 ) {
7017 errorText_ = "RtApiOss::stopStream: audio write error.";
7018 error( RtError::WARNING );
7022 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7023 if ( result == -1 ) {
7024 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7025 errorText_ = errorStream_.str();
7028 handle->triggered = false;
7031 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7032 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7033 if ( result == -1 ) {
7034 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7035 errorText_ = errorStream_.str();
7041 stream_.state = STREAM_STOPPED;
7042 MUTEX_UNLOCK( &stream_.mutex );
7044 if ( result != -1 ) return;
7045 error( RtError::SYSTEM_ERROR );
7048 void RtApiOss :: abortStream()
7051 if ( stream_.state == STREAM_STOPPED ) {
7052 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
7053 error( RtError::WARNING );
7057 MUTEX_LOCK( &stream_.mutex );
7059 // The state might change while waiting on a mutex.
7060 if ( stream_.state == STREAM_STOPPED ) {
7061 MUTEX_UNLOCK( &stream_.mutex );
7066 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7067 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7068 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
7069 if ( result == -1 ) {
7070 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
7071 errorText_ = errorStream_.str();
7074 handle->triggered = false;
7077 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
7078 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
7079 if ( result == -1 ) {
7080 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
7081 errorText_ = errorStream_.str();
7087 stream_.state = STREAM_STOPPED;
7088 MUTEX_UNLOCK( &stream_.mutex );
7090 if ( result != -1 ) return;
7091 error( RtError::SYSTEM_ERROR );
7094 void RtApiOss :: callbackEvent()
7096 OssHandle *handle = (OssHandle *) stream_.apiHandle;
7097 if ( stream_.state == STREAM_STOPPED ) {
7098 MUTEX_LOCK( &stream_.mutex );
7099 pthread_cond_wait( &handle->runnable, &stream_.mutex );
7100 if ( stream_.state != STREAM_RUNNING ) {
7101 MUTEX_UNLOCK( &stream_.mutex );
7104 MUTEX_UNLOCK( &stream_.mutex );
7107 if ( stream_.state == STREAM_CLOSED ) {
7108 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
7109 error( RtError::WARNING );
7113 // Invoke user callback to get fresh output data.
7114 int doStopStream = 0;
7115 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
7116 double streamTime = getStreamTime();
7117 RtAudioStreamStatus status = 0;
7118 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
7119 status |= RTAUDIO_OUTPUT_UNDERFLOW;
7120 handle->xrun[0] = false;
7122 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
7123 status |= RTAUDIO_INPUT_OVERFLOW;
7124 handle->xrun[1] = false;
7126 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
7127 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
7128 if ( doStopStream == 2 ) {
7129 this->abortStream();
7133 MUTEX_LOCK( &stream_.mutex );
7135 // The state might change while waiting on a mutex.
7136 if ( stream_.state == STREAM_STOPPED ) goto unlock;
7141 RtAudioFormat format;
7143 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
7145 // Setup parameters and do buffer conversion if necessary.
7146 if ( stream_.doConvertBuffer[0] ) {
7147 buffer = stream_.deviceBuffer;
7148 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
7149 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
7150 format = stream_.deviceFormat[0];
7153 buffer = stream_.userBuffer[0];
7154 samples = stream_.bufferSize * stream_.nUserChannels[0];
7155 format = stream_.userFormat;
7158 // Do byte swapping if necessary.
7159 if ( stream_.doByteSwap[0] )
7160 byteSwapBuffer( buffer, samples, format );
7162 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
7164 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7165 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7166 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
7167 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
7168 handle->triggered = true;
7171 // Write samples to device.
7172 result = write( handle->id[0], buffer, samples * formatBytes(format) );
7174 if ( result == -1 ) {
7175 // We'll assume this is an underrun, though there isn't a
7176 // specific means for determining that.
7177 handle->xrun[0] = true;
7178 errorText_ = "RtApiOss::callbackEvent: audio write error.";
7179 error( RtError::WARNING );
7180 // Continue on to input section.
7184 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
7186 // Setup parameters.
7187 if ( stream_.doConvertBuffer[1] ) {
7188 buffer = stream_.deviceBuffer;
7189 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
7190 format = stream_.deviceFormat[1];
7193 buffer = stream_.userBuffer[1];
7194 samples = stream_.bufferSize * stream_.nUserChannels[1];
7195 format = stream_.userFormat;
7198 // Read samples from device.
7199 result = read( handle->id[1], buffer, samples * formatBytes(format) );
7201 if ( result == -1 ) {
7202 // We'll assume this is an overrun, though there isn't a
7203 // specific means for determining that.
7204 handle->xrun[1] = true;
7205 errorText_ = "RtApiOss::callbackEvent: audio read error.";
7206 error( RtError::WARNING );
7210 // Do byte swapping if necessary.
7211 if ( stream_.doByteSwap[1] )
7212 byteSwapBuffer( buffer, samples, format );
7214 // Do buffer conversion if necessary.
7215 if ( stream_.doConvertBuffer[1] )
7216 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
7220 MUTEX_UNLOCK( &stream_.mutex );
7222 RtApi::tickStreamTime();
7223 if ( doStopStream == 1 ) this->stopStream();
7226 extern "C" void *ossCallbackHandler( void *ptr )
7228 CallbackInfo *info = (CallbackInfo *) ptr;
7229 RtApiOss *object = (RtApiOss *) info->object;
7230 bool *isRunning = &info->isRunning;
7232 while ( *isRunning == true ) {
7233 pthread_testcancel();
7234 object->callbackEvent();
7237 pthread_exit( NULL );
7240 //******************** End of __LINUX_OSS__ *********************//
7244 // *************************************************** //
7246 // Protected common (OS-independent) RtAudio methods.
7248 // *************************************************** //
7250 // This method can be modified to control the behavior of error
7251 // message printing.
7252 void RtApi :: error( RtError::Type type )
7254 errorStream_.str(""); // clear the ostringstream
7255 if ( type == RtError::WARNING && showWarnings_ == true )
7256 std::cerr << '\n' << errorText_ << "\n\n";
7258 throw( RtError( errorText_, type ) );
7261 void RtApi :: verifyStream()
7263 if ( stream_.state == STREAM_CLOSED ) {
7264 errorText_ = "RtApi:: a stream is not open!";
7265 error( RtError::INVALID_USE );
7269 void RtApi :: clearStreamInfo()
7271 stream_.mode = UNINITIALIZED;
7272 stream_.state = STREAM_CLOSED;
7273 stream_.sampleRate = 0;
7274 stream_.bufferSize = 0;
7275 stream_.nBuffers = 0;
7276 stream_.userFormat = 0;
7277 stream_.userInterleaved = true;
7278 stream_.streamTime = 0.0;
7279 stream_.apiHandle = 0;
7280 stream_.deviceBuffer = 0;
7281 stream_.callbackInfo.callback = 0;
7282 stream_.callbackInfo.userData = 0;
7283 stream_.callbackInfo.isRunning = false;
7284 for ( int i=0; i<2; i++ ) {
7285 stream_.device[i] = 11111;
7286 stream_.doConvertBuffer[i] = false;
7287 stream_.deviceInterleaved[i] = true;
7288 stream_.doByteSwap[i] = false;
7289 stream_.nUserChannels[i] = 0;
7290 stream_.nDeviceChannels[i] = 0;
7291 stream_.channelOffset[i] = 0;
7292 stream_.deviceFormat[i] = 0;
7293 stream_.latency[i] = 0;
7294 stream_.userBuffer[i] = 0;
7295 stream_.convertInfo[i].channels = 0;
7296 stream_.convertInfo[i].inJump = 0;
7297 stream_.convertInfo[i].outJump = 0;
7298 stream_.convertInfo[i].inFormat = 0;
7299 stream_.convertInfo[i].outFormat = 0;
7300 stream_.convertInfo[i].inOffset.clear();
7301 stream_.convertInfo[i].outOffset.clear();
7305 unsigned int RtApi :: formatBytes( RtAudioFormat format )
7307 if ( format == RTAUDIO_SINT16 )
7309 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
7310 format == RTAUDIO_FLOAT32 )
7312 else if ( format == RTAUDIO_FLOAT64 )
7314 else if ( format == RTAUDIO_SINT8 )
7317 errorText_ = "RtApi::formatBytes: undefined format.";
7318 error( RtError::WARNING );
7323 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
7325 if ( mode == INPUT ) { // convert device to user buffer
7326 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
7327 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
7328 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
7329 stream_.convertInfo[mode].outFormat = stream_.userFormat;
7331 else { // convert user to device buffer
7332 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
7333 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
7334 stream_.convertInfo[mode].inFormat = stream_.userFormat;
7335 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
7338 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
7339 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
7341 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
7343 // Set up the interleave/deinterleave offsets.
7344 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
7345 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
7346 ( mode == INPUT && stream_.userInterleaved ) ) {
7347 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7348 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7349 stream_.convertInfo[mode].outOffset.push_back( k );
7350 stream_.convertInfo[mode].inJump = 1;
7354 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7355 stream_.convertInfo[mode].inOffset.push_back( k );
7356 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7357 stream_.convertInfo[mode].outJump = 1;
7361 else { // no (de)interleaving
7362 if ( stream_.userInterleaved ) {
7363 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7364 stream_.convertInfo[mode].inOffset.push_back( k );
7365 stream_.convertInfo[mode].outOffset.push_back( k );
7369 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
7370 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
7371 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
7372 stream_.convertInfo[mode].inJump = 1;
7373 stream_.convertInfo[mode].outJump = 1;
7378 // Add channel offset.
7379 if ( firstChannel > 0 ) {
7380 if ( stream_.deviceInterleaved[mode] ) {
7381 if ( mode == OUTPUT ) {
7382 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7383 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7386 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7387 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7391 if ( mode == OUTPUT ) {
7392 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7393 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7396 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7397 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7403 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7405 // This function does format conversion, input/output channel compensation, and
7406 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7407 // the lower three bytes of a 32-bit integer.
7409 // Clear our device buffer when in/out duplex device channels are different
7410 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7411 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7412 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7415 if (info.outFormat == RTAUDIO_FLOAT64) {
7417 Float64 *out = (Float64 *)outBuffer;
7419 if (info.inFormat == RTAUDIO_SINT8) {
7420 signed char *in = (signed char *)inBuffer;
7421 scale = 1.0 / 127.5;
7422 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7423 for (j=0; j<info.channels; j++) {
7424 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7425 out[info.outOffset[j]] += 0.5;
7426 out[info.outOffset[j]] *= scale;
7429 out += info.outJump;
7432 else if (info.inFormat == RTAUDIO_SINT16) {
7433 Int16 *in = (Int16 *)inBuffer;
7434 scale = 1.0 / 32767.5;
7435 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7436 for (j=0; j<info.channels; j++) {
7437 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7438 out[info.outOffset[j]] += 0.5;
7439 out[info.outOffset[j]] *= scale;
7442 out += info.outJump;
7445 else if (info.inFormat == RTAUDIO_SINT24) {
7446 Int32 *in = (Int32 *)inBuffer;
7447 scale = 1.0 / 8388607.5;
7448 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7449 for (j=0; j<info.channels; j++) {
7450 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7451 out[info.outOffset[j]] += 0.5;
7452 out[info.outOffset[j]] *= scale;
7455 out += info.outJump;
7458 else if (info.inFormat == RTAUDIO_SINT32) {
7459 Int32 *in = (Int32 *)inBuffer;
7460 scale = 1.0 / 2147483647.5;
7461 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7462 for (j=0; j<info.channels; j++) {
7463 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7464 out[info.outOffset[j]] += 0.5;
7465 out[info.outOffset[j]] *= scale;
7468 out += info.outJump;
7471 else if (info.inFormat == RTAUDIO_FLOAT32) {
7472 Float32 *in = (Float32 *)inBuffer;
7473 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7474 for (j=0; j<info.channels; j++) {
7475 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7478 out += info.outJump;
7481 else if (info.inFormat == RTAUDIO_FLOAT64) {
7482 // Channel compensation and/or (de)interleaving only.
7483 Float64 *in = (Float64 *)inBuffer;
7484 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7485 for (j=0; j<info.channels; j++) {
7486 out[info.outOffset[j]] = in[info.inOffset[j]];
7489 out += info.outJump;
7493 else if (info.outFormat == RTAUDIO_FLOAT32) {
7495 Float32 *out = (Float32 *)outBuffer;
7497 if (info.inFormat == RTAUDIO_SINT8) {
7498 signed char *in = (signed char *)inBuffer;
7499 scale = (Float32) ( 1.0 / 127.5 );
7500 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7501 for (j=0; j<info.channels; j++) {
7502 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7503 out[info.outOffset[j]] += 0.5;
7504 out[info.outOffset[j]] *= scale;
7507 out += info.outJump;
7510 else if (info.inFormat == RTAUDIO_SINT16) {
7511 Int16 *in = (Int16 *)inBuffer;
7512 scale = (Float32) ( 1.0 / 32767.5 );
7513 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7514 for (j=0; j<info.channels; j++) {
7515 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7516 out[info.outOffset[j]] += 0.5;
7517 out[info.outOffset[j]] *= scale;
7520 out += info.outJump;
7523 else if (info.inFormat == RTAUDIO_SINT24) {
7524 Int32 *in = (Int32 *)inBuffer;
7525 scale = (Float32) ( 1.0 / 8388607.5 );
7526 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7527 for (j=0; j<info.channels; j++) {
7528 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7529 out[info.outOffset[j]] += 0.5;
7530 out[info.outOffset[j]] *= scale;
7533 out += info.outJump;
7536 else if (info.inFormat == RTAUDIO_SINT32) {
7537 Int32 *in = (Int32 *)inBuffer;
7538 scale = (Float32) ( 1.0 / 2147483647.5 );
7539 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7540 for (j=0; j<info.channels; j++) {
7541 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7542 out[info.outOffset[j]] += 0.5;
7543 out[info.outOffset[j]] *= scale;
7546 out += info.outJump;
7549 else if (info.inFormat == RTAUDIO_FLOAT32) {
7550 // Channel compensation and/or (de)interleaving only.
7551 Float32 *in = (Float32 *)inBuffer;
7552 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7553 for (j=0; j<info.channels; j++) {
7554 out[info.outOffset[j]] = in[info.inOffset[j]];
7557 out += info.outJump;
7560 else if (info.inFormat == RTAUDIO_FLOAT64) {
7561 Float64 *in = (Float64 *)inBuffer;
7562 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7563 for (j=0; j<info.channels; j++) {
7564 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7567 out += info.outJump;
7571 else if (info.outFormat == RTAUDIO_SINT32) {
7572 Int32 *out = (Int32 *)outBuffer;
7573 if (info.inFormat == RTAUDIO_SINT8) {
7574 signed char *in = (signed char *)inBuffer;
7575 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7576 for (j=0; j<info.channels; j++) {
7577 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7578 out[info.outOffset[j]] <<= 24;
7581 out += info.outJump;
7584 else if (info.inFormat == RTAUDIO_SINT16) {
7585 Int16 *in = (Int16 *)inBuffer;
7586 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7587 for (j=0; j<info.channels; j++) {
7588 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7589 out[info.outOffset[j]] <<= 16;
7592 out += info.outJump;
7595 else if (info.inFormat == RTAUDIO_SINT24) { // Hmmm ... we could just leave it in the lower 3 bytes
7596 Int32 *in = (Int32 *)inBuffer;
7597 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7598 for (j=0; j<info.channels; j++) {
7599 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7600 out[info.outOffset[j]] <<= 8;
7603 out += info.outJump;
7606 else if (info.inFormat == RTAUDIO_SINT32) {
7607 // Channel compensation and/or (de)interleaving only.
7608 Int32 *in = (Int32 *)inBuffer;
7609 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7610 for (j=0; j<info.channels; j++) {
7611 out[info.outOffset[j]] = in[info.inOffset[j]];
7614 out += info.outJump;
7617 else if (info.inFormat == RTAUDIO_FLOAT32) {
7618 Float32 *in = (Float32 *)inBuffer;
7619 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7620 for (j=0; j<info.channels; j++) {
7621 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7624 out += info.outJump;
7627 else if (info.inFormat == RTAUDIO_FLOAT64) {
7628 Float64 *in = (Float64 *)inBuffer;
7629 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7630 for (j=0; j<info.channels; j++) {
7631 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.5 - 0.5);
7634 out += info.outJump;
7638 else if (info.outFormat == RTAUDIO_SINT24) {
7639 Int32 *out = (Int32 *)outBuffer;
7640 if (info.inFormat == RTAUDIO_SINT8) {
7641 signed char *in = (signed char *)inBuffer;
7642 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7643 for (j=0; j<info.channels; j++) {
7644 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7645 out[info.outOffset[j]] <<= 16;
7648 out += info.outJump;
7651 else if (info.inFormat == RTAUDIO_SINT16) {
7652 Int16 *in = (Int16 *)inBuffer;
7653 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7654 for (j=0; j<info.channels; j++) {
7655 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7656 out[info.outOffset[j]] <<= 8;
7659 out += info.outJump;
7662 else if (info.inFormat == RTAUDIO_SINT24) {
7663 // Channel compensation and/or (de)interleaving only.
7664 Int32 *in = (Int32 *)inBuffer;
7665 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7666 for (j=0; j<info.channels; j++) {
7667 out[info.outOffset[j]] = in[info.inOffset[j]];
7670 out += info.outJump;
7673 else if (info.inFormat == RTAUDIO_SINT32) {
7674 Int32 *in = (Int32 *)inBuffer;
7675 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7676 for (j=0; j<info.channels; j++) {
7677 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7678 out[info.outOffset[j]] >>= 8;
7681 out += info.outJump;
7684 else if (info.inFormat == RTAUDIO_FLOAT32) {
7685 Float32 *in = (Float32 *)inBuffer;
7686 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7687 for (j=0; j<info.channels; j++) {
7688 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7691 out += info.outJump;
7694 else if (info.inFormat == RTAUDIO_FLOAT64) {
7695 Float64 *in = (Float64 *)inBuffer;
7696 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7697 for (j=0; j<info.channels; j++) {
7698 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388607.5 - 0.5);
7701 out += info.outJump;
7705 else if (info.outFormat == RTAUDIO_SINT16) {
7706 Int16 *out = (Int16 *)outBuffer;
7707 if (info.inFormat == RTAUDIO_SINT8) {
7708 signed char *in = (signed char *)inBuffer;
7709 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7710 for (j=0; j<info.channels; j++) {
7711 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7712 out[info.outOffset[j]] <<= 8;
7715 out += info.outJump;
7718 else if (info.inFormat == RTAUDIO_SINT16) {
7719 // Channel compensation and/or (de)interleaving only.
7720 Int16 *in = (Int16 *)inBuffer;
7721 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7722 for (j=0; j<info.channels; j++) {
7723 out[info.outOffset[j]] = in[info.inOffset[j]];
7726 out += info.outJump;
7729 else if (info.inFormat == RTAUDIO_SINT24) {
7730 Int32 *in = (Int32 *)inBuffer;
7731 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7732 for (j=0; j<info.channels; j++) {
7733 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7736 out += info.outJump;
7739 else if (info.inFormat == RTAUDIO_SINT32) {
7740 Int32 *in = (Int32 *)inBuffer;
7741 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7742 for (j=0; j<info.channels; j++) {
7743 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7746 out += info.outJump;
7749 else if (info.inFormat == RTAUDIO_FLOAT32) {
7750 Float32 *in = (Float32 *)inBuffer;
7751 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7752 for (j=0; j<info.channels; j++) {
7753 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7756 out += info.outJump;
7759 else if (info.inFormat == RTAUDIO_FLOAT64) {
7760 Float64 *in = (Float64 *)inBuffer;
7761 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7762 for (j=0; j<info.channels; j++) {
7763 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.5 - 0.5);
7766 out += info.outJump;
7770 else if (info.outFormat == RTAUDIO_SINT8) {
7771 signed char *out = (signed char *)outBuffer;
7772 if (info.inFormat == RTAUDIO_SINT8) {
7773 // Channel compensation and/or (de)interleaving only.
7774 signed char *in = (signed char *)inBuffer;
7775 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7776 for (j=0; j<info.channels; j++) {
7777 out[info.outOffset[j]] = in[info.inOffset[j]];
7780 out += info.outJump;
7783 if (info.inFormat == RTAUDIO_SINT16) {
7784 Int16 *in = (Int16 *)inBuffer;
7785 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7786 for (j=0; j<info.channels; j++) {
7787 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7790 out += info.outJump;
7793 else if (info.inFormat == RTAUDIO_SINT24) {
7794 Int32 *in = (Int32 *)inBuffer;
7795 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7796 for (j=0; j<info.channels; j++) {
7797 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7800 out += info.outJump;
7803 else if (info.inFormat == RTAUDIO_SINT32) {
7804 Int32 *in = (Int32 *)inBuffer;
7805 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7806 for (j=0; j<info.channels; j++) {
7807 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7810 out += info.outJump;
7813 else if (info.inFormat == RTAUDIO_FLOAT32) {
7814 Float32 *in = (Float32 *)inBuffer;
7815 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7816 for (j=0; j<info.channels; j++) {
7817 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7820 out += info.outJump;
7823 else if (info.inFormat == RTAUDIO_FLOAT64) {
7824 Float64 *in = (Float64 *)inBuffer;
7825 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7826 for (j=0; j<info.channels; j++) {
7827 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.5 - 0.5);
7830 out += info.outJump;
7836 //static inline uint16_t bswap_16(uint16_t x) { return (x>>8) | (x<<8); }
7837 //static inline uint32_t bswap_32(uint32_t x) { return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); }
7838 //static inline uint64_t bswap_64(uint64_t x) { return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); }
7840 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7846 if ( format == RTAUDIO_SINT16 ) {
7847 for ( unsigned int i=0; i<samples; i++ ) {
7848 // Swap 1st and 2nd bytes.
7853 // Increment 2 bytes.
7857 else if ( format == RTAUDIO_SINT24 ||
7858 format == RTAUDIO_SINT32 ||
7859 format == RTAUDIO_FLOAT32 ) {
7860 for ( unsigned int i=0; i<samples; i++ ) {
7861 // Swap 1st and 4th bytes.
7866 // Swap 2nd and 3rd bytes.
7872 // Increment 3 more bytes.
7876 else if ( format == RTAUDIO_FLOAT64 ) {
7877 for ( unsigned int i=0; i<samples; i++ ) {
7878 // Swap 1st and 8th bytes
7883 // Swap 2nd and 7th bytes
7889 // Swap 3rd and 6th bytes
7895 // Swap 4th and 5th bytes
7901 // Increment 5 more bytes.
7907 // Indentation settings for Vim and Emacs
7910 // c-basic-offset: 2
7911 // indent-tabs-mode: nil
7914 // vim: et sts=2 sw=2