1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2007 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0.3
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
60 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
61 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
62 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
63 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
66 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
69 // *************************************************** //
71 // RtAudio definitions.
73 // *************************************************** //
75 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
79 // The order here will control the order of RtAudio's API search in
81 #if defined(__UNIX_JACK__)
82 apis.push_back( UNIX_JACK );
84 #if defined(__LINUX_ALSA__)
85 apis.push_back( LINUX_ALSA );
87 #if defined(__LINUX_OSS__)
88 apis.push_back( LINUX_OSS );
90 #if defined(__WINDOWS_ASIO__)
91 apis.push_back( WINDOWS_ASIO );
93 #if defined(__WINDOWS_DS__)
94 apis.push_back( WINDOWS_DS );
96 #if defined(__MACOSX_CORE__)
97 apis.push_back( MACOSX_CORE );
99 #if defined(__RTAUDIO_DUMMY__)
100 apis.push_back( RTAUDIO_DUMMY );
104 void RtAudio :: openRtApi( RtAudio::Api api )
106 #if defined(__UNIX_JACK__)
107 if ( api == UNIX_JACK )
108 rtapi_ = new RtApiJack();
110 #if defined(__LINUX_ALSA__)
111 if ( api == LINUX_ALSA )
112 rtapi_ = new RtApiAlsa();
114 #if defined(__LINUX_OSS__)
115 if ( api == LINUX_OSS )
116 rtapi_ = new RtApiOss();
118 #if defined(__WINDOWS_ASIO__)
119 if ( api == WINDOWS_ASIO )
120 rtapi_ = new RtApiAsio();
122 #if defined(__WINDOWS_DS__)
123 if ( api == WINDOWS_DS )
124 rtapi_ = new RtApiDs();
126 #if defined(__MACOSX_CORE__)
127 if ( api == MACOSX_CORE )
128 rtapi_ = new RtApiCore();
130 #if defined(__RTAUDIO_DUMMY__)
131 if ( api == RTAUDIO_DUMMY )
132 rtapi_ = new RtApiDummy();
136 RtAudio :: RtAudio( RtAudio::Api api ) throw()
140 if ( api != UNSPECIFIED ) {
141 // Attempt to open the specified API.
143 if ( rtapi_ ) return;
145 // No compiled support for specified API value. Issue a debug
146 // warning and continue as if no API was specified.
147 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
150 // Iterate through the compiled APIs and return as soon as we find
151 // one with at least one device or we reach the end of the list.
152 std::vector< RtAudio::Api > apis;
153 getCompiledApi( apis );
154 for ( unsigned int i=0; i<apis.size(); i++ ) {
155 openRtApi( apis[i] );
156 if ( rtapi_->getDeviceCount() ) break;
159 if ( rtapi_ ) return;
161 // It should not be possible to get here because the preprocessor
162 // definition __RTAUDIO_DUMMY__ is automatically defined if no
163 // API-specific definitions are passed to the compiler. But just in
164 // case something weird happens, we'll print out an error message.
165 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
168 RtAudio :: ~RtAudio() throw()
173 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
174 RtAudio::StreamParameters *inputParameters,
175 RtAudioFormat format, unsigned int sampleRate,
176 unsigned int *bufferFrames,
177 RtAudioCallback callback, void *userData,
178 RtAudio::StreamOptions *options )
180 return rtapi_->openStream( outputParameters, inputParameters, format,
181 sampleRate, bufferFrames, callback,
185 // *************************************************** //
187 // Public RtApi definitions (see end of file for
188 // private or protected utility functions).
190 // *************************************************** //
194 stream_.state = STREAM_CLOSED;
195 stream_.mode = UNINITIALIZED;
196 stream_.apiHandle = 0;
197 stream_.userBuffer[0] = 0;
198 stream_.userBuffer[1] = 0;
199 MUTEX_INITIALIZE( &stream_.mutex );
200 showWarnings_ = true;
205 MUTEX_DESTROY( &stream_.mutex );
208 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
209 RtAudio::StreamParameters *iParams,
210 RtAudioFormat format, unsigned int sampleRate,
211 unsigned int *bufferFrames,
212 RtAudioCallback callback, void *userData,
213 RtAudio::StreamOptions *options )
215 if ( stream_.state != STREAM_CLOSED ) {
216 errorText_ = "RtApi::openStream: a stream is already open!";
217 error( RtError::INVALID_USE );
220 if ( oParams && oParams->nChannels < 1 ) {
221 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
222 error( RtError::INVALID_USE );
225 if ( iParams && iParams->nChannels < 1 ) {
226 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
227 error( RtError::INVALID_USE );
230 if ( oParams == NULL && iParams == NULL ) {
231 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
232 error( RtError::INVALID_USE );
235 if ( formatBytes(format) == 0 ) {
236 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
237 error( RtError::INVALID_USE );
240 unsigned int nDevices = getDeviceCount();
241 unsigned int oChannels = 0;
243 oChannels = oParams->nChannels;
244 if ( oParams->deviceId >= nDevices ) {
245 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
246 error( RtError::INVALID_USE );
250 unsigned int iChannels = 0;
252 iChannels = iParams->nChannels;
253 if ( iParams->deviceId >= nDevices ) {
254 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
255 error( RtError::INVALID_USE );
262 if ( oChannels > 0 ) {
264 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
265 sampleRate, format, bufferFrames, options );
266 if ( result == false ) error( RtError::SYSTEM_ERROR );
269 if ( iChannels > 0 ) {
271 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
272 sampleRate, format, bufferFrames, options );
273 if ( result == false ) {
274 if ( oChannels > 0 ) closeStream();
275 error( RtError::SYSTEM_ERROR );
279 stream_.callbackInfo.callback = (void *) callback;
280 stream_.callbackInfo.userData = userData;
282 if ( options ) options->numberOfBuffers = stream_.nBuffers;
283 stream_.state = STREAM_STOPPED;
286 unsigned int RtApi :: getDefaultInputDevice( void )
288 // Should be implemented in subclasses if possible.
292 unsigned int RtApi :: getDefaultOutputDevice( void )
294 // Should be implemented in subclasses if possible.
298 void RtApi :: closeStream( void )
300 // MUST be implemented in subclasses!
304 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
305 unsigned int firstChannel, unsigned int sampleRate,
306 RtAudioFormat format, unsigned int *bufferSize,
307 RtAudio::StreamOptions *options )
309 // MUST be implemented in subclasses!
313 void RtApi :: tickStreamTime( void )
315 // Subclasses that do not provide their own implementation of
316 // getStreamTime should call this function once per buffer I/O to
317 // provide basic stream time support.
319 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
321 #if defined( HAVE_GETTIMEOFDAY )
322 gettimeofday( &stream_.lastTickTimestamp, NULL );
326 long RtApi :: getStreamLatency( void )
330 long totalLatency = 0;
331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
332 totalLatency = stream_.latency[0];
333 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
334 totalLatency += stream_.latency[1];
339 double RtApi :: getStreamTime( void )
343 #if defined( HAVE_GETTIMEOFDAY )
344 // Return a very accurate estimate of the stream time by
345 // adding in the elapsed time since the last tick.
349 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
350 return stream_.streamTime;
352 gettimeofday( &now, NULL );
353 then = stream_.lastTickTimestamp;
354 return stream_.streamTime +
355 ((now.tv_sec + 0.000001 * now.tv_usec) -
356 (then.tv_sec + 0.000001 * then.tv_usec));
358 return stream_.streamTime;
363 // *************************************************** //
365 // OS/API-specific methods.
367 // *************************************************** //
369 #if defined(__MACOSX_CORE__)
371 // The OS X CoreAudio API is designed to use a separate callback
372 // procedure for each of its audio devices. A single RtAudio duplex
373 // stream using two different devices is supported here, though it
374 // cannot be guaranteed to always behave correctly because we cannot
375 // synchronize these two callbacks.
377 // A property listener is installed for over/underrun information.
378 // However, no functionality is currently provided to allow property
379 // listeners to trigger user handlers because it is unclear what could
380 // be done if a critical stream parameter (buffer size, sample rate,
381 // device disconnect) notification arrived. The listeners entail
382 // quite a bit of extra code and most likely, a user program wouldn't
383 // be prepared for the result anyway. However, we do provide a flag
384 // to the client callback function to inform of an over/underrun.
386 // The mechanism for querying and setting system parameters was
387 // updated (and perhaps simplified) in OS-X version 10.4. However,
388 // since 10.4 support is not necessarily available to all users, I've
389 // decided not to update the respective code at this time. Perhaps
390 // this will happen when Apple makes 10.4 free for everyone. :-)
392 // A structure to hold various information related to the CoreAudio API
395 AudioDeviceID id[2]; // device ids
396 UInt32 iStream[2]; // device stream index (first for mono mode)
399 pthread_cond_t condition;
400 int drainCounter; // Tracks callback counts when draining
401 bool internalDrain; // Indicates if stop is initiated from callback or not.
404 :deviceBuffer(0), drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
407 RtApiCore :: RtApiCore()
409 // Nothing to do here.
412 RtApiCore :: ~RtApiCore()
414 // The subclass destructor gets called before the base class
415 // destructor, so close an existing stream before deallocating
416 // apiDeviceId memory.
417 if ( stream_.state != STREAM_CLOSED ) closeStream();
420 unsigned int RtApiCore :: getDeviceCount( void )
422 // Find out how many audio devices there are, if any.
424 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
425 if ( result != noErr ) {
426 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
427 error( RtError::WARNING );
431 return dataSize / sizeof( AudioDeviceID );
434 unsigned int RtApiCore :: getDefaultInputDevice( void )
436 unsigned int nDevices = getDeviceCount();
437 if ( nDevices <= 1 ) return 0;
440 UInt32 dataSize = sizeof( AudioDeviceID );
441 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
444 if ( result != noErr ) {
445 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
446 error( RtError::WARNING );
450 dataSize *= nDevices;
451 AudioDeviceID deviceList[ nDevices ];
452 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
453 if ( result != noErr ) {
454 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
455 error( RtError::WARNING );
459 for ( unsigned int i=0; i<nDevices; i++ )
460 if ( id == deviceList[i] ) return i;
462 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
463 error( RtError::WARNING );
467 unsigned int RtApiCore :: getDefaultOutputDevice( void )
469 unsigned int nDevices = getDeviceCount();
470 if ( nDevices <= 1 ) return 0;
473 UInt32 dataSize = sizeof( AudioDeviceID );
474 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
477 if ( result != noErr ) {
478 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
479 error( RtError::WARNING );
483 dataSize *= nDevices;
484 AudioDeviceID deviceList[ nDevices ];
485 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
486 if ( result != noErr ) {
487 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
488 error( RtError::WARNING );
492 for ( unsigned int i=0; i<nDevices; i++ )
493 if ( id == deviceList[i] ) return i;
495 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
496 error( RtError::WARNING );
500 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
502 RtAudio::DeviceInfo info;
506 unsigned int nDevices = getDeviceCount();
507 if ( nDevices == 0 ) {
508 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
509 error( RtError::INVALID_USE );
512 if ( device >= nDevices ) {
513 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
514 error( RtError::INVALID_USE );
517 AudioDeviceID deviceList[ nDevices ];
518 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
519 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
520 if ( result != noErr ) {
521 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
522 error( RtError::WARNING );
526 AudioDeviceID id = deviceList[ device ];
528 // Get the device name.
532 result = AudioDeviceGetProperty( id, 0, false,
533 kAudioDevicePropertyDeviceManufacturer,
536 if ( result != noErr ) {
537 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
538 errorText_ = errorStream_.str();
539 error( RtError::WARNING );
542 info.name.append( (const char *)name, strlen(name) );
543 info.name.append( ": " );
546 result = AudioDeviceGetProperty( id, 0, false,
547 kAudioDevicePropertyDeviceName,
549 if ( result != noErr ) {
550 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
551 errorText_ = errorStream_.str();
552 error( RtError::WARNING );
555 info.name.append( (const char *)name, strlen(name) );
557 // Get the output stream "configuration".
558 AudioBufferList *bufferList = nil;
559 result = AudioDeviceGetPropertyInfo( id, 0, false,
560 kAudioDevicePropertyStreamConfiguration,
562 if (result != noErr || dataSize == 0) {
563 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
564 errorText_ = errorStream_.str();
565 error( RtError::WARNING );
569 // Allocate the AudioBufferList.
570 bufferList = (AudioBufferList *) malloc( dataSize );
571 if ( bufferList == NULL ) {
572 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
573 error( RtError::WARNING );
577 result = AudioDeviceGetProperty( id, 0, false,
578 kAudioDevicePropertyStreamConfiguration,
579 &dataSize, bufferList );
580 if ( result != noErr ) {
582 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
583 errorText_ = errorStream_.str();
584 error( RtError::WARNING );
588 // Get output channel information.
589 unsigned int i, nStreams = bufferList->mNumberBuffers;
590 for ( i=0; i<nStreams; i++ )
591 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
594 // Get the input stream "configuration".
595 result = AudioDeviceGetPropertyInfo( id, 0, true,
596 kAudioDevicePropertyStreamConfiguration,
598 if (result != noErr || dataSize == 0) {
599 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
600 errorText_ = errorStream_.str();
601 error( RtError::WARNING );
605 // Allocate the AudioBufferList.
606 bufferList = (AudioBufferList *) malloc( dataSize );
607 if ( bufferList == NULL ) {
608 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
609 error( RtError::WARNING );
613 result = AudioDeviceGetProperty( id, 0, true,
614 kAudioDevicePropertyStreamConfiguration,
615 &dataSize, bufferList );
616 if ( result != noErr ) {
618 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
619 errorText_ = errorStream_.str();
620 error( RtError::WARNING );
624 // Get input channel information.
625 nStreams = bufferList->mNumberBuffers;
626 for ( i=0; i<nStreams; i++ )
627 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
630 // If device opens for both playback and capture, we determine the channels.
631 if ( info.outputChannels > 0 && info.inputChannels > 0 )
632 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
634 // Probe the device sample rates.
635 bool isInput = false;
636 if ( info.outputChannels == 0 ) isInput = true;
638 // Determine the supported sample rates.
639 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
640 kAudioDevicePropertyAvailableNominalSampleRates,
643 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
644 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
645 errorText_ = errorStream_.str();
646 error( RtError::WARNING );
650 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
651 AudioValueRange rangeList[ nRanges ];
652 result = AudioDeviceGetProperty( id, 0, isInput,
653 kAudioDevicePropertyAvailableNominalSampleRates,
654 &dataSize, &rangeList );
656 if ( result != kAudioHardwareNoError ) {
657 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
658 errorText_ = errorStream_.str();
659 error( RtError::WARNING );
663 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
664 for ( UInt32 i=0; i<nRanges; i++ ) {
665 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
666 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
669 info.sampleRates.clear();
670 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
671 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
672 info.sampleRates.push_back( SAMPLE_RATES[k] );
675 if ( info.sampleRates.size() == 0 ) {
676 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
677 errorText_ = errorStream_.str();
678 error( RtError::WARNING );
682 // CoreAudio always uses 32-bit floating point data for PCM streams.
683 // Thus, any other "physical" formats supported by the device are of
684 // no interest to the client.
685 info.nativeFormats = RTAUDIO_FLOAT32;
687 if ( getDefaultOutputDevice() == device )
688 info.isDefaultOutput = true;
689 if ( getDefaultInputDevice() == device )
690 info.isDefaultInput = true;
696 OSStatus callbackHandler( AudioDeviceID inDevice,
697 const AudioTimeStamp* inNow,
698 const AudioBufferList* inInputData,
699 const AudioTimeStamp* inInputTime,
700 AudioBufferList* outOutputData,
701 const AudioTimeStamp* inOutputTime,
704 CallbackInfo *info = (CallbackInfo *) infoPointer;
706 RtApiCore *object = (RtApiCore *) info->object;
707 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
708 return kAudioHardwareUnspecifiedError;
710 return kAudioHardwareNoError;
713 OSStatus deviceListener( AudioDeviceID inDevice,
716 AudioDevicePropertyID propertyID,
717 void* handlePointer )
719 CoreHandle *handle = (CoreHandle *) handlePointer;
720 if ( propertyID == kAudioDeviceProcessorOverload ) {
722 handle->xrun[1] = true;
724 handle->xrun[0] = true;
727 return kAudioHardwareNoError;
730 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
732 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
736 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
737 unsigned int firstChannel, unsigned int sampleRate,
738 RtAudioFormat format, unsigned int *bufferSize,
739 RtAudio::StreamOptions *options )
742 unsigned int nDevices = getDeviceCount();
743 if ( nDevices == 0 ) {
744 // This should not happen because a check is made before this function is called.
745 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
749 if ( device >= nDevices ) {
750 // This should not happen because a check is made before this function is called.
751 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
755 AudioDeviceID deviceList[ nDevices ];
756 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
757 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
758 if ( result != noErr ) {
759 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
763 AudioDeviceID id = deviceList[ device ];
765 // Setup for stream mode.
766 bool isInput = false;
767 if ( mode == INPUT ) isInput = true;
769 // Set or disable "hog" mode.
770 dataSize = sizeof( UInt32 );
772 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
773 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
774 if ( result != noErr ) {
775 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
776 errorText_ = errorStream_.str();
780 // Get the stream "configuration".
781 AudioBufferList *bufferList;
782 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
783 kAudioDevicePropertyStreamConfiguration,
785 if (result != noErr || dataSize == 0) {
786 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
787 errorText_ = errorStream_.str();
791 // Allocate the AudioBufferList.
792 bufferList = (AudioBufferList *) malloc( dataSize );
793 if ( bufferList == NULL ) {
794 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
798 result = AudioDeviceGetProperty( id, 0, isInput,
799 kAudioDevicePropertyStreamConfiguration,
800 &dataSize, bufferList );
801 if ( result != noErr ) {
803 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
804 errorText_ = errorStream_.str();
808 // Search for a stream that contains the desired number of
809 // channels. CoreAudio devices can have an arbitrary number of
810 // streams and each stream can have an arbitrary number of channels.
811 // For each stream, a single buffer of interleaved samples is
812 // provided. RtAudio currently only supports the use of one stream
813 // of interleaved data or multiple consecutive single-channel
814 // streams. Thus, our search below is limited to these two
816 unsigned int streamChannels = 0, nStreams = 0;
817 UInt32 iChannel = 0, iStream = 0;
818 unsigned int offsetCounter = firstChannel;
819 stream_.deviceInterleaved[mode] = true;
820 nStreams = bufferList->mNumberBuffers;
821 bool foundStream = false;
823 for ( iStream=0; iStream<nStreams; iStream++ ) {
824 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
825 if ( streamChannels >= channels + offsetCounter ) {
826 iChannel += offsetCounter;
830 if ( streamChannels > offsetCounter ) break;
831 offsetCounter -= streamChannels;
832 iChannel += streamChannels;
835 // If we didn't find a single stream above, see if we can meet
836 // the channel specification in mono mode (i.e. using separate
837 // non-interleaved buffers). This can only work if there are N
838 // consecutive one-channel streams, where N is the number of
839 // desired channels (+ channel offset).
840 if ( foundStream == false ) {
841 unsigned int counter = 0;
842 offsetCounter = firstChannel;
844 for ( iStream=0; iStream<nStreams; iStream++ ) {
845 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
846 if ( offsetCounter ) {
847 if ( streamChannels > offsetCounter ) break;
848 offsetCounter -= streamChannels;
850 else if ( streamChannels == 1 )
854 if ( counter == channels ) {
855 iStream -= channels - 1;
856 iChannel -= channels - 1;
857 stream_.deviceInterleaved[mode] = false;
861 iChannel += streamChannels;
866 if ( foundStream == false ) {
867 errorStream_ << "RtApiCore::probeDeviceOpen: unable to find OS-X stream on device (" << device << ") for requested channels.";
868 errorText_ = errorStream_.str();
872 // Determine the buffer size.
873 AudioValueRange bufferRange;
874 dataSize = sizeof( AudioValueRange );
875 result = AudioDeviceGetProperty( id, 0, isInput,
876 kAudioDevicePropertyBufferFrameSizeRange,
877 &dataSize, &bufferRange );
878 if ( result != noErr ) {
879 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
880 errorText_ = errorStream_.str();
884 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
885 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
886 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
888 // Set the buffer size. For mono mode, I'm assuming we only need to
889 // make this setting for the master channel.
890 UInt32 theSize = (UInt32) *bufferSize;
891 dataSize = sizeof( UInt32 );
892 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
893 kAudioDevicePropertyBufferFrameSize,
894 dataSize, &theSize );
896 if ( result != noErr ) {
897 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
898 errorText_ = errorStream_.str();
902 // If attempting to setup a duplex stream, the bufferSize parameter
903 // MUST be the same in both directions!
904 *bufferSize = theSize;
905 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
906 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
907 errorText_ = errorStream_.str();
911 stream_.bufferSize = *bufferSize;
912 stream_.nBuffers = 1;
914 // Get the stream ID(s) so we can set the stream format. In mono
915 // mode, we'll have to do this for each stream (channel).
916 AudioStreamID streamIDs[ nStreams ];
917 dataSize = nStreams * sizeof( AudioStreamID );
918 result = AudioDeviceGetProperty( id, 0, isInput,
919 kAudioDevicePropertyStreams,
920 &dataSize, &streamIDs );
921 if ( result != noErr ) {
922 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
923 errorText_ = errorStream_.str();
927 // Now set the stream format. Also, check the physical format of the
928 // device and change that if necessary.
929 AudioStreamBasicDescription description;
930 dataSize = sizeof( AudioStreamBasicDescription );
931 if ( stream_.deviceInterleaved[mode] ) nStreams = 1;
932 else nStreams = channels;
935 for ( unsigned int i=0; i<nStreams; i++ ) {
937 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
938 kAudioStreamPropertyVirtualFormat,
939 &dataSize, &description );
941 if ( result != noErr ) {
942 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
943 errorText_ = errorStream_.str();
947 // Set the sample rate and data format id. However, only make the
948 // change if the sample rate is not within 1.0 of the desired
949 // rate and the format is not linear pcm.
950 updateFormat = false;
951 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
952 description.mSampleRate = (double) sampleRate;
956 if ( description.mFormatID != kAudioFormatLinearPCM ) {
957 description.mFormatID = kAudioFormatLinearPCM;
961 if ( updateFormat ) {
962 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0,
963 kAudioStreamPropertyVirtualFormat,
964 dataSize, &description );
965 if ( result != noErr ) {
966 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
967 errorText_ = errorStream_.str();
972 // Now check the physical format.
973 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
974 kAudioStreamPropertyPhysicalFormat,
975 &dataSize, &description );
976 if ( result != noErr ) {
977 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
978 errorText_ = errorStream_.str();
982 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
983 description.mFormatID = kAudioFormatLinearPCM;
984 AudioStreamBasicDescription testDescription = description;
985 unsigned long formatFlags;
987 // We'll try higher bit rates first and then work our way down.
988 testDescription.mBitsPerChannel = 32;
989 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
990 testDescription.mFormatFlags = formatFlags;
991 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
992 if ( result == noErr ) continue;
994 testDescription = description;
995 testDescription.mBitsPerChannel = 32;
996 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
997 testDescription.mFormatFlags = formatFlags;
998 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
999 if ( result == noErr ) continue;
1001 testDescription = description;
1002 testDescription.mBitsPerChannel = 24;
1003 testDescription.mFormatFlags = formatFlags;
1004 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1005 if ( result == noErr ) continue;
1007 testDescription = description;
1008 testDescription.mBitsPerChannel = 16;
1009 testDescription.mFormatFlags = formatFlags;
1010 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1011 if ( result == noErr ) continue;
1013 testDescription = description;
1014 testDescription.mBitsPerChannel = 8;
1015 testDescription.mFormatFlags = formatFlags;
1016 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1017 if ( result != noErr ) {
1018 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1019 errorText_ = errorStream_.str();
1025 // Get the stream latency. There can be latency in both the device
1026 // and the stream. First, attempt to get the device latency on the
1027 // master channel or the first open channel. Errors that might
1028 // occur here are not deemed critical.
1029 UInt32 latency, channel = 0;
1030 dataSize = sizeof( UInt32 );
1031 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1032 for ( int i=0; i<2; i++ ) {
1033 if ( hasProperty( id, channel, isInput, property ) == true ) break;
1034 channel = iChannel + 1 + i;
1036 if ( channel <= iChannel + 1 ) {
1037 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1038 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1040 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1041 errorText_ = errorStream_.str();
1042 error( RtError::WARNING );
1046 // Now try to get the stream latency. For "mono" mode, I assume the
1047 // latency is equal for all single-channel streams.
1048 result = AudioStreamGetProperty( streamIDs[iStream], 0, property, &dataSize, &latency );
1049 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1051 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1052 errorText_ = errorStream_.str();
1053 error( RtError::WARNING );
1056 // Byte-swapping: According to AudioHardware.h, the stream data will
1057 // always be presented in native-endian format, so we should never
1058 // need to byte swap.
1059 stream_.doByteSwap[mode] = false;
1061 // From the CoreAudio documentation, PCM data must be supplied as
1063 stream_.userFormat = format;
1064 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1066 if ( stream_.deviceInterleaved[mode] )
1067 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1069 stream_.nDeviceChannels[mode] = channels;
1070 stream_.nUserChannels[mode] = channels;
1071 stream_.channelOffset[mode] = iChannel; // offset within a CoreAudio stream
1072 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1073 else stream_.userInterleaved = true;
1075 // Set flags for buffer conversion.
1076 stream_.doConvertBuffer[mode] = false;
1077 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1078 stream_.doConvertBuffer[mode] = true;
1079 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1080 stream_.doConvertBuffer[mode] = true;
1081 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1082 stream_.nUserChannels[mode] > 1 )
1083 stream_.doConvertBuffer[mode] = true;
1085 // Allocate our CoreHandle structure for the stream.
1086 CoreHandle *handle = 0;
1087 if ( stream_.apiHandle == 0 ) {
1089 handle = new CoreHandle;
1091 catch ( std::bad_alloc& ) {
1092 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1096 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1097 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1100 stream_.apiHandle = (void *) handle;
1103 handle = (CoreHandle *) stream_.apiHandle;
1104 handle->iStream[mode] = iStream;
1105 handle->id[mode] = id;
1107 // Allocate necessary internal buffers.
1108 unsigned long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1109 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1110 if ( stream_.userBuffer[mode] == NULL ) {
1111 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1115 // If possible, we will make use of the CoreAudio stream buffers as
1116 // "device buffers". However, we can't do this if the device
1117 // buffers are non-interleaved ("mono" mode).
1118 if ( !stream_.deviceInterleaved[mode] && stream_.doConvertBuffer[mode] ) {
1120 bool makeBuffer = true;
1121 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1122 if ( mode == INPUT ) {
1123 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1124 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1125 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1130 bufferBytes *= *bufferSize;
1131 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1132 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1133 if ( stream_.deviceBuffer == NULL ) {
1134 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1138 // Save a pointer to our own device buffer in the CoreHandle
1139 // structure because we may need to use the stream_.deviceBuffer
1140 // variable to point to the CoreAudio buffer before buffer
1141 // conversion (if we have a duplex stream with two different
1142 // conversion schemes).
1143 handle->deviceBuffer = stream_.deviceBuffer;
1147 stream_.sampleRate = sampleRate;
1148 stream_.device[mode] = device;
1149 stream_.state = STREAM_STOPPED;
1150 stream_.callbackInfo.object = (void *) this;
1152 // Setup the buffer conversion information structure. We override
1153 // the channel offset value and perform our own setting for that
1155 if ( stream_.doConvertBuffer[mode] ) {
1156 setConvertInfo( mode, 0 );
1158 // Add channel offset for interleaved channels.
1159 if ( firstChannel > 0 && stream_.deviceInterleaved[mode] ) {
1160 if ( mode == OUTPUT ) {
1161 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1162 stream_.convertInfo[mode].outOffset[k] += firstChannel;
1165 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1166 stream_.convertInfo[mode].inOffset[k] += firstChannel;
1171 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1172 // Only one callback procedure per device.
1173 stream_.mode = DUPLEX;
1175 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1176 if ( result != noErr ) {
1177 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1178 errorText_ = errorStream_.str();
1181 if ( stream_.mode == OUTPUT && mode == INPUT )
1182 stream_.mode = DUPLEX;
1184 stream_.mode = mode;
1187 // Setup the device property listener for over/underload.
1188 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1189 kAudioDeviceProcessorOverload,
1190 deviceListener, (void *) handle );
1196 pthread_cond_destroy( &handle->condition );
1198 stream_.apiHandle = 0;
1201 for ( int i=0; i<2; i++ ) {
1202 if ( stream_.userBuffer[i] ) {
1203 free( stream_.userBuffer[i] );
1204 stream_.userBuffer[i] = 0;
1208 if ( stream_.deviceBuffer ) {
1209 free( stream_.deviceBuffer );
1210 stream_.deviceBuffer = 0;
1216 void RtApiCore :: closeStream( void )
1218 if ( stream_.state == STREAM_CLOSED ) {
1219 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1220 error( RtError::WARNING );
1224 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1225 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1226 if ( stream_.state == STREAM_RUNNING )
1227 AudioDeviceStop( handle->id[0], callbackHandler );
1228 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1231 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1232 if ( stream_.state == STREAM_RUNNING )
1233 AudioDeviceStop( handle->id[1], callbackHandler );
1234 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1237 for ( int i=0; i<2; i++ ) {
1238 if ( stream_.userBuffer[i] ) {
1239 free( stream_.userBuffer[i] );
1240 stream_.userBuffer[i] = 0;
1244 if ( handle->deviceBuffer ) {
1245 free( handle->deviceBuffer );
1246 stream_.deviceBuffer = 0;
1249 // Destroy pthread condition variable.
1250 pthread_cond_destroy( &handle->condition );
1252 stream_.apiHandle = 0;
1254 stream_.mode = UNINITIALIZED;
1255 stream_.state = STREAM_CLOSED;
1258 void RtApiCore :: startStream( void )
1261 if ( stream_.state == STREAM_RUNNING ) {
1262 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1263 error( RtError::WARNING );
1267 MUTEX_LOCK( &stream_.mutex );
1269 OSStatus result = noErr;
1270 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1271 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1273 result = AudioDeviceStart( handle->id[0], callbackHandler );
1274 if ( result != noErr ) {
1275 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1276 errorText_ = errorStream_.str();
1281 if ( stream_.mode == INPUT ||
1282 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1284 result = AudioDeviceStart( handle->id[1], callbackHandler );
1285 if ( result != noErr ) {
1286 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1287 errorText_ = errorStream_.str();
1292 handle->drainCounter = 0;
1293 handle->internalDrain = false;
1294 stream_.state = STREAM_RUNNING;
1297 MUTEX_UNLOCK( &stream_.mutex );
1299 if ( result == noErr ) return;
1300 error( RtError::SYSTEM_ERROR );
1303 void RtApiCore :: stopStream( void )
1306 if ( stream_.state == STREAM_STOPPED ) {
1307 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1308 error( RtError::WARNING );
1312 MUTEX_LOCK( &stream_.mutex );
1314 OSStatus result = noErr;
1315 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1316 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1318 if ( handle->drainCounter == 0 ) {
1319 handle->drainCounter = 1;
1320 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1323 result = AudioDeviceStop( handle->id[0], callbackHandler );
1324 if ( result != noErr ) {
1325 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1326 errorText_ = errorStream_.str();
1331 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1333 result = AudioDeviceStop( handle->id[1], callbackHandler );
1334 if ( result != noErr ) {
1335 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1336 errorText_ = errorStream_.str();
1342 MUTEX_UNLOCK( &stream_.mutex );
1344 stream_.state = STREAM_STOPPED;
1345 if ( result == noErr ) return;
1346 error( RtError::SYSTEM_ERROR );
1349 void RtApiCore :: abortStream( void )
1352 if ( stream_.state == STREAM_STOPPED ) {
1353 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1354 error( RtError::WARNING );
1358 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1359 handle->drainCounter = 1;
1364 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1365 const AudioBufferList *inBufferList,
1366 const AudioBufferList *outBufferList )
1368 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1369 if ( stream_.state == STREAM_CLOSED ) {
1370 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1371 error( RtError::WARNING );
1375 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1376 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1378 // Check if we were draining the stream and signal is finished.
1379 if ( handle->drainCounter > 3 ) {
1380 if ( handle->internalDrain == false )
1381 pthread_cond_signal( &handle->condition );
1387 MUTEX_LOCK( &stream_.mutex );
1389 AudioDeviceID outputDevice = handle->id[0];
1391 // Invoke user callback to get fresh output data UNLESS we are
1392 // draining stream or duplex mode AND the input/output devices are
1393 // different AND this function is called for the input device.
1394 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1395 RtAudioCallback callback = (RtAudioCallback) info->callback;
1396 double streamTime = getStreamTime();
1397 RtAudioStreamStatus status = 0;
1398 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1399 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1400 handle->xrun[0] = false;
1402 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1403 status |= RTAUDIO_INPUT_OVERFLOW;
1404 handle->xrun[1] = false;
1406 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1407 stream_.bufferSize, streamTime, status, info->userData );
1408 if ( handle->drainCounter == 2 ) {
1409 MUTEX_UNLOCK( &stream_.mutex );
1413 else if ( handle->drainCounter == 1 )
1414 handle->internalDrain = true;
1417 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1419 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1421 if ( stream_.deviceInterleaved[0] ) {
1422 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1424 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1427 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1428 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1430 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1434 else if ( stream_.doConvertBuffer[0] ) {
1436 if ( stream_.deviceInterleaved[0] )
1437 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->iStream[0]].mData;
1439 stream_.deviceBuffer = handle->deviceBuffer;
1441 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1443 if ( !stream_.deviceInterleaved[0] ) {
1444 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1445 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1446 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1447 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
1453 if ( stream_.deviceInterleaved[0] ) {
1454 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1455 stream_.userBuffer[0],
1456 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1459 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1460 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1461 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1462 &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
1467 if ( handle->drainCounter ) {
1468 handle->drainCounter++;
1473 AudioDeviceID inputDevice = handle->id[1];
1474 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1476 if ( stream_.doConvertBuffer[1] ) {
1478 if ( stream_.deviceInterleaved[1] )
1479 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->iStream[1]].mData;
1481 stream_.deviceBuffer = (char *) handle->deviceBuffer;
1482 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1483 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
1484 memcpy( &stream_.deviceBuffer[i*bufferBytes],
1485 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1489 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
1493 memcpy( stream_.userBuffer[1],
1494 inBufferList->mBuffers[handle->iStream[1]].mData,
1495 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1500 MUTEX_UNLOCK( &stream_.mutex );
1502 RtApi::tickStreamTime();
1506 const char* RtApiCore :: getErrorCode( OSStatus code )
1510 case kAudioHardwareNotRunningError:
1511 return "kAudioHardwareNotRunningError";
1513 case kAudioHardwareUnspecifiedError:
1514 return "kAudioHardwareUnspecifiedError";
1516 case kAudioHardwareUnknownPropertyError:
1517 return "kAudioHardwareUnknownPropertyError";
1519 case kAudioHardwareBadPropertySizeError:
1520 return "kAudioHardwareBadPropertySizeError";
1522 case kAudioHardwareIllegalOperationError:
1523 return "kAudioHardwareIllegalOperationError";
1525 case kAudioHardwareBadObjectError:
1526 return "kAudioHardwareBadObjectError";
1528 case kAudioHardwareBadDeviceError:
1529 return "kAudioHardwareBadDeviceError";
1531 case kAudioHardwareBadStreamError:
1532 return "kAudioHardwareBadStreamError";
1534 case kAudioHardwareUnsupportedOperationError:
1535 return "kAudioHardwareUnsupportedOperationError";
1537 case kAudioDeviceUnsupportedFormatError:
1538 return "kAudioDeviceUnsupportedFormatError";
1540 case kAudioDevicePermissionsError:
1541 return "kAudioDevicePermissionsError";
1544 return "CoreAudio unknown error";
1548 //******************** End of __MACOSX_CORE__ *********************//
1551 #if defined(__UNIX_JACK__)
1553 // JACK is a low-latency audio server, originally written for the
1554 // GNU/Linux operating system and now also ported to OS-X. It can
1555 // connect a number of different applications to an audio device, as
1556 // well as allowing them to share audio between themselves.
1558 // When using JACK with RtAudio, "devices" refer to JACK clients that
1559 // have ports connected to the server. The JACK server is typically
1560 // started in a terminal as follows:
1562 // .jackd -d alsa -d hw:0
1564 // or through an interface program such as qjackctl. Many of the
1565 // parameters normally set for a stream are fixed by the JACK server
1566 // and can be specified when the JACK server is started. In
1569 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1571 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1572 // frames, and number of buffers = 4. Once the server is running, it
1573 // is not possible to override these values. If the values are not
1574 // specified in the command-line, the JACK server uses default values.
1576 // The JACK server does not have to be running when an instance of
1577 // RtApiJack is created, though the function getDeviceCount() will
1578 // report 0 devices found until JACK has been started. When no
1579 // devices are available (i.e., the JACK server is not running), a
1580 // stream cannot be opened.
1582 #include <jack/jack.h>
1585 // A structure to hold various information related to the Jack API
1588 jack_client_t *client;
1589 jack_port_t **ports[2];
1590 std::string deviceName[2];
1592 pthread_cond_t condition;
1593 int drainCounter; // Tracks callback counts when draining
1594 bool internalDrain; // Indicates if stop is initiated from callback or not.
1597 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1600 RtApiJack :: RtApiJack()
1602 // Nothing to do here.
1605 RtApiJack :: ~RtApiJack()
1607 if ( stream_.state != STREAM_CLOSED ) closeStream();
1610 unsigned int RtApiJack :: getDeviceCount( void )
1612 // See if we can become a jack client.
1613 jack_client_t *client = jack_client_new( "RtApiJackCount" );
1614 if ( client == 0 ) return 0;
1617 std::string port, previousPort;
1618 unsigned int nChannels = 0, nDevices = 0;
1619 ports = jack_get_ports( client, NULL, NULL, 0 );
1621 // Parse the port names up to the first colon (:).
1622 unsigned int iColon = 0;
1624 port = (char *) ports[ nChannels ];
1625 iColon = port.find(":");
1626 if ( iColon != std::string::npos ) {
1627 port = port.substr( 0, iColon + 1 );
1628 if ( port != previousPort ) {
1630 previousPort = port;
1633 } while ( ports[++nChannels] );
1637 jack_client_close( client );
1641 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1643 RtAudio::DeviceInfo info;
1644 info.probed = false;
1646 jack_client_t *client = jack_client_new( "RtApiJackInfo" );
1647 if ( client == 0 ) {
1648 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1649 error( RtError::WARNING );
1654 std::string port, previousPort;
1655 unsigned int nPorts = 0, nDevices = 0;
1656 ports = jack_get_ports( client, NULL, NULL, 0 );
1658 // Parse the port names up to the first colon (:).
1659 unsigned int iColon = 0;
1661 port = (char *) ports[ nPorts ];
1662 iColon = port.find(":");
1663 if ( iColon != std::string::npos ) {
1664 port = port.substr( 0, iColon );
1665 if ( port != previousPort ) {
1666 if ( nDevices == device ) info.name = port;
1668 previousPort = port;
1671 } while ( ports[++nPorts] );
1675 if ( device >= nDevices ) {
1676 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1677 error( RtError::INVALID_USE );
1680 // Get the current jack server sample rate.
1681 info.sampleRates.clear();
1682 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1684 // Count the available ports containing the client name as device
1685 // channels. Jack "input ports" equal RtAudio output channels.
1686 unsigned int nChannels = 0;
1687 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1689 while ( ports[ nChannels ] ) nChannels++;
1691 info.outputChannels = nChannels;
1694 // Jack "output ports" equal RtAudio input channels.
1696 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1698 while ( ports[ nChannels ] ) nChannels++;
1700 info.inputChannels = nChannels;
1703 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1704 jack_client_close(client);
1705 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1706 error( RtError::WARNING );
1710 // If device opens for both playback and capture, we determine the channels.
1711 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1712 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1714 // Jack always uses 32-bit floats.
1715 info.nativeFormats = RTAUDIO_FLOAT32;
1717 // Jack doesn't provide default devices so we'll use the first available one.
1718 if ( device == 0 && info.outputChannels > 0 )
1719 info.isDefaultOutput = true;
1720 if ( device == 0 && info.inputChannels > 0 )
1721 info.isDefaultInput = true;
1723 jack_client_close(client);
1728 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1730 CallbackInfo *info = (CallbackInfo *) infoPointer;
1732 RtApiJack *object = (RtApiJack *) info->object;
1733 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1738 void jackShutdown( void *infoPointer )
1740 CallbackInfo *info = (CallbackInfo *) infoPointer;
1741 RtApiJack *object = (RtApiJack *) info->object;
1743 // Check current stream state. If stopped, then we'll assume this
1744 // was called as a result of a call to RtApiJack::stopStream (the
1745 // deactivation of a client handle causes this function to be called).
1746 // If not, we'll assume the Jack server is shutting down or some
1747 // other problem occurred and we should close the stream.
1748 if ( object->isStreamRunning() == false ) return;
1750 object->closeStream();
1751 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1754 int jackXrun( void *infoPointer )
1756 JackHandle *handle = (JackHandle *) infoPointer;
1758 if ( handle->ports[0] ) handle->xrun[0] = true;
1759 if ( handle->ports[1] ) handle->xrun[1] = true;
1764 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1765 unsigned int firstChannel, unsigned int sampleRate,
1766 RtAudioFormat format, unsigned int *bufferSize,
1767 RtAudio::StreamOptions *options )
1769 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1771 // Look for jack server and try to become a client (only do once per stream).
1772 jack_client_t *client = 0;
1773 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1774 if ( options && !options->streamName.empty() )
1775 client = jack_client_new( options->streamName.c_str() );
1777 client = jack_client_new( "RtApiJack" );
1778 if ( client == 0 ) {
1779 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1780 error( RtError::WARNING );
1785 // The handle must have been created on an earlier pass.
1786 client = handle->client;
1790 std::string port, previousPort, deviceName;
1791 unsigned int nPorts = 0, nDevices = 0;
1792 ports = jack_get_ports( client, NULL, NULL, 0 );
1794 // Parse the port names up to the first colon (:).
1795 unsigned int iColon = 0;
1797 port = (char *) ports[ nPorts ];
1798 iColon = port.find(":");
1799 if ( iColon != std::string::npos ) {
1800 port = port.substr( 0, iColon );
1801 if ( port != previousPort ) {
1802 if ( nDevices == device ) deviceName = port;
1804 previousPort = port;
1807 } while ( ports[++nPorts] );
1811 if ( device >= nDevices ) {
1812 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1816 // Count the available ports containing the client name as device
1817 // channels. Jack "input ports" equal RtAudio output channels.
1818 unsigned int nChannels = 0;
1819 unsigned long flag = JackPortIsOutput;
1820 if ( mode == INPUT ) flag = JackPortIsInput;
1821 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1823 while ( ports[ nChannels ] ) nChannels++;
1827 // Compare the jack ports for specified client to the requested number of channels.
1828 if ( nChannels < (channels + firstChannel) ) {
1829 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1830 errorText_ = errorStream_.str();
1834 // Check the jack server sample rate.
1835 unsigned int jackRate = jack_get_sample_rate( client );
1836 if ( sampleRate != jackRate ) {
1837 jack_client_close( client );
1838 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1839 errorText_ = errorStream_.str();
1842 stream_.sampleRate = jackRate;
1844 // Get the latency of the JACK port.
1845 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1846 if ( ports[ firstChannel ] )
1847 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1850 // The jack server always uses 32-bit floating-point data.
1851 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1852 stream_.userFormat = format;
1854 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1855 else stream_.userInterleaved = true;
1857 // Jack always uses non-interleaved buffers.
1858 stream_.deviceInterleaved[mode] = false;
1860 // Jack always provides host byte-ordered data.
1861 stream_.doByteSwap[mode] = false;
1863 // Get the buffer size. The buffer size and number of buffers
1864 // (periods) is set when the jack server is started.
1865 stream_.bufferSize = (int) jack_get_buffer_size( client );
1866 *bufferSize = stream_.bufferSize;
1868 stream_.nDeviceChannels[mode] = channels;
1869 stream_.nUserChannels[mode] = channels;
1871 // Set flags for buffer conversion.
1872 stream_.doConvertBuffer[mode] = false;
1873 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1874 stream_.doConvertBuffer[mode] = true;
1875 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1876 stream_.nUserChannels[mode] > 1 )
1877 stream_.doConvertBuffer[mode] = true;
1879 // Allocate our JackHandle structure for the stream.
1880 if ( handle == 0 ) {
1882 handle = new JackHandle;
1884 catch ( std::bad_alloc& ) {
1885 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
1889 if ( pthread_cond_init(&handle->condition, NULL) ) {
1890 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
1893 stream_.apiHandle = (void *) handle;
1894 handle->client = client;
1896 handle->deviceName[mode] = deviceName;
1898 // Allocate necessary internal buffers.
1899 unsigned long bufferBytes;
1900 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1901 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1902 if ( stream_.userBuffer[mode] == NULL ) {
1903 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
1907 if ( stream_.doConvertBuffer[mode] ) {
1909 bool makeBuffer = true;
1910 if ( mode == OUTPUT )
1911 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1912 else { // mode == INPUT
1913 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
1914 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1915 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1916 if ( bufferBytes < bytesOut ) makeBuffer = false;
1921 bufferBytes *= *bufferSize;
1922 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1923 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1924 if ( stream_.deviceBuffer == NULL ) {
1925 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
1931 // Allocate memory for the Jack ports (channels) identifiers.
1932 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
1933 if ( handle->ports[mode] == NULL ) {
1934 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
1938 stream_.device[mode] = device;
1939 stream_.channelOffset[mode] = firstChannel;
1940 stream_.state = STREAM_STOPPED;
1941 stream_.callbackInfo.object = (void *) this;
1943 if ( stream_.mode == OUTPUT && mode == INPUT )
1944 // We had already set up the stream for output.
1945 stream_.mode = DUPLEX;
1947 stream_.mode = mode;
1948 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
1949 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
1950 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
1953 // Register our ports.
1955 if ( mode == OUTPUT ) {
1956 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1957 snprintf( label, 64, "outport %d", i );
1958 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
1959 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
1963 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1964 snprintf( label, 64, "inport %d", i );
1965 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
1966 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
1970 // Setup the buffer conversion information structure. We don't use
1971 // buffers to do channel offsets, so we override that parameter
1973 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
1979 pthread_cond_destroy( &handle->condition );
1980 jack_client_close( handle->client );
1982 if ( handle->ports[0] ) free( handle->ports[0] );
1983 if ( handle->ports[1] ) free( handle->ports[1] );
1986 stream_.apiHandle = 0;
1989 for ( int i=0; i<2; i++ ) {
1990 if ( stream_.userBuffer[i] ) {
1991 free( stream_.userBuffer[i] );
1992 stream_.userBuffer[i] = 0;
1996 if ( stream_.deviceBuffer ) {
1997 free( stream_.deviceBuffer );
1998 stream_.deviceBuffer = 0;
2004 void RtApiJack :: closeStream( void )
2006 if ( stream_.state == STREAM_CLOSED ) {
2007 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2008 error( RtError::WARNING );
2012 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2015 if ( stream_.state == STREAM_RUNNING )
2016 jack_deactivate( handle->client );
2018 jack_client_close( handle->client );
2022 if ( handle->ports[0] ) free( handle->ports[0] );
2023 if ( handle->ports[1] ) free( handle->ports[1] );
2024 pthread_cond_destroy( &handle->condition );
2026 stream_.apiHandle = 0;
2029 for ( int i=0; i<2; i++ ) {
2030 if ( stream_.userBuffer[i] ) {
2031 free( stream_.userBuffer[i] );
2032 stream_.userBuffer[i] = 0;
2036 if ( stream_.deviceBuffer ) {
2037 free( stream_.deviceBuffer );
2038 stream_.deviceBuffer = 0;
2041 stream_.mode = UNINITIALIZED;
2042 stream_.state = STREAM_CLOSED;
2045 void RtApiJack :: startStream( void )
2048 if ( stream_.state == STREAM_RUNNING ) {
2049 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2050 error( RtError::WARNING );
2054 MUTEX_LOCK(&stream_.mutex);
2056 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2057 int result = jack_activate( handle->client );
2059 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2065 // Get the list of available ports.
2066 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2068 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2069 if ( ports == NULL) {
2070 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2074 // Now make the port connections. Since RtAudio wasn't designed to
2075 // allow the user to select particular channels of a device, we'll
2076 // just open the first "nChannels" ports with offset.
2077 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2079 if ( ports[ stream_.channelOffset[0] + i ] )
2080 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2083 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2090 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2092 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2093 if ( ports == NULL) {
2094 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2098 // Now make the port connections. See note above.
2099 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2101 if ( ports[ stream_.channelOffset[1] + i ] )
2102 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2105 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2112 handle->drainCounter = 0;
2113 handle->internalDrain = false;
2114 stream_.state = STREAM_RUNNING;
2117 MUTEX_UNLOCK(&stream_.mutex);
2119 if ( result == 0 ) return;
2120 error( RtError::SYSTEM_ERROR );
2123 void RtApiJack :: stopStream( void )
2126 if ( stream_.state == STREAM_STOPPED ) {
2127 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2128 error( RtError::WARNING );
2132 MUTEX_LOCK( &stream_.mutex );
2134 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2135 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2137 if ( handle->drainCounter == 0 ) {
2138 handle->drainCounter = 1;
2139 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2143 jack_deactivate( handle->client );
2144 stream_.state = STREAM_STOPPED;
2146 MUTEX_UNLOCK( &stream_.mutex );
2149 void RtApiJack :: abortStream( void )
2152 if ( stream_.state == STREAM_STOPPED ) {
2153 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2154 error( RtError::WARNING );
2158 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2159 handle->drainCounter = 1;
2164 bool RtApiJack :: callbackEvent( unsigned long nframes )
2166 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2167 if ( stream_.state == STREAM_CLOSED ) {
2168 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2169 error( RtError::WARNING );
2172 if ( stream_.bufferSize != nframes ) {
2173 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2174 error( RtError::WARNING );
2178 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2179 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2181 // Check if we were draining the stream and signal is finished.
2182 if ( handle->drainCounter > 3 ) {
2183 if ( handle->internalDrain == false )
2184 pthread_cond_signal( &handle->condition );
2190 MUTEX_LOCK( &stream_.mutex );
2192 // Invoke user callback first, to get fresh output data.
2193 if ( handle->drainCounter == 0 ) {
2194 RtAudioCallback callback = (RtAudioCallback) info->callback;
2195 double streamTime = getStreamTime();
2196 RtAudioStreamStatus status = 0;
2197 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2198 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2199 handle->xrun[0] = false;
2201 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2202 status |= RTAUDIO_INPUT_OVERFLOW;
2203 handle->xrun[1] = false;
2205 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2206 stream_.bufferSize, streamTime, status, info->userData );
2207 if ( handle->drainCounter == 2 ) {
2208 MUTEX_UNLOCK( &stream_.mutex );
2212 else if ( handle->drainCounter == 1 )
2213 handle->internalDrain = true;
2216 jack_default_audio_sample_t *jackbuffer;
2217 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2218 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2220 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2222 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2223 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2224 memset( jackbuffer, 0, bufferBytes );
2228 else if ( stream_.doConvertBuffer[0] ) {
2230 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2232 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2233 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2234 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2237 else { // no buffer conversion
2238 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2239 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2240 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2244 if ( handle->drainCounter ) {
2245 handle->drainCounter++;
2250 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2252 if ( stream_.doConvertBuffer[1] ) {
2253 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2254 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2255 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2257 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2259 else { // no buffer conversion
2260 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2261 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2262 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2268 MUTEX_UNLOCK(&stream_.mutex);
2270 RtApi::tickStreamTime();
2273 //******************** End of __UNIX_JACK__ *********************//
2276 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2278 // The ASIO API is designed around a callback scheme, so this
2279 // implementation is similar to that used for OS-X CoreAudio and Linux
2280 // Jack. The primary constraint with ASIO is that it only allows
2281 // access to a single driver at a time. Thus, it is not possible to
2282 // have more than one simultaneous RtAudio stream.
2284 // This implementation also requires a number of external ASIO files
2285 // and a few global variables. The ASIO callback scheme does not
2286 // allow for the passing of user data, so we must create a global
2287 // pointer to our callbackInfo structure.
2289 // On unix systems, we make use of a pthread condition variable.
2290 // Since there is no equivalent in Windows, I hacked something based
2291 // on information found in
2292 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2294 #include "asiosys.h"
2296 #include "iasiothiscallresolver.h"
2297 #include "asiodrivers.h"
2300 AsioDrivers drivers;
2301 ASIOCallbacks asioCallbacks;
2302 ASIODriverInfo driverInfo;
2303 CallbackInfo *asioCallbackInfo;
2307 int drainCounter; // Tracks callback counts when draining
2308 bool internalDrain; // Indicates if stop is initiated from callback or not.
2309 ASIOBufferInfo *bufferInfos;
2313 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2316 // Function declarations (definitions at end of section)
2317 static const char* getAsioErrorString( ASIOError result );
2318 void sampleRateChanged( ASIOSampleRate sRate );
2319 long asioMessages( long selector, long value, void* message, double* opt );
2321 RtApiAsio :: RtApiAsio()
2323 // ASIO cannot run on a multi-threaded appartment. You can call
2324 // CoInitialize beforehand, but it must be for appartment threading
2325 // (in which case, CoInitilialize will return S_FALSE here).
2326 coInitialized_ = false;
2327 HRESULT hr = CoInitialize( NULL );
2329 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2330 error( RtError::WARNING );
2332 coInitialized_ = true;
2334 drivers.removeCurrentDriver();
2335 driverInfo.asioVersion = 2;
2337 // See note in DirectSound implementation about GetDesktopWindow().
2338 driverInfo.sysRef = GetForegroundWindow();
2341 RtApiAsio :: ~RtApiAsio()
2343 if ( stream_.state != STREAM_CLOSED ) closeStream();
2344 if ( coInitialized_ ) CoUninitialize();
2347 unsigned int RtApiAsio :: getDeviceCount( void )
2349 return (unsigned int) drivers.asioGetNumDev();
2352 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2354 RtAudio::DeviceInfo info;
2355 info.probed = false;
2358 unsigned int nDevices = getDeviceCount();
2359 if ( nDevices == 0 ) {
2360 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2361 error( RtError::INVALID_USE );
2364 if ( device >= nDevices ) {
2365 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2366 error( RtError::INVALID_USE );
2369 // If a stream is already open, we cannot probe other devices. Thus, use the saved results.
2370 if ( stream_.state != STREAM_CLOSED ) {
2371 if ( device >= devices_.size() ) {
2372 errorText_ = "RtApiAsio::getDeviceInfo: device ID was not present before stream was opened.";
2373 error( RtError::WARNING );
2376 return devices_[ device ];
2379 char driverName[32];
2380 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2381 if ( result != ASE_OK ) {
2382 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2383 errorText_ = errorStream_.str();
2384 error( RtError::WARNING );
2388 info.name = driverName;
2390 if ( !drivers.loadDriver( driverName ) ) {
2391 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2392 errorText_ = errorStream_.str();
2393 error( RtError::WARNING );
2397 result = ASIOInit( &driverInfo );
2398 if ( result != ASE_OK ) {
2399 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2400 errorText_ = errorStream_.str();
2401 error( RtError::WARNING );
2405 // Determine the device channel information.
2406 long inputChannels, outputChannels;
2407 result = ASIOGetChannels( &inputChannels, &outputChannels );
2408 if ( result != ASE_OK ) {
2409 drivers.removeCurrentDriver();
2410 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2411 errorText_ = errorStream_.str();
2412 error( RtError::WARNING );
2416 info.outputChannels = outputChannels;
2417 info.inputChannels = inputChannels;
2418 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2419 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2421 // Determine the supported sample rates.
2422 info.sampleRates.clear();
2423 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2424 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2425 if ( result == ASE_OK )
2426 info.sampleRates.push_back( SAMPLE_RATES[i] );
2429 // Determine supported data types ... just check first channel and assume rest are the same.
2430 ASIOChannelInfo channelInfo;
2431 channelInfo.channel = 0;
2432 channelInfo.isInput = true;
2433 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2434 result = ASIOGetChannelInfo( &channelInfo );
2435 if ( result != ASE_OK ) {
2436 drivers.removeCurrentDriver();
2437 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2438 errorText_ = errorStream_.str();
2439 error( RtError::WARNING );
2443 info.nativeFormats = 0;
2444 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2445 info.nativeFormats |= RTAUDIO_SINT16;
2446 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2447 info.nativeFormats |= RTAUDIO_SINT32;
2448 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2449 info.nativeFormats |= RTAUDIO_FLOAT32;
2450 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2451 info.nativeFormats |= RTAUDIO_FLOAT64;
2453 if ( getDefaultOutputDevice() == device )
2454 info.isDefaultOutput = true;
2455 if ( getDefaultInputDevice() == device )
2456 info.isDefaultInput = true;
2459 drivers.removeCurrentDriver();
2463 void bufferSwitch( long index, ASIOBool processNow )
2465 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2466 object->callbackEvent( index );
2469 void RtApiAsio :: saveDeviceInfo( void )
2473 unsigned int nDevices = getDeviceCount();
2474 devices_.resize( nDevices );
2475 for ( unsigned int i=0; i<nDevices; i++ )
2476 devices_[i] = getDeviceInfo( i );
2479 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2480 unsigned int firstChannel, unsigned int sampleRate,
2481 RtAudioFormat format, unsigned int *bufferSize,
2482 RtAudio::StreamOptions *options )
2484 // For ASIO, a duplex stream MUST use the same driver.
2485 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2486 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2490 char driverName[32];
2491 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2492 if ( result != ASE_OK ) {
2493 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2494 errorText_ = errorStream_.str();
2498 // The getDeviceInfo() function will not work when a stream is open
2499 // because ASIO does not allow multiple devices to run at the same
2500 // time. Thus, we'll probe the system before opening a stream and
2501 // save the results for use by getDeviceInfo().
2502 this->saveDeviceInfo();
2504 // Only load the driver once for duplex stream.
2505 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2506 if ( !drivers.loadDriver( driverName ) ) {
2507 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2508 errorText_ = errorStream_.str();
2512 result = ASIOInit( &driverInfo );
2513 if ( result != ASE_OK ) {
2514 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2515 errorText_ = errorStream_.str();
2520 // Check the device channel count.
2521 long inputChannels, outputChannels;
2522 result = ASIOGetChannels( &inputChannels, &outputChannels );
2523 if ( result != ASE_OK ) {
2524 drivers.removeCurrentDriver();
2525 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2526 errorText_ = errorStream_.str();
2530 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2531 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2532 drivers.removeCurrentDriver();
2533 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2534 errorText_ = errorStream_.str();
2537 stream_.nDeviceChannels[mode] = channels;
2538 stream_.nUserChannels[mode] = channels;
2539 stream_.channelOffset[mode] = firstChannel;
2541 // Verify the sample rate is supported.
2542 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2543 if ( result != ASE_OK ) {
2544 drivers.removeCurrentDriver();
2545 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2546 errorText_ = errorStream_.str();
2550 // Get the current sample rate
2551 ASIOSampleRate currentRate;
2552 result = ASIOGetSampleRate( ¤tRate );
2553 if ( result != ASE_OK ) {
2554 drivers.removeCurrentDriver();
2555 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2556 errorText_ = errorStream_.str();
2560 // Set the sample rate only if necessary
2561 if ( currentRate != sampleRate ) {
2562 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2563 if ( result != ASE_OK ) {
2564 drivers.removeCurrentDriver();
2565 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2566 errorText_ = errorStream_.str();
2571 // Determine the driver data type.
2572 ASIOChannelInfo channelInfo;
2573 channelInfo.channel = 0;
2574 if ( mode == OUTPUT ) channelInfo.isInput = false;
2575 else channelInfo.isInput = true;
2576 result = ASIOGetChannelInfo( &channelInfo );
2577 if ( result != ASE_OK ) {
2578 drivers.removeCurrentDriver();
2579 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2580 errorText_ = errorStream_.str();
2584 // Assuming WINDOWS host is always little-endian.
2585 stream_.doByteSwap[mode] = false;
2586 stream_.userFormat = format;
2587 stream_.deviceFormat[mode] = 0;
2588 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2589 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2590 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2592 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2593 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2594 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2596 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2597 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2598 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2600 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2601 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2602 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2605 if ( stream_.deviceFormat[mode] == 0 ) {
2606 drivers.removeCurrentDriver();
2607 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2608 errorText_ = errorStream_.str();
2612 // Set the buffer size. For a duplex stream, this will end up
2613 // setting the buffer size based on the input constraints, which
2615 long minSize, maxSize, preferSize, granularity;
2616 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2617 if ( result != ASE_OK ) {
2618 drivers.removeCurrentDriver();
2619 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2620 errorText_ = errorStream_.str();
2624 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2625 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2626 else if ( granularity == -1 ) {
2627 // Make sure bufferSize is a power of two.
2628 double power = std::log10( (double) *bufferSize ) / log10( 2.0 );
2629 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
2630 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2631 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2632 else *bufferSize = preferSize;
2634 else if ( granularity != 0 ) {
2635 // Set to an even multiple of granularity, rounding up.
2636 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2639 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2640 drivers.removeCurrentDriver();
2641 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2645 stream_.bufferSize = *bufferSize;
2646 stream_.nBuffers = 2;
2648 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2649 else stream_.userInterleaved = true;
2651 // ASIO always uses non-interleaved buffers.
2652 stream_.deviceInterleaved[mode] = false;
2654 // Allocate, if necessary, our AsioHandle structure for the stream.
2655 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2656 if ( handle == 0 ) {
2658 handle = new AsioHandle;
2660 catch ( std::bad_alloc& ) {
2661 //if ( handle == NULL ) {
2662 drivers.removeCurrentDriver();
2663 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2666 handle->bufferInfos = 0;
2668 // Create a manual-reset event.
2669 handle->condition = CreateEvent( NULL, // no security
2670 TRUE, // manual-reset
2671 FALSE, // non-signaled initially
2673 stream_.apiHandle = (void *) handle;
2676 // Create the ASIO internal buffers. Since RtAudio sets up input
2677 // and output separately, we'll have to dispose of previously
2678 // created output buffers for a duplex stream.
2679 long inputLatency, outputLatency;
2680 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2681 ASIODisposeBuffers();
2682 if ( handle->bufferInfos ) free( handle->bufferInfos );
2685 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2686 bool buffersAllocated = false;
2687 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2688 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2689 if ( handle->bufferInfos == NULL ) {
2690 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2691 errorText_ = errorStream_.str();
2695 ASIOBufferInfo *infos;
2696 infos = handle->bufferInfos;
2697 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2698 infos->isInput = ASIOFalse;
2699 infos->channelNum = i + stream_.channelOffset[0];
2700 infos->buffers[0] = infos->buffers[1] = 0;
2702 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2703 infos->isInput = ASIOTrue;
2704 infos->channelNum = i + stream_.channelOffset[1];
2705 infos->buffers[0] = infos->buffers[1] = 0;
2708 // Set up the ASIO callback structure and create the ASIO data buffers.
2709 asioCallbacks.bufferSwitch = &bufferSwitch;
2710 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2711 asioCallbacks.asioMessage = &asioMessages;
2712 asioCallbacks.bufferSwitchTimeInfo = NULL;
2713 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2714 if ( result != ASE_OK ) {
2715 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2716 errorText_ = errorStream_.str();
2719 buffersAllocated = true;
2721 // Set flags for buffer conversion.
2722 stream_.doConvertBuffer[mode] = false;
2723 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2724 stream_.doConvertBuffer[mode] = true;
2725 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2726 stream_.nUserChannels[mode] > 1 )
2727 stream_.doConvertBuffer[mode] = true;
2729 // Allocate necessary internal buffers
2730 unsigned long bufferBytes;
2731 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2732 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2733 if ( stream_.userBuffer[mode] == NULL ) {
2734 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2738 if ( stream_.doConvertBuffer[mode] ) {
2740 bool makeBuffer = true;
2741 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2742 if ( mode == INPUT ) {
2743 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2744 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2745 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2750 bufferBytes *= *bufferSize;
2751 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2752 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2753 if ( stream_.deviceBuffer == NULL ) {
2754 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2760 stream_.sampleRate = sampleRate;
2761 stream_.device[mode] = device;
2762 stream_.state = STREAM_STOPPED;
2763 asioCallbackInfo = &stream_.callbackInfo;
2764 stream_.callbackInfo.object = (void *) this;
2765 if ( stream_.mode == OUTPUT && mode == INPUT )
2766 // We had already set up an output stream.
2767 stream_.mode = DUPLEX;
2769 stream_.mode = mode;
2771 // Determine device latencies
2772 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2773 if ( result != ASE_OK ) {
2774 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2775 errorText_ = errorStream_.str();
2776 error( RtError::WARNING); // warn but don't fail
2779 stream_.latency[0] = outputLatency;
2780 stream_.latency[1] = inputLatency;
2783 // Setup the buffer conversion information structure. We don't use
2784 // buffers to do channel offsets, so we override that parameter
2786 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2791 if ( buffersAllocated )
2792 ASIODisposeBuffers();
2793 drivers.removeCurrentDriver();
2796 CloseHandle( handle->condition );
2797 if ( handle->bufferInfos )
2798 free( handle->bufferInfos );
2800 stream_.apiHandle = 0;
2803 for ( int i=0; i<2; i++ ) {
2804 if ( stream_.userBuffer[i] ) {
2805 free( stream_.userBuffer[i] );
2806 stream_.userBuffer[i] = 0;
2810 if ( stream_.deviceBuffer ) {
2811 free( stream_.deviceBuffer );
2812 stream_.deviceBuffer = 0;
2818 void RtApiAsio :: closeStream()
2820 if ( stream_.state == STREAM_CLOSED ) {
2821 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
2822 error( RtError::WARNING );
2826 if ( stream_.state == STREAM_RUNNING ) {
2827 stream_.state = STREAM_STOPPED;
2830 ASIODisposeBuffers();
2831 drivers.removeCurrentDriver();
2833 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2835 CloseHandle( handle->condition );
2836 if ( handle->bufferInfos )
2837 free( handle->bufferInfos );
2839 stream_.apiHandle = 0;
2842 for ( int i=0; i<2; i++ ) {
2843 if ( stream_.userBuffer[i] ) {
2844 free( stream_.userBuffer[i] );
2845 stream_.userBuffer[i] = 0;
2849 if ( stream_.deviceBuffer ) {
2850 free( stream_.deviceBuffer );
2851 stream_.deviceBuffer = 0;
2854 stream_.mode = UNINITIALIZED;
2855 stream_.state = STREAM_CLOSED;
2858 void RtApiAsio :: startStream()
2861 if ( stream_.state == STREAM_RUNNING ) {
2862 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
2863 error( RtError::WARNING );
2867 MUTEX_LOCK( &stream_.mutex );
2869 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2870 ASIOError result = ASIOStart();
2871 if ( result != ASE_OK ) {
2872 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
2873 errorText_ = errorStream_.str();
2877 handle->drainCounter = 0;
2878 handle->internalDrain = false;
2879 stream_.state = STREAM_RUNNING;
2883 MUTEX_UNLOCK( &stream_.mutex );
2885 if ( result == ASE_OK ) return;
2886 error( RtError::SYSTEM_ERROR );
2889 void RtApiAsio :: stopStream()
2892 if ( stream_.state == STREAM_STOPPED ) {
2893 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
2894 error( RtError::WARNING );
2898 MUTEX_LOCK( &stream_.mutex );
2900 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2901 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2902 if ( handle->drainCounter == 0 ) {
2903 handle->drainCounter = 1;
2904 MUTEX_UNLOCK( &stream_.mutex );
2905 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
2906 ResetEvent( handle->condition );
2907 MUTEX_LOCK( &stream_.mutex );
2911 ASIOError result = ASIOStop();
2912 if ( result != ASE_OK ) {
2913 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
2914 errorText_ = errorStream_.str();
2917 stream_.state = STREAM_STOPPED;
2918 MUTEX_UNLOCK( &stream_.mutex );
2920 if ( result == ASE_OK ) return;
2921 error( RtError::SYSTEM_ERROR );
2924 void RtApiAsio :: abortStream()
2927 if ( stream_.state == STREAM_STOPPED ) {
2928 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
2929 error( RtError::WARNING );
2933 // The following lines were commented-out because some behavior was
2934 // noted where the device buffers need to be zeroed to avoid
2935 // continuing sound, even when the device buffers are completed
2936 // disposed. So now, calling abort is the same as calling stop.
2937 //AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2938 //handle->drainCounter = 1;
2942 bool RtApiAsio :: callbackEvent( long bufferIndex )
2944 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2945 if ( stream_.state == STREAM_CLOSED ) {
2946 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
2947 error( RtError::WARNING );
2951 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2952 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2954 // Check if we were draining the stream and signal is finished.
2955 if ( handle->drainCounter > 3 ) {
2956 if ( handle->internalDrain == false )
2957 SetEvent( handle->condition );
2963 MUTEX_LOCK( &stream_.mutex );
2965 // The state might change while waiting on a mutex.
2966 if ( stream_.state == STREAM_STOPPED ) goto unlock;
2968 // Invoke user callback to get fresh output data UNLESS we are
2970 if ( handle->drainCounter == 0 ) {
2971 RtAudioCallback callback = (RtAudioCallback) info->callback;
2972 double streamTime = getStreamTime();
2973 RtAudioStreamStatus status = 0;
2974 if ( stream_.mode != INPUT && asioXRun == true ) {
2975 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2978 if ( stream_.mode != OUTPUT && asioXRun == true ) {
2979 status |= RTAUDIO_INPUT_OVERFLOW;
2982 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2983 stream_.bufferSize, streamTime, status, info->userData );
2984 if ( handle->drainCounter == 2 ) {
2985 MUTEX_UNLOCK( &stream_.mutex );
2989 else if ( handle->drainCounter == 1 )
2990 handle->internalDrain = true;
2993 unsigned int nChannels, bufferBytes, i, j;
2994 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2995 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2997 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
2999 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
3001 for ( i=0, j=0; i<nChannels; i++ ) {
3002 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3003 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
3007 else if ( stream_.doConvertBuffer[0] ) {
3009 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
3010 if ( stream_.doByteSwap[0] )
3011 byteSwapBuffer( stream_.deviceBuffer,
3012 stream_.bufferSize * stream_.nDeviceChannels[0],
3013 stream_.deviceFormat[0] );
3015 for ( i=0, j=0; i<nChannels; i++ ) {
3016 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3017 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3018 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3024 if ( stream_.doByteSwap[0] )
3025 byteSwapBuffer( stream_.userBuffer[0],
3026 stream_.bufferSize * stream_.nUserChannels[0],
3027 stream_.userFormat );
3029 for ( i=0, j=0; i<nChannels; i++ ) {
3030 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3031 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3032 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3037 if ( handle->drainCounter ) {
3038 handle->drainCounter++;
3043 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3045 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3047 if (stream_.doConvertBuffer[1]) {
3049 // Always interleave ASIO input data.
3050 for ( i=0, j=0; i<nChannels; i++ ) {
3051 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3052 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3053 handle->bufferInfos[i].buffers[bufferIndex],
3057 if ( stream_.doByteSwap[1] )
3058 byteSwapBuffer( stream_.deviceBuffer,
3059 stream_.bufferSize * stream_.nDeviceChannels[1],
3060 stream_.deviceFormat[1] );
3061 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3065 for ( i=0, j=0; i<nChannels; i++ ) {
3066 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3067 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3068 handle->bufferInfos[i].buffers[bufferIndex],
3073 if ( stream_.doByteSwap[1] )
3074 byteSwapBuffer( stream_.userBuffer[1],
3075 stream_.bufferSize * stream_.nUserChannels[1],
3076 stream_.userFormat );
3081 // The following call was suggested by Malte Clasen. While the API
3082 // documentation indicates it should not be required, some device
3083 // drivers apparently do not function correctly without it.
3086 MUTEX_UNLOCK( &stream_.mutex );
3088 RtApi::tickStreamTime();
3092 void sampleRateChanged( ASIOSampleRate sRate )
3094 // The ASIO documentation says that this usually only happens during
3095 // external sync. Audio processing is not stopped by the driver,
3096 // actual sample rate might not have even changed, maybe only the
3097 // sample rate status of an AES/EBU or S/PDIF digital input at the
3100 RtApi *object = (RtApi *) asioCallbackInfo->object;
3102 object->stopStream();
3104 catch ( RtError &exception ) {
3105 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3109 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3112 long asioMessages( long selector, long value, void* message, double* opt )
3116 switch( selector ) {
3117 case kAsioSelectorSupported:
3118 if ( value == kAsioResetRequest
3119 || value == kAsioEngineVersion
3120 || value == kAsioResyncRequest
3121 || value == kAsioLatenciesChanged
3122 // The following three were added for ASIO 2.0, you don't
3123 // necessarily have to support them.
3124 || value == kAsioSupportsTimeInfo
3125 || value == kAsioSupportsTimeCode
3126 || value == kAsioSupportsInputMonitor)
3129 case kAsioResetRequest:
3130 // Defer the task and perform the reset of the driver during the
3131 // next "safe" situation. You cannot reset the driver right now,
3132 // as this code is called from the driver. Reset the driver is
3133 // done by completely destruct is. I.e. ASIOStop(),
3134 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3136 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3139 case kAsioResyncRequest:
3140 // This informs the application that the driver encountered some
3141 // non-fatal data loss. It is used for synchronization purposes
3142 // of different media. Added mainly to work around the Win16Mutex
3143 // problems in Windows 95/98 with the Windows Multimedia system,
3144 // which could lose data because the Mutex was held too long by
3145 // another thread. However a driver can issue it in other
3147 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3151 case kAsioLatenciesChanged:
3152 // This will inform the host application that the drivers were
3153 // latencies changed. Beware, it this does not mean that the
3154 // buffer sizes have changed! You might need to update internal
3156 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3159 case kAsioEngineVersion:
3160 // Return the supported ASIO version of the host application. If
3161 // a host application does not implement this selector, ASIO 1.0
3162 // is assumed by the driver.
3165 case kAsioSupportsTimeInfo:
3166 // Informs the driver whether the
3167 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3168 // For compatibility with ASIO 1.0 drivers the host application
3169 // should always support the "old" bufferSwitch method, too.
3172 case kAsioSupportsTimeCode:
3173 // Informs the driver whether application is interested in time
3174 // code info. If an application does not need to know about time
3175 // code, the driver has less work to do.
3182 static const char* getAsioErrorString( ASIOError result )
3190 static Messages m[] =
3192 { ASE_NotPresent, "Hardware input or output is not present or available." },
3193 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3194 { ASE_InvalidParameter, "Invalid input parameter." },
3195 { ASE_InvalidMode, "Invalid mode." },
3196 { ASE_SPNotAdvancing, "Sample position not advancing." },
3197 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3198 { ASE_NoMemory, "Not enough memory to complete the request." }
3201 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3202 if ( m[i].value == result ) return m[i].message;
3204 return "Unknown error.";
3206 //******************** End of __WINDOWS_ASIO__ *********************//
3210 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3212 // Modified by Robin Davies, October 2005
3213 // - Improvements to DirectX pointer chasing.
3214 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3215 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3216 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3217 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3222 #if defined(__MINGW32__)
3223 // missing from latest mingw winapi
3224 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3225 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3226 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3227 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3230 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3232 #ifdef _MSC_VER // if Microsoft Visual C++
3233 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3236 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3238 if (laterPointer > earlierPointer)
3239 return laterPointer - earlierPointer;
3241 return laterPointer - earlierPointer + bufferSize;
3244 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3246 if ( pointer > bufferSize ) pointer -= bufferSize;
3247 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3248 if ( pointer < earlierPointer ) pointer += bufferSize;
3249 return pointer >= earlierPointer && pointer < laterPointer;
3252 // A structure to hold various information related to the DirectSound
3253 // API implementation.
3255 unsigned int drainCounter; // Tracks callback counts when draining
3256 bool internalDrain; // Indicates if stop is initiated from callback or not.
3260 UINT bufferPointer[2];
3261 DWORD dsBufferSize[2];
3262 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3266 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3270 RtApiDs::RtDsStatistics RtApiDs::statistics;
3272 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3273 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3275 RtDsStatistics s = statistics;
3277 // update the calculated fields.
3278 if ( s.inputFrameSize != 0 )
3279 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3281 if ( s.outputFrameSize != 0 )
3282 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3288 // Declarations for utility functions, callbacks, and structures
3289 // specific to the DirectSound implementation.
3290 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
3291 LPCTSTR description,
3295 static char* getErrorString( int code );
3297 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3303 unsigned int counter;
3309 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3312 RtApiDs :: RtApiDs()
3314 // Dsound will run both-threaded. If CoInitialize fails, then just
3315 // accept whatever the mainline chose for a threading model.
3316 coInitialized_ = false;
3317 HRESULT hr = CoInitialize( NULL );
3318 if ( !FAILED( hr ) ) coInitialized_ = true;
3321 RtApiDs :: ~RtApiDs()
3323 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3324 if ( stream_.state != STREAM_CLOSED ) closeStream();
3327 unsigned int RtApiDs :: getDefaultInputDevice( void )
3329 // Count output devices.
3331 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3332 if ( FAILED( result ) ) {
3333 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3334 errorText_ = errorStream_.str();
3335 error( RtError::WARNING );
3339 // Now enumerate input devices until we find the id = NULL.
3340 info.isInput = true;
3341 info.getDefault = true;
3342 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3343 if ( FAILED( result ) ) {
3344 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3345 errorText_ = errorStream_.str();
3346 error( RtError::WARNING );
3350 if ( info.counter > 0 ) return info.counter - 1;
3354 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3356 // Enumerate output devices until we find the id = NULL.
3358 info.getDefault = true;
3359 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3360 if ( FAILED( result ) ) {
3361 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3362 errorText_ = errorStream_.str();
3363 error( RtError::WARNING );
3367 if ( info.counter > 0 ) return info.counter - 1;
3371 unsigned int RtApiDs :: getDeviceCount( void )
3373 // Count DirectSound devices.
3375 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3376 if ( FAILED( result ) ) {
3377 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3378 errorText_ = errorStream_.str();
3379 error( RtError::WARNING );
3382 // Count DirectSoundCapture devices.
3383 info.isInput = true;
3384 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &info );
3385 if ( FAILED( result ) ) {
3386 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3387 errorText_ = errorStream_.str();
3388 error( RtError::WARNING );
3391 return info.counter;
3394 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3396 // Because DirectSound always enumerates input and output devices
3397 // separately (and because we don't attempt to combine devices
3398 // internally), none of our "devices" will ever be duplex.
3400 RtAudio::DeviceInfo info;
3401 info.probed = false;
3403 // Enumerate through devices to find the id (if it exists). Note
3404 // that we have to do the output enumeration first, even if this is
3405 // an input device, in order for the device counter to be correct.
3407 dsinfo.findIndex = true;
3408 dsinfo.index = device;
3409 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3410 if ( FAILED( result ) ) {
3411 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3412 errorText_ = errorStream_.str();
3413 error( RtError::WARNING );
3416 if ( dsinfo.name.empty() ) goto probeInput;
3418 LPDIRECTSOUND output;
3420 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3421 if ( FAILED( result ) ) {
3422 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3423 errorText_ = errorStream_.str();
3424 error( RtError::WARNING );
3428 outCaps.dwSize = sizeof( outCaps );
3429 result = output->GetCaps( &outCaps );
3430 if ( FAILED( result ) ) {
3432 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3433 errorText_ = errorStream_.str();
3434 error( RtError::WARNING );
3438 // Get output channel information.
3439 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3441 // Get sample rate information.
3442 info.sampleRates.clear();
3443 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3444 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3445 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3446 info.sampleRates.push_back( SAMPLE_RATES[k] );
3449 // Get format information.
3450 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3451 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3455 if ( getDefaultOutputDevice() == device )
3456 info.isDefaultOutput = true;
3458 // Copy name and return.
3459 info.name = dsinfo.name;
3466 dsinfo.isInput = true;
3467 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3468 if ( FAILED( result ) ) {
3469 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3470 errorText_ = errorStream_.str();
3471 error( RtError::WARNING );
3474 if ( dsinfo.name.empty() ) return info;
3476 LPDIRECTSOUNDCAPTURE input;
3477 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3478 if ( FAILED( result ) ) {
3479 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3480 errorText_ = errorStream_.str();
3481 error( RtError::WARNING );
3486 inCaps.dwSize = sizeof( inCaps );
3487 result = input->GetCaps( &inCaps );
3488 if ( FAILED( result ) ) {
3490 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3491 errorText_ = errorStream_.str();
3492 error( RtError::WARNING );
3496 // Get input channel information.
3497 info.inputChannels = inCaps.dwChannels;
3499 // Get sample rate and format information.
3500 if ( inCaps.dwChannels == 2 ) {
3501 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3502 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3503 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3504 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3505 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3506 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3507 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3508 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3510 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3511 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3512 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3513 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3514 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3516 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3517 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3518 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3519 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3520 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3523 else if ( inCaps.dwChannels == 1 ) {
3524 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3525 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3526 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3527 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3528 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3529 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3530 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3531 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3533 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3534 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3535 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3536 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3537 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3539 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3540 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3541 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3542 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3543 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3546 else info.inputChannels = 0; // technically, this would be an error
3550 if ( info.inputChannels == 0 ) return info;
3552 if ( getDefaultInputDevice() == device )
3553 info.isDefaultInput = true;
3555 // Copy name and return.
3556 info.name = dsinfo.name;
3561 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3562 unsigned int firstChannel, unsigned int sampleRate,
3563 RtAudioFormat format, unsigned int *bufferSize,
3564 RtAudio::StreamOptions *options )
3566 if ( channels + firstChannel > 2 ) {
3567 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3571 // Enumerate through devices to find the id (if it exists). Note
3572 // that we have to do the output enumeration first, even if this is
3573 // an input device, in order for the device counter to be correct.
3575 dsinfo.findIndex = true;
3576 dsinfo.index = device;
3577 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3578 if ( FAILED( result ) ) {
3579 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3580 errorText_ = errorStream_.str();
3584 if ( mode == OUTPUT ) {
3585 if ( dsinfo.name.empty() ) {
3586 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3587 errorText_ = errorStream_.str();
3591 else { // mode == INPUT
3592 dsinfo.isInput = true;
3593 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceQueryCallback, &dsinfo );
3594 if ( FAILED( result ) ) {
3595 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3596 errorText_ = errorStream_.str();
3599 if ( dsinfo.name.empty() ) {
3600 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3601 errorText_ = errorStream_.str();
3606 // According to a note in PortAudio, using GetDesktopWindow()
3607 // instead of GetForegroundWindow() is supposed to avoid problems
3608 // that occur when the application's window is not the foreground
3609 // window. Also, if the application window closes before the
3610 // DirectSound buffer, DirectSound can crash. However, for console
3611 // applications, no sound was produced when using GetDesktopWindow().
3612 HWND hWnd = GetForegroundWindow();
3614 // Check the numberOfBuffers parameter and limit the lowest value to
3615 // two. This is a judgement call and a value of two is probably too
3616 // low for capture, but it should work for playback.
3618 if ( options ) nBuffers = options->numberOfBuffers;
3619 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3620 if ( nBuffers < 2 ) nBuffers = 3;
3622 // Create the wave format structure. The data format setting will
3623 // be determined later.
3624 WAVEFORMATEX waveFormat;
3625 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3626 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3627 waveFormat.nChannels = channels + firstChannel;
3628 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3630 // Determine the device buffer size. By default, 32k, but we will
3631 // grow it to make allowances for very large software buffer sizes.
3632 DWORD dsBufferSize = 0;
3633 DWORD dsPointerLeadTime = 0;
3634 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3636 void *ohandle = 0, *bhandle = 0;
3637 if ( mode == OUTPUT ) {
3639 LPDIRECTSOUND output;
3640 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3641 if ( FAILED( result ) ) {
3642 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3643 errorText_ = errorStream_.str();
3648 outCaps.dwSize = sizeof( outCaps );
3649 result = output->GetCaps( &outCaps );
3650 if ( FAILED( result ) ) {
3652 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3653 errorText_ = errorStream_.str();
3657 // Check channel information.
3658 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3659 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3660 errorText_ = errorStream_.str();
3664 // Check format information. Use 16-bit format unless not
3665 // supported or user requests 8-bit.
3666 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3667 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3668 waveFormat.wBitsPerSample = 16;
3669 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3672 waveFormat.wBitsPerSample = 8;
3673 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3675 stream_.userFormat = format;
3677 // Update wave format structure and buffer information.
3678 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3679 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3680 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3682 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3683 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3686 // Set cooperative level to DSSCL_EXCLUSIVE
3687 result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3688 if ( FAILED( result ) ) {
3690 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3691 errorText_ = errorStream_.str();
3695 // Even though we will write to the secondary buffer, we need to
3696 // access the primary buffer to set the correct output format
3697 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3698 // buffer description.
3699 DSBUFFERDESC bufferDescription;
3700 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3701 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3702 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3704 // Obtain the primary buffer
3705 LPDIRECTSOUNDBUFFER buffer;
3706 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3707 if ( FAILED( result ) ) {
3709 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3710 errorText_ = errorStream_.str();
3714 // Set the primary DS buffer sound format.
3715 result = buffer->SetFormat( &waveFormat );
3716 if ( FAILED( result ) ) {
3718 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3719 errorText_ = errorStream_.str();
3723 // Setup the secondary DS buffer description.
3724 dsBufferSize = (DWORD) bufferBytes;
3725 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3726 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3727 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3728 DSBCAPS_GETCURRENTPOSITION2 |
3729 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3730 bufferDescription.dwBufferBytes = bufferBytes;
3731 bufferDescription.lpwfxFormat = &waveFormat;
3733 // Try to create the secondary DS buffer. If that doesn't work,
3734 // try to use software mixing. Otherwise, there's a problem.
3735 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3736 if ( FAILED( result ) ) {
3737 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3738 DSBCAPS_GETCURRENTPOSITION2 |
3739 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3740 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3741 if ( FAILED( result ) ) {
3743 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3744 errorText_ = errorStream_.str();
3749 // Get the buffer size ... might be different from what we specified.
3751 dsbcaps.dwSize = sizeof( DSBCAPS );
3752 result = buffer->GetCaps( &dsbcaps );
3753 if ( FAILED( result ) ) {
3756 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3757 errorText_ = errorStream_.str();
3761 bufferBytes = dsbcaps.dwBufferBytes;
3763 // Lock the DS buffer
3766 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3767 if ( FAILED( result ) ) {
3770 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3771 errorText_ = errorStream_.str();
3775 // Zero the DS buffer
3776 ZeroMemory( audioPtr, dataLen );
3778 // Unlock the DS buffer
3779 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3780 if ( FAILED( result ) ) {
3783 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3784 errorText_ = errorStream_.str();
3788 dsBufferSize = bufferBytes;
3789 ohandle = (void *) output;
3790 bhandle = (void *) buffer;
3793 if ( mode == INPUT ) {
3795 LPDIRECTSOUNDCAPTURE input;
3796 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3797 if ( FAILED( result ) ) {
3798 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3799 errorText_ = errorStream_.str();
3804 inCaps.dwSize = sizeof( inCaps );
3805 result = input->GetCaps( &inCaps );
3806 if ( FAILED( result ) ) {
3808 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
3809 errorText_ = errorStream_.str();
3813 // Check channel information.
3814 if ( inCaps.dwChannels < channels + firstChannel ) {
3815 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
3819 // Check format information. Use 16-bit format unless user
3821 DWORD deviceFormats;
3822 if ( channels + firstChannel == 2 ) {
3823 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
3824 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3825 waveFormat.wBitsPerSample = 8;
3826 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3828 else { // assume 16-bit is supported
3829 waveFormat.wBitsPerSample = 16;
3830 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3833 else { // channel == 1
3834 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
3835 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3836 waveFormat.wBitsPerSample = 8;
3837 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3839 else { // assume 16-bit is supported
3840 waveFormat.wBitsPerSample = 16;
3841 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3844 stream_.userFormat = format;
3846 // Update wave format structure and buffer information.
3847 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3848 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3850 // Setup the secondary DS buffer description.
3851 dsBufferSize = bufferBytes;
3852 DSCBUFFERDESC bufferDescription;
3853 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
3854 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
3855 bufferDescription.dwFlags = 0;
3856 bufferDescription.dwReserved = 0;
3857 bufferDescription.dwBufferBytes = bufferBytes;
3858 bufferDescription.lpwfxFormat = &waveFormat;
3860 // Create the capture buffer.
3861 LPDIRECTSOUNDCAPTUREBUFFER buffer;
3862 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
3863 if ( FAILED( result ) ) {
3865 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
3866 errorText_ = errorStream_.str();
3870 // Lock the capture buffer
3873 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3874 if ( FAILED( result ) ) {
3877 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
3878 errorText_ = errorStream_.str();
3883 ZeroMemory( audioPtr, dataLen );
3885 // Unlock the buffer
3886 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3887 if ( FAILED( result ) ) {
3890 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
3891 errorText_ = errorStream_.str();
3895 dsBufferSize = bufferBytes;
3896 ohandle = (void *) input;
3897 bhandle = (void *) buffer;
3900 // Set various stream parameters
3901 DsHandle *handle = 0;
3902 stream_.nDeviceChannels[mode] = channels + firstChannel;
3903 stream_.nUserChannels[mode] = channels;
3904 stream_.bufferSize = *bufferSize;
3905 stream_.channelOffset[mode] = firstChannel;
3906 stream_.deviceInterleaved[mode] = true;
3907 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
3908 else stream_.userInterleaved = true;
3910 // Set flag for buffer conversion
3911 stream_.doConvertBuffer[mode] = false;
3912 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
3913 stream_.doConvertBuffer[mode] = true;
3914 if (stream_.userFormat != stream_.deviceFormat[mode])
3915 stream_.doConvertBuffer[mode] = true;
3916 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
3917 stream_.nUserChannels[mode] > 1 )
3918 stream_.doConvertBuffer[mode] = true;
3920 // Allocate necessary internal buffers
3921 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
3922 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
3923 if ( stream_.userBuffer[mode] == NULL ) {
3924 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
3928 if ( stream_.doConvertBuffer[mode] ) {
3930 bool makeBuffer = true;
3931 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
3932 if ( mode == INPUT ) {
3933 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3934 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
3935 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
3940 bufferBytes *= *bufferSize;
3941 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3942 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3943 if ( stream_.deviceBuffer == NULL ) {
3944 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
3950 // Allocate our DsHandle structures for the stream.
3951 if ( stream_.apiHandle == 0 ) {
3953 handle = new DsHandle;
3955 catch ( std::bad_alloc& ) {
3956 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
3960 // Create a manual-reset event.
3961 handle->condition = CreateEvent( NULL, // no security
3962 TRUE, // manual-reset
3963 FALSE, // non-signaled initially
3965 stream_.apiHandle = (void *) handle;
3968 handle = (DsHandle *) stream_.apiHandle;
3969 handle->id[mode] = ohandle;
3970 handle->buffer[mode] = bhandle;
3971 handle->dsBufferSize[mode] = dsBufferSize;
3972 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
3974 stream_.device[mode] = device;
3975 stream_.state = STREAM_STOPPED;
3976 if ( stream_.mode == OUTPUT && mode == INPUT )
3977 // We had already set up an output stream.
3978 stream_.mode = DUPLEX;
3980 stream_.mode = mode;
3981 stream_.nBuffers = nBuffers;
3982 stream_.sampleRate = sampleRate;
3984 // Setup the buffer conversion information structure.
3985 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
3987 // Setup the callback thread.
3989 stream_.callbackInfo.object = (void *) this;
3990 stream_.callbackInfo.isRunning = true;
3991 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
3992 &stream_.callbackInfo, 0, &threadId );
3993 if ( stream_.callbackInfo.thread == 0 ) {
3994 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
4002 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4003 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4004 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4005 if ( buffer ) buffer->Release();
4008 if ( handle->buffer[1] ) {
4009 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4010 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4011 if ( buffer ) buffer->Release();
4014 CloseHandle( handle->condition );
4016 stream_.apiHandle = 0;
4019 for ( int i=0; i<2; i++ ) {
4020 if ( stream_.userBuffer[i] ) {
4021 free( stream_.userBuffer[i] );
4022 stream_.userBuffer[i] = 0;
4026 if ( stream_.deviceBuffer ) {
4027 free( stream_.deviceBuffer );
4028 stream_.deviceBuffer = 0;
4034 void RtApiDs :: closeStream()
4036 if ( stream_.state == STREAM_CLOSED ) {
4037 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4038 error( RtError::WARNING );
4042 // Stop the callback thread.
4043 stream_.callbackInfo.isRunning = false;
4044 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4045 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4047 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4049 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4050 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4051 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4058 if ( handle->buffer[1] ) {
4059 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4060 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4067 CloseHandle( handle->condition );
4069 stream_.apiHandle = 0;
4072 for ( int i=0; i<2; i++ ) {
4073 if ( stream_.userBuffer[i] ) {
4074 free( stream_.userBuffer[i] );
4075 stream_.userBuffer[i] = 0;
4079 if ( stream_.deviceBuffer ) {
4080 free( stream_.deviceBuffer );
4081 stream_.deviceBuffer = 0;
4084 stream_.mode = UNINITIALIZED;
4085 stream_.state = STREAM_CLOSED;
4088 void RtApiDs :: startStream()
4091 if ( stream_.state == STREAM_RUNNING ) {
4092 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4093 error( RtError::WARNING );
4097 // Increase scheduler frequency on lesser windows (a side-effect of
4098 // increasing timer accuracy). On greater windows (Win2K or later),
4099 // this is already in effect.
4101 MUTEX_LOCK( &stream_.mutex );
4103 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4105 timeBeginPeriod( 1 );
4108 memset( &statistics, 0, sizeof( statistics ) );
4109 statistics.sampleRate = stream_.sampleRate;
4110 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4113 buffersRolling = false;
4114 duplexPrerollBytes = 0;
4116 if ( stream_.mode == DUPLEX ) {
4117 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4118 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4122 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4123 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4125 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4126 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4127 if ( FAILED( result ) ) {
4128 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4129 errorText_ = errorStream_.str();
4134 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4135 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4137 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4138 result = buffer->Start( DSCBSTART_LOOPING );
4139 if ( FAILED( result ) ) {
4140 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4141 errorText_ = errorStream_.str();
4146 handle->drainCounter = 0;
4147 handle->internalDrain = false;
4148 stream_.state = STREAM_RUNNING;
4151 MUTEX_UNLOCK( &stream_.mutex );
4153 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4156 void RtApiDs :: stopStream()
4159 if ( stream_.state == STREAM_STOPPED ) {
4160 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4161 error( RtError::WARNING );
4165 MUTEX_LOCK( &stream_.mutex );
4170 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4171 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4172 if ( handle->drainCounter == 0 ) {
4173 handle->drainCounter = 1;
4174 MUTEX_UNLOCK( &stream_.mutex );
4175 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4176 ResetEvent( handle->condition );
4177 MUTEX_LOCK( &stream_.mutex );
4180 // Stop the buffer and clear memory
4181 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4182 result = buffer->Stop();
4183 if ( FAILED( result ) ) {
4184 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4185 errorText_ = errorStream_.str();
4189 // Lock the buffer and clear it so that if we start to play again,
4190 // we won't have old data playing.
4191 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4192 if ( FAILED( result ) ) {
4193 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
4194 errorText_ = errorStream_.str();
4198 // Zero the DS buffer
4199 ZeroMemory( audioPtr, dataLen );
4201 // Unlock the DS buffer
4202 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4203 if ( FAILED( result ) ) {
4204 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4205 errorText_ = errorStream_.str();
4209 // If we start playing again, we must begin at beginning of buffer.
4210 handle->bufferPointer[0] = 0;
4213 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4214 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4218 result = buffer->Stop();
4219 if ( FAILED( result ) ) {
4220 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4221 errorText_ = errorStream_.str();
4225 // Lock the buffer and clear it so that if we start to play again,
4226 // we won't have old data playing.
4227 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4228 if ( FAILED( result ) ) {
4229 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
4230 errorText_ = errorStream_.str();
4234 // Zero the DS buffer
4235 ZeroMemory( audioPtr, dataLen );
4237 // Unlock the DS buffer
4238 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4239 if ( FAILED( result ) ) {
4240 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4241 errorText_ = errorStream_.str();
4245 // If we start recording again, we must begin at beginning of buffer.
4246 handle->bufferPointer[1] = 0;
4250 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4251 stream_.state = STREAM_STOPPED;
4252 MUTEX_UNLOCK( &stream_.mutex );
4253 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4256 void RtApiDs :: abortStream()
4259 if ( stream_.state == STREAM_STOPPED ) {
4260 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4261 error( RtError::WARNING );
4265 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4266 handle->drainCounter = 1;
4271 void RtApiDs :: callbackEvent()
4273 if ( stream_.state == STREAM_STOPPED ) {
4274 Sleep(50); // sleep 50 milliseconds
4278 if ( stream_.state == STREAM_CLOSED ) {
4279 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4280 error( RtError::WARNING );
4284 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4285 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4287 // Check if we were draining the stream and signal is finished.
4288 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4289 if ( handle->internalDrain == false )
4290 SetEvent( handle->condition );
4296 MUTEX_LOCK( &stream_.mutex );
4298 // Invoke user callback to get fresh output data UNLESS we are
4300 if ( handle->drainCounter == 0 ) {
4301 RtAudioCallback callback = (RtAudioCallback) info->callback;
4302 double streamTime = getStreamTime();
4303 RtAudioStreamStatus status = 0;
4304 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4305 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4306 handle->xrun[0] = false;
4308 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4309 status |= RTAUDIO_INPUT_OVERFLOW;
4310 handle->xrun[1] = false;
4312 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4313 stream_.bufferSize, streamTime, status, info->userData );
4314 if ( handle->drainCounter == 2 ) {
4315 MUTEX_UNLOCK( &stream_.mutex );
4319 else if ( handle->drainCounter == 1 )
4320 handle->internalDrain = true;
4324 DWORD currentWritePos, safeWritePos;
4325 DWORD currentReadPos, safeReadPos;
4329 #ifdef GENERATE_DEBUG_LOG
4330 DWORD writeTime, readTime;
4333 LPVOID buffer1 = NULL;
4334 LPVOID buffer2 = NULL;
4335 DWORD bufferSize1 = 0;
4336 DWORD bufferSize2 = 0;
4341 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4342 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4344 // It takes a while for the devices to get rolling. As a result,
4345 // there's no guarantee that the capture and write device pointers
4346 // will move in lockstep. Wait here for both devices to start
4347 // rolling, and then set our buffer pointers accordingly.
4348 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4349 // bytes later than the write buffer.
4351 // Stub: a serious risk of having a pre-emptive scheduling round
4352 // take place between the two GetCurrentPosition calls... but I'm
4353 // really not sure how to solve the problem. Temporarily boost to
4354 // Realtime priority, maybe; but I'm not sure what priority the
4355 // DirectSound service threads run at. We *should* be roughly
4356 // within a ms or so of correct.
4358 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4359 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4361 DWORD initialWritePos, initialSafeWritePos;
4362 DWORD initialReadPos, initialSafeReadPos;
4364 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4365 if ( FAILED( result ) ) {
4366 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4367 errorText_ = errorStream_.str();
4368 error( RtError::SYSTEM_ERROR );
4370 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4371 if ( FAILED( result ) ) {
4372 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4373 errorText_ = errorStream_.str();
4374 error( RtError::SYSTEM_ERROR );
4377 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4378 if ( FAILED( result ) ) {
4379 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4380 errorText_ = errorStream_.str();
4381 error( RtError::SYSTEM_ERROR );
4383 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4384 if ( FAILED( result ) ) {
4385 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4386 errorText_ = errorStream_.str();
4387 error( RtError::SYSTEM_ERROR );
4389 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4393 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4395 buffersRolling = true;
4396 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4397 handle->bufferPointer[1] = safeReadPos;
4400 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4402 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4404 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4405 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4406 bufferBytes *= formatBytes( stream_.userFormat );
4407 memset( stream_.userBuffer[0], 0, bufferBytes );
4410 // Setup parameters and do buffer conversion if necessary.
4411 if ( stream_.doConvertBuffer[0] ) {
4412 buffer = stream_.deviceBuffer;
4413 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4414 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4415 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4418 buffer = stream_.userBuffer[0];
4419 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4420 bufferBytes *= formatBytes( stream_.userFormat );
4423 // No byte swapping necessary in DirectSound implementation.
4425 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4426 // unsigned. So, we need to convert our signed 8-bit data here to
4428 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4429 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4431 DWORD dsBufferSize = handle->dsBufferSize[0];
4432 nextWritePos = handle->bufferPointer[0];
4436 // Find out where the read and "safe write" pointers are.
4437 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4438 if ( FAILED( result ) ) {
4439 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4440 errorText_ = errorStream_.str();
4441 error( RtError::SYSTEM_ERROR );
4444 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4445 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4446 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4447 endWrite = nextWritePos + bufferBytes;
4449 // Check whether the entire write region is behind the play pointer.
4450 if ( leadPos >= endWrite ) break;
4452 // If we are here, then we must wait until the play pointer gets
4453 // beyond the write region. The approach here is to use the
4454 // Sleep() function to suspend operation until safePos catches
4455 // up. Calculate number of milliseconds to wait as:
4456 // time = distance * (milliseconds/second) * fudgefactor /
4457 // ((bytes/sample) * (samples/second))
4458 // A "fudgefactor" less than 1 is used because it was found
4459 // that sleeping too long was MUCH worse than sleeping for
4460 // several shorter periods.
4461 double millis = ( endWrite - leadPos ) * 900.0;
4462 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4463 if ( millis < 1.0 ) millis = 1.0;
4464 if ( millis > 50.0 ) {
4465 static int nOverruns = 0;
4468 Sleep( (DWORD) millis );
4471 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4472 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4475 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4476 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4477 // We've strayed into the forbidden zone ... resync the read pointer.
4478 //++statistics.numberOfWriteUnderruns;
4479 handle->xrun[0] = true;
4480 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4481 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4482 handle->bufferPointer[0] = nextWritePos;
4483 endWrite = nextWritePos + bufferBytes;
4486 // Lock free space in the buffer
4487 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4488 &bufferSize1, &buffer2, &bufferSize2, 0 );
4489 if ( FAILED( result ) ) {
4490 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4491 errorText_ = errorStream_.str();
4492 error( RtError::SYSTEM_ERROR );
4495 // Copy our buffer into the DS buffer
4496 CopyMemory( buffer1, buffer, bufferSize1 );
4497 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4499 // Update our buffer offset and unlock sound buffer
4500 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4501 if ( FAILED( result ) ) {
4502 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4503 errorText_ = errorStream_.str();
4504 error( RtError::SYSTEM_ERROR );
4506 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4507 handle->bufferPointer[0] = nextWritePos;
4509 if ( handle->drainCounter ) {
4510 handle->drainCounter++;
4515 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4517 // Setup parameters.
4518 if ( stream_.doConvertBuffer[1] ) {
4519 buffer = stream_.deviceBuffer;
4520 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4521 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4524 buffer = stream_.userBuffer[1];
4525 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4526 bufferBytes *= formatBytes( stream_.userFormat );
4529 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4530 long nextReadPos = handle->bufferPointer[1];
4531 DWORD dsBufferSize = handle->dsBufferSize[1];
4533 // Find out where the write and "safe read" pointers are.
4534 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4535 if ( FAILED( result ) ) {
4536 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4537 errorText_ = errorStream_.str();
4538 error( RtError::SYSTEM_ERROR );
4541 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4542 DWORD endRead = nextReadPos + bufferBytes;
4544 // Handling depends on whether we are INPUT or DUPLEX.
4545 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4546 // then a wait here will drag the write pointers into the forbidden zone.
4548 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4549 // it's in a safe position. This causes dropouts, but it seems to be the only
4550 // practical way to sync up the read and write pointers reliably, given the
4551 // the very complex relationship between phase and increment of the read and write
4554 // In order to minimize audible dropouts in DUPLEX mode, we will
4555 // provide a pre-roll period of 0.5 seconds in which we return
4556 // zeros from the read buffer while the pointers sync up.
4558 if ( stream_.mode == DUPLEX ) {
4559 if ( safeReadPos < endRead ) {
4560 if ( duplexPrerollBytes <= 0 ) {
4561 // Pre-roll time over. Be more agressive.
4562 int adjustment = endRead-safeReadPos;
4564 handle->xrun[1] = true;
4565 //++statistics.numberOfReadOverruns;
4567 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4568 // and perform fine adjustments later.
4569 // - small adjustments: back off by twice as much.
4570 if ( adjustment >= 2*bufferBytes )
4571 nextReadPos = safeReadPos-2*bufferBytes;
4573 nextReadPos = safeReadPos-bufferBytes-adjustment;
4575 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4576 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4577 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4581 // In pre=roll time. Just do it.
4582 nextReadPos = safeReadPos-bufferBytes;
4583 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4585 endRead = nextReadPos + bufferBytes;
4588 else { // mode == INPUT
4589 while ( safeReadPos < endRead ) {
4590 // See comments for playback.
4591 double millis = (endRead - safeReadPos) * 900.0;
4592 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4593 if ( millis < 1.0 ) millis = 1.0;
4594 Sleep( (DWORD) millis );
4596 // Wake up, find out where we are now
4597 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4598 if ( FAILED( result ) ) {
4599 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4600 errorText_ = errorStream_.str();
4601 error( RtError::SYSTEM_ERROR );
4604 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4608 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4609 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4611 // Lock free space in the buffer
4612 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4613 &bufferSize1, &buffer2, &bufferSize2, 0 );
4614 if ( FAILED( result ) ) {
4615 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4616 errorText_ = errorStream_.str();
4617 error( RtError::SYSTEM_ERROR );
4620 if ( duplexPrerollBytes <= 0 ) {
4621 // Copy our buffer into the DS buffer
4622 CopyMemory( buffer, buffer1, bufferSize1 );
4623 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4626 memset( buffer, 0, bufferSize1 );
4627 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4628 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4631 // Update our buffer offset and unlock sound buffer
4632 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4633 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4634 if ( FAILED( result ) ) {
4635 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4636 errorText_ = errorStream_.str();
4637 error( RtError::SYSTEM_ERROR );
4639 handle->bufferPointer[1] = nextReadPos;
4641 // No byte swapping necessary in DirectSound implementation.
4643 // If necessary, convert 8-bit data from unsigned to signed.
4644 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4645 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4647 // Do buffer conversion if necessary.
4648 if ( stream_.doConvertBuffer[1] )
4649 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4651 #ifdef GENERATE_DEBUG_LOG
4652 if ( currentDebugLogEntry < debugLog.size() )
4654 TTickRecord &r = debugLog[currentDebugLogEntry++];
4655 r.currentReadPointer = currentReadPos;
4656 r.safeReadPointer = safeReadPos;
4657 r.currentWritePointer = currentWritePos;
4658 r.safeWritePointer = safeWritePos;
4659 r.readTime = readTime;
4660 r.writeTime = writeTime;
4661 r.nextReadPointer = handles[1].bufferPointer;
4662 r.nextWritePointer = handles[0].bufferPointer;
4667 MUTEX_UNLOCK( &stream_.mutex );
4669 RtApi::tickStreamTime();
4672 // Definitions for utility functions and callbacks
4673 // specific to the DirectSound implementation.
4675 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4677 CallbackInfo *info = (CallbackInfo *) ptr;
4678 RtApiDs *object = (RtApiDs *) info->object;
4679 bool* isRunning = &info->isRunning;
4681 while ( *isRunning == true ) {
4682 object->callbackEvent();
4691 std::string convertTChar( LPCTSTR name )
4695 #if defined( UNICODE ) || defined( _UNICODE )
4696 // Yes, this conversion doesn't make sense for two-byte characters
4697 // but RtAudio is currently written to return an std::string of
4698 // one-byte chars for the device name.
4699 for ( unsigned int i=0; i<wcslen( name ); i++ )
4700 s.push_back( name[i] );
4702 s.append( std::string( name ) );
4708 static BOOL CALLBACK deviceQueryCallback( LPGUID lpguid,
4709 LPCTSTR description,
4713 EnumInfo *info = (EnumInfo *) lpContext;
4716 if ( info->isInput == true ) {
4718 LPDIRECTSOUNDCAPTURE object;
4720 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4721 if ( hr != DS_OK ) return TRUE;
4723 caps.dwSize = sizeof(caps);
4724 hr = object->GetCaps( &caps );
4725 if ( hr == DS_OK ) {
4726 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4733 LPDIRECTSOUND object;
4734 hr = DirectSoundCreate( lpguid, &object, NULL );
4735 if ( hr != DS_OK ) return TRUE;
4737 caps.dwSize = sizeof(caps);
4738 hr = object->GetCaps( &caps );
4739 if ( hr == DS_OK ) {
4740 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4746 if ( info->getDefault && lpguid == NULL ) return FALSE;
4748 if ( info->findIndex && info->counter > info->index ) {
4750 info->name = convertTChar( description );
4757 static char* getErrorString( int code )
4761 case DSERR_ALLOCATED:
4762 return "Already allocated";
4764 case DSERR_CONTROLUNAVAIL:
4765 return "Control unavailable";
4767 case DSERR_INVALIDPARAM:
4768 return "Invalid parameter";
4770 case DSERR_INVALIDCALL:
4771 return "Invalid call";
4774 return "Generic error";
4776 case DSERR_PRIOLEVELNEEDED:
4777 return "Priority level needed";
4779 case DSERR_OUTOFMEMORY:
4780 return "Out of memory";
4782 case DSERR_BADFORMAT:
4783 return "The sample rate or the channel format is not supported";
4785 case DSERR_UNSUPPORTED:
4786 return "Not supported";
4788 case DSERR_NODRIVER:
4791 case DSERR_ALREADYINITIALIZED:
4792 return "Already initialized";
4794 case DSERR_NOAGGREGATION:
4795 return "No aggregation";
4797 case DSERR_BUFFERLOST:
4798 return "Buffer lost";
4800 case DSERR_OTHERAPPHASPRIO:
4801 return "Another application already has priority";
4803 case DSERR_UNINITIALIZED:
4804 return "Uninitialized";
4807 return "DirectSound unknown error";
4810 //******************** End of __WINDOWS_DS__ *********************//
4814 #if defined(__LINUX_ALSA__)
4816 #include <alsa/asoundlib.h>
4819 // A structure to hold various information related to the ALSA API
4822 snd_pcm_t *handles[2];
4827 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
4830 extern "C" void *alsaCallbackHandler( void * ptr );
4832 RtApiAlsa :: RtApiAlsa()
4834 // Nothing to do here.
4837 RtApiAlsa :: ~RtApiAlsa()
4839 if ( stream_.state != STREAM_CLOSED ) closeStream();
4842 unsigned int RtApiAlsa :: getDeviceCount( void )
4844 unsigned nDevices = 0;
4845 int result, subdevice, card;
4849 // Count cards and devices
4851 snd_card_next( &card );
4852 while ( card >= 0 ) {
4853 sprintf( name, "hw:%d", card );
4854 result = snd_ctl_open( &handle, name, 0 );
4856 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4857 errorText_ = errorStream_.str();
4858 error( RtError::WARNING );
4863 result = snd_ctl_pcm_next_device( handle, &subdevice );
4865 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4866 errorText_ = errorStream_.str();
4867 error( RtError::WARNING );
4870 if ( subdevice < 0 )
4875 snd_ctl_close( handle );
4876 snd_card_next( &card );
4882 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
4884 RtAudio::DeviceInfo info;
4885 info.probed = false;
4887 unsigned nDevices = 0;
4888 int result, subdevice, card;
4892 // Count cards and devices
4894 snd_card_next( &card );
4895 while ( card >= 0 ) {
4896 sprintf( name, "hw:%d", card );
4897 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
4899 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4900 errorText_ = errorStream_.str();
4901 error( RtError::WARNING );
4906 result = snd_ctl_pcm_next_device( chandle, &subdevice );
4908 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4909 errorText_ = errorStream_.str();
4910 error( RtError::WARNING );
4913 if ( subdevice < 0 ) break;
4914 if ( nDevices == device ) {
4915 sprintf( name, "hw:%d,%d", card, subdevice );
4921 snd_ctl_close( chandle );
4922 snd_card_next( &card );
4925 if ( nDevices == 0 ) {
4926 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
4927 error( RtError::INVALID_USE );
4930 if ( device >= nDevices ) {
4931 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
4932 error( RtError::INVALID_USE );
4937 int openMode = SND_PCM_ASYNC;
4938 snd_pcm_stream_t stream;
4939 snd_pcm_info_t *pcminfo;
4940 snd_pcm_info_alloca( &pcminfo );
4942 snd_pcm_hw_params_t *params;
4943 snd_pcm_hw_params_alloca( ¶ms );
4945 // First try for playback
4946 stream = SND_PCM_STREAM_PLAYBACK;
4947 snd_pcm_info_set_device( pcminfo, subdevice );
4948 snd_pcm_info_set_subdevice( pcminfo, 0 );
4949 snd_pcm_info_set_stream( pcminfo, stream );
4951 result = snd_ctl_pcm_info( chandle, pcminfo );
4953 // Device probably doesn't support playback.
4957 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
4959 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
4960 errorText_ = errorStream_.str();
4961 error( RtError::WARNING );
4965 // The device is open ... fill the parameter structure.
4966 result = snd_pcm_hw_params_any( phandle, params );
4968 snd_pcm_close( phandle );
4969 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
4970 errorText_ = errorStream_.str();
4971 error( RtError::WARNING );
4975 // Get output channel information.
4977 result = snd_pcm_hw_params_get_channels_max( params, &value );
4979 snd_pcm_close( phandle );
4980 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
4981 errorText_ = errorStream_.str();
4982 error( RtError::WARNING );
4985 info.outputChannels = value;
4986 snd_pcm_close( phandle );
4989 // Now try for capture
4990 stream = SND_PCM_STREAM_CAPTURE;
4991 snd_pcm_info_set_stream( pcminfo, stream );
4993 result = snd_ctl_pcm_info( chandle, pcminfo );
4994 snd_ctl_close( chandle );
4996 // Device probably doesn't support capture.
4997 if ( info.outputChannels == 0 ) return info;
4998 goto probeParameters;
5001 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5003 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5004 errorText_ = errorStream_.str();
5005 error( RtError::WARNING );
5006 if ( info.outputChannels == 0 ) return info;
5007 goto probeParameters;
5010 // The device is open ... fill the parameter structure.
5011 result = snd_pcm_hw_params_any( phandle, params );
5013 snd_pcm_close( phandle );
5014 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5015 errorText_ = errorStream_.str();
5016 error( RtError::WARNING );
5017 if ( info.outputChannels == 0 ) return info;
5018 goto probeParameters;
5021 result = snd_pcm_hw_params_get_channels_max( params, &value );
5023 snd_pcm_close( phandle );
5024 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5025 errorText_ = errorStream_.str();
5026 error( RtError::WARNING );
5027 if ( info.outputChannels == 0 ) return info;
5028 goto probeParameters;
5030 info.inputChannels = value;
5031 snd_pcm_close( phandle );
5033 // If device opens for both playback and capture, we determine the channels.
5034 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5035 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5037 // ALSA doesn't provide default devices so we'll use the first available one.
5038 if ( device == 0 && info.outputChannels > 0 )
5039 info.isDefaultOutput = true;
5040 if ( device == 0 && info.inputChannels > 0 )
5041 info.isDefaultInput = true;
5044 // At this point, we just need to figure out the supported data
5045 // formats and sample rates. We'll proceed by opening the device in
5046 // the direction with the maximum number of channels, or playback if
5047 // they are equal. This might limit our sample rate options, but so
5050 if ( info.outputChannels >= info.inputChannels )
5051 stream = SND_PCM_STREAM_PLAYBACK;
5053 stream = SND_PCM_STREAM_CAPTURE;
5054 snd_pcm_info_set_stream( pcminfo, stream );
5056 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5058 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5059 errorText_ = errorStream_.str();
5060 error( RtError::WARNING );
5064 // The device is open ... fill the parameter structure.
5065 result = snd_pcm_hw_params_any( phandle, params );
5067 snd_pcm_close( phandle );
5068 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5069 errorText_ = errorStream_.str();
5070 error( RtError::WARNING );
5074 // Test our discrete set of sample rate values.
5075 info.sampleRates.clear();
5076 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5077 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5078 info.sampleRates.push_back( SAMPLE_RATES[i] );
5080 if ( info.sampleRates.size() == 0 ) {
5081 snd_pcm_close( phandle );
5082 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5083 errorText_ = errorStream_.str();
5084 error( RtError::WARNING );
5088 // Probe the supported data formats ... we don't care about endian-ness just yet
5089 snd_pcm_format_t format;
5090 info.nativeFormats = 0;
5091 format = SND_PCM_FORMAT_S8;
5092 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5093 info.nativeFormats |= RTAUDIO_SINT8;
5094 format = SND_PCM_FORMAT_S16;
5095 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5096 info.nativeFormats |= RTAUDIO_SINT16;
5097 format = SND_PCM_FORMAT_S24;
5098 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5099 info.nativeFormats |= RTAUDIO_SINT24;
5100 format = SND_PCM_FORMAT_S32;
5101 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5102 info.nativeFormats |= RTAUDIO_SINT32;
5103 format = SND_PCM_FORMAT_FLOAT;
5104 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5105 info.nativeFormats |= RTAUDIO_FLOAT32;
5106 format = SND_PCM_FORMAT_FLOAT64;
5107 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5108 info.nativeFormats |= RTAUDIO_FLOAT64;
5110 // Check that we have at least one supported format
5111 if ( info.nativeFormats == 0 ) {
5112 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5113 errorText_ = errorStream_.str();
5114 error( RtError::WARNING );
5118 // Get the device name
5120 result = snd_card_get_name( card, &cardname );
5122 sprintf( name, "hw:%s,%d", cardname, subdevice );
5125 // That's all ... close the device and return
5126 snd_pcm_close( phandle );
5131 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5132 unsigned int firstChannel, unsigned int sampleRate,
5133 RtAudioFormat format, unsigned int *bufferSize,
5134 RtAudio::StreamOptions *options )
5137 #if defined(__RTAUDIO_DEBUG__)
5139 snd_output_stdio_attach(&out, stderr, 0);
5142 // I'm not using the "plug" interface ... too much inconsistent behavior.
5144 unsigned nDevices = 0;
5145 int result, subdevice, card;
5149 // Count cards and devices
5151 snd_card_next( &card );
5152 while ( card >= 0 ) {
5153 sprintf( name, "hw:%d", card );
5154 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5156 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5157 errorText_ = errorStream_.str();
5162 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5163 if ( result < 0 ) break;
5164 if ( subdevice < 0 ) break;
5165 if ( nDevices == device ) {
5166 sprintf( name, "hw:%d,%d", card, subdevice );
5171 snd_ctl_close( chandle );
5172 snd_card_next( &card );
5175 if ( nDevices == 0 ) {
5176 // This should not happen because a check is made before this function is called.
5177 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5181 if ( device >= nDevices ) {
5182 // This should not happen because a check is made before this function is called.
5183 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5189 snd_pcm_stream_t stream;
5190 if ( mode == OUTPUT )
5191 stream = SND_PCM_STREAM_PLAYBACK;
5193 stream = SND_PCM_STREAM_CAPTURE;
5196 int openMode = SND_PCM_ASYNC;
5197 result = snd_pcm_open( &phandle, name, stream, openMode );
5199 if ( mode == OUTPUT )
5200 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5202 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5203 errorText_ = errorStream_.str();
5207 // Fill the parameter structure.
5208 snd_pcm_hw_params_t *hw_params;
5209 snd_pcm_hw_params_alloca( &hw_params );
5210 result = snd_pcm_hw_params_any( phandle, hw_params );
5212 snd_pcm_close( phandle );
5213 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5214 errorText_ = errorStream_.str();
5218 #if defined(__RTAUDIO_DEBUG__)
5219 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5220 snd_pcm_hw_params_dump( hw_params, out );
5223 // Set access ... check user preference.
5224 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5225 stream_.userInterleaved = false;
5226 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5228 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5229 stream_.deviceInterleaved[mode] = true;
5232 stream_.deviceInterleaved[mode] = false;
5235 stream_.userInterleaved = true;
5236 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5238 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5239 stream_.deviceInterleaved[mode] = false;
5242 stream_.deviceInterleaved[mode] = true;
5246 snd_pcm_close( phandle );
5247 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5248 errorText_ = errorStream_.str();
5252 // Determine how to set the device format.
5253 stream_.userFormat = format;
5254 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5256 if ( format == RTAUDIO_SINT8 )
5257 deviceFormat = SND_PCM_FORMAT_S8;
5258 else if ( format == RTAUDIO_SINT16 )
5259 deviceFormat = SND_PCM_FORMAT_S16;
5260 else if ( format == RTAUDIO_SINT24 )
5261 deviceFormat = SND_PCM_FORMAT_S24;
5262 else if ( format == RTAUDIO_SINT32 )
5263 deviceFormat = SND_PCM_FORMAT_S32;
5264 else if ( format == RTAUDIO_FLOAT32 )
5265 deviceFormat = SND_PCM_FORMAT_FLOAT;
5266 else if ( format == RTAUDIO_FLOAT64 )
5267 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5269 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5270 stream_.deviceFormat[mode] = format;
5274 // The user requested format is not natively supported by the device.
5275 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5276 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5277 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5281 deviceFormat = SND_PCM_FORMAT_FLOAT;
5282 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5283 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5287 deviceFormat = SND_PCM_FORMAT_S32;
5288 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5289 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5293 deviceFormat = SND_PCM_FORMAT_S24;
5294 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5295 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5299 deviceFormat = SND_PCM_FORMAT_S16;
5300 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5301 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5305 deviceFormat = SND_PCM_FORMAT_S8;
5306 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5307 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5311 // If we get here, no supported format was found.
5312 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5313 errorText_ = errorStream_.str();
5317 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5319 snd_pcm_close( phandle );
5320 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5321 errorText_ = errorStream_.str();
5325 // Determine whether byte-swaping is necessary.
5326 stream_.doByteSwap[mode] = false;
5327 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5328 result = snd_pcm_format_cpu_endian( deviceFormat );
5330 stream_.doByteSwap[mode] = true;
5331 else if (result < 0) {
5332 snd_pcm_close( phandle );
5333 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5334 errorText_ = errorStream_.str();
5339 // Set the sample rate.
5340 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5342 snd_pcm_close( phandle );
5343 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5344 errorText_ = errorStream_.str();
5348 // Determine the number of channels for this device. We support a possible
5349 // minimum device channel number > than the value requested by the user.
5350 stream_.nUserChannels[mode] = channels;
5352 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5353 unsigned int deviceChannels = value;
5354 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5355 snd_pcm_close( phandle );
5356 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5357 errorText_ = errorStream_.str();
5361 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5363 snd_pcm_close( phandle );
5364 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5365 errorText_ = errorStream_.str();
5368 deviceChannels = value;
5369 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5370 stream_.nDeviceChannels[mode] = deviceChannels;
5372 // Set the device channels.
5373 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5375 snd_pcm_close( phandle );
5376 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5377 errorText_ = errorStream_.str();
5381 // Set the buffer number, which in ALSA is referred to as the "period".
5383 unsigned int periods = 0;
5384 if ( options ) periods = options->numberOfBuffers;
5385 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5386 // Even though the hardware might allow 1 buffer, it won't work reliably.
5387 if ( periods < 2 ) periods = 2;
5388 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5390 snd_pcm_close( phandle );
5391 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5392 errorText_ = errorStream_.str();
5396 // Set the buffer (or period) size.
5397 snd_pcm_uframes_t periodSize = *bufferSize;
5398 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5400 snd_pcm_close( phandle );
5401 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5402 errorText_ = errorStream_.str();
5405 *bufferSize = periodSize;
5407 // If attempting to setup a duplex stream, the bufferSize parameter
5408 // MUST be the same in both directions!
5409 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5410 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5411 errorText_ = errorStream_.str();
5415 stream_.bufferSize = *bufferSize;
5417 // Install the hardware configuration
5418 result = snd_pcm_hw_params( phandle, hw_params );
5420 snd_pcm_close( phandle );
5421 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5422 errorText_ = errorStream_.str();
5426 #if defined(__RTAUDIO_DEBUG__)
5427 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5428 snd_pcm_hw_params_dump( hw_params, out );
5431 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5432 snd_pcm_sw_params_t *sw_params = NULL;
5433 snd_pcm_sw_params_alloca( &sw_params );
5434 snd_pcm_sw_params_current( phandle, sw_params );
5435 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5436 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, 0x7fffffff );
5437 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5438 snd_pcm_sw_params_set_silence_size( phandle, sw_params, INT_MAX );
5439 result = snd_pcm_sw_params( phandle, sw_params );
5441 snd_pcm_close( phandle );
5442 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5443 errorText_ = errorStream_.str();
5447 #if defined(__RTAUDIO_DEBUG__)
5448 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5449 snd_pcm_sw_params_dump( sw_params, out );
5452 // Set flags for buffer conversion
5453 stream_.doConvertBuffer[mode] = false;
5454 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5455 stream_.doConvertBuffer[mode] = true;
5456 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5457 stream_.doConvertBuffer[mode] = true;
5458 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5459 stream_.nUserChannels[mode] > 1 )
5460 stream_.doConvertBuffer[mode] = true;
5462 // Allocate the ApiHandle if necessary and then save.
5463 AlsaHandle *apiInfo = 0;
5464 if ( stream_.apiHandle == 0 ) {
5466 apiInfo = (AlsaHandle *) new AlsaHandle;
5468 catch ( std::bad_alloc& ) {
5469 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5472 stream_.apiHandle = (void *) apiInfo;
5473 apiInfo->handles[0] = 0;
5474 apiInfo->handles[1] = 0;
5477 apiInfo = (AlsaHandle *) stream_.apiHandle;
5479 apiInfo->handles[mode] = phandle;
5481 // Allocate necessary internal buffers.
5482 unsigned long bufferBytes;
5483 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5484 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5485 if ( stream_.userBuffer[mode] == NULL ) {
5486 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5490 if ( stream_.doConvertBuffer[mode] ) {
5492 bool makeBuffer = true;
5493 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5494 if ( mode == INPUT ) {
5495 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5496 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5497 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5502 bufferBytes *= *bufferSize;
5503 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5504 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5505 if ( stream_.deviceBuffer == NULL ) {
5506 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5512 stream_.sampleRate = sampleRate;
5513 stream_.nBuffers = periods;
5514 stream_.device[mode] = device;
5515 stream_.state = STREAM_STOPPED;
5517 // Setup the buffer conversion information structure.
5518 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5520 // Setup thread if necessary.
5521 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5522 // We had already set up an output stream.
5523 stream_.mode = DUPLEX;
5524 // Link the streams if possible.
5525 apiInfo->synchronized = false;
5526 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5527 apiInfo->synchronized = true;
5529 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5530 error( RtError::WARNING );
5534 stream_.mode = mode;
5536 // Setup callback thread.
5537 stream_.callbackInfo.object = (void *) this;
5539 // Set the thread attributes for joinable and realtime scheduling
5540 // priority. The higher priority will only take affect if the
5541 // program is run as root or suid.
5542 pthread_attr_t attr;
5543 pthread_attr_init( &attr );
5544 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5545 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5546 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5548 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5551 stream_.callbackInfo.isRunning = true;
5552 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5553 pthread_attr_destroy( &attr );
5555 stream_.callbackInfo.isRunning = false;
5556 errorText_ = "RtApiAlsa::error creating callback thread!";
5565 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5566 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5568 stream_.apiHandle = 0;
5571 for ( int i=0; i<2; i++ ) {
5572 if ( stream_.userBuffer[i] ) {
5573 free( stream_.userBuffer[i] );
5574 stream_.userBuffer[i] = 0;
5578 if ( stream_.deviceBuffer ) {
5579 free( stream_.deviceBuffer );
5580 stream_.deviceBuffer = 0;
5586 void RtApiAlsa :: closeStream()
5588 if ( stream_.state == STREAM_CLOSED ) {
5589 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5590 error( RtError::WARNING );
5594 stream_.callbackInfo.isRunning = false;
5595 pthread_join( stream_.callbackInfo.thread, NULL );
5597 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5598 if ( stream_.state == STREAM_RUNNING ) {
5599 stream_.state = STREAM_STOPPED;
5600 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5601 snd_pcm_drop( apiInfo->handles[0] );
5602 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5603 snd_pcm_drop( apiInfo->handles[1] );
5607 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5608 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5610 stream_.apiHandle = 0;
5613 for ( int i=0; i<2; i++ ) {
5614 if ( stream_.userBuffer[i] ) {
5615 free( stream_.userBuffer[i] );
5616 stream_.userBuffer[i] = 0;
5620 if ( stream_.deviceBuffer ) {
5621 free( stream_.deviceBuffer );
5622 stream_.deviceBuffer = 0;
5625 stream_.mode = UNINITIALIZED;
5626 stream_.state = STREAM_CLOSED;
5629 void RtApiAlsa :: startStream()
5631 // This method calls snd_pcm_prepare if the device isn't already in that state.
5634 if ( stream_.state == STREAM_RUNNING ) {
5635 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5636 error( RtError::WARNING );
5640 MUTEX_LOCK( &stream_.mutex );
5643 snd_pcm_state_t state;
5644 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5645 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5646 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5647 state = snd_pcm_state( handle[0] );
5648 if ( state != SND_PCM_STATE_PREPARED ) {
5649 result = snd_pcm_prepare( handle[0] );
5651 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5652 errorText_ = errorStream_.str();
5658 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5659 state = snd_pcm_state( handle[1] );
5660 if ( state != SND_PCM_STATE_PREPARED ) {
5661 result = snd_pcm_prepare( handle[1] );
5663 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5664 errorText_ = errorStream_.str();
5670 stream_.state = STREAM_RUNNING;
5673 MUTEX_UNLOCK( &stream_.mutex );
5675 if ( result >= 0 ) return;
5676 error( RtError::SYSTEM_ERROR );
5679 void RtApiAlsa :: stopStream()
5682 if ( stream_.state == STREAM_STOPPED ) {
5683 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5684 error( RtError::WARNING );
5688 // Change the state before the lock to improve shutdown response
5689 // when using a callback.
5690 stream_.state = STREAM_STOPPED;
5691 MUTEX_LOCK( &stream_.mutex );
5694 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5695 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5696 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5697 if ( apiInfo->synchronized )
5698 result = snd_pcm_drop( handle[0] );
5700 result = snd_pcm_drain( handle[0] );
5702 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
5703 errorText_ = errorStream_.str();
5708 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5709 result = snd_pcm_drop( handle[1] );
5711 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
5712 errorText_ = errorStream_.str();
5718 MUTEX_UNLOCK( &stream_.mutex );
5720 if ( result >= 0 ) return;
5721 error( RtError::SYSTEM_ERROR );
5724 void RtApiAlsa :: abortStream()
5727 if ( stream_.state == STREAM_STOPPED ) {
5728 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
5729 error( RtError::WARNING );
5733 // Change the state before the lock to improve shutdown response
5734 // when using a callback.
5735 stream_.state = STREAM_STOPPED;
5736 MUTEX_LOCK( &stream_.mutex );
5739 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5740 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5741 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5742 result = snd_pcm_drop( handle[0] );
5744 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
5745 errorText_ = errorStream_.str();
5750 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5751 result = snd_pcm_drop( handle[1] );
5753 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
5754 errorText_ = errorStream_.str();
5760 MUTEX_UNLOCK( &stream_.mutex );
5762 stream_.state = STREAM_STOPPED;
5763 if ( result >= 0 ) return;
5764 error( RtError::SYSTEM_ERROR );
5767 void RtApiAlsa :: callbackEvent()
5769 if ( stream_.state == STREAM_STOPPED ) {
5770 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
5774 if ( stream_.state == STREAM_CLOSED ) {
5775 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
5776 error( RtError::WARNING );
5780 int doStopStream = 0;
5781 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5782 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
5783 double streamTime = getStreamTime();
5784 RtAudioStreamStatus status = 0;
5785 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
5786 status |= RTAUDIO_OUTPUT_UNDERFLOW;
5787 apiInfo->xrun[0] = false;
5789 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
5790 status |= RTAUDIO_INPUT_OVERFLOW;
5791 apiInfo->xrun[1] = false;
5793 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
5794 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
5796 MUTEX_LOCK( &stream_.mutex );
5798 // The state might change while waiting on a mutex.
5799 if ( stream_.state == STREAM_STOPPED ) goto unlock;
5805 snd_pcm_sframes_t frames;
5806 RtAudioFormat format;
5807 handle = (snd_pcm_t **) apiInfo->handles;
5809 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5811 // Setup parameters.
5812 if ( stream_.doConvertBuffer[1] ) {
5813 buffer = stream_.deviceBuffer;
5814 channels = stream_.nDeviceChannels[1];
5815 format = stream_.deviceFormat[1];
5818 buffer = stream_.userBuffer[1];
5819 channels = stream_.nUserChannels[1];
5820 format = stream_.userFormat;
5823 // Read samples from device in interleaved/non-interleaved format.
5824 if ( stream_.deviceInterleaved[1] )
5825 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
5827 void *bufs[channels];
5828 size_t offset = stream_.bufferSize * formatBytes( format );
5829 for ( int i=0; i<channels; i++ )
5830 bufs[i] = (void *) (buffer + (i * offset));
5831 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
5834 if ( result < (int) stream_.bufferSize ) {
5835 // Either an error or underrun occured.
5836 if ( result == -EPIPE ) {
5837 snd_pcm_state_t state = snd_pcm_state( handle[1] );
5838 if ( state == SND_PCM_STATE_XRUN ) {
5839 apiInfo->xrun[1] = true;
5840 result = snd_pcm_prepare( handle[1] );
5842 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
5843 errorText_ = errorStream_.str();
5847 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5848 errorText_ = errorStream_.str();
5852 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
5853 errorText_ = errorStream_.str();
5855 error( RtError::WARNING );
5859 // Do byte swapping if necessary.
5860 if ( stream_.doByteSwap[1] )
5861 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
5863 // Do buffer conversion if necessary.
5864 if ( stream_.doConvertBuffer[1] )
5865 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
5867 // Check stream latency
5868 result = snd_pcm_delay( handle[1], &frames );
5869 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
5872 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5874 // Setup parameters and do buffer conversion if necessary.
5875 if ( stream_.doConvertBuffer[0] ) {
5876 buffer = stream_.deviceBuffer;
5877 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
5878 channels = stream_.nDeviceChannels[0];
5879 format = stream_.deviceFormat[0];
5882 buffer = stream_.userBuffer[0];
5883 channels = stream_.nUserChannels[0];
5884 format = stream_.userFormat;
5887 // Do byte swapping if necessary.
5888 if ( stream_.doByteSwap[0] )
5889 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
5891 // Write samples to device in interleaved/non-interleaved format.
5892 if ( stream_.deviceInterleaved[0] )
5893 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
5895 void *bufs[channels];
5896 size_t offset = stream_.bufferSize * formatBytes( format );
5897 for ( int i=0; i<channels; i++ )
5898 bufs[i] = (void *) (buffer + (i * offset));
5899 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
5902 if ( result < (int) stream_.bufferSize ) {
5903 // Either an error or underrun occured.
5904 if ( result == -EPIPE ) {
5905 snd_pcm_state_t state = snd_pcm_state( handle[0] );
5906 if ( state == SND_PCM_STATE_XRUN ) {
5907 apiInfo->xrun[0] = true;
5908 result = snd_pcm_prepare( handle[0] );
5910 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
5911 errorText_ = errorStream_.str();
5915 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5916 errorText_ = errorStream_.str();
5920 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
5921 errorText_ = errorStream_.str();
5923 error( RtError::WARNING );
5927 // Check stream latency
5928 result = snd_pcm_delay( handle[0], &frames );
5929 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
5933 MUTEX_UNLOCK( &stream_.mutex );
5935 RtApi::tickStreamTime();
5936 if ( doStopStream == 1 ) this->stopStream();
5937 else if ( doStopStream == 2 ) this->abortStream();
5940 extern "C" void *alsaCallbackHandler( void *ptr )
5942 CallbackInfo *info = (CallbackInfo *) ptr;
5943 RtApiAlsa *object = (RtApiAlsa *) info->object;
5944 bool *isRunning = &info->isRunning;
5947 // Set a higher scheduler priority (P.J. Leonard)
5948 struct sched_param param;
5949 int min = sched_get_priority_min( SCHED_RR );
5950 int max = sched_get_priority_max( SCHED_RR );
5951 param.sched_priority = min + ( max - min ) / 2; // Is this the best number?
5952 sched_setscheduler( 0, SCHED_RR, ¶m );
5955 while ( *isRunning == true ) {
5956 pthread_testcancel();
5957 object->callbackEvent();
5960 pthread_exit( NULL );
5963 //******************** End of __LINUX_ALSA__ *********************//
5967 #if defined(__LINUX_OSS__)
5970 #include <sys/ioctl.h>
5973 #include "soundcard.h"
5977 extern "C" void *ossCallbackHandler(void * ptr);
5979 // A structure to hold various information related to the OSS API
5982 int id[2]; // device ids
5987 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
5990 RtApiOss :: RtApiOss()
5992 // Nothing to do here.
5995 RtApiOss :: ~RtApiOss()
5997 if ( stream_.state != STREAM_CLOSED ) closeStream();
6000 unsigned int RtApiOss :: getDeviceCount( void )
6002 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6003 if ( mixerfd == -1 ) {
6004 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
6005 error( RtError::WARNING );
6009 oss_sysinfo sysinfo;
6010 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
6012 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
6013 error( RtError::WARNING );
6017 return sysinfo.numaudios;
6020 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6022 RtAudio::DeviceInfo info;
6023 info.probed = false;
6025 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6026 if ( mixerfd == -1 ) {
6027 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6028 error( RtError::WARNING );
6032 oss_sysinfo sysinfo;
6033 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6034 if ( result == -1 ) {
6036 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6037 error( RtError::WARNING );
6041 unsigned nDevices = sysinfo.numaudios;
6042 if ( nDevices == 0 ) {
6044 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6045 error( RtError::INVALID_USE );
6048 if ( device >= nDevices ) {
6050 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6051 error( RtError::INVALID_USE );
6054 oss_audioinfo ainfo;
6056 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6058 if ( result == -1 ) {
6059 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6060 errorText_ = errorStream_.str();
6061 error( RtError::WARNING );
6066 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6067 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6068 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6069 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6070 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6073 // Probe data formats ... do for input
6074 unsigned long mask = ainfo.iformats;
6075 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6076 info.nativeFormats |= RTAUDIO_SINT16;
6077 if ( mask & AFMT_S8 )
6078 info.nativeFormats |= RTAUDIO_SINT8;
6079 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6080 info.nativeFormats |= RTAUDIO_SINT32;
6081 if ( mask & AFMT_FLOAT )
6082 info.nativeFormats |= RTAUDIO_FLOAT32;
6083 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6084 info.nativeFormats |= RTAUDIO_SINT24;
6086 // Check that we have at least one supported format
6087 if ( info.nativeFormats == 0 ) {
6088 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6089 errorText_ = errorStream_.str();
6090 error( RtError::WARNING );
6094 // Probe the supported sample rates.
6095 info.sampleRates.clear();
6096 if ( ainfo.nrates ) {
6097 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6098 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6099 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6100 info.sampleRates.push_back( SAMPLE_RATES[k] );
6107 // Check min and max rate values;
6108 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6109 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6110 info.sampleRates.push_back( SAMPLE_RATES[k] );
6114 if ( info.sampleRates.size() == 0 ) {
6115 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6116 errorText_ = errorStream_.str();
6117 error( RtError::WARNING );
6121 info.name = ainfo.name;
6128 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6129 unsigned int firstChannel, unsigned int sampleRate,
6130 RtAudioFormat format, unsigned int *bufferSize,
6131 RtAudio::StreamOptions *options )
6133 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6134 if ( mixerfd == -1 ) {
6135 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6139 oss_sysinfo sysinfo;
6140 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6141 if ( result == -1 ) {
6143 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6147 unsigned nDevices = sysinfo.numaudios;
6148 if ( nDevices == 0 ) {
6149 // This should not happen because a check is made before this function is called.
6151 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6155 if ( device >= nDevices ) {
6156 // This should not happen because a check is made before this function is called.
6158 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6162 oss_audioinfo ainfo;
6164 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6166 if ( result == -1 ) {
6167 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6168 errorText_ = errorStream_.str();
6172 // Check if device supports input or output
6173 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6174 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6175 if ( mode == OUTPUT )
6176 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6178 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6179 errorText_ = errorStream_.str();
6184 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6185 if ( mode == OUTPUT )
6187 else { // mode == INPUT
6188 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6189 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6190 close( handle->id[0] );
6192 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6193 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6194 errorText_ = errorStream_.str();
6197 // Check that the number previously set channels is the same.
6198 if ( stream_.nUserChannels[0] != channels ) {
6199 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6200 errorText_ = errorStream_.str();
6209 // Set exclusive access if specified.
6210 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6212 // Try to open the device.
6214 fd = open( ainfo.devnode, flags, 0 );
6216 if ( errno == EBUSY )
6217 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6219 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6220 errorText_ = errorStream_.str();
6224 // For duplex operation, specifically set this mode (this doesn't seem to work).
6226 if ( flags | O_RDWR ) {
6227 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6228 if ( result == -1) {
6229 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6230 errorText_ = errorStream_.str();
6236 // Check the device channel support.
6237 stream_.nUserChannels[mode] = channels;
6238 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6240 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6241 errorText_ = errorStream_.str();
6245 // Set the number of channels.
6246 int deviceChannels = channels + firstChannel;
6247 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6248 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6250 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6251 errorText_ = errorStream_.str();
6254 stream_.nDeviceChannels[mode] = deviceChannels;
6256 // Get the data format mask
6258 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6259 if ( result == -1 ) {
6261 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6262 errorText_ = errorStream_.str();
6266 // Determine how to set the device format.
6267 stream_.userFormat = format;
6268 int deviceFormat = -1;
6269 stream_.doByteSwap[mode] = false;
6270 if ( format == RTAUDIO_SINT8 ) {
6271 if ( mask & AFMT_S8 ) {
6272 deviceFormat = AFMT_S8;
6273 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6276 else if ( format == RTAUDIO_SINT16 ) {
6277 if ( mask & AFMT_S16_NE ) {
6278 deviceFormat = AFMT_S16_NE;
6279 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6281 else if ( mask & AFMT_S16_OE ) {
6282 deviceFormat = AFMT_S16_OE;
6283 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6284 stream_.doByteSwap[mode] = true;
6287 else if ( format == RTAUDIO_SINT24 ) {
6288 if ( mask & AFMT_S24_NE ) {
6289 deviceFormat = AFMT_S24_NE;
6290 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6292 else if ( mask & AFMT_S24_OE ) {
6293 deviceFormat = AFMT_S24_OE;
6294 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6295 stream_.doByteSwap[mode] = true;
6298 else if ( format == RTAUDIO_SINT32 ) {
6299 if ( mask & AFMT_S32_NE ) {
6300 deviceFormat = AFMT_S32_NE;
6301 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6303 else if ( mask & AFMT_S32_OE ) {
6304 deviceFormat = AFMT_S32_OE;
6305 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6306 stream_.doByteSwap[mode] = true;
6310 if ( deviceFormat == -1 ) {
6311 // The user requested format is not natively supported by the device.
6312 if ( mask & AFMT_S16_NE ) {
6313 deviceFormat = AFMT_S16_NE;
6314 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6316 else if ( mask & AFMT_S32_NE ) {
6317 deviceFormat = AFMT_S32_NE;
6318 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6320 else if ( mask & AFMT_S24_NE ) {
6321 deviceFormat = AFMT_S24_NE;
6322 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6324 else if ( mask & AFMT_S16_OE ) {
6325 deviceFormat = AFMT_S16_OE;
6326 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6327 stream_.doByteSwap[mode] = true;
6329 else if ( mask & AFMT_S32_OE ) {
6330 deviceFormat = AFMT_S32_OE;
6331 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6332 stream_.doByteSwap[mode] = true;
6334 else if ( mask & AFMT_S24_OE ) {
6335 deviceFormat = AFMT_S24_OE;
6336 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6337 stream_.doByteSwap[mode] = true;
6339 else if ( mask & AFMT_S8) {
6340 deviceFormat = AFMT_S8;
6341 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6345 if ( stream_.deviceFormat[mode] == 0 ) {
6346 // This really shouldn't happen ...
6348 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6349 errorText_ = errorStream_.str();
6353 // Set the data format.
6354 int temp = deviceFormat;
6355 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6356 if ( result == -1 || deviceFormat != temp ) {
6358 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6359 errorText_ = errorStream_.str();
6363 // Attempt to set the buffer size. According to OSS, the minimum
6364 // number of buffers is two. The supposed minimum buffer size is 16
6365 // bytes, so that will be our lower bound. The argument to this
6366 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6367 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6368 // We'll check the actual value used near the end of the setup
6370 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6371 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6373 if ( options ) buffers = options->numberOfBuffers;
6374 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6375 if ( buffers < 2 ) buffers = 3;
6376 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6377 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6378 if ( result == -1 ) {
6380 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6381 errorText_ = errorStream_.str();
6384 stream_.nBuffers = buffers;
6386 // Save buffer size (in sample frames).
6387 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6388 stream_.bufferSize = *bufferSize;
6390 // Set the sample rate.
6391 int srate = sampleRate;
6392 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6393 if ( result == -1 ) {
6395 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6396 errorText_ = errorStream_.str();
6400 // Verify the sample rate setup worked.
6401 if ( abs( srate - sampleRate ) > 100 ) {
6403 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6404 errorText_ = errorStream_.str();
6407 stream_.sampleRate = sampleRate;
6409 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6410 // We're doing duplex setup here.
6411 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6412 stream_.nDeviceChannels[0] = deviceChannels;
6415 // Set interleaving parameters.
6416 stream_.userInterleaved = true;
6417 stream_.deviceInterleaved[mode] = true;
6418 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6419 stream_.userInterleaved = false;
6421 // Set flags for buffer conversion
6422 stream_.doConvertBuffer[mode] = false;
6423 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6424 stream_.doConvertBuffer[mode] = true;
6425 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6426 stream_.doConvertBuffer[mode] = true;
6427 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6428 stream_.nUserChannels[mode] > 1 )
6429 stream_.doConvertBuffer[mode] = true;
6431 // Allocate the stream handles if necessary and then save.
6432 if ( stream_.apiHandle == 0 ) {
6434 handle = new OssHandle;
6436 catch ( std::bad_alloc& ) {
6437 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6441 stream_.apiHandle = (void *) handle;
6444 handle = (OssHandle *) stream_.apiHandle;
6446 handle->id[mode] = fd;
6448 // Allocate necessary internal buffers.
6449 unsigned long bufferBytes;
6450 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6451 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6452 if ( stream_.userBuffer[mode] == NULL ) {
6453 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6457 if ( stream_.doConvertBuffer[mode] ) {
6459 bool makeBuffer = true;
6460 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6461 if ( mode == INPUT ) {
6462 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6463 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6464 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6469 bufferBytes *= *bufferSize;
6470 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6471 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6472 if ( stream_.deviceBuffer == NULL ) {
6473 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6479 stream_.device[mode] = device;
6480 stream_.state = STREAM_STOPPED;
6482 // Setup the buffer conversion information structure.
6483 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6485 // Setup thread if necessary.
6486 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6487 // We had already set up an output stream.
6488 stream_.mode = DUPLEX;
6489 if ( stream_.device[0] == device ) handle->id[0] = fd;
6492 stream_.mode = mode;
6494 // Setup callback thread.
6495 stream_.callbackInfo.object = (void *) this;
6497 // Set the thread attributes for joinable and realtime scheduling
6498 // priority. The higher priority will only take affect if the
6499 // program is run as root or suid.
6500 pthread_attr_t attr;
6501 pthread_attr_init( &attr );
6502 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6503 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6504 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6506 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6509 stream_.callbackInfo.isRunning = true;
6510 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6511 pthread_attr_destroy( &attr );
6513 stream_.callbackInfo.isRunning = false;
6514 errorText_ = "RtApiOss::error creating callback thread!";
6523 if ( handle->id[0] ) close( handle->id[0] );
6524 if ( handle->id[1] ) close( handle->id[1] );
6526 stream_.apiHandle = 0;
6529 for ( int i=0; i<2; i++ ) {
6530 if ( stream_.userBuffer[i] ) {
6531 free( stream_.userBuffer[i] );
6532 stream_.userBuffer[i] = 0;
6536 if ( stream_.deviceBuffer ) {
6537 free( stream_.deviceBuffer );
6538 stream_.deviceBuffer = 0;
6544 void RtApiOss :: closeStream()
6546 if ( stream_.state == STREAM_CLOSED ) {
6547 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6548 error( RtError::WARNING );
6552 stream_.callbackInfo.isRunning = false;
6553 pthread_join( stream_.callbackInfo.thread, NULL );
6555 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6556 if ( stream_.state == STREAM_RUNNING ) {
6557 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6558 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6560 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6561 stream_.state = STREAM_STOPPED;
6565 if ( handle->id[0] ) close( handle->id[0] );
6566 if ( handle->id[1] ) close( handle->id[1] );
6568 stream_.apiHandle = 0;
6571 for ( int i=0; i<2; i++ ) {
6572 if ( stream_.userBuffer[i] ) {
6573 free( stream_.userBuffer[i] );
6574 stream_.userBuffer[i] = 0;
6578 if ( stream_.deviceBuffer ) {
6579 free( stream_.deviceBuffer );
6580 stream_.deviceBuffer = 0;
6583 stream_.mode = UNINITIALIZED;
6584 stream_.state = STREAM_CLOSED;
6587 void RtApiOss :: startStream()
6590 if ( stream_.state == STREAM_RUNNING ) {
6591 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6592 error( RtError::WARNING );
6596 MUTEX_LOCK( &stream_.mutex );
6598 stream_.state = STREAM_RUNNING;
6600 // No need to do anything else here ... OSS automatically starts
6601 // when fed samples.
6603 MUTEX_UNLOCK( &stream_.mutex );
6606 void RtApiOss :: stopStream()
6609 if ( stream_.state == STREAM_STOPPED ) {
6610 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6611 error( RtError::WARNING );
6615 // Change the state before the lock to improve shutdown response
6616 // when using a callback.
6617 stream_.state = STREAM_STOPPED;
6618 MUTEX_LOCK( &stream_.mutex );
6621 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6622 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6624 // Flush the output with zeros a few times.
6627 RtAudioFormat format;
6629 if ( stream_.doConvertBuffer[0] ) {
6630 buffer = stream_.deviceBuffer;
6631 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6632 format = stream_.deviceFormat[0];
6635 buffer = stream_.userBuffer[0];
6636 samples = stream_.bufferSize * stream_.nUserChannels[0];
6637 format = stream_.userFormat;
6640 memset( buffer, 0, samples * formatBytes(format) );
6641 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6642 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6643 if ( result == -1 ) {
6644 errorText_ = "RtApiOss::stopStream: audio write error.";
6645 error( RtError::WARNING );
6649 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6650 if ( result == -1 ) {
6651 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6652 errorText_ = errorStream_.str();
6655 handle->triggered = false;
6658 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6659 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6660 if ( result == -1 ) {
6661 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6662 errorText_ = errorStream_.str();
6668 MUTEX_UNLOCK( &stream_.mutex );
6670 stream_.state = STREAM_STOPPED;
6671 if ( result != -1 ) return;
6672 error( RtError::SYSTEM_ERROR );
6675 void RtApiOss :: abortStream()
6678 if ( stream_.state == STREAM_STOPPED ) {
6679 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
6680 error( RtError::WARNING );
6684 // Change the state before the lock to improve shutdown response
6685 // when using a callback.
6686 stream_.state = STREAM_STOPPED;
6687 MUTEX_LOCK( &stream_.mutex );
6690 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6691 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6692 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6693 if ( result == -1 ) {
6694 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6695 errorText_ = errorStream_.str();
6698 handle->triggered = false;
6701 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6702 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6703 if ( result == -1 ) {
6704 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6705 errorText_ = errorStream_.str();
6711 MUTEX_UNLOCK( &stream_.mutex );
6713 stream_.state = STREAM_STOPPED;
6714 if ( result != -1 ) return;
6715 error( RtError::SYSTEM_ERROR );
6718 void RtApiOss :: callbackEvent()
6720 if ( stream_.state == STREAM_STOPPED ) {
6721 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
6725 if ( stream_.state == STREAM_CLOSED ) {
6726 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
6727 error( RtError::WARNING );
6731 // Invoke user callback to get fresh output data.
6732 int doStopStream = 0;
6733 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6734 double streamTime = getStreamTime();
6735 RtAudioStreamStatus status = 0;
6736 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6737 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
6738 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6739 handle->xrun[0] = false;
6741 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
6742 status |= RTAUDIO_INPUT_OVERFLOW;
6743 handle->xrun[1] = false;
6745 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6746 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6748 MUTEX_LOCK( &stream_.mutex );
6750 // The state might change while waiting on a mutex.
6751 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6756 RtAudioFormat format;
6758 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6760 // Setup parameters and do buffer conversion if necessary.
6761 if ( stream_.doConvertBuffer[0] ) {
6762 buffer = stream_.deviceBuffer;
6763 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6764 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6765 format = stream_.deviceFormat[0];
6768 buffer = stream_.userBuffer[0];
6769 samples = stream_.bufferSize * stream_.nUserChannels[0];
6770 format = stream_.userFormat;
6773 // Do byte swapping if necessary.
6774 if ( stream_.doByteSwap[0] )
6775 byteSwapBuffer( buffer, samples, format );
6777 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
6779 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6780 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6781 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
6782 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6783 handle->triggered = true;
6786 // Write samples to device.
6787 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6789 if ( result == -1 ) {
6790 // We'll assume this is an underrun, though there isn't a
6791 // specific means for determining that.
6792 handle->xrun[0] = true;
6793 errorText_ = "RtApiOss::callbackEvent: audio write error.";
6794 error( RtError::WARNING );
6799 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6801 // Setup parameters.
6802 if ( stream_.doConvertBuffer[1] ) {
6803 buffer = stream_.deviceBuffer;
6804 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
6805 format = stream_.deviceFormat[1];
6808 buffer = stream_.userBuffer[1];
6809 samples = stream_.bufferSize * stream_.nUserChannels[1];
6810 format = stream_.userFormat;
6813 // Read samples from device.
6814 result = read( handle->id[1], buffer, samples * formatBytes(format) );
6816 if ( result == -1 ) {
6817 // We'll assume this is an overrun, though there isn't a
6818 // specific means for determining that.
6819 handle->xrun[1] = true;
6820 errorText_ = "RtApiOss::callbackEvent: audio read error.";
6821 error( RtError::WARNING );
6825 // Do byte swapping if necessary.
6826 if ( stream_.doByteSwap[1] )
6827 byteSwapBuffer( buffer, samples, format );
6829 // Do buffer conversion if necessary.
6830 if ( stream_.doConvertBuffer[1] )
6831 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6835 MUTEX_UNLOCK( &stream_.mutex );
6837 RtApi::tickStreamTime();
6838 if ( doStopStream == 1 ) this->stopStream();
6839 else if ( doStopStream == 2 ) this->abortStream();
6842 extern "C" void *ossCallbackHandler( void *ptr )
6844 CallbackInfo *info = (CallbackInfo *) ptr;
6845 RtApiOss *object = (RtApiOss *) info->object;
6846 bool *isRunning = &info->isRunning;
6849 // Set a higher scheduler priority (P.J. Leonard)
6850 struct sched_param param;
6851 param.sched_priority = 39; // Is this the best number?
6852 sched_setscheduler( 0, SCHED_RR, ¶m );
6855 while ( *isRunning == true ) {
6856 pthread_testcancel();
6857 object->callbackEvent();
6860 pthread_exit( NULL );
6863 //******************** End of __LINUX_OSS__ *********************//
6867 // *************************************************** //
6869 // Protected common (OS-independent) RtAudio methods.
6871 // *************************************************** //
6873 // This method can be modified to control the behavior of error
6874 // message printing.
6875 void RtApi :: error( RtError::Type type )
6877 errorStream_.str(""); // clear the ostringstream
6878 if ( type == RtError::WARNING && showWarnings_ == true )
6879 std::cerr << '\n' << errorText_ << "\n\n";
6881 throw( RtError( errorText_, type ) );
6884 void RtApi :: verifyStream()
6886 if ( stream_.state == STREAM_CLOSED ) {
6887 errorText_ = "RtApi:: a stream is not open!";
6888 error( RtError::INVALID_USE );
6892 void RtApi :: clearStreamInfo()
6894 stream_.mode = UNINITIALIZED;
6895 stream_.state = STREAM_CLOSED;
6896 stream_.sampleRate = 0;
6897 stream_.bufferSize = 0;
6898 stream_.nBuffers = 0;
6899 stream_.userFormat = 0;
6900 stream_.userInterleaved = true;
6901 stream_.streamTime = 0.0;
6902 stream_.apiHandle = 0;
6903 stream_.deviceBuffer = 0;
6904 stream_.callbackInfo.callback = 0;
6905 stream_.callbackInfo.userData = 0;
6906 stream_.callbackInfo.isRunning = false;
6907 for ( int i=0; i<2; i++ ) {
6908 stream_.device[i] = 0;
6909 stream_.doConvertBuffer[i] = false;
6910 stream_.deviceInterleaved[i] = true;
6911 stream_.doByteSwap[i] = false;
6912 stream_.nUserChannels[i] = 0;
6913 stream_.nDeviceChannels[i] = 0;
6914 stream_.channelOffset[i] = 0;
6915 stream_.deviceFormat[i] = 0;
6916 stream_.latency[i] = 0;
6917 stream_.userBuffer[i] = 0;
6918 stream_.convertInfo[i].channels = 0;
6919 stream_.convertInfo[i].inJump = 0;
6920 stream_.convertInfo[i].outJump = 0;
6921 stream_.convertInfo[i].inFormat = 0;
6922 stream_.convertInfo[i].outFormat = 0;
6923 stream_.convertInfo[i].inOffset.clear();
6924 stream_.convertInfo[i].outOffset.clear();
6928 unsigned int RtApi :: formatBytes( RtAudioFormat format )
6930 if ( format == RTAUDIO_SINT16 )
6932 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
6933 format == RTAUDIO_FLOAT32 )
6935 else if ( format == RTAUDIO_FLOAT64 )
6937 else if ( format == RTAUDIO_SINT8 )
6940 errorText_ = "RtApi::formatBytes: undefined format.";
6941 error( RtError::WARNING );
6946 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
6948 if ( mode == INPUT ) { // convert device to user buffer
6949 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6950 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6951 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6952 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6954 else { // convert user to device buffer
6955 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6956 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6957 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6958 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6961 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6962 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
6964 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
6966 // Set up the interleave/deinterleave offsets.
6967 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
6968 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
6969 ( mode == INPUT && stream_.userInterleaved ) ) {
6970 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6971 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6972 stream_.convertInfo[mode].outOffset.push_back( k );
6973 stream_.convertInfo[mode].inJump = 1;
6977 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6978 stream_.convertInfo[mode].inOffset.push_back( k );
6979 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6980 stream_.convertInfo[mode].outJump = 1;
6984 else { // no (de)interleaving
6985 if ( stream_.userInterleaved ) {
6986 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6987 stream_.convertInfo[mode].inOffset.push_back( k );
6988 stream_.convertInfo[mode].outOffset.push_back( k );
6992 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6993 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6994 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6995 stream_.convertInfo[mode].inJump = 1;
6996 stream_.convertInfo[mode].outJump = 1;
7001 // Add channel offset.
7002 if ( firstChannel > 0 ) {
7003 if ( stream_.deviceInterleaved[mode] ) {
7004 if ( mode == OUTPUT ) {
7005 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7006 stream_.convertInfo[mode].outOffset[k] += firstChannel;
7009 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7010 stream_.convertInfo[mode].inOffset[k] += firstChannel;
7014 if ( mode == OUTPUT ) {
7015 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7016 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7019 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7020 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7026 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7028 // This function does format conversion, input/output channel compensation, and
7029 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7030 // the upper three bytes of a 32-bit integer.
7032 // Clear our device buffer when in/out duplex device channels are different
7033 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7034 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7035 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7038 if (info.outFormat == RTAUDIO_FLOAT64) {
7040 Float64 *out = (Float64 *)outBuffer;
7042 if (info.inFormat == RTAUDIO_SINT8) {
7043 signed char *in = (signed char *)inBuffer;
7044 scale = 1.0 / 128.0;
7045 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7046 for (j=0; j<info.channels; j++) {
7047 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7048 out[info.outOffset[j]] *= scale;
7051 out += info.outJump;
7054 else if (info.inFormat == RTAUDIO_SINT16) {
7055 Int16 *in = (Int16 *)inBuffer;
7056 scale = 1.0 / 32768.0;
7057 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7058 for (j=0; j<info.channels; j++) {
7059 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7060 out[info.outOffset[j]] *= scale;
7063 out += info.outJump;
7066 else if (info.inFormat == RTAUDIO_SINT24) {
7067 Int32 *in = (Int32 *)inBuffer;
7068 scale = 1.0 / 8388608.0;
7069 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7070 for (j=0; j<info.channels; j++) {
7071 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7072 out[info.outOffset[j]] *= scale;
7075 out += info.outJump;
7078 else if (info.inFormat == RTAUDIO_SINT32) {
7079 Int32 *in = (Int32 *)inBuffer;
7080 scale = 1.0 / 2147483648.0;
7081 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7082 for (j=0; j<info.channels; j++) {
7083 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7084 out[info.outOffset[j]] *= scale;
7087 out += info.outJump;
7090 else if (info.inFormat == RTAUDIO_FLOAT32) {
7091 Float32 *in = (Float32 *)inBuffer;
7092 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7093 for (j=0; j<info.channels; j++) {
7094 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7097 out += info.outJump;
7100 else if (info.inFormat == RTAUDIO_FLOAT64) {
7101 // Channel compensation and/or (de)interleaving only.
7102 Float64 *in = (Float64 *)inBuffer;
7103 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7104 for (j=0; j<info.channels; j++) {
7105 out[info.outOffset[j]] = in[info.inOffset[j]];
7108 out += info.outJump;
7112 else if (info.outFormat == RTAUDIO_FLOAT32) {
7114 Float32 *out = (Float32 *)outBuffer;
7116 if (info.inFormat == RTAUDIO_SINT8) {
7117 signed char *in = (signed char *)inBuffer;
7118 scale = 1.0 / 128.0;
7119 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7120 for (j=0; j<info.channels; j++) {
7121 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7122 out[info.outOffset[j]] *= scale;
7125 out += info.outJump;
7128 else if (info.inFormat == RTAUDIO_SINT16) {
7129 Int16 *in = (Int16 *)inBuffer;
7130 scale = 1.0 / 32768.0;
7131 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7132 for (j=0; j<info.channels; j++) {
7133 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7134 out[info.outOffset[j]] *= scale;
7137 out += info.outJump;
7140 else if (info.inFormat == RTAUDIO_SINT24) {
7141 Int32 *in = (Int32 *)inBuffer;
7142 scale = 1.0 / 8388608.0;
7143 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7144 for (j=0; j<info.channels; j++) {
7145 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7146 out[info.outOffset[j]] *= scale;
7149 out += info.outJump;
7152 else if (info.inFormat == RTAUDIO_SINT32) {
7153 Int32 *in = (Int32 *)inBuffer;
7154 scale = 1.0 / 2147483648.0;
7155 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7156 for (j=0; j<info.channels; j++) {
7157 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7158 out[info.outOffset[j]] *= scale;
7161 out += info.outJump;
7164 else if (info.inFormat == RTAUDIO_FLOAT32) {
7165 // Channel compensation and/or (de)interleaving only.
7166 Float32 *in = (Float32 *)inBuffer;
7167 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7168 for (j=0; j<info.channels; j++) {
7169 out[info.outOffset[j]] = in[info.inOffset[j]];
7172 out += info.outJump;
7175 else if (info.inFormat == RTAUDIO_FLOAT64) {
7176 Float64 *in = (Float64 *)inBuffer;
7177 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7178 for (j=0; j<info.channels; j++) {
7179 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7182 out += info.outJump;
7186 else if (info.outFormat == RTAUDIO_SINT32) {
7187 Int32 *out = (Int32 *)outBuffer;
7188 if (info.inFormat == RTAUDIO_SINT8) {
7189 signed char *in = (signed char *)inBuffer;
7190 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7191 for (j=0; j<info.channels; j++) {
7192 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7193 out[info.outOffset[j]] <<= 24;
7196 out += info.outJump;
7199 else if (info.inFormat == RTAUDIO_SINT16) {
7200 Int16 *in = (Int16 *)inBuffer;
7201 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7202 for (j=0; j<info.channels; j++) {
7203 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7204 out[info.outOffset[j]] <<= 16;
7207 out += info.outJump;
7210 else if (info.inFormat == RTAUDIO_SINT24) {
7211 Int32 *in = (Int32 *)inBuffer;
7212 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7213 for (j=0; j<info.channels; j++) {
7214 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7215 out[info.outOffset[j]] <<= 8;
7218 out += info.outJump;
7221 else if (info.inFormat == RTAUDIO_SINT32) {
7222 // Channel compensation and/or (de)interleaving only.
7223 Int32 *in = (Int32 *)inBuffer;
7224 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7225 for (j=0; j<info.channels; j++) {
7226 out[info.outOffset[j]] = in[info.inOffset[j]];
7229 out += info.outJump;
7232 else if (info.inFormat == RTAUDIO_FLOAT32) {
7233 Float32 *in = (Float32 *)inBuffer;
7234 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7235 for (j=0; j<info.channels; j++) {
7236 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7239 out += info.outJump;
7242 else if (info.inFormat == RTAUDIO_FLOAT64) {
7243 Float64 *in = (Float64 *)inBuffer;
7244 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7245 for (j=0; j<info.channels; j++) {
7246 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7249 out += info.outJump;
7253 else if (info.outFormat == RTAUDIO_SINT24) {
7254 Int32 *out = (Int32 *)outBuffer;
7255 if (info.inFormat == RTAUDIO_SINT8) {
7256 signed char *in = (signed char *)inBuffer;
7257 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7258 for (j=0; j<info.channels; j++) {
7259 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7260 out[info.outOffset[j]] <<= 16;
7263 out += info.outJump;
7266 else if (info.inFormat == RTAUDIO_SINT16) {
7267 Int16 *in = (Int16 *)inBuffer;
7268 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7269 for (j=0; j<info.channels; j++) {
7270 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7271 out[info.outOffset[j]] <<= 8;
7274 out += info.outJump;
7277 else if (info.inFormat == RTAUDIO_SINT24) {
7278 // Channel compensation and/or (de)interleaving only.
7279 Int32 *in = (Int32 *)inBuffer;
7280 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7281 for (j=0; j<info.channels; j++) {
7282 out[info.outOffset[j]] = in[info.inOffset[j]];
7285 out += info.outJump;
7288 else if (info.inFormat == RTAUDIO_SINT32) {
7289 Int32 *in = (Int32 *)inBuffer;
7290 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7291 for (j=0; j<info.channels; j++) {
7292 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7293 out[info.outOffset[j]] >>= 8;
7296 out += info.outJump;
7299 else if (info.inFormat == RTAUDIO_FLOAT32) {
7300 Float32 *in = (Float32 *)inBuffer;
7301 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7302 for (j=0; j<info.channels; j++) {
7303 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388608.0);
7306 out += info.outJump;
7309 else if (info.inFormat == RTAUDIO_FLOAT64) {
7310 Float64 *in = (Float64 *)inBuffer;
7311 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7312 for (j=0; j<info.channels; j++) {
7313 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7316 out += info.outJump;
7320 else if (info.outFormat == RTAUDIO_SINT16) {
7321 Int16 *out = (Int16 *)outBuffer;
7322 if (info.inFormat == RTAUDIO_SINT8) {
7323 signed char *in = (signed char *)inBuffer;
7324 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7325 for (j=0; j<info.channels; j++) {
7326 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7327 out[info.outOffset[j]] <<= 8;
7330 out += info.outJump;
7333 else if (info.inFormat == RTAUDIO_SINT16) {
7334 // Channel compensation and/or (de)interleaving only.
7335 Int16 *in = (Int16 *)inBuffer;
7336 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7337 for (j=0; j<info.channels; j++) {
7338 out[info.outOffset[j]] = in[info.inOffset[j]];
7341 out += info.outJump;
7344 else if (info.inFormat == RTAUDIO_SINT24) {
7345 Int32 *in = (Int32 *)inBuffer;
7346 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7347 for (j=0; j<info.channels; j++) {
7348 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7351 out += info.outJump;
7354 else if (info.inFormat == RTAUDIO_SINT32) {
7355 Int32 *in = (Int32 *)inBuffer;
7356 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7357 for (j=0; j<info.channels; j++) {
7358 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7361 out += info.outJump;
7364 else if (info.inFormat == RTAUDIO_FLOAT32) {
7365 Float32 *in = (Float32 *)inBuffer;
7366 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7367 for (j=0; j<info.channels; j++) {
7368 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7371 out += info.outJump;
7374 else if (info.inFormat == RTAUDIO_FLOAT64) {
7375 Float64 *in = (Float64 *)inBuffer;
7376 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7377 for (j=0; j<info.channels; j++) {
7378 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7381 out += info.outJump;
7385 else if (info.outFormat == RTAUDIO_SINT8) {
7386 signed char *out = (signed char *)outBuffer;
7387 if (info.inFormat == RTAUDIO_SINT8) {
7388 // Channel compensation and/or (de)interleaving only.
7389 signed char *in = (signed char *)inBuffer;
7390 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7391 for (j=0; j<info.channels; j++) {
7392 out[info.outOffset[j]] = in[info.inOffset[j]];
7395 out += info.outJump;
7398 if (info.inFormat == RTAUDIO_SINT16) {
7399 Int16 *in = (Int16 *)inBuffer;
7400 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7401 for (j=0; j<info.channels; j++) {
7402 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7405 out += info.outJump;
7408 else if (info.inFormat == RTAUDIO_SINT24) {
7409 Int32 *in = (Int32 *)inBuffer;
7410 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7411 for (j=0; j<info.channels; j++) {
7412 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7415 out += info.outJump;
7418 else if (info.inFormat == RTAUDIO_SINT32) {
7419 Int32 *in = (Int32 *)inBuffer;
7420 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7421 for (j=0; j<info.channels; j++) {
7422 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7425 out += info.outJump;
7428 else if (info.inFormat == RTAUDIO_FLOAT32) {
7429 Float32 *in = (Float32 *)inBuffer;
7430 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7431 for (j=0; j<info.channels; j++) {
7432 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7435 out += info.outJump;
7438 else if (info.inFormat == RTAUDIO_FLOAT64) {
7439 Float64 *in = (Float64 *)inBuffer;
7440 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7441 for (j=0; j<info.channels; j++) {
7442 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7445 out += info.outJump;
7451 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7457 if ( format == RTAUDIO_SINT16 ) {
7458 for ( unsigned int i=0; i<samples; i++ ) {
7459 // Swap 1st and 2nd bytes.
7464 // Increment 2 bytes.
7468 else if ( format == RTAUDIO_SINT24 ||
7469 format == RTAUDIO_SINT32 ||
7470 format == RTAUDIO_FLOAT32 ) {
7471 for ( unsigned int i=0; i<samples; i++ ) {
7472 // Swap 1st and 4th bytes.
7477 // Swap 2nd and 3rd bytes.
7483 // Increment 4 bytes.
7487 else if ( format == RTAUDIO_FLOAT64 ) {
7488 for ( unsigned int i=0; i<samples; i++ ) {
7489 // Swap 1st and 8th bytes
7494 // Swap 2nd and 7th bytes
7500 // Swap 3rd and 6th bytes
7506 // Swap 4th and 5th bytes
7512 // Increment 8 bytes.
7518 // Indentation settings for Vim and Emacs
7521 // c-basic-offset: 2
7522 // indent-tabs-mode: nil
7525 // vim: et sts=2 sw=2