1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio and Jack), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://www.music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2007 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 asked to send the modifications to the original developer so that
28 they can be incorporated into the canonical version. This is,
29 however, not a binding provision of this license.
31 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
34 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
35 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
36 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
39 /************************************************************************/
41 // RtAudio: Version 4.0
46 // Static variable definitions.
47 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
48 const unsigned int RtApi::SAMPLE_RATES[] = {
49 4000, 5512, 8000, 9600, 11025, 16000, 22050,
50 32000, 44100, 48000, 88200, 96000, 176400, 192000
53 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
54 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
55 #define MUTEX_DESTROY(A) DeleteCriticalSection(A)
56 #define MUTEX_LOCK(A) EnterCriticalSection(A)
57 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
58 #elif defined(__LINUX_ALSA__) || defined(__UNIX_JACK__) || defined(__LINUX_OSS__) || defined(__MACOSX_CORE__)
60 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
61 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A)
62 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
63 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
65 #define MUTEX_INITIALIZE(A) abs(*A) // dummy definitions
66 #define MUTEX_DESTROY(A) abs(*A) // dummy definitions
69 // *************************************************** //
71 // RtAudio definitions.
73 // *************************************************** //
75 void RtAudio :: getCompiledApi( std::vector<RtAudio::Api> &apis ) throw()
79 // The order here will control the order of RtAudio's API search in
81 #if defined(__UNIX_JACK__)
82 apis.push_back( UNIX_JACK );
84 #if defined(__LINUX_ALSA__)
85 apis.push_back( LINUX_ALSA );
87 #if defined(__LINUX_OSS__)
88 apis.push_back( LINUX_OSS );
90 #if defined(__WINDOWS_ASIO__)
91 apis.push_back( WINDOWS_ASIO );
93 #if defined(__WINDOWS_DS__)
94 apis.push_back( WINDOWS_DS );
96 #if defined(__MACOSX_CORE__)
97 apis.push_back( MACOSX_CORE );
99 #if defined(__RTAUDIO_DUMMY__)
100 apis.push_back( RTAUDIO_DUMMY );
104 void RtAudio :: openRtApi( RtAudio::Api api )
106 #if defined(__UNIX_JACK__)
107 if ( api == UNIX_JACK )
108 rtapi_ = new RtApiJack();
110 #if defined(__LINUX_ALSA__)
111 if ( api == LINUX_ALSA )
112 rtapi_ = new RtApiAlsa();
114 #if defined(__LINUX_OSS__)
115 if ( api == LINUX_OSS )
116 rtapi_ = new RtApiOss();
118 #if defined(__WINDOWS_ASIO__)
119 if ( api == WINDOWS_ASIO )
120 rtapi_ = new RtApiAsio();
122 #if defined(__WINDOWS_DS__)
123 if ( api == WINDOWS_DS )
124 rtapi_ = new RtApiDs();
126 #if defined(__MACOSX_CORE__)
127 if ( api == MACOSX_CORE )
128 rtapi_ = new RtApiCore();
130 #if defined(__RTAUDIO_DUMMY__)
131 if ( api == RTAUDIO_DUMMY )
132 rtapi_ = new RtApiDummy();
136 RtAudio :: RtAudio( RtAudio::Api api ) throw()
140 if ( api != UNSPECIFIED ) {
141 // Attempt to open the specified API.
143 if ( rtapi_ ) return;
145 // No compiled support for specified API value. Issue a debug
146 // warning and continue as if no API was specified.
147 std::cerr << "\nRtAudio: no compiled support for specified API argument!\n" << std::endl;
150 // Iterate through the compiled APIs and return as soon as we find
151 // one with at least one device or we reach the end of the list.
152 std::vector< RtAudio::Api > apis;
153 getCompiledApi( apis );
154 for ( unsigned int i=0; i<apis.size(); i++ ) {
155 openRtApi( apis[i] );
156 if ( rtapi_->getDeviceCount() ) break;
159 if ( rtapi_ ) return;
161 // It should not be possible to get here because the preprocessor
162 // definition __RTAUDIO_DUMMY__ is automatically defined if no
163 // API-specific definitions are passed to the compiler. But just in
164 // case something weird happens, we'll print out an error message.
165 std::cerr << "\nRtAudio: no compiled API support found ... critical error!!\n\n";
168 RtAudio :: ~RtAudio() throw()
173 void RtAudio :: openStream( RtAudio::StreamParameters *outputParameters,
174 RtAudio::StreamParameters *inputParameters,
175 RtAudioFormat format, unsigned int sampleRate,
176 unsigned int *bufferFrames,
177 RtAudioCallback callback, void *userData,
178 RtAudio::StreamOptions *options )
180 return rtapi_->openStream( outputParameters, inputParameters, format,
181 sampleRate, bufferFrames, callback,
185 // *************************************************** //
187 // Public RtApi definitions (see end of file for
188 // private or protected utility functions).
190 // *************************************************** //
194 stream_.state = STREAM_CLOSED;
195 stream_.mode = UNINITIALIZED;
196 stream_.apiHandle = 0;
197 stream_.userBuffer[0] = 0;
198 stream_.userBuffer[1] = 0;
199 MUTEX_INITIALIZE( &stream_.mutex );
200 showWarnings_ = true;
205 MUTEX_DESTROY( &stream_.mutex );
208 void RtApi :: openStream( RtAudio::StreamParameters *oParams,
209 RtAudio::StreamParameters *iParams,
210 RtAudioFormat format, unsigned int sampleRate,
211 unsigned int *bufferFrames,
212 RtAudioCallback callback, void *userData,
213 RtAudio::StreamOptions *options )
215 if ( stream_.state != STREAM_CLOSED ) {
216 errorText_ = "RtApi::openStream: a stream is already open!";
217 error( RtError::INVALID_USE );
220 if ( oParams && oParams->nChannels < 1 ) {
221 errorText_ = "RtApi::openStream: a non-NULL output StreamParameters structure cannot have an nChannels value less than one.";
222 error( RtError::INVALID_USE );
225 if ( iParams && iParams->nChannels < 1 ) {
226 errorText_ = "RtApi::openStream: a non-NULL input StreamParameters structure cannot have an nChannels value less than one.";
227 error( RtError::INVALID_USE );
230 if ( oParams == NULL && iParams == NULL ) {
231 errorText_ = "RtApi::openStream: input and output StreamParameters structures are both NULL!";
232 error( RtError::INVALID_USE );
235 if ( formatBytes(format) == 0 ) {
236 errorText_ = "RtApi::openStream: 'format' parameter value is undefined.";
237 error( RtError::INVALID_USE );
240 unsigned int nDevices = getDeviceCount();
241 unsigned int oChannels = 0;
243 oChannels = oParams->nChannels;
244 if ( oParams->deviceId >= nDevices ) {
245 errorText_ = "RtApi::openStream: output device parameter value is invalid.";
246 error( RtError::INVALID_USE );
250 unsigned int iChannels = 0;
252 iChannels = iParams->nChannels;
253 if ( iParams->deviceId >= nDevices ) {
254 errorText_ = "RtApi::openStream: input device parameter value is invalid.";
255 error( RtError::INVALID_USE );
262 if ( oChannels > 0 ) {
264 result = probeDeviceOpen( oParams->deviceId, OUTPUT, oChannels, oParams->firstChannel,
265 sampleRate, format, bufferFrames, options );
266 if ( result == false ) error( RtError::SYSTEM_ERROR );
269 if ( iChannels > 0 ) {
271 result = probeDeviceOpen( iParams->deviceId, INPUT, iChannels, iParams->firstChannel,
272 sampleRate, format, bufferFrames, options );
273 if ( result == false ) {
274 if ( oChannels > 0 ) closeStream();
275 error( RtError::SYSTEM_ERROR );
279 stream_.callbackInfo.callback = (void *) callback;
280 stream_.callbackInfo.userData = userData;
282 if ( options ) options->numberOfBuffers = stream_.nBuffers;
283 stream_.state = STREAM_STOPPED;
286 unsigned int RtApi :: getDefaultInputDevice( void )
288 // Should be implemented in subclasses if possible.
292 unsigned int RtApi :: getDefaultOutputDevice( void )
294 // Should be implemented in subclasses if possible.
298 void RtApi :: closeStream( void )
300 // MUST be implemented in subclasses!
304 bool RtApi :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
305 unsigned int firstChannel, unsigned int sampleRate,
306 RtAudioFormat format, unsigned int *bufferSize,
307 RtAudio::StreamOptions *options )
309 // MUST be implemented in subclasses!
313 void RtApi :: tickStreamTime( void )
315 // Subclasses that do not provide their own implementation of
316 // getStreamTime should call this function once per buffer I/O to
317 // provide basic stream time support.
319 stream_.streamTime += ( stream_.bufferSize * 1.0 / stream_.sampleRate );
321 #if defined( HAVE_GETTIMEOFDAY )
322 gettimeofday( &stream_.lastTickTimestamp, NULL );
326 long RtApi :: getStreamLatency( void )
330 long totalLatency = 0;
331 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
332 totalLatency = stream_.latency[0];
333 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
334 totalLatency += stream_.latency[1];
339 double RtApi :: getStreamTime( void )
343 #if defined( HAVE_GETTIMEOFDAY )
344 // Return a very accurate estimate of the stream time by
345 // adding in the elapsed time since the last tick.
349 if ( stream_.state != STREAM_RUNNING || stream_.streamTime == 0.0 )
350 return stream_.streamTime;
352 gettimeofday( &now, NULL );
353 then = stream_.lastTickTimestamp;
354 return stream_.streamTime +
355 ((now.tv_sec + 0.000001 * now.tv_usec) -
356 (then.tv_sec + 0.000001 * then.tv_usec));
358 return stream_.streamTime;
363 // *************************************************** //
365 // OS/API-specific methods.
367 // *************************************************** //
369 #if defined(__MACOSX_CORE__)
371 // The OS X CoreAudio API is designed to use a separate callback
372 // procedure for each of its audio devices. A single RtAudio duplex
373 // stream using two different devices is supported here, though it
374 // cannot be guaranteed to always behave correctly because we cannot
375 // synchronize these two callbacks.
377 // A property listener is installed for over/underrun information.
378 // However, no functionality is currently provided to allow property
379 // listeners to trigger user handlers because it is unclear what could
380 // be done if a critical stream parameter (buffer size, sample rate,
381 // device disconnect) notification arrived. The listeners entail
382 // quite a bit of extra code and most likely, a user program wouldn't
383 // be prepared for the result anyway. However, we do provide a flag
384 // to the client callback function to inform of an over/underrun.
386 // The mechanism for querying and setting system parameters was
387 // updated (and perhaps simplified) in OS-X version 10.4. However,
388 // since 10.4 support is not necessarily available to all users, I've
389 // decided not to update the respective code at this time. Perhaps
390 // this will happen when Apple makes 10.4 free for everyone. :-)
392 // A structure to hold various information related to the CoreAudio API
395 AudioDeviceID id[2]; // device ids
396 UInt32 iStream[2]; // device stream index (first for mono mode)
399 pthread_cond_t condition;
400 int drainCounter; // Tracks callback counts when draining
401 bool internalDrain; // Indicates if stop is initiated from callback or not.
404 :deviceBuffer(0), drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
407 RtApiCore :: RtApiCore()
409 // Nothing to do here.
412 RtApiCore :: ~RtApiCore()
414 // The subclass destructor gets called before the base class
415 // destructor, so close an existing stream before deallocating
416 // apiDeviceId memory.
417 if ( stream_.state != STREAM_CLOSED ) closeStream();
420 unsigned int RtApiCore :: getDeviceCount( void )
422 // Find out how many audio devices there are, if any.
424 OSStatus result = AudioHardwareGetPropertyInfo( kAudioHardwarePropertyDevices, &dataSize, NULL );
425 if ( result != noErr ) {
426 errorText_ = "RtApiCore::getDeviceCount: OS-X error getting device info!";
427 error( RtError::WARNING );
431 return dataSize / sizeof( AudioDeviceID );
434 unsigned int RtApiCore :: getDefaultInputDevice( void )
436 unsigned int nDevices = getDeviceCount();
437 if ( nDevices <= 1 ) return 0;
440 UInt32 dataSize = sizeof( AudioDeviceID );
441 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
444 if ( result != noErr ) {
445 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device.";
446 error( RtError::WARNING );
450 dataSize *= nDevices;
451 AudioDeviceID deviceList[ nDevices ];
452 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
453 if ( result != noErr ) {
454 errorText_ = "RtApiCore::getDefaultInputDevice: OS-X system error getting device IDs.";
455 error( RtError::WARNING );
459 for ( unsigned int i=0; i<nDevices; i++ )
460 if ( id == deviceList[i] ) return i;
462 errorText_ = "RtApiCore::getDefaultInputDevice: No default device found!";
463 error( RtError::WARNING );
467 unsigned int RtApiCore :: getDefaultOutputDevice( void )
469 unsigned int nDevices = getDeviceCount();
470 if ( nDevices <= 1 ) return 0;
473 UInt32 dataSize = sizeof( AudioDeviceID );
474 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
477 if ( result != noErr ) {
478 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device.";
479 error( RtError::WARNING );
483 dataSize *= nDevices;
484 AudioDeviceID deviceList[ nDevices ];
485 result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
486 if ( result != noErr ) {
487 errorText_ = "RtApiCore::getDefaultOutputDevice: OS-X system error getting device IDs.";
488 error( RtError::WARNING );
492 for ( unsigned int i=0; i<nDevices; i++ )
493 if ( id == deviceList[i] ) return i;
495 errorText_ = "RtApiCore::getDefaultOutputDevice: No default device found!";
496 error( RtError::WARNING );
500 RtAudio::DeviceInfo RtApiCore :: getDeviceInfo( unsigned int device )
502 RtAudio::DeviceInfo info;
506 unsigned int nDevices = getDeviceCount();
507 if ( nDevices == 0 ) {
508 errorText_ = "RtApiCore::getDeviceInfo: no devices found!";
509 error( RtError::INVALID_USE );
512 if ( device >= nDevices ) {
513 errorText_ = "RtApiCore::getDeviceInfo: device ID is invalid!";
514 error( RtError::INVALID_USE );
517 AudioDeviceID deviceList[ nDevices ];
518 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
519 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
520 if ( result != noErr ) {
521 errorText_ = "RtApiCore::getDeviceInfo: OS-X system error getting device IDs.";
522 error( RtError::WARNING );
526 AudioDeviceID id = deviceList[ device ];
528 // Get the device name.
532 result = AudioDeviceGetProperty( id, 0, false,
533 kAudioDevicePropertyDeviceManufacturer,
536 if ( result != noErr ) {
537 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device manufacturer.";
538 errorText_ = errorStream_.str();
539 error( RtError::WARNING );
542 info.name.append( (const char *)name, strlen(name) );
543 info.name.append( ": " );
546 result = AudioDeviceGetProperty( id, 0, false,
547 kAudioDevicePropertyDeviceName,
549 if ( result != noErr ) {
550 errorStream_ << "RtApiCore::probeDeviceInfo: system error (" << getErrorCode( result ) << ") getting device name.";
551 errorText_ = errorStream_.str();
552 error( RtError::WARNING );
555 info.name.append( (const char *)name, strlen(name) );
557 // Get the output stream "configuration".
558 AudioBufferList *bufferList = nil;
559 result = AudioDeviceGetPropertyInfo( id, 0, false,
560 kAudioDevicePropertyStreamConfiguration,
562 if (result != noErr || dataSize == 0) {
563 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration info for device (" << device << ").";
564 errorText_ = errorStream_.str();
565 error( RtError::WARNING );
569 // Allocate the AudioBufferList.
570 bufferList = (AudioBufferList *) malloc( dataSize );
571 if ( bufferList == NULL ) {
572 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating output AudioBufferList.";
573 error( RtError::WARNING );
577 result = AudioDeviceGetProperty( id, 0, false,
578 kAudioDevicePropertyStreamConfiguration,
579 &dataSize, bufferList );
580 if ( result != noErr ) {
582 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting output stream configuration for device (" << device << ").";
583 errorText_ = errorStream_.str();
584 error( RtError::WARNING );
588 // Get output channel information.
589 unsigned int i, nStreams = bufferList->mNumberBuffers;
590 for ( i=0; i<nStreams; i++ )
591 info.outputChannels += bufferList->mBuffers[i].mNumberChannels;
594 // Get the input stream "configuration".
595 result = AudioDeviceGetPropertyInfo( id, 0, true,
596 kAudioDevicePropertyStreamConfiguration,
598 if (result != noErr || dataSize == 0) {
599 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration info for device (" << device << ").";
600 errorText_ = errorStream_.str();
601 error( RtError::WARNING );
605 // Allocate the AudioBufferList.
606 bufferList = (AudioBufferList *) malloc( dataSize );
607 if ( bufferList == NULL ) {
608 errorText_ = "RtApiCore::getDeviceInfo: memory error allocating input AudioBufferList.";
609 error( RtError::WARNING );
613 result = AudioDeviceGetProperty( id, 0, true,
614 kAudioDevicePropertyStreamConfiguration,
615 &dataSize, bufferList );
616 if ( result != noErr ) {
618 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting input stream configuration for device (" << device << ").";
619 errorText_ = errorStream_.str();
620 error( RtError::WARNING );
624 // Get input channel information.
625 nStreams = bufferList->mNumberBuffers;
626 for ( i=0; i<nStreams; i++ )
627 info.inputChannels += bufferList->mBuffers[i].mNumberChannels;
630 // If device opens for both playback and capture, we determine the channels.
631 if ( info.outputChannels > 0 && info.inputChannels > 0 )
632 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
634 // Probe the device sample rates.
635 bool isInput = false;
636 if ( info.outputChannels == 0 ) isInput = true;
638 // Determine the supported sample rates.
639 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
640 kAudioDevicePropertyAvailableNominalSampleRates,
643 if ( result != kAudioHardwareNoError || dataSize == 0 ) {
644 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rate info.";
645 errorText_ = errorStream_.str();
646 error( RtError::WARNING );
650 UInt32 nRanges = dataSize / sizeof( AudioValueRange );
651 AudioValueRange rangeList[ nRanges ];
652 result = AudioDeviceGetProperty( id, 0, isInput,
653 kAudioDevicePropertyAvailableNominalSampleRates,
654 &dataSize, &rangeList );
656 if ( result != kAudioHardwareNoError ) {
657 errorStream_ << "RtApiCore::getDeviceInfo: system error (" << getErrorCode( result ) << ") getting sample rates.";
658 errorText_ = errorStream_.str();
659 error( RtError::WARNING );
663 Float64 minimumRate = 100000000.0, maximumRate = 0.0;
664 for ( UInt32 i=0; i<nRanges; i++ ) {
665 if ( rangeList[i].mMinimum < minimumRate ) minimumRate = rangeList[i].mMinimum;
666 if ( rangeList[i].mMaximum > maximumRate ) maximumRate = rangeList[i].mMaximum;
669 info.sampleRates.clear();
670 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
671 if ( SAMPLE_RATES[k] >= (unsigned int) minimumRate && SAMPLE_RATES[k] <= (unsigned int) maximumRate )
672 info.sampleRates.push_back( SAMPLE_RATES[k] );
675 if ( info.sampleRates.size() == 0 ) {
676 errorStream_ << "RtApiCore::probeDeviceInfo: No supported sample rates found for device (" << device << ").";
677 errorText_ = errorStream_.str();
678 error( RtError::WARNING );
682 // CoreAudio always uses 32-bit floating point data for PCM streams.
683 // Thus, any other "physical" formats supported by the device are of
684 // no interest to the client.
685 info.nativeFormats = RTAUDIO_FLOAT32;
687 if ( getDefaultOutputDevice() == device )
688 info.isDefaultOutput = true;
689 if ( getDefaultInputDevice() == device )
690 info.isDefaultInput = true;
696 OSStatus callbackHandler( AudioDeviceID inDevice,
697 const AudioTimeStamp* inNow,
698 const AudioBufferList* inInputData,
699 const AudioTimeStamp* inInputTime,
700 AudioBufferList* outOutputData,
701 const AudioTimeStamp* inOutputTime,
704 CallbackInfo *info = (CallbackInfo *) infoPointer;
706 RtApiCore *object = (RtApiCore *) info->object;
707 if ( object->callbackEvent( inDevice, inInputData, outOutputData ) == false )
708 return kAudioHardwareUnspecifiedError;
710 return kAudioHardwareNoError;
713 OSStatus deviceListener( AudioDeviceID inDevice,
716 AudioDevicePropertyID propertyID,
717 void* handlePointer )
719 CoreHandle *handle = (CoreHandle *) handlePointer;
720 if ( propertyID == kAudioDeviceProcessorOverload ) {
722 handle->xrun[1] = true;
724 handle->xrun[0] = true;
727 return kAudioHardwareNoError;
730 static bool hasProperty( AudioDeviceID id, UInt32 channel, bool isInput, AudioDevicePropertyID property )
732 OSStatus result = AudioDeviceGetPropertyInfo( id, channel, isInput, property, NULL, NULL );
736 bool RtApiCore :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
737 unsigned int firstChannel, unsigned int sampleRate,
738 RtAudioFormat format, unsigned int *bufferSize,
739 RtAudio::StreamOptions *options )
742 unsigned int nDevices = getDeviceCount();
743 if ( nDevices == 0 ) {
744 // This should not happen because a check is made before this function is called.
745 errorText_ = "RtApiCore::probeDeviceOpen: no devices found!";
749 if ( device >= nDevices ) {
750 // This should not happen because a check is made before this function is called.
751 errorText_ = "RtApiCore::probeDeviceOpen: device ID is invalid!";
755 AudioDeviceID deviceList[ nDevices ];
756 UInt32 dataSize = sizeof( AudioDeviceID ) * nDevices;
757 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDevices, &dataSize, (void *) &deviceList );
758 if ( result != noErr ) {
759 errorText_ = "RtApiCore::probeDeviceOpen: OS-X system error getting device IDs.";
763 AudioDeviceID id = deviceList[ device ];
765 // Setup for stream mode.
766 bool isInput = false;
767 if ( mode == INPUT ) isInput = true;
769 // Set or disable "hog" mode.
770 dataSize = sizeof( UInt32 );
772 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) doHog = 1;
773 result = AudioHardwareSetProperty( kAudioHardwarePropertyHogModeIsAllowed, dataSize, &doHog );
774 if ( result != noErr ) {
775 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting 'hog' state!";
776 errorText_ = errorStream_.str();
780 // Get the stream "configuration".
781 AudioBufferList *bufferList;
782 result = AudioDeviceGetPropertyInfo( id, 0, isInput,
783 kAudioDevicePropertyStreamConfiguration,
785 if (result != noErr || dataSize == 0) {
786 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration info for device (" << device << ").";
787 errorText_ = errorStream_.str();
791 // Allocate the AudioBufferList.
792 bufferList = (AudioBufferList *) malloc( dataSize );
793 if ( bufferList == NULL ) {
794 errorText_ = "RtApiCore::probeDeviceOpen: memory error allocating AudioBufferList.";
798 result = AudioDeviceGetProperty( id, 0, isInput,
799 kAudioDevicePropertyStreamConfiguration,
800 &dataSize, bufferList );
801 if ( result != noErr ) {
803 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream configuration for device (" << device << ").";
804 errorText_ = errorStream_.str();
808 // Search for a stream that contains the desired number of
809 // channels. CoreAudio devices can have an arbitrary number of
810 // streams and each stream can have an arbitrary number of channels.
811 // For each stream, a single buffer of interleaved samples is
812 // provided. RtAudio currently only supports the use of one stream
813 // of interleaved data or multiple consecutive single-channel
814 // streams. Thus, our search below is limited to these two
816 unsigned int streamChannels = 0, nStreams = 0;
817 UInt32 iChannel = 0, iStream = 0;
818 unsigned int offsetCounter = firstChannel;
819 stream_.deviceInterleaved[mode] = true;
820 nStreams = bufferList->mNumberBuffers;
821 bool foundStream = false;
823 for ( iStream=0; iStream<nStreams; iStream++ ) {
824 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
825 if ( streamChannels >= channels + offsetCounter ) {
826 iChannel += offsetCounter;
830 if ( streamChannels > offsetCounter ) break;
831 offsetCounter -= streamChannels;
832 iChannel += streamChannels;
835 // If we didn't find a single stream above, see if we can meet
836 // the channel specification in mono mode (i.e. using separate
837 // non-interleaved buffers). This can only work if there are N
838 // consecutive one-channel streams, where N is the number of
839 // desired channels (+ channel offset).
840 if ( foundStream == false ) {
841 unsigned int counter = 0;
842 offsetCounter = firstChannel;
844 for ( iStream=0; iStream<nStreams; iStream++ ) {
845 streamChannels = bufferList->mBuffers[iStream].mNumberChannels;
846 if ( offsetCounter ) {
847 if ( streamChannels > offsetCounter ) break;
848 offsetCounter -= streamChannels;
850 else if ( streamChannels == 1 )
854 if ( counter == channels ) {
855 iStream -= channels - 1;
856 iChannel -= channels - 1;
857 stream_.deviceInterleaved[mode] = false;
861 iChannel += streamChannels;
866 if ( foundStream == false ) {
867 errorStream_ << "RtApiCore::probeDeviceOpen: unable to find OS-X stream on device (" << device << ") for requested channels.";
868 errorText_ = errorStream_.str();
872 // Determine the buffer size.
873 AudioValueRange bufferRange;
874 dataSize = sizeof( AudioValueRange );
875 result = AudioDeviceGetProperty( id, 0, isInput,
876 kAudioDevicePropertyBufferFrameSizeRange,
877 &dataSize, &bufferRange );
878 if ( result != noErr ) {
879 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting buffer size range for device (" << device << ").";
880 errorText_ = errorStream_.str();
884 if ( bufferRange.mMinimum > *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMinimum;
885 else if ( bufferRange.mMaximum < *bufferSize ) *bufferSize = (unsigned long) bufferRange.mMaximum;
886 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) *bufferSize = (unsigned long) bufferRange.mMinimum;
888 // Set the buffer size. For mono mode, I'm assuming we only need to
889 // make this setting for the master channel.
890 UInt32 theSize = (UInt32) *bufferSize;
891 dataSize = sizeof( UInt32 );
892 result = AudioDeviceSetProperty( id, NULL, 0, isInput,
893 kAudioDevicePropertyBufferFrameSize,
894 dataSize, &theSize );
896 if ( result != noErr ) {
897 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting the buffer size for device (" << device << ").";
898 errorText_ = errorStream_.str();
902 // If attempting to setup a duplex stream, the bufferSize parameter
903 // MUST be the same in both directions!
904 *bufferSize = theSize;
905 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
906 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << device << ").";
907 errorText_ = errorStream_.str();
911 stream_.bufferSize = *bufferSize;
912 stream_.nBuffers = 1;
914 // Get the stream ID(s) so we can set the stream format. In mono
915 // mode, we'll have to do this for each stream (channel).
916 AudioStreamID streamIDs[ nStreams ];
917 dataSize = nStreams * sizeof( AudioStreamID );
918 result = AudioDeviceGetProperty( id, 0, isInput,
919 kAudioDevicePropertyStreams,
920 &dataSize, &streamIDs );
921 if ( result != noErr ) {
922 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream ID(s) for device (" << device << ").";
923 errorText_ = errorStream_.str();
927 // Now set the stream format. Also, check the physical format of the
928 // device and change that if necessary.
929 AudioStreamBasicDescription description;
930 dataSize = sizeof( AudioStreamBasicDescription );
931 if ( stream_.deviceInterleaved[mode] ) nStreams = 1;
932 else nStreams = channels;
935 for ( unsigned int i=0; i<nStreams; i++ ) {
937 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
938 kAudioStreamPropertyVirtualFormat,
939 &dataSize, &description );
941 if ( result != noErr ) {
942 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream format for device (" << device << ").";
943 errorText_ = errorStream_.str();
947 // Set the sample rate and data format id. However, only make the
948 // change if the sample rate is not within 1.0 of the desired
949 // rate and the format is not linear pcm.
950 updateFormat = false;
951 if ( fabs( description.mSampleRate - (double)sampleRate ) > 1.0 ) {
952 description.mSampleRate = (double) sampleRate;
956 if ( description.mFormatID != kAudioFormatLinearPCM ) {
957 description.mFormatID = kAudioFormatLinearPCM;
961 if ( updateFormat ) {
962 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0,
963 kAudioStreamPropertyVirtualFormat,
964 dataSize, &description );
965 if ( result != noErr ) {
966 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting sample rate or data format for device (" << device << ").";
967 errorText_ = errorStream_.str();
972 // Now check the physical format.
973 result = AudioStreamGetProperty( streamIDs[iStream+i], 0,
974 kAudioStreamPropertyPhysicalFormat,
975 &dataSize, &description );
976 if ( result != noErr ) {
977 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream physical format for device (" << device << ").";
978 errorText_ = errorStream_.str();
982 if ( description.mFormatID != kAudioFormatLinearPCM || description.mBitsPerChannel < 24 ) {
983 description.mFormatID = kAudioFormatLinearPCM;
984 AudioStreamBasicDescription testDescription = description;
985 unsigned long formatFlags;
987 // We'll try higher bit rates first and then work our way down.
988 testDescription.mBitsPerChannel = 32;
989 formatFlags = description.mFormatFlags | kLinearPCMFormatFlagIsFloat & ~kLinearPCMFormatFlagIsSignedInteger;
990 testDescription.mFormatFlags = formatFlags;
991 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
992 if ( result == noErr ) continue;
994 testDescription = description;
995 testDescription.mBitsPerChannel = 32;
996 formatFlags = (description.mFormatFlags | kLinearPCMFormatFlagIsSignedInteger) & ~kLinearPCMFormatFlagIsFloat;
997 testDescription.mFormatFlags = formatFlags;
998 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
999 if ( result == noErr ) continue;
1001 testDescription = description;
1002 testDescription.mBitsPerChannel = 24;
1003 testDescription.mFormatFlags = formatFlags;
1004 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1005 if ( result == noErr ) continue;
1007 testDescription = description;
1008 testDescription.mBitsPerChannel = 16;
1009 testDescription.mFormatFlags = formatFlags;
1010 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1011 if ( result == noErr ) continue;
1013 testDescription = description;
1014 testDescription.mBitsPerChannel = 8;
1015 testDescription.mFormatFlags = formatFlags;
1016 result = AudioStreamSetProperty( streamIDs[iStream+i], NULL, 0, kAudioStreamPropertyPhysicalFormat, dataSize, &testDescription );
1017 if ( result != noErr ) {
1018 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") setting physical data format for device (" << device << ").";
1019 errorText_ = errorStream_.str();
1025 // Get the stream latency. There can be latency in both the device
1026 // and the stream. First, attempt to get the device latency on the
1027 // master channel or the first open channel. Errors that might
1028 // occur here are not deemed critical.
1029 UInt32 latency, channel = 0;
1030 dataSize = sizeof( UInt32 );
1031 AudioDevicePropertyID property = kAudioDevicePropertyLatency;
1032 for ( int i=0; i<2; i++ ) {
1033 if ( hasProperty( id, channel, isInput, property ) == true ) break;
1034 channel = iChannel + 1 + i;
1036 if ( channel <= iChannel + 1 ) {
1037 result = AudioDeviceGetProperty( id, channel, isInput, property, &dataSize, &latency );
1038 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] = latency;
1040 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting device latency for device (" << device << ").";
1041 errorText_ = errorStream_.str();
1042 error( RtError::WARNING );
1046 // Now try to get the stream latency. For "mono" mode, I assume the
1047 // latency is equal for all single-channel streams.
1048 result = AudioStreamGetProperty( streamIDs[iStream], 0, property, &dataSize, &latency );
1049 if ( result == kAudioHardwareNoError ) stream_.latency[ mode ] += latency;
1051 errorStream_ << "RtApiCore::probeDeviceOpen: system error (" << getErrorCode( result ) << ") getting stream latency for device (" << device << ").";
1052 errorText_ = errorStream_.str();
1053 error( RtError::WARNING );
1056 // Byte-swapping: According to AudioHardware.h, the stream data will
1057 // always be presented in native-endian format, so we should never
1058 // need to byte swap.
1059 stream_.doByteSwap[mode] = false;
1061 // From the CoreAudio documentation, PCM data must be supplied as
1063 stream_.userFormat = format;
1064 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1066 if ( stream_.deviceInterleaved[mode] )
1067 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
1069 stream_.nDeviceChannels[mode] = channels;
1070 stream_.nUserChannels[mode] = channels;
1071 stream_.channelOffset[mode] = iChannel; // offset within a CoreAudio stream
1072 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1073 else stream_.userInterleaved = true;
1075 // Set flags for buffer conversion.
1076 stream_.doConvertBuffer[mode] = false;
1077 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1078 stream_.doConvertBuffer[mode] = true;
1079 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
1080 stream_.doConvertBuffer[mode] = true;
1081 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1082 stream_.nUserChannels[mode] > 1 )
1083 stream_.doConvertBuffer[mode] = true;
1085 // Allocate our CoreHandle structure for the stream.
1086 CoreHandle *handle = 0;
1087 if ( stream_.apiHandle == 0 ) {
1089 handle = new CoreHandle;
1091 catch ( std::bad_alloc& ) {
1092 errorText_ = "RtApiCore::probeDeviceOpen: error allocating CoreHandle memory.";
1096 if ( pthread_cond_init( &handle->condition, NULL ) ) {
1097 errorText_ = "RtApiCore::probeDeviceOpen: error initializing pthread condition variable.";
1100 stream_.apiHandle = (void *) handle;
1103 handle = (CoreHandle *) stream_.apiHandle;
1104 handle->iStream[mode] = iStream;
1105 handle->id[mode] = id;
1107 // Allocate necessary internal buffers.
1108 unsigned long bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1109 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1110 if ( stream_.userBuffer[mode] == NULL ) {
1111 errorText_ = "RtApiCore::probeDeviceOpen: error allocating user buffer memory.";
1115 // If possible, we will make use of the CoreAudio stream buffers as
1116 // "device buffers". However, we can't do this if the device
1117 // buffers are non-interleaved ("mono" mode).
1118 if ( !stream_.deviceInterleaved[mode] && stream_.doConvertBuffer[mode] ) {
1120 bool makeBuffer = true;
1121 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
1122 if ( mode == INPUT ) {
1123 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1124 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1125 if ( bufferBytes <= bytesOut ) makeBuffer = false;
1130 bufferBytes *= *bufferSize;
1131 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1132 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1133 if ( stream_.deviceBuffer == NULL ) {
1134 errorText_ = "RtApiCore::probeDeviceOpen: error allocating device buffer memory.";
1138 // Save a pointer to our own device buffer in the CoreHandle
1139 // structure because we may need to use the stream_.deviceBuffer
1140 // variable to point to the CoreAudio buffer before buffer
1141 // conversion (if we have a duplex stream with two different
1142 // conversion schemes).
1143 handle->deviceBuffer = stream_.deviceBuffer;
1147 stream_.sampleRate = sampleRate;
1148 stream_.device[mode] = device;
1149 stream_.state = STREAM_STOPPED;
1150 stream_.callbackInfo.object = (void *) this;
1152 // Setup the buffer conversion information structure. We override
1153 // the channel offset value and perform our own setting for that
1155 if ( stream_.doConvertBuffer[mode] ) {
1156 setConvertInfo( mode, 0 );
1158 // Add channel offset for interleaved channels.
1159 if ( firstChannel > 0 && stream_.deviceInterleaved[mode] ) {
1160 if ( mode == OUTPUT ) {
1161 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1162 stream_.convertInfo[mode].outOffset[k] += firstChannel;
1165 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
1166 stream_.convertInfo[mode].inOffset[k] += firstChannel;
1171 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device )
1172 // Only one callback procedure per device.
1173 stream_.mode = DUPLEX;
1175 result = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
1176 if ( result != noErr ) {
1177 errorStream_ << "RtApiCore::probeDeviceOpen: system error setting callback for device (" << device << ").";
1178 errorText_ = errorStream_.str();
1181 if ( stream_.mode == OUTPUT && mode == INPUT )
1182 stream_.mode = DUPLEX;
1184 stream_.mode = mode;
1187 // Setup the device property listener for over/underload.
1188 result = AudioDeviceAddPropertyListener( id, 0, isInput,
1189 kAudioDeviceProcessorOverload,
1190 deviceListener, (void *) handle );
1196 pthread_cond_destroy( &handle->condition );
1198 stream_.apiHandle = 0;
1201 for ( int i=0; i<2; i++ ) {
1202 if ( stream_.userBuffer[i] ) {
1203 free( stream_.userBuffer[i] );
1204 stream_.userBuffer[i] = 0;
1208 if ( stream_.deviceBuffer ) {
1209 free( stream_.deviceBuffer );
1210 stream_.deviceBuffer = 0;
1216 void RtApiCore :: closeStream( void )
1218 if ( stream_.state == STREAM_CLOSED ) {
1219 errorText_ = "RtApiCore::closeStream(): no open stream to close!";
1220 error( RtError::WARNING );
1224 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1225 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1226 if ( stream_.state == STREAM_RUNNING )
1227 AudioDeviceStop( handle->id[0], callbackHandler );
1228 AudioDeviceRemoveIOProc( handle->id[0], callbackHandler );
1231 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1232 if ( stream_.state == STREAM_RUNNING )
1233 AudioDeviceStop( handle->id[1], callbackHandler );
1234 AudioDeviceRemoveIOProc( handle->id[1], callbackHandler );
1237 for ( int i=0; i<2; i++ ) {
1238 if ( stream_.userBuffer[i] ) {
1239 free( stream_.userBuffer[i] );
1240 stream_.userBuffer[i] = 0;
1244 if ( handle->deviceBuffer ) {
1245 free( handle->deviceBuffer );
1246 stream_.deviceBuffer = 0;
1249 // Destroy pthread condition variable.
1250 pthread_cond_destroy( &handle->condition );
1252 stream_.apiHandle = 0;
1254 stream_.mode = UNINITIALIZED;
1255 stream_.state = STREAM_CLOSED;
1258 void RtApiCore :: startStream( void )
1261 if ( stream_.state == STREAM_RUNNING ) {
1262 errorText_ = "RtApiCore::startStream(): the stream is already running!";
1263 error( RtError::WARNING );
1267 MUTEX_LOCK( &stream_.mutex );
1269 OSStatus result = noErr;
1270 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1271 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1273 result = AudioDeviceStart( handle->id[0], callbackHandler );
1274 if ( result != noErr ) {
1275 errorStream_ << "RtApiCore::startStream: system error (" << getErrorCode( result ) << ") starting callback procedure on device (" << stream_.device[0] << ").";
1276 errorText_ = errorStream_.str();
1281 if ( stream_.mode == INPUT ||
1282 ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1284 result = AudioDeviceStart( handle->id[1], callbackHandler );
1285 if ( result != noErr ) {
1286 errorStream_ << "RtApiCore::startStream: system error starting input callback procedure on device (" << stream_.device[1] << ").";
1287 errorText_ = errorStream_.str();
1292 handle->drainCounter = 0;
1293 handle->internalDrain = false;
1294 stream_.state = STREAM_RUNNING;
1297 MUTEX_UNLOCK( &stream_.mutex );
1299 if ( result == noErr ) return;
1300 error( RtError::SYSTEM_ERROR );
1303 void RtApiCore :: stopStream( void )
1306 if ( stream_.state == STREAM_STOPPED ) {
1307 errorText_ = "RtApiCore::stopStream(): the stream is already stopped!";
1308 error( RtError::WARNING );
1312 MUTEX_LOCK( &stream_.mutex );
1314 OSStatus result = noErr;
1315 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1316 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
1318 if ( handle->drainCounter == 0 ) {
1319 handle->drainCounter = 1;
1320 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
1323 result = AudioDeviceStop( handle->id[0], callbackHandler );
1324 if ( result != noErr ) {
1325 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping callback procedure on device (" << stream_.device[0] << ").";
1326 errorText_ = errorStream_.str();
1331 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1] ) ) {
1333 result = AudioDeviceStop( handle->id[1], callbackHandler );
1334 if ( result != noErr ) {
1335 errorStream_ << "RtApiCore::stopStream: system error (" << getErrorCode( result ) << ") stopping input callback procedure on device (" << stream_.device[1] << ").";
1336 errorText_ = errorStream_.str();
1342 MUTEX_UNLOCK( &stream_.mutex );
1344 stream_.state = STREAM_STOPPED;
1345 if ( result == noErr ) return;
1346 error( RtError::SYSTEM_ERROR );
1349 void RtApiCore :: abortStream( void )
1352 if ( stream_.state == STREAM_STOPPED ) {
1353 errorText_ = "RtApiCore::abortStream(): the stream is already stopped!";
1354 error( RtError::WARNING );
1358 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1359 handle->drainCounter = 1;
1364 bool RtApiCore :: callbackEvent( AudioDeviceID deviceId,
1365 const AudioBufferList *inBufferList,
1366 const AudioBufferList *outBufferList )
1368 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
1369 if ( stream_.state == STREAM_CLOSED ) {
1370 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
1371 error( RtError::WARNING );
1375 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1376 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
1378 // Check if we were draining the stream and signal is finished.
1379 if ( handle->drainCounter > 3 ) {
1380 if ( handle->internalDrain == false )
1381 pthread_cond_signal( &handle->condition );
1387 MUTEX_LOCK( &stream_.mutex );
1389 AudioDeviceID outputDevice = handle->id[0];
1391 // Invoke user callback to get fresh output data UNLESS we are
1392 // draining stream or duplex mode AND the input/output devices are
1393 // different AND this function is called for the input device.
1394 if ( handle->drainCounter == 0 && ( stream_.mode != DUPLEX || deviceId == outputDevice ) ) {
1395 RtAudioCallback callback = (RtAudioCallback) info->callback;
1396 double streamTime = getStreamTime();
1397 RtAudioStreamStatus status = 0;
1398 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
1399 status |= RTAUDIO_OUTPUT_UNDERFLOW;
1400 handle->xrun[0] = false;
1402 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
1403 status |= RTAUDIO_INPUT_OVERFLOW;
1404 handle->xrun[1] = false;
1406 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
1407 stream_.bufferSize, streamTime, status, info->userData );
1408 if ( handle->drainCounter == 2 ) {
1409 MUTEX_UNLOCK( &stream_.mutex );
1413 else if ( handle->drainCounter == 1 )
1414 handle->internalDrain = true;
1417 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == outputDevice ) ) {
1419 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
1421 if ( stream_.deviceInterleaved[0] ) {
1422 memset( outBufferList->mBuffers[handle->iStream[0]].mData,
1424 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1427 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1428 memset( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1430 outBufferList->mBuffers[handle->iStream[0]+i].mDataByteSize );
1434 else if ( stream_.doConvertBuffer[0] ) {
1436 if ( stream_.deviceInterleaved[0] )
1437 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->iStream[0]].mData;
1439 stream_.deviceBuffer = handle->deviceBuffer;
1441 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
1443 if ( !stream_.deviceInterleaved[0] ) {
1444 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1445 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1446 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1447 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
1453 if ( stream_.deviceInterleaved[0] ) {
1454 memcpy( outBufferList->mBuffers[handle->iStream[0]].mData,
1455 stream_.userBuffer[0],
1456 outBufferList->mBuffers[handle->iStream[0]].mDataByteSize );
1459 UInt32 bufferBytes = outBufferList->mBuffers[handle->iStream[0]].mDataByteSize;
1460 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
1461 memcpy( outBufferList->mBuffers[handle->iStream[0]+i].mData,
1462 &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
1467 if ( handle->drainCounter ) {
1468 handle->drainCounter++;
1473 AudioDeviceID inputDevice = handle->id[1];
1474 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == inputDevice ) ) {
1476 if ( stream_.doConvertBuffer[1] ) {
1478 if ( stream_.deviceInterleaved[1] )
1479 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->iStream[1]].mData;
1481 stream_.deviceBuffer = (char *) handle->deviceBuffer;
1482 UInt32 bufferBytes = inBufferList->mBuffers[handle->iStream[1]].mDataByteSize;
1483 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
1484 memcpy( &stream_.deviceBuffer[i*bufferBytes],
1485 inBufferList->mBuffers[handle->iStream[1]+i].mData, bufferBytes );
1489 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
1493 memcpy( stream_.userBuffer[1],
1494 inBufferList->mBuffers[handle->iStream[1]].mData,
1495 inBufferList->mBuffers[handle->iStream[1]].mDataByteSize );
1500 MUTEX_UNLOCK( &stream_.mutex );
1502 RtApi::tickStreamTime();
1506 const char* RtApiCore :: getErrorCode( OSStatus code )
1510 case kAudioHardwareNotRunningError:
1511 return "kAudioHardwareNotRunningError";
1513 case kAudioHardwareUnspecifiedError:
1514 return "kAudioHardwareUnspecifiedError";
1516 case kAudioHardwareUnknownPropertyError:
1517 return "kAudioHardwareUnknownPropertyError";
1519 case kAudioHardwareBadPropertySizeError:
1520 return "kAudioHardwareBadPropertySizeError";
1522 case kAudioHardwareIllegalOperationError:
1523 return "kAudioHardwareIllegalOperationError";
1525 case kAudioHardwareBadObjectError:
1526 return "kAudioHardwareBadObjectError";
1528 case kAudioHardwareBadDeviceError:
1529 return "kAudioHardwareBadDeviceError";
1531 case kAudioHardwareBadStreamError:
1532 return "kAudioHardwareBadStreamError";
1534 case kAudioHardwareUnsupportedOperationError:
1535 return "kAudioHardwareUnsupportedOperationError";
1537 case kAudioDeviceUnsupportedFormatError:
1538 return "kAudioDeviceUnsupportedFormatError";
1540 case kAudioDevicePermissionsError:
1541 return "kAudioDevicePermissionsError";
1544 return "CoreAudio unknown error";
1548 //******************** End of __MACOSX_CORE__ *********************//
1551 #if defined(__UNIX_JACK__)
1553 // JACK is a low-latency audio server, originally written for the
1554 // GNU/Linux operating system and now also ported to OS-X. It can
1555 // connect a number of different applications to an audio device, as
1556 // well as allowing them to share audio between themselves.
1558 // When using JACK with RtAudio, "devices" refer to JACK clients that
1559 // have ports connected to the server. The JACK server is typically
1560 // started in a terminal as follows:
1562 // .jackd -d alsa -d hw:0
1564 // or through an interface program such as qjackctl. Many of the
1565 // parameters normally set for a stream are fixed by the JACK server
1566 // and can be specified when the JACK server is started. In
1569 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
1571 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
1572 // frames, and number of buffers = 4. Once the server is running, it
1573 // is not possible to override these values. If the values are not
1574 // specified in the command-line, the JACK server uses default values.
1576 // The JACK server does not have to be running when an instance of
1577 // RtApiJack is created, though the function getDeviceCount() will
1578 // report 0 devices found until JACK has been started. When no
1579 // devices are available (i.e., the JACK server is not running), a
1580 // stream cannot be opened.
1582 #include <jack/jack.h>
1585 // A structure to hold various information related to the Jack API
1588 jack_client_t *client;
1589 jack_port_t **ports[2];
1590 std::string deviceName[2];
1592 pthread_cond_t condition;
1593 int drainCounter; // Tracks callback counts when draining
1594 bool internalDrain; // Indicates if stop is initiated from callback or not.
1597 :client(0), drainCounter(0), internalDrain(false) { ports[0] = 0; ports[1] = 0; xrun[0] = false; xrun[1] = false; }
1600 RtApiJack :: RtApiJack()
1602 // Nothing to do here.
1605 RtApiJack :: ~RtApiJack()
1607 if ( stream_.state != STREAM_CLOSED ) closeStream();
1610 unsigned int RtApiJack :: getDeviceCount( void )
1612 // See if we can become a jack client.
1613 jack_client_t *client = jack_client_new( "RtApiJackCount" );
1614 if ( client == 0 ) return 0;
1617 std::string port, previousPort;
1618 unsigned int nChannels = 0, nDevices = 0;
1619 ports = jack_get_ports( client, NULL, NULL, 0 );
1621 // Parse the port names up to the first colon (:).
1622 unsigned int iColon = 0;
1624 port = (char *) ports[ nChannels ];
1625 iColon = port.find(":");
1626 if ( iColon != std::string::npos ) {
1627 port = port.substr( 0, iColon + 1 );
1628 if ( port != previousPort ) {
1630 previousPort = port;
1633 } while ( ports[++nChannels] );
1637 jack_client_close( client );
1641 RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device )
1643 RtAudio::DeviceInfo info;
1644 info.probed = false;
1646 jack_client_t *client = jack_client_new( "RtApiJackInfo" );
1647 if ( client == 0 ) {
1648 errorText_ = "RtApiJack::getDeviceInfo: Jack server not found or connection error!";
1649 error( RtError::WARNING );
1654 std::string port, previousPort;
1655 unsigned int nPorts = 0, nDevices = 0;
1656 ports = jack_get_ports( client, NULL, NULL, 0 );
1658 // Parse the port names up to the first colon (:).
1659 unsigned int iColon = 0;
1661 port = (char *) ports[ nPorts ];
1662 iColon = port.find(":");
1663 if ( iColon != std::string::npos ) {
1664 port = port.substr( 0, iColon );
1665 if ( port != previousPort ) {
1666 if ( nDevices == device ) info.name = port;
1668 previousPort = port;
1671 } while ( ports[++nPorts] );
1675 if ( device >= nDevices ) {
1676 errorText_ = "RtApiJack::getDeviceInfo: device ID is invalid!";
1677 error( RtError::INVALID_USE );
1680 // Get the current jack server sample rate.
1681 info.sampleRates.clear();
1682 info.sampleRates.push_back( jack_get_sample_rate( client ) );
1684 // Count the available ports containing the client name as device
1685 // channels. Jack "input ports" equal RtAudio output channels.
1686 unsigned int nChannels = 0;
1687 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput );
1689 while ( ports[ nChannels ] ) nChannels++;
1691 info.outputChannels = nChannels;
1694 // Jack "output ports" equal RtAudio input channels.
1696 ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput );
1698 while ( ports[ nChannels ] ) nChannels++;
1700 info.inputChannels = nChannels;
1703 if ( info.outputChannels == 0 && info.inputChannels == 0 ) {
1704 jack_client_close(client);
1705 errorText_ = "RtApiJack::getDeviceInfo: error determining Jack input/output channels!";
1706 error( RtError::WARNING );
1710 // If device opens for both playback and capture, we determine the channels.
1711 if ( info.outputChannels > 0 && info.inputChannels > 0 )
1712 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
1714 // Jack always uses 32-bit floats.
1715 info.nativeFormats = RTAUDIO_FLOAT32;
1717 // Jack doesn't provide default devices so we'll use the first available one.
1718 if ( device == 0 && info.outputChannels > 0 )
1719 info.isDefaultOutput = true;
1720 if ( device == 0 && info.inputChannels > 0 )
1721 info.isDefaultInput = true;
1723 jack_client_close(client);
1728 int jackCallbackHandler( jack_nframes_t nframes, void *infoPointer )
1730 CallbackInfo *info = (CallbackInfo *) infoPointer;
1732 RtApiJack *object = (RtApiJack *) info->object;
1733 if ( object->callbackEvent( (unsigned long) nframes ) == false ) return 1;
1738 void jackShutdown( void *infoPointer )
1740 CallbackInfo *info = (CallbackInfo *) infoPointer;
1741 RtApiJack *object = (RtApiJack *) info->object;
1743 // Check current stream state. If stopped, then we'll assume this
1744 // was called as a result of a call to RtApiJack::stopStream (the
1745 // deactivation of a client handle causes this function to be called).
1746 // If not, we'll assume the Jack server is shutting down or some
1747 // other problem occurred and we should close the stream.
1748 if ( object->isStreamRunning() == false ) return;
1750 object->closeStream();
1751 std::cerr << "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!\n" << std::endl;
1754 int jackXrun( void *infoPointer )
1756 JackHandle *handle = (JackHandle *) infoPointer;
1758 if ( handle->ports[0] ) handle->xrun[0] = true;
1759 if ( handle->ports[1] ) handle->xrun[1] = true;
1764 bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
1765 unsigned int firstChannel, unsigned int sampleRate,
1766 RtAudioFormat format, unsigned int *bufferSize,
1767 RtAudio::StreamOptions *options )
1769 JackHandle *handle = (JackHandle *) stream_.apiHandle;
1771 // Look for jack server and try to become a client (only do once per stream).
1772 jack_client_t *client = 0;
1773 if ( mode == OUTPUT || ( mode == INPUT && stream_.mode != OUTPUT ) ) {
1774 if ( options && !options->streamName.empty() )
1775 client = jack_client_new( options->streamName.c_str() );
1777 client = jack_client_new( "RtApiJack" );
1778 if ( client == 0 ) {
1779 errorText_ = "RtApiJack::probeDeviceOpen: Jack server not found or connection error!";
1780 error( RtError::WARNING );
1785 // The handle must have been created on an earlier pass.
1786 client = handle->client;
1790 std::string port, previousPort, deviceName;
1791 unsigned int nPorts = 0, nDevices = 0;
1792 ports = jack_get_ports( client, NULL, NULL, 0 );
1794 // Parse the port names up to the first colon (:).
1795 unsigned int iColon = 0;
1797 port = (char *) ports[ nPorts ];
1798 iColon = port.find(":");
1799 if ( iColon != std::string::npos ) {
1800 port = port.substr( 0, iColon );
1801 if ( port != previousPort ) {
1802 if ( nDevices == device ) deviceName = port;
1804 previousPort = port;
1807 } while ( ports[++nPorts] );
1811 if ( device >= nDevices ) {
1812 errorText_ = "RtApiJack::probeDeviceOpen: device ID is invalid!";
1816 // Count the available ports containing the client name as device
1817 // channels. Jack "input ports" equal RtAudio output channels.
1818 unsigned int nChannels = 0;
1819 unsigned long flag = JackPortIsOutput;
1820 if ( mode == INPUT ) flag = JackPortIsInput;
1821 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1823 while ( ports[ nChannels ] ) nChannels++;
1827 // Compare the jack ports for specified client to the requested number of channels.
1828 if ( nChannels < (channels + firstChannel) ) {
1829 errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ").";
1830 errorText_ = errorStream_.str();
1834 // Check the jack server sample rate.
1835 unsigned int jackRate = jack_get_sample_rate( client );
1836 if ( sampleRate != jackRate ) {
1837 jack_client_close( client );
1838 errorStream_ << "RtApiJack::probeDeviceOpen: the requested sample rate (" << sampleRate << ") is different than the JACK server rate (" << jackRate << ").";
1839 errorText_ = errorStream_.str();
1842 stream_.sampleRate = jackRate;
1844 // Get the latency of the JACK port.
1845 ports = jack_get_ports( client, deviceName.c_str(), NULL, flag );
1846 if ( ports[ firstChannel ] )
1847 stream_.latency[mode] = jack_port_get_latency( jack_port_by_name( client, ports[ firstChannel ] ) );
1850 // The jack server always uses 32-bit floating-point data.
1851 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
1852 stream_.userFormat = format;
1854 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
1855 else stream_.userInterleaved = true;
1857 // Jack always uses non-interleaved buffers.
1858 stream_.deviceInterleaved[mode] = false;
1860 // Jack always provides host byte-ordered data.
1861 stream_.doByteSwap[mode] = false;
1863 // Get the buffer size. The buffer size and number of buffers
1864 // (periods) is set when the jack server is started.
1865 stream_.bufferSize = (int) jack_get_buffer_size( client );
1866 *bufferSize = stream_.bufferSize;
1868 stream_.nDeviceChannels[mode] = channels;
1869 stream_.nUserChannels[mode] = channels;
1871 // Set flags for buffer conversion.
1872 stream_.doConvertBuffer[mode] = false;
1873 if ( stream_.userFormat != stream_.deviceFormat[mode] )
1874 stream_.doConvertBuffer[mode] = true;
1875 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
1876 stream_.nUserChannels[mode] > 1 )
1877 stream_.doConvertBuffer[mode] = true;
1879 // Allocate our JackHandle structure for the stream.
1880 if ( handle == 0 ) {
1882 handle = new JackHandle;
1884 catch ( std::bad_alloc& ) {
1885 errorText_ = "RtApiJack::probeDeviceOpen: error allocating JackHandle memory.";
1889 if ( pthread_cond_init(&handle->condition, NULL) ) {
1890 errorText_ = "RtApiJack::probeDeviceOpen: error initializing pthread condition variable.";
1893 stream_.apiHandle = (void *) handle;
1894 handle->client = client;
1896 handle->deviceName[mode] = deviceName;
1898 // Allocate necessary internal buffers.
1899 unsigned long bufferBytes;
1900 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
1901 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
1902 if ( stream_.userBuffer[mode] == NULL ) {
1903 errorText_ = "RtApiJack::probeDeviceOpen: error allocating user buffer memory.";
1907 if ( stream_.doConvertBuffer[mode] ) {
1909 bool makeBuffer = true;
1910 if ( mode == OUTPUT )
1911 bufferBytes = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
1912 else { // mode == INPUT
1913 bufferBytes = stream_.nDeviceChannels[1] * formatBytes( stream_.deviceFormat[1] );
1914 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1915 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1916 if ( bufferBytes < bytesOut ) makeBuffer = false;
1921 bufferBytes *= *bufferSize;
1922 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
1923 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
1924 if ( stream_.deviceBuffer == NULL ) {
1925 errorText_ = "RtApiJack::probeDeviceOpen: error allocating device buffer memory.";
1931 // Allocate memory for the Jack ports (channels) identifiers.
1932 handle->ports[mode] = (jack_port_t **) malloc ( sizeof (jack_port_t *) * channels );
1933 if ( handle->ports[mode] == NULL ) {
1934 errorText_ = "RtApiJack::probeDeviceOpen: error allocating port memory.";
1938 stream_.device[mode] = device;
1939 stream_.channelOffset[mode] = firstChannel;
1940 stream_.state = STREAM_STOPPED;
1941 stream_.callbackInfo.object = (void *) this;
1943 if ( stream_.mode == OUTPUT && mode == INPUT )
1944 // We had already set up the stream for output.
1945 stream_.mode = DUPLEX;
1947 stream_.mode = mode;
1948 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
1949 jack_set_xrun_callback( handle->client, jackXrun, (void *) &handle );
1950 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
1953 // Register our ports.
1955 if ( mode == OUTPUT ) {
1956 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
1957 snprintf( label, 64, "outport %d", i );
1958 handle->ports[0][i] = jack_port_register( handle->client, (const char *)label,
1959 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0 );
1963 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
1964 snprintf( label, 64, "inport %d", i );
1965 handle->ports[1][i] = jack_port_register( handle->client, (const char *)label,
1966 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0 );
1970 // Setup the buffer conversion information structure. We don't use
1971 // buffers to do channel offsets, so we override that parameter
1973 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
1979 pthread_cond_destroy( &handle->condition );
1980 jack_client_close( handle->client );
1982 if ( handle->ports[0] ) free( handle->ports[0] );
1983 if ( handle->ports[1] ) free( handle->ports[1] );
1986 stream_.apiHandle = 0;
1989 for ( int i=0; i<2; i++ ) {
1990 if ( stream_.userBuffer[i] ) {
1991 free( stream_.userBuffer[i] );
1992 stream_.userBuffer[i] = 0;
1996 if ( stream_.deviceBuffer ) {
1997 free( stream_.deviceBuffer );
1998 stream_.deviceBuffer = 0;
2004 void RtApiJack :: closeStream( void )
2006 if ( stream_.state == STREAM_CLOSED ) {
2007 errorText_ = "RtApiJack::closeStream(): no open stream to close!";
2008 error( RtError::WARNING );
2012 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2015 if ( stream_.state == STREAM_RUNNING )
2016 jack_deactivate( handle->client );
2018 jack_client_close( handle->client );
2022 if ( handle->ports[0] ) free( handle->ports[0] );
2023 if ( handle->ports[1] ) free( handle->ports[1] );
2024 pthread_cond_destroy( &handle->condition );
2026 stream_.apiHandle = 0;
2029 for ( int i=0; i<2; i++ ) {
2030 if ( stream_.userBuffer[i] ) {
2031 free( stream_.userBuffer[i] );
2032 stream_.userBuffer[i] = 0;
2036 if ( stream_.deviceBuffer ) {
2037 free( stream_.deviceBuffer );
2038 stream_.deviceBuffer = 0;
2041 stream_.mode = UNINITIALIZED;
2042 stream_.state = STREAM_CLOSED;
2045 void RtApiJack :: startStream( void )
2048 if ( stream_.state == STREAM_RUNNING ) {
2049 errorText_ = "RtApiJack::startStream(): the stream is already running!";
2050 error( RtError::WARNING );
2054 MUTEX_LOCK(&stream_.mutex);
2056 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2057 int result = jack_activate( handle->client );
2059 errorText_ = "RtApiJack::startStream(): unable to activate JACK client!";
2065 // Get the list of available ports.
2066 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2068 ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput);
2069 if ( ports == NULL) {
2070 errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!";
2074 // Now make the port connections. Since RtAudio wasn't designed to
2075 // allow the user to select particular channels of a device, we'll
2076 // just open the first "nChannels" ports with offset.
2077 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2079 if ( ports[ stream_.channelOffset[0] + i ] )
2080 result = jack_connect( handle->client, jack_port_name( handle->ports[0][i] ), ports[ stream_.channelOffset[0] + i ] );
2083 errorText_ = "RtApiJack::startStream(): error connecting output ports!";
2090 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2092 ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput );
2093 if ( ports == NULL) {
2094 errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!";
2098 // Now make the port connections. See note above.
2099 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2101 if ( ports[ stream_.channelOffset[1] + i ] )
2102 result = jack_connect( handle->client, ports[ stream_.channelOffset[1] + i ], jack_port_name( handle->ports[1][i] ) );
2105 errorText_ = "RtApiJack::startStream(): error connecting input ports!";
2112 handle->drainCounter = 0;
2113 handle->internalDrain = false;
2114 stream_.state = STREAM_RUNNING;
2117 MUTEX_UNLOCK(&stream_.mutex);
2119 if ( result == 0 ) return;
2120 error( RtError::SYSTEM_ERROR );
2123 void RtApiJack :: stopStream( void )
2126 if ( stream_.state == STREAM_STOPPED ) {
2127 errorText_ = "RtApiJack::stopStream(): the stream is already stopped!";
2128 error( RtError::WARNING );
2132 MUTEX_LOCK( &stream_.mutex );
2134 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2135 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2137 if ( handle->drainCounter == 0 ) {
2138 handle->drainCounter = 1;
2139 pthread_cond_wait( &handle->condition, &stream_.mutex ); // block until signaled
2143 jack_deactivate( handle->client );
2144 stream_.state = STREAM_STOPPED;
2146 MUTEX_UNLOCK( &stream_.mutex );
2149 void RtApiJack :: abortStream( void )
2152 if ( stream_.state == STREAM_STOPPED ) {
2153 errorText_ = "RtApiJack::abortStream(): the stream is already stopped!";
2154 error( RtError::WARNING );
2158 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2159 handle->drainCounter = 1;
2164 bool RtApiJack :: callbackEvent( unsigned long nframes )
2166 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2167 if ( stream_.state == STREAM_CLOSED ) {
2168 errorText_ = "RtApiCore::callbackEvent(): the stream is closed ... this shouldn't happen!";
2169 error( RtError::WARNING );
2172 if ( stream_.bufferSize != nframes ) {
2173 errorText_ = "RtApiCore::callbackEvent(): the JACK buffer size has changed ... cannot process!";
2174 error( RtError::WARNING );
2178 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2179 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2181 // Check if we were draining the stream and signal is finished.
2182 if ( handle->drainCounter > 3 ) {
2183 if ( handle->internalDrain == false )
2184 pthread_cond_signal( &handle->condition );
2190 MUTEX_LOCK( &stream_.mutex );
2192 // Invoke user callback first, to get fresh output data.
2193 if ( handle->drainCounter == 0 ) {
2194 RtAudioCallback callback = (RtAudioCallback) info->callback;
2195 double streamTime = getStreamTime();
2196 RtAudioStreamStatus status = 0;
2197 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
2198 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2199 handle->xrun[0] = false;
2201 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
2202 status |= RTAUDIO_INPUT_OVERFLOW;
2203 handle->xrun[1] = false;
2205 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2206 stream_.bufferSize, streamTime, status, info->userData );
2207 if ( handle->drainCounter == 2 ) {
2208 MUTEX_UNLOCK( &stream_.mutex );
2212 else if ( handle->drainCounter == 1 )
2213 handle->internalDrain = true;
2216 jack_default_audio_sample_t *jackbuffer;
2217 unsigned long bufferBytes = nframes * sizeof( jack_default_audio_sample_t );
2218 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2220 if ( handle->drainCounter > 0 ) { // write zeros to the output stream
2222 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2223 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2224 memset( jackbuffer, 0, bufferBytes );
2228 else if ( stream_.doConvertBuffer[0] ) {
2230 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2232 for ( unsigned int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2233 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2234 memcpy( jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2237 else { // no buffer conversion
2238 for ( unsigned int i=0; i<stream_.nUserChannels[0]; i++ ) {
2239 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[0][i], (jack_nframes_t) nframes );
2240 memcpy( jackbuffer, &stream_.userBuffer[0][i*bufferBytes], bufferBytes );
2244 if ( handle->drainCounter ) {
2245 handle->drainCounter++;
2250 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
2252 if ( stream_.doConvertBuffer[1] ) {
2253 for ( unsigned int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2254 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2255 memcpy( &stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
2257 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
2259 else { // no buffer conversion
2260 for ( unsigned int i=0; i<stream_.nUserChannels[1]; i++ ) {
2261 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer( handle->ports[1][i], (jack_nframes_t) nframes );
2262 memcpy( &stream_.userBuffer[1][i*bufferBytes], jackbuffer, bufferBytes );
2268 MUTEX_UNLOCK(&stream_.mutex);
2270 RtApi::tickStreamTime();
2273 //******************** End of __UNIX_JACK__ *********************//
2276 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
2278 // The ASIO API is designed around a callback scheme, so this
2279 // implementation is similar to that used for OS-X CoreAudio and Linux
2280 // Jack. The primary constraint with ASIO is that it only allows
2281 // access to a single driver at a time. Thus, it is not possible to
2282 // have more than one simultaneous RtAudio stream.
2284 // This implementation also requires a number of external ASIO files
2285 // and a few global variables. The ASIO callback scheme does not
2286 // allow for the passing of user data, so we must create a global
2287 // pointer to our callbackInfo structure.
2289 // On unix systems, we make use of a pthread condition variable.
2290 // Since there is no equivalent in Windows, I hacked something based
2291 // on information found in
2292 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
2294 #include "asio/asiosys.h"
2295 #include "asio/asio.h"
2296 #include "asio/iasiothiscallresolver.h"
2297 #include "asio/asiodrivers.h"
2300 AsioDrivers drivers;
2301 ASIOCallbacks asioCallbacks;
2302 ASIODriverInfo driverInfo;
2303 CallbackInfo *asioCallbackInfo;
2307 int drainCounter; // Tracks callback counts when draining
2308 bool internalDrain; // Indicates if stop is initiated from callback or not.
2309 ASIOBufferInfo *bufferInfos;
2313 :drainCounter(0), internalDrain(false), bufferInfos(0) {}
2316 // Function declarations (definitions at end of section)
2317 static const char* getAsioErrorString( ASIOError result );
2318 void sampleRateChanged( ASIOSampleRate sRate );
2319 long asioMessages( long selector, long value, void* message, double* opt );
2321 RtApiAsio :: RtApiAsio()
2323 // ASIO cannot run on a multi-threaded appartment. You can call
2324 // CoInitialize beforehand, but it must be for appartment threading
2325 // (in which case, CoInitilialize will return S_FALSE here).
2326 coInitialized_ = false;
2327 HRESULT hr = CoInitialize( NULL );
2329 errorText_ = "RtApiAsio::ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)";
2330 error( RtError::WARNING );
2332 coInitialized_ = true;
2334 drivers.removeCurrentDriver();
2335 driverInfo.asioVersion = 2;
2337 // See note in DirectSound implementation about GetDesktopWindow().
2338 driverInfo.sysRef = GetForegroundWindow();
2341 RtApiAsio :: ~RtApiAsio()
2343 if ( stream_.state != STREAM_CLOSED ) closeStream();
2344 if ( coInitialized_ ) CoUninitialize();
2347 unsigned int RtApiAsio :: getDeviceCount( void )
2349 return (unsigned int) drivers.asioGetNumDev();
2352 RtAudio::DeviceInfo RtApiAsio :: getDeviceInfo( unsigned int device )
2354 RtAudio::DeviceInfo info;
2355 info.probed = false;
2358 unsigned int nDevices = getDeviceCount();
2359 if ( nDevices == 0 ) {
2360 errorText_ = "RtApiAsio::getDeviceInfo: no devices found!";
2361 error( RtError::INVALID_USE );
2364 if ( device >= nDevices ) {
2365 errorText_ = "RtApiAsio::getDeviceInfo: device ID is invalid!";
2366 error( RtError::INVALID_USE );
2369 // Don't probe if a stream is already open.
2370 if ( stream_.state != STREAM_CLOSED ) {
2371 errorText_ = "RtApiAsio::getDeviceInfo: unable to probe driver while a stream is open.";
2372 error( RtError::WARNING );
2376 char driverName[32];
2377 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2378 if ( result != ASE_OK ) {
2379 errorStream_ << "RtApiAsio::getDeviceInfo: unable to get driver name (" << getAsioErrorString( result ) << ").";
2380 errorText_ = errorStream_.str();
2381 error( RtError::WARNING );
2385 info.name = driverName;
2387 if ( !drivers.loadDriver( driverName ) ) {
2388 errorStream_ << "RtApiAsio::getDeviceInfo: unable to load driver (" << driverName << ").";
2389 errorText_ = errorStream_.str();
2390 error( RtError::WARNING );
2394 result = ASIOInit( &driverInfo );
2395 if ( result != ASE_OK ) {
2396 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2397 errorText_ = errorStream_.str();
2398 error( RtError::WARNING );
2402 // Determine the device channel information.
2403 long inputChannels, outputChannels;
2404 result = ASIOGetChannels( &inputChannels, &outputChannels );
2405 if ( result != ASE_OK ) {
2406 drivers.removeCurrentDriver();
2407 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2408 errorText_ = errorStream_.str();
2409 error( RtError::WARNING );
2413 info.outputChannels = outputChannels;
2414 info.inputChannels = inputChannels;
2415 if ( info.outputChannels > 0 && info.inputChannels > 0 )
2416 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
2418 // Determine the supported sample rates.
2419 info.sampleRates.clear();
2420 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
2421 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
2422 if ( result == ASE_OK )
2423 info.sampleRates.push_back( SAMPLE_RATES[i] );
2426 // Determine supported data types ... just check first channel and assume rest are the same.
2427 ASIOChannelInfo channelInfo;
2428 channelInfo.channel = 0;
2429 channelInfo.isInput = true;
2430 if ( info.inputChannels <= 0 ) channelInfo.isInput = false;
2431 result = ASIOGetChannelInfo( &channelInfo );
2432 if ( result != ASE_OK ) {
2433 drivers.removeCurrentDriver();
2434 errorStream_ << "RtApiAsio::getDeviceInfo: error (" << getAsioErrorString( result ) << ") getting driver channel info (" << driverName << ").";
2435 errorText_ = errorStream_.str();
2436 error( RtError::WARNING );
2440 info.nativeFormats = 0;
2441 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
2442 info.nativeFormats |= RTAUDIO_SINT16;
2443 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
2444 info.nativeFormats |= RTAUDIO_SINT32;
2445 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
2446 info.nativeFormats |= RTAUDIO_FLOAT32;
2447 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
2448 info.nativeFormats |= RTAUDIO_FLOAT64;
2450 if ( getDefaultOutputDevice() == device )
2451 info.isDefaultOutput = true;
2452 if ( getDefaultInputDevice() == device )
2453 info.isDefaultInput = true;
2456 drivers.removeCurrentDriver();
2460 void bufferSwitch( long index, ASIOBool processNow )
2462 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
2463 object->callbackEvent( index );
2466 bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
2467 unsigned int firstChannel, unsigned int sampleRate,
2468 RtAudioFormat format, unsigned int *bufferSize,
2469 RtAudio::StreamOptions *options )
2471 // For ASIO, a duplex stream MUST use the same driver.
2472 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
2473 errorText_ = "RtApiAsio::probeDeviceOpen: an ASIO duplex stream must use the same device for input and output!";
2477 char driverName[32];
2478 ASIOError result = drivers.asioGetDriverName( (int) device, driverName, 32 );
2479 if ( result != ASE_OK ) {
2480 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to get driver name (" << getAsioErrorString( result ) << ").";
2481 errorText_ = errorStream_.str();
2485 // Only load the driver once for duplex stream.
2486 if ( mode != INPUT || stream_.mode != OUTPUT ) {
2487 if ( !drivers.loadDriver( driverName ) ) {
2488 errorStream_ << "RtApiAsio::probeDeviceOpen: unable to load driver (" << driverName << ").";
2489 errorText_ = errorStream_.str();
2493 result = ASIOInit( &driverInfo );
2494 if ( result != ASE_OK ) {
2495 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") initializing driver (" << driverName << ").";
2496 errorText_ = errorStream_.str();
2501 // Check the device channel count.
2502 long inputChannels, outputChannels;
2503 result = ASIOGetChannels( &inputChannels, &outputChannels );
2504 if ( result != ASE_OK ) {
2505 drivers.removeCurrentDriver();
2506 errorStream_ << "RtApiAsio::probeDeviceOpen: error (" << getAsioErrorString( result ) << ") getting channel count (" << driverName << ").";
2507 errorText_ = errorStream_.str();
2511 if ( ( mode == OUTPUT && (channels+firstChannel) > (unsigned int) outputChannels) ||
2512 ( mode == INPUT && (channels+firstChannel) > (unsigned int) inputChannels) ) {
2513 drivers.removeCurrentDriver();
2514 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested channel count (" << channels << ") + offset (" << firstChannel << ").";
2515 errorText_ = errorStream_.str();
2518 stream_.nDeviceChannels[mode] = channels;
2519 stream_.nUserChannels[mode] = channels;
2520 stream_.channelOffset[mode] = firstChannel;
2522 // Verify the sample rate is supported.
2523 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
2524 if ( result != ASE_OK ) {
2525 drivers.removeCurrentDriver();
2526 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") does not support requested sample rate (" << sampleRate << ").";
2527 errorText_ = errorStream_.str();
2531 // Get the current sample rate
2532 ASIOSampleRate currentRate;
2533 result = ASIOGetSampleRate( ¤tRate );
2534 if ( result != ASE_OK ) {
2535 drivers.removeCurrentDriver();
2536 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error getting sample rate.";
2537 errorText_ = errorStream_.str();
2541 // Set the sample rate only if necessary
2542 if ( currentRate != sampleRate ) {
2543 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
2544 if ( result != ASE_OK ) {
2545 drivers.removeCurrentDriver();
2546 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error setting sample rate (" << sampleRate << ").";
2547 errorText_ = errorStream_.str();
2552 // Determine the driver data type.
2553 ASIOChannelInfo channelInfo;
2554 channelInfo.channel = 0;
2555 if ( mode == OUTPUT ) channelInfo.isInput = false;
2556 else channelInfo.isInput = true;
2557 result = ASIOGetChannelInfo( &channelInfo );
2558 if ( result != ASE_OK ) {
2559 drivers.removeCurrentDriver();
2560 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting data format.";
2561 errorText_ = errorStream_.str();
2565 // Assuming WINDOWS host is always little-endian.
2566 stream_.doByteSwap[mode] = false;
2567 stream_.userFormat = format;
2568 stream_.deviceFormat[mode] = 0;
2569 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
2570 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
2571 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
2573 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
2574 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
2575 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
2577 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
2578 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2579 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
2581 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
2582 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
2583 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
2586 if ( stream_.deviceFormat[mode] == 0 ) {
2587 drivers.removeCurrentDriver();
2588 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") data format not supported by RtAudio.";
2589 errorText_ = errorStream_.str();
2593 // Set the buffer size. For a duplex stream, this will end up
2594 // setting the buffer size based on the input constraints, which
2596 long minSize, maxSize, preferSize, granularity;
2597 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
2598 if ( result != ASE_OK ) {
2599 drivers.removeCurrentDriver();
2600 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting buffer size.";
2601 errorText_ = errorStream_.str();
2605 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2606 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2607 else if ( granularity == -1 ) {
2608 // Make sure bufferSize is a power of two.
2609 double power = std::log10( (double) *bufferSize ) / log10( 2.0 );
2610 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
2611 if ( *bufferSize < (unsigned int) minSize ) *bufferSize = (unsigned int) minSize;
2612 else if ( *bufferSize > (unsigned int) maxSize ) *bufferSize = (unsigned int) maxSize;
2613 else *bufferSize = preferSize;
2615 else if ( granularity != 0 ) {
2616 // Set to an even multiple of granularity, rounding up.
2617 *bufferSize = (*bufferSize + granularity-1) / granularity * granularity;
2620 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) {
2621 drivers.removeCurrentDriver();
2622 errorText_ = "RtApiAsio::probeDeviceOpen: input/output buffersize discrepancy!";
2626 stream_.bufferSize = *bufferSize;
2627 stream_.nBuffers = 2;
2629 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
2630 else stream_.userInterleaved = true;
2632 // ASIO always uses non-interleaved buffers.
2633 stream_.deviceInterleaved[mode] = false;
2635 // Allocate, if necessary, our AsioHandle structure for the stream.
2636 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2637 if ( handle == 0 ) {
2639 handle = new AsioHandle;
2641 catch ( std::bad_alloc& ) {
2642 //if ( handle == NULL ) {
2643 drivers.removeCurrentDriver();
2644 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating AsioHandle memory.";
2647 handle->bufferInfos = 0;
2649 // Create a manual-reset event.
2650 handle->condition = CreateEvent( NULL, // no security
2651 TRUE, // manual-reset
2652 FALSE, // non-signaled initially
2654 stream_.apiHandle = (void *) handle;
2657 // Create the ASIO internal buffers. Since RtAudio sets up input
2658 // and output separately, we'll have to dispose of previously
2659 // created output buffers for a duplex stream.
2660 long inputLatency, outputLatency;
2661 if ( mode == INPUT && stream_.mode == OUTPUT ) {
2662 ASIODisposeBuffers();
2663 if ( handle->bufferInfos ) free( handle->bufferInfos );
2666 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
2667 bool buffersAllocated = false;
2668 unsigned int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2669 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
2670 if ( handle->bufferInfos == NULL ) {
2671 errorStream_ << "RtApiAsio::probeDeviceOpen: error allocating bufferInfo memory for driver (" << driverName << ").";
2672 errorText_ = errorStream_.str();
2676 ASIOBufferInfo *infos;
2677 infos = handle->bufferInfos;
2678 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
2679 infos->isInput = ASIOFalse;
2680 infos->channelNum = i + stream_.channelOffset[0];
2681 infos->buffers[0] = infos->buffers[1] = 0;
2683 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
2684 infos->isInput = ASIOTrue;
2685 infos->channelNum = i + stream_.channelOffset[1];
2686 infos->buffers[0] = infos->buffers[1] = 0;
2689 // Set up the ASIO callback structure and create the ASIO data buffers.
2690 asioCallbacks.bufferSwitch = &bufferSwitch;
2691 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
2692 asioCallbacks.asioMessage = &asioMessages;
2693 asioCallbacks.bufferSwitchTimeInfo = NULL;
2694 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks );
2695 if ( result != ASE_OK ) {
2696 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") creating buffers.";
2697 errorText_ = errorStream_.str();
2700 buffersAllocated = true;
2702 // Set flags for buffer conversion.
2703 stream_.doConvertBuffer[mode] = false;
2704 if ( stream_.userFormat != stream_.deviceFormat[mode] )
2705 stream_.doConvertBuffer[mode] = true;
2706 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
2707 stream_.nUserChannels[mode] > 1 )
2708 stream_.doConvertBuffer[mode] = true;
2710 // Allocate necessary internal buffers
2711 unsigned long bufferBytes;
2712 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
2713 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
2714 if ( stream_.userBuffer[mode] == NULL ) {
2715 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating user buffer memory.";
2719 if ( stream_.doConvertBuffer[mode] ) {
2721 bool makeBuffer = true;
2722 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
2723 if ( mode == INPUT ) {
2724 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2725 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
2726 if ( bufferBytes <= bytesOut ) makeBuffer = false;
2731 bufferBytes *= *bufferSize;
2732 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
2733 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
2734 if ( stream_.deviceBuffer == NULL ) {
2735 errorText_ = "RtApiAsio::probeDeviceOpen: error allocating device buffer memory.";
2741 stream_.sampleRate = sampleRate;
2742 stream_.device[mode] = device;
2743 stream_.state = STREAM_STOPPED;
2744 asioCallbackInfo = &stream_.callbackInfo;
2745 stream_.callbackInfo.object = (void *) this;
2746 if ( stream_.mode == OUTPUT && mode == INPUT )
2747 // We had already set up an output stream.
2748 stream_.mode = DUPLEX;
2750 stream_.mode = mode;
2752 // Determine device latencies
2753 result = ASIOGetLatencies( &inputLatency, &outputLatency );
2754 if ( result != ASE_OK ) {
2755 errorStream_ << "RtApiAsio::probeDeviceOpen: driver (" << driverName << ") error (" << getAsioErrorString( result ) << ") getting latency.";
2756 errorText_ = errorStream_.str();
2757 error( RtError::WARNING); // warn but don't fail
2760 stream_.latency[0] = outputLatency;
2761 stream_.latency[1] = inputLatency;
2764 // Setup the buffer conversion information structure. We don't use
2765 // buffers to do channel offsets, so we override that parameter
2767 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, 0 );
2772 if ( buffersAllocated )
2773 ASIODisposeBuffers();
2774 drivers.removeCurrentDriver();
2777 CloseHandle( handle->condition );
2778 if ( handle->bufferInfos )
2779 free( handle->bufferInfos );
2781 stream_.apiHandle = 0;
2784 for ( int i=0; i<2; i++ ) {
2785 if ( stream_.userBuffer[i] ) {
2786 free( stream_.userBuffer[i] );
2787 stream_.userBuffer[i] = 0;
2791 if ( stream_.deviceBuffer ) {
2792 free( stream_.deviceBuffer );
2793 stream_.deviceBuffer = 0;
2799 void RtApiAsio :: closeStream()
2801 if ( stream_.state == STREAM_CLOSED ) {
2802 errorText_ = "RtApiAsio::closeStream(): no open stream to close!";
2803 error( RtError::WARNING );
2807 if ( stream_.state == STREAM_RUNNING ) {
2808 stream_.state = STREAM_STOPPED;
2811 ASIODisposeBuffers();
2812 drivers.removeCurrentDriver();
2814 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2816 CloseHandle( handle->condition );
2817 if ( handle->bufferInfos )
2818 free( handle->bufferInfos );
2820 stream_.apiHandle = 0;
2823 for ( int i=0; i<2; i++ ) {
2824 if ( stream_.userBuffer[i] ) {
2825 free( stream_.userBuffer[i] );
2826 stream_.userBuffer[i] = 0;
2830 if ( stream_.deviceBuffer ) {
2831 free( stream_.deviceBuffer );
2832 stream_.deviceBuffer = 0;
2835 stream_.mode = UNINITIALIZED;
2836 stream_.state = STREAM_CLOSED;
2839 void RtApiAsio :: startStream()
2842 if ( stream_.state == STREAM_RUNNING ) {
2843 errorText_ = "RtApiAsio::startStream(): the stream is already running!";
2844 error( RtError::WARNING );
2848 MUTEX_LOCK( &stream_.mutex );
2850 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2851 ASIOError result = ASIOStart();
2852 if ( result != ASE_OK ) {
2853 errorStream_ << "RtApiAsio::startStream: error (" << getAsioErrorString( result ) << ") starting device.";
2854 errorText_ = errorStream_.str();
2858 handle->drainCounter = 0;
2859 handle->internalDrain = false;
2860 stream_.state = STREAM_RUNNING;
2864 MUTEX_UNLOCK( &stream_.mutex );
2866 if ( result == ASE_OK ) return;
2867 error( RtError::SYSTEM_ERROR );
2870 void RtApiAsio :: stopStream()
2873 if ( stream_.state == STREAM_STOPPED ) {
2874 errorText_ = "RtApiAsio::stopStream(): the stream is already stopped!";
2875 error( RtError::WARNING );
2879 MUTEX_LOCK( &stream_.mutex );
2881 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2882 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2883 if ( handle->drainCounter == 0 ) {
2884 handle->drainCounter = 1;
2885 MUTEX_UNLOCK( &stream_.mutex );
2886 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
2887 ResetEvent( handle->condition );
2888 MUTEX_LOCK( &stream_.mutex );
2892 ASIOError result = ASIOStop();
2893 if ( result != ASE_OK ) {
2894 errorStream_ << "RtApiAsio::stopStream: error (" << getAsioErrorString( result ) << ") stopping device.";
2895 errorText_ = errorStream_.str();
2898 stream_.state = STREAM_STOPPED;
2899 MUTEX_UNLOCK( &stream_.mutex );
2901 if ( result == ASE_OK ) return;
2902 error( RtError::SYSTEM_ERROR );
2905 void RtApiAsio :: abortStream()
2908 if ( stream_.state == STREAM_STOPPED ) {
2909 errorText_ = "RtApiAsio::abortStream(): the stream is already stopped!";
2910 error( RtError::WARNING );
2914 // The following lines were commented-out because some behavior was
2915 // noted where the device buffers need to be zeroed to avoid
2916 // continuing sound, even when the device buffers are completed
2917 // disposed. So now, calling abort is the same as calling stop.
2918 //AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2919 //handle->drainCounter = 1;
2923 bool RtApiAsio :: callbackEvent( long bufferIndex )
2925 if ( stream_.state == STREAM_STOPPED ) return SUCCESS;
2926 if ( stream_.state == STREAM_CLOSED ) {
2927 errorText_ = "RtApiAsio::callbackEvent(): the stream is closed ... this shouldn't happen!";
2928 error( RtError::WARNING );
2932 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2933 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
2935 // Check if we were draining the stream and signal is finished.
2936 if ( handle->drainCounter > 3 ) {
2937 if ( handle->internalDrain == false )
2938 SetEvent( handle->condition );
2944 MUTEX_LOCK( &stream_.mutex );
2946 // The state might change while waiting on a mutex.
2947 if ( stream_.state == STREAM_STOPPED ) goto unlock;
2949 // Invoke user callback to get fresh output data UNLESS we are
2951 if ( handle->drainCounter == 0 ) {
2952 RtAudioCallback callback = (RtAudioCallback) info->callback;
2953 double streamTime = getStreamTime();
2954 RtAudioStreamStatus status = 0;
2955 if ( stream_.mode != INPUT && asioXRun == true ) {
2956 status |= RTAUDIO_OUTPUT_UNDERFLOW;
2959 if ( stream_.mode != OUTPUT && asioXRun == true ) {
2960 status |= RTAUDIO_INPUT_OVERFLOW;
2963 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
2964 stream_.bufferSize, streamTime, status, info->userData );
2965 if ( handle->drainCounter == 2 ) {
2966 MUTEX_UNLOCK( &stream_.mutex );
2970 else if ( handle->drainCounter == 1 )
2971 handle->internalDrain = true;
2974 unsigned int nChannels, bufferBytes, i, j;
2975 nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
2976 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
2978 bufferBytes = stream_.bufferSize * formatBytes( stream_.deviceFormat[0] );
2980 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
2982 for ( i=0, j=0; i<nChannels; i++ ) {
2983 if ( handle->bufferInfos[i].isInput != ASIOTrue )
2984 memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );
2988 else if ( stream_.doConvertBuffer[0] ) {
2990 convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );
2991 if ( stream_.doByteSwap[0] )
2992 byteSwapBuffer( stream_.deviceBuffer,
2993 stream_.bufferSize * stream_.nDeviceChannels[0],
2994 stream_.deviceFormat[0] );
2996 for ( i=0, j=0; i<nChannels; i++ ) {
2997 if ( handle->bufferInfos[i].isInput != ASIOTrue )
2998 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
2999 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
3005 if ( stream_.doByteSwap[0] )
3006 byteSwapBuffer( stream_.userBuffer[0],
3007 stream_.bufferSize * stream_.nUserChannels[0],
3008 stream_.userFormat );
3010 for ( i=0, j=0; i<nChannels; i++ ) {
3011 if ( handle->bufferInfos[i].isInput != ASIOTrue )
3012 memcpy( handle->bufferInfos[i].buffers[bufferIndex],
3013 &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );
3018 if ( handle->drainCounter ) {
3019 handle->drainCounter++;
3024 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3026 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
3028 if (stream_.doConvertBuffer[1]) {
3030 // Always interleave ASIO input data.
3031 for ( i=0, j=0; i<nChannels; i++ ) {
3032 if ( handle->bufferInfos[i].isInput == ASIOTrue )
3033 memcpy( &stream_.deviceBuffer[j++*bufferBytes],
3034 handle->bufferInfos[i].buffers[bufferIndex],
3038 if ( stream_.doByteSwap[1] )
3039 byteSwapBuffer( stream_.deviceBuffer,
3040 stream_.bufferSize * stream_.nDeviceChannels[1],
3041 stream_.deviceFormat[1] );
3042 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
3046 for ( i=0, j=0; i<nChannels; i++ ) {
3047 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
3048 memcpy( &stream_.userBuffer[1][bufferBytes*j++],
3049 handle->bufferInfos[i].buffers[bufferIndex],
3054 if ( stream_.doByteSwap[1] )
3055 byteSwapBuffer( stream_.userBuffer[1],
3056 stream_.bufferSize * stream_.nUserChannels[1],
3057 stream_.userFormat );
3062 // The following call was suggested by Malte Clasen. While the API
3063 // documentation indicates it should not be required, some device
3064 // drivers apparently do not function correctly without it.
3067 MUTEX_UNLOCK( &stream_.mutex );
3069 RtApi::tickStreamTime();
3073 void sampleRateChanged( ASIOSampleRate sRate )
3075 // The ASIO documentation says that this usually only happens during
3076 // external sync. Audio processing is not stopped by the driver,
3077 // actual sample rate might not have even changed, maybe only the
3078 // sample rate status of an AES/EBU or S/PDIF digital input at the
3081 RtApi *object = (RtApi *) asioCallbackInfo->object;
3083 object->stopStream();
3085 catch ( RtError &exception ) {
3086 std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;
3090 std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;
3093 long asioMessages( long selector, long value, void* message, double* opt )
3097 switch( selector ) {
3098 case kAsioSelectorSupported:
3099 if ( value == kAsioResetRequest
3100 || value == kAsioEngineVersion
3101 || value == kAsioResyncRequest
3102 || value == kAsioLatenciesChanged
3103 // The following three were added for ASIO 2.0, you don't
3104 // necessarily have to support them.
3105 || value == kAsioSupportsTimeInfo
3106 || value == kAsioSupportsTimeCode
3107 || value == kAsioSupportsInputMonitor)
3110 case kAsioResetRequest:
3111 // Defer the task and perform the reset of the driver during the
3112 // next "safe" situation. You cannot reset the driver right now,
3113 // as this code is called from the driver. Reset the driver is
3114 // done by completely destruct is. I.e. ASIOStop(),
3115 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
3117 std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;
3120 case kAsioResyncRequest:
3121 // This informs the application that the driver encountered some
3122 // non-fatal data loss. It is used for synchronization purposes
3123 // of different media. Added mainly to work around the Win16Mutex
3124 // problems in Windows 95/98 with the Windows Multimedia system,
3125 // which could lose data because the Mutex was held too long by
3126 // another thread. However a driver can issue it in other
3128 // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;
3132 case kAsioLatenciesChanged:
3133 // This will inform the host application that the drivers were
3134 // latencies changed. Beware, it this does not mean that the
3135 // buffer sizes have changed! You might need to update internal
3137 std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;
3140 case kAsioEngineVersion:
3141 // Return the supported ASIO version of the host application. If
3142 // a host application does not implement this selector, ASIO 1.0
3143 // is assumed by the driver.
3146 case kAsioSupportsTimeInfo:
3147 // Informs the driver whether the
3148 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
3149 // For compatibility with ASIO 1.0 drivers the host application
3150 // should always support the "old" bufferSwitch method, too.
3153 case kAsioSupportsTimeCode:
3154 // Informs the driver whether application is interested in time
3155 // code info. If an application does not need to know about time
3156 // code, the driver has less work to do.
3163 static const char* getAsioErrorString( ASIOError result )
3171 static Messages m[] =
3173 { ASE_NotPresent, "Hardware input or output is not present or available." },
3174 { ASE_HWMalfunction, "Hardware is malfunctioning." },
3175 { ASE_InvalidParameter, "Invalid input parameter." },
3176 { ASE_InvalidMode, "Invalid mode." },
3177 { ASE_SPNotAdvancing, "Sample position not advancing." },
3178 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
3179 { ASE_NoMemory, "Not enough memory to complete the request." }
3182 for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )
3183 if ( m[i].value == result ) return m[i].message;
3185 return "Unknown error.";
3187 //******************** End of __WINDOWS_ASIO__ *********************//
3191 #if defined(__WINDOWS_DS__) // Windows DirectSound API
3193 // Modified by Robin Davies, October 2005
3194 // - Improvements to DirectX pointer chasing.
3195 // - Backdoor RtDsStatistics hook provides DirectX performance information.
3196 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
3197 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
3198 // Various revisions for RtAudio 4.0 by Gary Scavone, April 2007
3203 #if defined(__MINGW32__)
3204 // missing from latest mingw winapi
3205 #define WAVE_FORMAT_96M08 0x00010000 /* 96 kHz, Mono, 8-bit */
3206 #define WAVE_FORMAT_96S08 0x00020000 /* 96 kHz, Stereo, 8-bit */
3207 #define WAVE_FORMAT_96M16 0x00040000 /* 96 kHz, Mono, 16-bit */
3208 #define WAVE_FORMAT_96S16 0x00080000 /* 96 kHz, Stereo, 16-bit */
3211 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
3213 #ifdef _MSC_VER // if Microsoft Visual C++
3214 #pragma comment( lib, "winmm.lib" ) // then, auto-link winmm.lib. Otherwise, it has to be added manually.
3217 static inline DWORD dsPointerDifference( DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3219 if (laterPointer > earlierPointer)
3220 return laterPointer - earlierPointer;
3222 return laterPointer - earlierPointer + bufferSize;
3225 static inline DWORD dsPointerBetween( DWORD pointer, DWORD laterPointer, DWORD earlierPointer, DWORD bufferSize )
3227 if ( pointer > bufferSize ) pointer -= bufferSize;
3228 if ( laterPointer < earlierPointer ) laterPointer += bufferSize;
3229 if ( pointer < earlierPointer ) pointer += bufferSize;
3230 return pointer >= earlierPointer && pointer < laterPointer;
3233 // A structure to hold various information related to the DirectSound
3234 // API implementation.
3236 unsigned int drainCounter; // Tracks callback counts when draining
3237 bool internalDrain; // Indicates if stop is initiated from callback or not.
3241 UINT bufferPointer[2];
3242 DWORD dsBufferSize[2];
3243 DWORD dsPointerLeadTime[2]; // the number of bytes ahead of the safe pointer to lead by.
3247 :drainCounter(0), internalDrain(false) { id[0] = 0; id[1] = 0; buffer[0] = 0; buffer[1] = 0; xrun[0] = false; xrun[1] = false; bufferPointer[0] = 0; bufferPointer[1] = 0; }
3251 RtApiDs::RtDsStatistics RtApiDs::statistics;
3253 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
3254 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
3256 RtDsStatistics s = statistics;
3258 // update the calculated fields.
3259 if ( s.inputFrameSize != 0 )
3260 s.latency += s.readDeviceSafeLeadBytes * 1.0 / s.inputFrameSize / s.sampleRate;
3262 if ( s.outputFrameSize != 0 )
3263 s.latency += (s.writeDeviceSafeLeadBytes + s.writeDeviceBufferLeadBytes) * 1.0 / s.outputFrameSize / s.sampleRate;
3269 // Declarations for utility functions, callbacks, and structures
3270 // specific to the DirectSound implementation.
3271 static BOOL CALLBACK deviceCountCallback( LPGUID lpguid,
3272 LPCTSTR description,
3276 static char* getErrorString( int code );
3278 extern "C" unsigned __stdcall callbackHandler( void *ptr );
3284 unsigned int counter;
3290 : isInput(false), getDefault(false), findIndex(false), counter(0), index(0) {}
3293 RtApiDs :: RtApiDs()
3295 // Dsound will run both-threaded. If CoInitialize fails, then just
3296 // accept whatever the mainline chose for a threading model.
3297 coInitialized_ = false;
3298 HRESULT hr = CoInitialize( NULL );
3299 if ( !FAILED( hr ) ) coInitialized_ = true;
3302 RtApiDs :: ~RtApiDs()
3304 if ( coInitialized_ ) CoUninitialize(); // balanced call.
3305 if ( stream_.state != STREAM_CLOSED ) closeStream();
3308 unsigned int RtApiDs :: getDefaultInputDevice( void )
3310 // Count output devices.
3312 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &info );
3313 if ( FAILED( result ) ) {
3314 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") counting output devices!";
3315 errorText_ = errorStream_.str();
3316 error( RtError::WARNING );
3320 // Now enumerate input devices until we find the id = NULL.
3321 info.isInput = true;
3322 info.getDefault = true;
3323 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &info );
3324 if ( FAILED( result ) ) {
3325 errorStream_ << "RtApiDs::getDefaultInputDevice: error (" << getErrorString( result ) << ") enumerating input devices!";
3326 errorText_ = errorStream_.str();
3327 error( RtError::WARNING );
3331 if ( info.counter > 0 ) return info.counter - 1;
3335 unsigned int RtApiDs :: getDefaultOutputDevice( void )
3337 // Enumerate output devices until we find the id = NULL.
3339 info.getDefault = true;
3340 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &info );
3341 if ( FAILED( result ) ) {
3342 errorStream_ << "RtApiDs::getDefaultOutputDevice: error (" << getErrorString( result ) << ") enumerating output devices!";
3343 errorText_ = errorStream_.str();
3344 error( RtError::WARNING );
3348 if ( info.counter > 0 ) return info.counter - 1;
3352 unsigned int RtApiDs :: getDeviceCount( void )
3354 // Count DirectSound devices.
3356 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &info );
3357 if ( FAILED( result ) ) {
3358 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating output devices!";
3359 errorText_ = errorStream_.str();
3360 error( RtError::WARNING );
3363 // Count DirectSoundCapture devices.
3364 info.isInput = true;
3365 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &info );
3366 if ( FAILED( result ) ) {
3367 errorStream_ << "RtApiDs::getDeviceCount: error (" << getErrorString( result ) << ") enumerating input devices!";
3368 errorText_ = errorStream_.str();
3369 error( RtError::WARNING );
3372 return info.counter;
3375 RtAudio::DeviceInfo RtApiDs :: getDeviceInfo( unsigned int device )
3377 // Because DirectSound always enumerates input and output devices
3378 // separately (and because we don't attempt to combine devices
3379 // internally), none of our "devices" will ever be duplex.
3381 RtAudio::DeviceInfo info;
3382 info.probed = false;
3384 // Enumerate through devices to find the id (if it exists). Note
3385 // that we have to do the output enumeration first, even if this is
3386 // an input device, in order for the device counter to be correct.
3388 dsinfo.findIndex = true;
3389 dsinfo.index = device;
3390 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &dsinfo );
3391 if ( FAILED( result ) ) {
3392 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating output devices!";
3393 errorText_ = errorStream_.str();
3394 error( RtError::WARNING );
3397 if ( dsinfo.name.empty() ) goto probeInput;
3399 LPDIRECTSOUND output;
3401 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3402 if ( FAILED( result ) ) {
3403 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3404 errorText_ = errorStream_.str();
3405 error( RtError::WARNING );
3409 outCaps.dwSize = sizeof( outCaps );
3410 result = output->GetCaps( &outCaps );
3411 if ( FAILED( result ) ) {
3413 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting capabilities!";
3414 errorText_ = errorStream_.str();
3415 error( RtError::WARNING );
3419 // Get output channel information.
3420 info.outputChannels = ( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
3422 // Get sample rate information.
3423 info.sampleRates.clear();
3424 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
3425 if ( SAMPLE_RATES[k] >= (unsigned int) outCaps.dwMinSecondarySampleRate &&
3426 SAMPLE_RATES[k] <= (unsigned int) outCaps.dwMaxSecondarySampleRate )
3427 info.sampleRates.push_back( SAMPLE_RATES[k] );
3430 // Get format information.
3431 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT ) info.nativeFormats |= RTAUDIO_SINT16;
3432 if ( outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) info.nativeFormats |= RTAUDIO_SINT8;
3436 if ( getDefaultOutputDevice() == device )
3437 info.isDefaultOutput = true;
3439 // Copy name and return.
3440 info.name = dsinfo.name;
3447 dsinfo.isInput = true;
3448 result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &dsinfo );
3449 if ( FAILED( result ) ) {
3450 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") enumerating input devices!";
3451 errorText_ = errorStream_.str();
3452 error( RtError::WARNING );
3455 if ( dsinfo.name.empty() ) return info;
3457 LPDIRECTSOUNDCAPTURE input;
3458 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3459 if ( FAILED( result ) ) {
3460 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3461 errorText_ = errorStream_.str();
3462 error( RtError::WARNING );
3467 inCaps.dwSize = sizeof( inCaps );
3468 result = input->GetCaps( &inCaps );
3469 if ( FAILED( result ) ) {
3471 errorStream_ << "RtApiDs::getDeviceInfo: error (" << getErrorString( result ) << ") getting object capabilities (" << dsinfo.name << ")!";
3472 errorText_ = errorStream_.str();
3473 error( RtError::WARNING );
3477 // Get input channel information.
3478 info.inputChannels = inCaps.dwChannels;
3480 // Get sample rate and format information.
3481 if ( inCaps.dwChannels == 2 ) {
3482 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3483 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3484 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3485 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.nativeFormats |= RTAUDIO_SINT16;
3486 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3487 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3488 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3489 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.nativeFormats |= RTAUDIO_SINT8;
3491 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3492 if ( inCaps.dwFormats & WAVE_FORMAT_1S16 ) info.sampleRates.push_back( 11025 );
3493 if ( inCaps.dwFormats & WAVE_FORMAT_2S16 ) info.sampleRates.push_back( 22050 );
3494 if ( inCaps.dwFormats & WAVE_FORMAT_4S16 ) info.sampleRates.push_back( 44100 );
3495 if ( inCaps.dwFormats & WAVE_FORMAT_96S16 ) info.sampleRates.push_back( 96000 );
3497 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3498 if ( inCaps.dwFormats & WAVE_FORMAT_1S08 ) info.sampleRates.push_back( 11025 );
3499 if ( inCaps.dwFormats & WAVE_FORMAT_2S08 ) info.sampleRates.push_back( 22050 );
3500 if ( inCaps.dwFormats & WAVE_FORMAT_4S08 ) info.sampleRates.push_back( 44100 );
3501 if ( inCaps.dwFormats & WAVE_FORMAT_96S08 ) info.sampleRates.push_back( 44100 );
3504 else if ( inCaps.dwChannels == 1 ) {
3505 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3506 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3507 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3508 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.nativeFormats |= RTAUDIO_SINT16;
3509 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3510 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3511 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3512 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.nativeFormats |= RTAUDIO_SINT8;
3514 if ( info.nativeFormats & RTAUDIO_SINT16 ) {
3515 if ( inCaps.dwFormats & WAVE_FORMAT_1M16 ) info.sampleRates.push_back( 11025 );
3516 if ( inCaps.dwFormats & WAVE_FORMAT_2M16 ) info.sampleRates.push_back( 22050 );
3517 if ( inCaps.dwFormats & WAVE_FORMAT_4M16 ) info.sampleRates.push_back( 44100 );
3518 if ( inCaps.dwFormats & WAVE_FORMAT_96M16 ) info.sampleRates.push_back( 96000 );
3520 else if ( info.nativeFormats & RTAUDIO_SINT8 ) {
3521 if ( inCaps.dwFormats & WAVE_FORMAT_1M08 ) info.sampleRates.push_back( 11025 );
3522 if ( inCaps.dwFormats & WAVE_FORMAT_2M08 ) info.sampleRates.push_back( 22050 );
3523 if ( inCaps.dwFormats & WAVE_FORMAT_4M08 ) info.sampleRates.push_back( 44100 );
3524 if ( inCaps.dwFormats & WAVE_FORMAT_96M08 ) info.sampleRates.push_back( 96000 );
3527 else info.inputChannels = 0; // technically, this would be an error
3531 if ( info.inputChannels == 0 ) return info;
3533 if ( getDefaultInputDevice() == device )
3534 info.isDefaultInput = true;
3536 // Copy name and return.
3537 info.name = dsinfo.name;
3542 bool RtApiDs :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
3543 unsigned int firstChannel, unsigned int sampleRate,
3544 RtAudioFormat format, unsigned int *bufferSize,
3545 RtAudio::StreamOptions *options )
3547 if ( channels + firstChannel > 2 ) {
3548 errorText_ = "RtApiDs::probeDeviceOpen: DirectSound does not support more than 2 channels per device.";
3552 // Enumerate through devices to find the id (if it exists). Note
3553 // that we have to do the output enumeration first, even if this is
3554 // an input device, in order for the device counter to be correct.
3556 dsinfo.findIndex = true;
3557 dsinfo.index = device;
3558 HRESULT result = DirectSoundEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &dsinfo );
3559 if ( FAILED( result ) ) {
3560 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating output devices!";
3561 errorText_ = errorStream_.str();
3565 if ( mode == OUTPUT ) {
3566 if ( dsinfo.name.empty() ) {
3567 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support output!";
3568 errorText_ = errorStream_.str();
3572 else { // mode == INPUT
3573 dsinfo.isInput = true;
3574 HRESULT result = DirectSoundCaptureEnumerate( (LPDSENUMCALLBACK) deviceCountCallback, &dsinfo );
3575 if ( FAILED( result ) ) {
3576 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") enumerating input devices!";
3577 errorText_ = errorStream_.str();
3580 if ( dsinfo.name.empty() ) {
3581 errorStream_ << "RtApiDs::probeDeviceOpen: device (" << device << ") does not support input!";
3582 errorText_ = errorStream_.str();
3587 // According to a note in PortAudio, using GetDesktopWindow()
3588 // instead of GetForegroundWindow() is supposed to avoid problems
3589 // that occur when the application's window is not the foreground
3590 // window. Also, if the application window closes before the
3591 // DirectSound buffer, DirectSound can crash. However, for console
3592 // applications, no sound was produced when using GetDesktopWindow().
3593 HWND hWnd = GetForegroundWindow();
3595 // Check the numberOfBuffers parameter and limit the lowest value to
3596 // two. This is a judgement call and a value of two is probably too
3597 // low for capture, but it should work for playback.
3599 if ( options ) nBuffers = options->numberOfBuffers;
3600 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) nBuffers = 2;
3601 if ( nBuffers < 2 ) nBuffers = 3;
3603 // Create the wave format structure. The data format setting will
3604 // be determined later.
3605 WAVEFORMATEX waveFormat;
3606 ZeroMemory( &waveFormat, sizeof(WAVEFORMATEX) );
3607 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
3608 waveFormat.nChannels = channels + firstChannel;
3609 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
3611 // Determine the device buffer size. By default, 32k, but we will
3612 // grow it to make allowances for very large software buffer sizes.
3613 DWORD dsBufferSize = 0;
3614 DWORD dsPointerLeadTime = 0;
3615 long bufferBytes = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
3617 void *ohandle = 0, *bhandle = 0;
3618 if ( mode == OUTPUT ) {
3620 LPDIRECTSOUND output;
3621 result = DirectSoundCreate( dsinfo.id, &output, NULL );
3622 if ( FAILED( result ) ) {
3623 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening output device (" << dsinfo.name << ")!";
3624 errorText_ = errorStream_.str();
3629 outCaps.dwSize = sizeof( outCaps );
3630 result = output->GetCaps( &outCaps );
3631 if ( FAILED( result ) ) {
3633 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting capabilities (" << dsinfo.name << ")!";
3634 errorText_ = errorStream_.str();
3638 // Check channel information.
3639 if ( channels + firstChannel == 2 && !( outCaps.dwFlags & DSCAPS_PRIMARYSTEREO ) ) {
3640 errorStream_ << "RtApiDs::getDeviceInfo: the output device (" << dsinfo.name << ") does not support stereo playback.";
3641 errorText_ = errorStream_.str();
3645 // Check format information. Use 16-bit format unless not
3646 // supported or user requests 8-bit.
3647 if ( outCaps.dwFlags & DSCAPS_PRIMARY16BIT &&
3648 !( format == RTAUDIO_SINT8 && outCaps.dwFlags & DSCAPS_PRIMARY8BIT ) ) {
3649 waveFormat.wBitsPerSample = 16;
3650 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3653 waveFormat.wBitsPerSample = 8;
3654 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3656 stream_.userFormat = format;
3658 // Update wave format structure and buffer information.
3659 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3660 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3661 dsPointerLeadTime = nBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels;
3663 // If the user wants an even bigger buffer, increase the device buffer size accordingly.
3664 while ( dsPointerLeadTime * 2U > (DWORD) bufferBytes )
3667 // Set cooperative level to DSSCL_EXCLUSIVE
3668 result = output->SetCooperativeLevel( hWnd, DSSCL_EXCLUSIVE );
3669 if ( FAILED( result ) ) {
3671 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting cooperative level (" << dsinfo.name << ")!";
3672 errorText_ = errorStream_.str();
3676 // Even though we will write to the secondary buffer, we need to
3677 // access the primary buffer to set the correct output format
3678 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
3679 // buffer description.
3680 DSBUFFERDESC bufferDescription;
3681 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3682 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3683 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
3685 // Obtain the primary buffer
3686 LPDIRECTSOUNDBUFFER buffer;
3687 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3688 if ( FAILED( result ) ) {
3690 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") accessing primary buffer (" << dsinfo.name << ")!";
3691 errorText_ = errorStream_.str();
3695 // Set the primary DS buffer sound format.
3696 result = buffer->SetFormat( &waveFormat );
3697 if ( FAILED( result ) ) {
3699 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") setting primary buffer format (" << dsinfo.name << ")!";
3700 errorText_ = errorStream_.str();
3704 // Setup the secondary DS buffer description.
3705 dsBufferSize = (DWORD) bufferBytes;
3706 ZeroMemory( &bufferDescription, sizeof( DSBUFFERDESC ) );
3707 bufferDescription.dwSize = sizeof( DSBUFFERDESC );
3708 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3709 DSBCAPS_GETCURRENTPOSITION2 |
3710 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
3711 bufferDescription.dwBufferBytes = bufferBytes;
3712 bufferDescription.lpwfxFormat = &waveFormat;
3714 // Try to create the secondary DS buffer. If that doesn't work,
3715 // try to use software mixing. Otherwise, there's a problem.
3716 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3717 if ( FAILED( result ) ) {
3718 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
3719 DSBCAPS_GETCURRENTPOSITION2 |
3720 DSBCAPS_LOCSOFTWARE ); // Force software mixing
3721 result = output->CreateSoundBuffer( &bufferDescription, &buffer, NULL );
3722 if ( FAILED( result ) ) {
3724 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating secondary buffer (" << dsinfo.name << ")!";
3725 errorText_ = errorStream_.str();
3730 // Get the buffer size ... might be different from what we specified.
3732 dsbcaps.dwSize = sizeof( DSBCAPS );
3733 result = buffer->GetCaps( &dsbcaps );
3734 if ( FAILED( result ) ) {
3737 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting buffer settings (" << dsinfo.name << ")!";
3738 errorText_ = errorStream_.str();
3742 bufferBytes = dsbcaps.dwBufferBytes;
3744 // Lock the DS buffer
3747 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3748 if ( FAILED( result ) ) {
3751 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking buffer (" << dsinfo.name << ")!";
3752 errorText_ = errorStream_.str();
3756 // Zero the DS buffer
3757 ZeroMemory( audioPtr, dataLen );
3759 // Unlock the DS buffer
3760 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3761 if ( FAILED( result ) ) {
3764 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking buffer (" << dsinfo.name << ")!";
3765 errorText_ = errorStream_.str();
3769 dsBufferSize = bufferBytes;
3770 ohandle = (void *) output;
3771 bhandle = (void *) buffer;
3774 if ( mode == INPUT ) {
3776 LPDIRECTSOUNDCAPTURE input;
3777 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
3778 if ( FAILED( result ) ) {
3779 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") opening input device (" << dsinfo.name << ")!";
3780 errorText_ = errorStream_.str();
3785 inCaps.dwSize = sizeof( inCaps );
3786 result = input->GetCaps( &inCaps );
3787 if ( FAILED( result ) ) {
3789 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") getting input capabilities (" << dsinfo.name << ")!";
3790 errorText_ = errorStream_.str();
3794 // Check channel information.
3795 if ( inCaps.dwChannels < channels + firstChannel ) {
3796 errorText_ = "RtApiDs::getDeviceInfo: the input device does not support requested input channels.";
3800 // Check format information. Use 16-bit format unless user
3802 DWORD deviceFormats;
3803 if ( channels + firstChannel == 2 ) {
3804 deviceFormats = WAVE_FORMAT_1S08 | WAVE_FORMAT_2S08 | WAVE_FORMAT_4S08 | WAVE_FORMAT_96S08;
3805 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3806 waveFormat.wBitsPerSample = 8;
3807 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3809 else { // assume 16-bit is supported
3810 waveFormat.wBitsPerSample = 16;
3811 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3814 else { // channel == 1
3815 deviceFormats = WAVE_FORMAT_1M08 | WAVE_FORMAT_2M08 | WAVE_FORMAT_4M08 | WAVE_FORMAT_96M08;
3816 if ( format == RTAUDIO_SINT8 && inCaps.dwFormats & deviceFormats ) {
3817 waveFormat.wBitsPerSample = 8;
3818 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3820 else { // assume 16-bit is supported
3821 waveFormat.wBitsPerSample = 16;
3822 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3825 stream_.userFormat = format;
3827 // Update wave format structure and buffer information.
3828 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
3829 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
3831 // Setup the secondary DS buffer description.
3832 dsBufferSize = bufferBytes;
3833 DSCBUFFERDESC bufferDescription;
3834 ZeroMemory( &bufferDescription, sizeof( DSCBUFFERDESC ) );
3835 bufferDescription.dwSize = sizeof( DSCBUFFERDESC );
3836 bufferDescription.dwFlags = 0;
3837 bufferDescription.dwReserved = 0;
3838 bufferDescription.dwBufferBytes = bufferBytes;
3839 bufferDescription.lpwfxFormat = &waveFormat;
3841 // Create the capture buffer.
3842 LPDIRECTSOUNDCAPTUREBUFFER buffer;
3843 result = input->CreateCaptureBuffer( &bufferDescription, &buffer, NULL );
3844 if ( FAILED( result ) ) {
3846 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") creating input buffer (" << dsinfo.name << ")!";
3847 errorText_ = errorStream_.str();
3851 // Lock the capture buffer
3854 result = buffer->Lock( 0, bufferBytes, &audioPtr, &dataLen, NULL, NULL, 0 );
3855 if ( FAILED( result ) ) {
3858 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") locking input buffer (" << dsinfo.name << ")!";
3859 errorText_ = errorStream_.str();
3864 ZeroMemory( audioPtr, dataLen );
3866 // Unlock the buffer
3867 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
3868 if ( FAILED( result ) ) {
3871 errorStream_ << "RtApiDs::probeDeviceOpen: error (" << getErrorString( result ) << ") unlocking input buffer (" << dsinfo.name << ")!";
3872 errorText_ = errorStream_.str();
3876 dsBufferSize = bufferBytes;
3877 ohandle = (void *) input;
3878 bhandle = (void *) buffer;
3881 // Set various stream parameters
3882 stream_.nDeviceChannels[mode] = channels + firstChannel;
3883 stream_.nUserChannels[mode] = channels;
3884 stream_.bufferSize = *bufferSize;
3885 stream_.channelOffset[mode] = firstChannel;
3886 stream_.deviceInterleaved[mode] = true;
3887 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false;
3888 else stream_.userInterleaved = true;
3890 // Set flag for buffer conversion
3891 stream_.doConvertBuffer[mode] = false;
3892 if (stream_.nUserChannels[mode] != stream_.nDeviceChannels[mode])
3893 stream_.doConvertBuffer[mode] = true;
3894 if (stream_.userFormat != stream_.deviceFormat[mode])
3895 stream_.doConvertBuffer[mode] = true;
3896 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
3897 stream_.nUserChannels[mode] > 1 )
3898 stream_.doConvertBuffer[mode] = true;
3900 // Allocate necessary internal buffers
3901 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
3902 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
3903 if ( stream_.userBuffer[mode] == NULL ) {
3904 errorText_ = "RtApiDs::probeDeviceOpen: error allocating user buffer memory.";
3908 if ( stream_.doConvertBuffer[mode] ) {
3910 bool makeBuffer = true;
3911 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
3912 if ( mode == INPUT ) {
3913 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
3914 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
3915 if ( bufferBytes <= (long) bytesOut ) makeBuffer = false;
3920 bufferBytes *= *bufferSize;
3921 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
3922 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
3923 if ( stream_.deviceBuffer == NULL ) {
3924 errorText_ = "RtApiDs::probeDeviceOpen: error allocating device buffer memory.";
3930 // Allocate our DsHandle structures for the stream.
3932 if ( stream_.apiHandle == 0 ) {
3934 handle = new DsHandle;
3936 catch ( std::bad_alloc& ) {
3937 errorText_ = "RtApiDs::probeDeviceOpen: error allocating AsioHandle memory.";
3941 // Create a manual-reset event.
3942 handle->condition = CreateEvent( NULL, // no security
3943 TRUE, // manual-reset
3944 FALSE, // non-signaled initially
3946 stream_.apiHandle = (void *) handle;
3949 handle = (DsHandle *) stream_.apiHandle;
3950 handle->id[mode] = ohandle;
3951 handle->buffer[mode] = bhandle;
3952 handle->dsBufferSize[mode] = dsBufferSize;
3953 handle->dsPointerLeadTime[mode] = dsPointerLeadTime;
3955 stream_.device[mode] = device;
3956 stream_.state = STREAM_STOPPED;
3957 if ( stream_.mode == OUTPUT && mode == INPUT )
3958 // We had already set up an output stream.
3959 stream_.mode = DUPLEX;
3961 stream_.mode = mode;
3962 stream_.nBuffers = nBuffers;
3963 stream_.sampleRate = sampleRate;
3965 // Setup the buffer conversion information structure.
3966 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
3968 // Setup the callback thread.
3970 stream_.callbackInfo.object = (void *) this;
3971 stream_.callbackInfo.isRunning = true;
3972 stream_.callbackInfo.thread = _beginthreadex( NULL, 0, &callbackHandler,
3973 &stream_.callbackInfo, 0, &threadId );
3974 if ( stream_.callbackInfo.thread == 0 ) {
3975 errorText_ = "RtApiDs::probeDeviceOpen: error creating callback thread!";
3983 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
3984 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
3985 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
3986 if ( buffer ) buffer->Release();
3989 if ( handle->buffer[1] ) {
3990 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
3991 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
3992 if ( buffer ) buffer->Release();
3995 CloseHandle( handle->condition );
3997 stream_.apiHandle = 0;
4000 for ( int i=0; i<2; i++ ) {
4001 if ( stream_.userBuffer[i] ) {
4002 free( stream_.userBuffer[i] );
4003 stream_.userBuffer[i] = 0;
4007 if ( stream_.deviceBuffer ) {
4008 free( stream_.deviceBuffer );
4009 stream_.deviceBuffer = 0;
4015 void RtApiDs :: closeStream()
4017 if ( stream_.state == STREAM_CLOSED ) {
4018 errorText_ = "RtApiDs::closeStream(): no open stream to close!";
4019 error( RtError::WARNING );
4023 // Stop the callback thread.
4024 stream_.callbackInfo.isRunning = false;
4025 WaitForSingleObject( (HANDLE) stream_.callbackInfo.thread, INFINITE );
4026 CloseHandle( (HANDLE) stream_.callbackInfo.thread );
4028 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4030 if ( handle->buffer[0] ) { // the object pointer can be NULL and valid
4031 LPDIRECTSOUND object = (LPDIRECTSOUND) handle->id[0];
4032 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4039 if ( handle->buffer[1] ) {
4040 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handle->id[1];
4041 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4048 CloseHandle( handle->condition );
4050 stream_.apiHandle = 0;
4053 for ( int i=0; i<2; i++ ) {
4054 if ( stream_.userBuffer[i] ) {
4055 free( stream_.userBuffer[i] );
4056 stream_.userBuffer[i] = 0;
4060 if ( stream_.deviceBuffer ) {
4061 free( stream_.deviceBuffer );
4062 stream_.deviceBuffer = 0;
4065 stream_.mode = UNINITIALIZED;
4066 stream_.state = STREAM_CLOSED;
4069 void RtApiDs :: startStream()
4072 if ( stream_.state == STREAM_RUNNING ) {
4073 errorText_ = "RtApiDs::startStream(): the stream is already running!";
4074 error( RtError::WARNING );
4078 // Increase scheduler frequency on lesser windows (a side-effect of
4079 // increasing timer accuracy). On greater windows (Win2K or later),
4080 // this is already in effect.
4082 MUTEX_LOCK( &stream_.mutex );
4084 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4086 timeBeginPeriod( 1 );
4089 memset( &statistics, 0, sizeof( statistics ) );
4090 statistics.sampleRate = stream_.sampleRate;
4091 statistics.writeDeviceBufferLeadBytes = handle->dsPointerLeadTime[0];
4094 buffersRolling = false;
4095 duplexPrerollBytes = 0;
4097 if ( stream_.mode == DUPLEX ) {
4098 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
4099 duplexPrerollBytes = (int) ( 0.5 * stream_.sampleRate * formatBytes( stream_.deviceFormat[1] ) * stream_.nDeviceChannels[1] );
4103 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4104 //statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0] ) * stream_.nDeviceChannels[0];
4106 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4107 result = buffer->Play( 0, 0, DSBPLAY_LOOPING );
4108 if ( FAILED( result ) ) {
4109 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting output buffer!";
4110 errorText_ = errorStream_.str();
4115 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4116 //statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) * stream_.nDeviceChannels[1];
4118 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4119 result = buffer->Start( DSCBSTART_LOOPING );
4120 if ( FAILED( result ) ) {
4121 errorStream_ << "RtApiDs::startStream: error (" << getErrorString( result ) << ") starting input buffer!";
4122 errorText_ = errorStream_.str();
4127 handle->drainCounter = 0;
4128 handle->internalDrain = false;
4129 stream_.state = STREAM_RUNNING;
4132 MUTEX_UNLOCK( &stream_.mutex );
4134 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4137 void RtApiDs :: stopStream()
4140 if ( stream_.state == STREAM_STOPPED ) {
4141 errorText_ = "RtApiDs::stopStream(): the stream is already stopped!";
4142 error( RtError::WARNING );
4146 MUTEX_LOCK( &stream_.mutex );
4151 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4152 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4153 if ( handle->drainCounter == 0 ) {
4154 handle->drainCounter = 1;
4155 MUTEX_UNLOCK( &stream_.mutex );
4156 WaitForMultipleObjects( 1, &handle->condition, FALSE, INFINITE ); // block until signaled
4157 ResetEvent( handle->condition );
4158 MUTEX_LOCK( &stream_.mutex );
4161 // Stop the buffer and clear memory
4162 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4163 result = buffer->Stop();
4164 if ( FAILED( result ) ) {
4165 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping output buffer!";
4166 errorText_ = errorStream_.str();
4170 // Lock the buffer and clear it so that if we start to play again,
4171 // we won't have old data playing.
4172 result = buffer->Lock( 0, handle->dsBufferSize[0], &audioPtr, &dataLen, NULL, NULL, 0 );
4173 if ( FAILED( result ) ) {
4174 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking output buffer!";
4175 errorText_ = errorStream_.str();
4179 // Zero the DS buffer
4180 ZeroMemory( audioPtr, dataLen );
4182 // Unlock the DS buffer
4183 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4184 if ( FAILED( result ) ) {
4185 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking output buffer!";
4186 errorText_ = errorStream_.str();
4190 // If we start playing again, we must begin at beginning of buffer.
4191 handle->bufferPointer[0] = 0;
4194 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4195 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4199 result = buffer->Stop();
4200 if ( FAILED( result ) ) {
4201 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") stopping input buffer!";
4202 errorText_ = errorStream_.str();
4206 // Lock the buffer and clear it so that if we start to play again,
4207 // we won't have old data playing.
4208 result = buffer->Lock( 0, handle->dsBufferSize[1], &audioPtr, &dataLen, NULL, NULL, 0 );
4209 if ( FAILED( result ) ) {
4210 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") locking input buffer!";
4211 errorText_ = errorStream_.str();
4215 // Zero the DS buffer
4216 ZeroMemory( audioPtr, dataLen );
4218 // Unlock the DS buffer
4219 result = buffer->Unlock( audioPtr, dataLen, NULL, 0 );
4220 if ( FAILED( result ) ) {
4221 errorStream_ << "RtApiDs::abortStream: error (" << getErrorString( result ) << ") unlocking input buffer!";
4222 errorText_ = errorStream_.str();
4226 // If we start recording again, we must begin at beginning of buffer.
4227 handle->bufferPointer[1] = 0;
4231 timeEndPeriod( 1 ); // revert to normal scheduler frequency on lesser windows.
4232 stream_.state = STREAM_STOPPED;
4233 MUTEX_UNLOCK( &stream_.mutex );
4234 if ( FAILED( result ) ) error( RtError::SYSTEM_ERROR );
4237 void RtApiDs :: abortStream()
4240 if ( stream_.state == STREAM_STOPPED ) {
4241 errorText_ = "RtApiDs::abortStream(): the stream is already stopped!";
4242 error( RtError::WARNING );
4246 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4247 handle->drainCounter = 1;
4252 void RtApiDs :: callbackEvent()
4254 if ( stream_.state == STREAM_STOPPED ) {
4255 Sleep(50); // sleep 50 milliseconds
4259 if ( stream_.state == STREAM_CLOSED ) {
4260 errorText_ = "RtApiDs::callbackEvent(): the stream is closed ... this shouldn't happen!";
4261 error( RtError::WARNING );
4265 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4266 DsHandle *handle = (DsHandle *) stream_.apiHandle;
4268 // Check if we were draining the stream and signal is finished.
4269 if ( handle->drainCounter > stream_.nBuffers + 2 ) {
4270 if ( handle->internalDrain == false )
4271 SetEvent( handle->condition );
4277 MUTEX_LOCK( &stream_.mutex );
4279 // Invoke user callback to get fresh output data UNLESS we are
4281 if ( handle->drainCounter == 0 ) {
4282 RtAudioCallback callback = (RtAudioCallback) info->callback;
4283 double streamTime = getStreamTime();
4284 RtAudioStreamStatus status = 0;
4285 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
4286 status |= RTAUDIO_OUTPUT_UNDERFLOW;
4287 handle->xrun[0] = false;
4289 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
4290 status |= RTAUDIO_INPUT_OVERFLOW;
4291 handle->xrun[1] = false;
4293 handle->drainCounter = callback( stream_.userBuffer[0], stream_.userBuffer[1],
4294 stream_.bufferSize, streamTime, status, info->userData );
4295 if ( handle->drainCounter == 2 ) {
4296 MUTEX_UNLOCK( &stream_.mutex );
4300 else if ( handle->drainCounter == 1 )
4301 handle->internalDrain = true;
4305 DWORD currentWritePos, safeWritePos;
4306 DWORD currentReadPos, safeReadPos;
4310 #ifdef GENERATE_DEBUG_LOG
4311 DWORD writeTime, readTime;
4314 LPVOID buffer1 = NULL;
4315 LPVOID buffer2 = NULL;
4316 DWORD bufferSize1 = 0;
4317 DWORD bufferSize2 = 0;
4322 if ( stream_.mode == DUPLEX && !buffersRolling ) {
4323 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4325 // It takes a while for the devices to get rolling. As a result,
4326 // there's no guarantee that the capture and write device pointers
4327 // will move in lockstep. Wait here for both devices to start
4328 // rolling, and then set our buffer pointers accordingly.
4329 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600
4330 // bytes later than the write buffer.
4332 // Stub: a serious risk of having a pre-emptive scheduling round
4333 // take place between the two GetCurrentPosition calls... but I'm
4334 // really not sure how to solve the problem. Temporarily boost to
4335 // Realtime priority, maybe; but I'm not sure what priority the
4336 // DirectSound service threads run at. We *should* be roughly
4337 // within a ms or so of correct.
4339 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4340 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4342 DWORD initialWritePos, initialSafeWritePos;
4343 DWORD initialReadPos, initialSafeReadPos;
4345 result = dsWriteBuffer->GetCurrentPosition( &initialWritePos, &initialSafeWritePos );
4346 if ( FAILED( result ) ) {
4347 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4348 errorText_ = errorStream_.str();
4349 error( RtError::SYSTEM_ERROR );
4351 result = dsCaptureBuffer->GetCurrentPosition( &initialReadPos, &initialSafeReadPos );
4352 if ( FAILED( result ) ) {
4353 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4354 errorText_ = errorStream_.str();
4355 error( RtError::SYSTEM_ERROR );
4358 result = dsWriteBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4359 if ( FAILED( result ) ) {
4360 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4361 errorText_ = errorStream_.str();
4362 error( RtError::SYSTEM_ERROR );
4364 result = dsCaptureBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4365 if ( FAILED( result ) ) {
4366 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4367 errorText_ = errorStream_.str();
4368 error( RtError::SYSTEM_ERROR );
4370 if ( safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos ) break;
4374 assert( handle->dsBufferSize[0] == handle->dsBufferSize[1] );
4376 buffersRolling = true;
4377 handle->bufferPointer[0] = ( safeWritePos + handle->dsPointerLeadTime[0] );
4378 handle->bufferPointer[1] = safeReadPos;
4381 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
4383 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handle->buffer[0];
4385 if ( handle->drainCounter > 1 ) { // write zeros to the output stream
4386 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4387 bufferBytes *= formatBytes( stream_.userFormat );
4388 memset( stream_.userBuffer[0], 0, bufferBytes );
4391 // Setup parameters and do buffer conversion if necessary.
4392 if ( stream_.doConvertBuffer[0] ) {
4393 buffer = stream_.deviceBuffer;
4394 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
4395 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[0];
4396 bufferBytes *= formatBytes( stream_.deviceFormat[0] );
4399 buffer = stream_.userBuffer[0];
4400 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0];
4401 bufferBytes *= formatBytes( stream_.userFormat );
4404 // No byte swapping necessary in DirectSound implementation.
4406 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
4407 // unsigned. So, we need to convert our signed 8-bit data here to
4409 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
4410 for ( int i=0; i<bufferBytes; i++ ) buffer[i] = (unsigned char) ( buffer[i] + 128 );
4412 DWORD dsBufferSize = handle->dsBufferSize[0];
4413 nextWritePos = handle->bufferPointer[0];
4417 // Find out where the read and "safe write" pointers are.
4418 result = dsBuffer->GetCurrentPosition( ¤tWritePos, &safeWritePos );
4419 if ( FAILED( result ) ) {
4420 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current write position!";
4421 errorText_ = errorStream_.str();
4422 error( RtError::SYSTEM_ERROR );
4425 leadPos = safeWritePos + handle->dsPointerLeadTime[0];
4426 if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize;
4427 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
4428 endWrite = nextWritePos + bufferBytes;
4430 // Check whether the entire write region is behind the play pointer.
4431 if ( leadPos >= endWrite ) break;
4433 // If we are here, then we must wait until the play pointer gets
4434 // beyond the write region. The approach here is to use the
4435 // Sleep() function to suspend operation until safePos catches
4436 // up. Calculate number of milliseconds to wait as:
4437 // time = distance * (milliseconds/second) * fudgefactor /
4438 // ((bytes/sample) * (samples/second))
4439 // A "fudgefactor" less than 1 is used because it was found
4440 // that sleeping too long was MUCH worse than sleeping for
4441 // several shorter periods.
4442 double millis = ( endWrite - leadPos ) * 900.0;
4443 millis /= ( formatBytes( stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] * stream_.sampleRate);
4444 if ( millis < 1.0 ) millis = 1.0;
4445 if ( millis > 50.0 ) {
4446 static int nOverruns = 0;
4449 Sleep( (DWORD) millis );
4452 //if ( statistics.writeDeviceSafeLeadBytes < dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] ) ) {
4453 // statistics.writeDeviceSafeLeadBytes = dsPointerDifference( safeWritePos, currentWritePos, handle->dsBufferSize[0] );
4456 if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize )
4457 || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) {
4458 // We've strayed into the forbidden zone ... resync the read pointer.
4459 //++statistics.numberOfWriteUnderruns;
4460 handle->xrun[0] = true;
4461 nextWritePos = safeWritePos + handle->dsPointerLeadTime[0] - bufferBytes + dsBufferSize;
4462 while ( nextWritePos >= dsBufferSize ) nextWritePos -= dsBufferSize;
4463 handle->bufferPointer[0] = nextWritePos;
4464 endWrite = nextWritePos + bufferBytes;
4467 // Lock free space in the buffer
4468 result = dsBuffer->Lock( nextWritePos, bufferBytes, &buffer1,
4469 &bufferSize1, &buffer2, &bufferSize2, 0 );
4470 if ( FAILED( result ) ) {
4471 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking buffer during playback!";
4472 errorText_ = errorStream_.str();
4473 error( RtError::SYSTEM_ERROR );
4476 // Copy our buffer into the DS buffer
4477 CopyMemory( buffer1, buffer, bufferSize1 );
4478 if ( buffer2 != NULL ) CopyMemory( buffer2, buffer+bufferSize1, bufferSize2 );
4480 // Update our buffer offset and unlock sound buffer
4481 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4482 if ( FAILED( result ) ) {
4483 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking buffer during playback!";
4484 errorText_ = errorStream_.str();
4485 error( RtError::SYSTEM_ERROR );
4487 nextWritePos = ( nextWritePos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4488 handle->bufferPointer[0] = nextWritePos;
4490 if ( handle->drainCounter ) {
4491 handle->drainCounter++;
4496 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
4498 // Setup parameters.
4499 if ( stream_.doConvertBuffer[1] ) {
4500 buffer = stream_.deviceBuffer;
4501 bufferBytes = stream_.bufferSize * stream_.nDeviceChannels[1];
4502 bufferBytes *= formatBytes( stream_.deviceFormat[1] );
4505 buffer = stream_.userBuffer[1];
4506 bufferBytes = stream_.bufferSize * stream_.nUserChannels[1];
4507 bufferBytes *= formatBytes( stream_.userFormat );
4510 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handle->buffer[1];
4511 long nextReadPos = handle->bufferPointer[1];
4512 DWORD dsBufferSize = handle->dsBufferSize[1];
4514 // Find out where the write and "safe read" pointers are.
4515 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4516 if ( FAILED( result ) ) {
4517 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4518 errorText_ = errorStream_.str();
4519 error( RtError::SYSTEM_ERROR );
4522 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4523 DWORD endRead = nextReadPos + bufferBytes;
4525 // Handling depends on whether we are INPUT or DUPLEX.
4526 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
4527 // then a wait here will drag the write pointers into the forbidden zone.
4529 // In DUPLEX mode, rather than wait, we will back off the read pointer until
4530 // it's in a safe position. This causes dropouts, but it seems to be the only
4531 // practical way to sync up the read and write pointers reliably, given the
4532 // the very complex relationship between phase and increment of the read and write
4535 // In order to minimize audible dropouts in DUPLEX mode, we will
4536 // provide a pre-roll period of 0.5 seconds in which we return
4537 // zeros from the read buffer while the pointers sync up.
4539 if ( stream_.mode == DUPLEX ) {
4540 if ( safeReadPos < endRead ) {
4541 if ( duplexPrerollBytes <= 0 ) {
4542 // Pre-roll time over. Be more agressive.
4543 int adjustment = endRead-safeReadPos;
4545 handle->xrun[1] = true;
4546 //++statistics.numberOfReadOverruns;
4548 // - large adjustments: we've probably run out of CPU cycles, so just resync exactly,
4549 // and perform fine adjustments later.
4550 // - small adjustments: back off by twice as much.
4551 if ( adjustment >= 2*bufferBytes )
4552 nextReadPos = safeReadPos-2*bufferBytes;
4554 nextReadPos = safeReadPos-bufferBytes-adjustment;
4556 //statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
4557 //if ( statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
4558 if ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4562 // In pre=roll time. Just do it.
4563 nextReadPos = safeReadPos-bufferBytes;
4564 while ( nextReadPos < 0 ) nextReadPos += dsBufferSize;
4566 endRead = nextReadPos + bufferBytes;
4569 else { // mode == INPUT
4570 while ( safeReadPos < endRead ) {
4571 // See comments for playback.
4572 double millis = (endRead - safeReadPos) * 900.0;
4573 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
4574 if ( millis < 1.0 ) millis = 1.0;
4575 Sleep( (DWORD) millis );
4577 // Wake up, find out where we are now
4578 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
4579 if ( FAILED( result ) ) {
4580 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") getting current read position!";
4581 errorText_ = errorStream_.str();
4582 error( RtError::SYSTEM_ERROR );
4585 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
4589 //if (statistics.readDeviceSafeLeadBytes < dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize ) )
4590 // statistics.readDeviceSafeLeadBytes = dsPointerDifference( currentReadPos, nextReadPos, dsBufferSize );
4592 // Lock free space in the buffer
4593 result = dsBuffer->Lock( nextReadPos, bufferBytes, &buffer1,
4594 &bufferSize1, &buffer2, &bufferSize2, 0 );
4595 if ( FAILED( result ) ) {
4596 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") locking capture buffer!";
4597 errorText_ = errorStream_.str();
4598 error( RtError::SYSTEM_ERROR );
4601 if ( duplexPrerollBytes <= 0 ) {
4602 // Copy our buffer into the DS buffer
4603 CopyMemory( buffer, buffer1, bufferSize1 );
4604 if ( buffer2 != NULL ) CopyMemory( buffer+bufferSize1, buffer2, bufferSize2 );
4607 memset( buffer, 0, bufferSize1 );
4608 if ( buffer2 != NULL ) memset( buffer + bufferSize1, 0, bufferSize2 );
4609 duplexPrerollBytes -= bufferSize1 + bufferSize2;
4612 // Update our buffer offset and unlock sound buffer
4613 nextReadPos = ( nextReadPos + bufferSize1 + bufferSize2 ) % dsBufferSize;
4614 dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 );
4615 if ( FAILED( result ) ) {
4616 errorStream_ << "RtApiDs::callbackEvent: error (" << getErrorString( result ) << ") unlocking capture buffer!";
4617 errorText_ = errorStream_.str();
4618 error( RtError::SYSTEM_ERROR );
4620 handle->bufferPointer[1] = nextReadPos;
4622 // No byte swapping necessary in DirectSound implementation.
4624 // If necessary, convert 8-bit data from unsigned to signed.
4625 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
4626 for ( int j=0; j<bufferBytes; j++ ) buffer[j] = (signed char) ( buffer[j] - 128 );
4628 // Do buffer conversion if necessary.
4629 if ( stream_.doConvertBuffer[1] )
4630 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
4632 #ifdef GENERATE_DEBUG_LOG
4633 if ( currentDebugLogEntry < debugLog.size() )
4635 TTickRecord &r = debugLog[currentDebugLogEntry++];
4636 r.currentReadPointer = currentReadPos;
4637 r.safeReadPointer = safeReadPos;
4638 r.currentWritePointer = currentWritePos;
4639 r.safeWritePointer = safeWritePos;
4640 r.readTime = readTime;
4641 r.writeTime = writeTime;
4642 r.nextReadPointer = handles[1].bufferPointer;
4643 r.nextWritePointer = handles[0].bufferPointer;
4648 MUTEX_UNLOCK( &stream_.mutex );
4650 RtApi::tickStreamTime();
4653 // Definitions for utility functions and callbacks
4654 // specific to the DirectSound implementation.
4656 extern "C" unsigned __stdcall callbackHandler( void *ptr )
4658 CallbackInfo *info = (CallbackInfo *) ptr;
4659 RtApiDs *object = (RtApiDs *) info->object;
4660 bool* isRunning = &info->isRunning;
4662 while ( *isRunning == true ) {
4663 object->callbackEvent();
4672 std::string convertTChar( LPCTSTR name )
4676 #if defined( UNICODE ) || defined( _UNICODE )
4677 // Yes, this conversion doesn't make sense for two-byte characters
4678 // but RtAudio is currently written to return an std::string of
4679 // one-byte chars for the device name.
4680 for ( unsigned int i=0; i<wcslen( name ); i++ )
4681 s.push_back( name[i] );
4683 s.append( std::string( name ) );
4689 static BOOL CALLBACK deviceCountCallback( LPGUID lpguid,
4690 LPCTSTR description,
4694 EnumInfo *info = (EnumInfo *) lpContext;
4697 if ( info->isInput == true ) {
4699 LPDIRECTSOUNDCAPTURE object;
4701 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
4702 if ( hr != DS_OK ) return TRUE;
4704 caps.dwSize = sizeof(caps);
4705 hr = object->GetCaps( &caps );
4706 if ( hr == DS_OK ) {
4707 if ( caps.dwChannels > 0 && caps.dwFormats > 0 )
4714 LPDIRECTSOUND object;
4715 hr = DirectSoundCreate( lpguid, &object, NULL );
4716 if ( hr != DS_OK ) return TRUE;
4718 caps.dwSize = sizeof(caps);
4719 hr = object->GetCaps( &caps );
4720 if ( hr == DS_OK ) {
4721 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
4727 if ( info->getDefault && lpguid == NULL ) return FALSE;
4729 if ( info->findIndex && info->counter > info->index ) {
4731 info->name = convertTChar( description );
4738 static char* getErrorString( int code )
4742 case DSERR_ALLOCATED:
4743 return "Already allocated";
4745 case DSERR_CONTROLUNAVAIL:
4746 return "Control unavailable";
4748 case DSERR_INVALIDPARAM:
4749 return "Invalid parameter";
4751 case DSERR_INVALIDCALL:
4752 return "Invalid call";
4755 return "Generic error";
4757 case DSERR_PRIOLEVELNEEDED:
4758 return "Priority level needed";
4760 case DSERR_OUTOFMEMORY:
4761 return "Out of memory";
4763 case DSERR_BADFORMAT:
4764 return "The sample rate or the channel format is not supported";
4766 case DSERR_UNSUPPORTED:
4767 return "Not supported";
4769 case DSERR_NODRIVER:
4772 case DSERR_ALREADYINITIALIZED:
4773 return "Already initialized";
4775 case DSERR_NOAGGREGATION:
4776 return "No aggregation";
4778 case DSERR_BUFFERLOST:
4779 return "Buffer lost";
4781 case DSERR_OTHERAPPHASPRIO:
4782 return "Another application already has priority";
4784 case DSERR_UNINITIALIZED:
4785 return "Uninitialized";
4788 return "DirectSound unknown error";
4791 //******************** End of __WINDOWS_DS__ *********************//
4795 #if defined(__LINUX_ALSA__)
4797 #include <alsa/asoundlib.h>
4800 // A structure to hold various information related to the ALSA API
4803 snd_pcm_t *handles[2];
4808 :synchronized(false) { xrun[0] = false; xrun[1] = false; }
4811 extern "C" void *alsaCallbackHandler( void * ptr );
4813 RtApiAlsa :: RtApiAlsa()
4815 // Nothing to do here.
4818 RtApiAlsa :: ~RtApiAlsa()
4820 if ( stream_.state != STREAM_CLOSED ) closeStream();
4823 unsigned int RtApiAlsa :: getDeviceCount( void )
4825 unsigned nDevices = 0;
4826 int result, subdevice, card;
4830 // Count cards and devices
4832 snd_card_next( &card );
4833 while ( card >= 0 ) {
4834 sprintf( name, "hw:%d", card );
4835 result = snd_ctl_open( &handle, name, 0 );
4837 errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4838 errorText_ = errorStream_.str();
4839 error( RtError::WARNING );
4844 result = snd_ctl_pcm_next_device( handle, &subdevice );
4846 errorStream_ << "RtApiAlsa::getDeviceCount: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4847 errorText_ = errorStream_.str();
4848 error( RtError::WARNING );
4851 if ( subdevice < 0 )
4856 snd_ctl_close( handle );
4857 snd_card_next( &card );
4863 RtAudio::DeviceInfo RtApiAlsa :: getDeviceInfo( unsigned int device )
4865 RtAudio::DeviceInfo info;
4866 info.probed = false;
4868 unsigned nDevices = 0;
4869 int result, subdevice, card;
4873 // Count cards and devices
4875 snd_card_next( &card );
4876 while ( card >= 0 ) {
4877 sprintf( name, "hw:%d", card );
4878 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
4880 errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
4881 errorText_ = errorStream_.str();
4882 error( RtError::WARNING );
4887 result = snd_ctl_pcm_next_device( chandle, &subdevice );
4889 errorStream_ << "RtApiAlsa::getDeviceInfo: control next device, card = " << card << ", " << snd_strerror( result ) << ".";
4890 errorText_ = errorStream_.str();
4891 error( RtError::WARNING );
4894 if ( subdevice < 0 ) break;
4895 if ( nDevices == device ) {
4896 sprintf( name, "hw:%d,%d", card, subdevice );
4902 snd_ctl_close( chandle );
4903 snd_card_next( &card );
4906 if ( nDevices == 0 ) {
4907 errorText_ = "RtApiAlsa::getDeviceInfo: no devices found!";
4908 error( RtError::INVALID_USE );
4911 if ( device >= nDevices ) {
4912 errorText_ = "RtApiAlsa::getDeviceInfo: device ID is invalid!";
4913 error( RtError::INVALID_USE );
4918 int openMode = SND_PCM_ASYNC;
4919 snd_pcm_stream_t stream;
4920 snd_pcm_info_t *pcminfo;
4921 snd_pcm_info_alloca( &pcminfo );
4923 snd_pcm_hw_params_t *params;
4924 snd_pcm_hw_params_alloca( ¶ms );
4926 // First try for playback
4927 stream = SND_PCM_STREAM_PLAYBACK;
4928 snd_pcm_info_set_device( pcminfo, subdevice );
4929 snd_pcm_info_set_subdevice( pcminfo, 0 );
4930 snd_pcm_info_set_stream( pcminfo, stream );
4932 result = snd_ctl_pcm_info( chandle, pcminfo );
4934 // Device probably doesn't support playback.
4938 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK );
4940 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
4941 errorText_ = errorStream_.str();
4942 error( RtError::WARNING );
4946 // The device is open ... fill the parameter structure.
4947 result = snd_pcm_hw_params_any( phandle, params );
4949 snd_pcm_close( phandle );
4950 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
4951 errorText_ = errorStream_.str();
4952 error( RtError::WARNING );
4956 // Get output channel information.
4958 result = snd_pcm_hw_params_get_channels_max( params, &value );
4960 snd_pcm_close( phandle );
4961 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") output channels, " << snd_strerror( result ) << ".";
4962 errorText_ = errorStream_.str();
4963 error( RtError::WARNING );
4966 info.outputChannels = value;
4967 snd_pcm_close( phandle );
4970 // Now try for capture
4971 stream = SND_PCM_STREAM_CAPTURE;
4972 snd_pcm_info_set_stream( pcminfo, stream );
4974 result = snd_ctl_pcm_info( chandle, pcminfo );
4975 snd_ctl_close( chandle );
4977 // Device probably doesn't support capture.
4978 if ( info.outputChannels == 0 ) return info;
4979 goto probeParameters;
4982 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
4984 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
4985 errorText_ = errorStream_.str();
4986 error( RtError::WARNING );
4987 if ( info.outputChannels == 0 ) return info;
4988 goto probeParameters;
4991 // The device is open ... fill the parameter structure.
4992 result = snd_pcm_hw_params_any( phandle, params );
4994 snd_pcm_close( phandle );
4995 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
4996 errorText_ = errorStream_.str();
4997 error( RtError::WARNING );
4998 if ( info.outputChannels == 0 ) return info;
4999 goto probeParameters;
5002 result = snd_pcm_hw_params_get_channels_max( params, &value );
5004 snd_pcm_close( phandle );
5005 errorStream_ << "RtApiAlsa::getDeviceInfo: error getting device (" << name << ") input channels, " << snd_strerror( result ) << ".";
5006 errorText_ = errorStream_.str();
5007 error( RtError::WARNING );
5008 if ( info.outputChannels == 0 ) return info;
5009 goto probeParameters;
5011 info.inputChannels = value;
5012 snd_pcm_close( phandle );
5014 // If device opens for both playback and capture, we determine the channels.
5015 if ( info.outputChannels > 0 && info.inputChannels > 0 )
5016 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
5018 // ALSA doesn't provide default devices so we'll use the first available one.
5019 if ( device == 0 && info.outputChannels > 0 )
5020 info.isDefaultOutput = true;
5021 if ( device == 0 && info.inputChannels > 0 )
5022 info.isDefaultInput = true;
5025 // At this point, we just need to figure out the supported data
5026 // formats and sample rates. We'll proceed by opening the device in
5027 // the direction with the maximum number of channels, or playback if
5028 // they are equal. This might limit our sample rate options, but so
5031 if ( info.outputChannels >= info.inputChannels )
5032 stream = SND_PCM_STREAM_PLAYBACK;
5034 stream = SND_PCM_STREAM_CAPTURE;
5035 snd_pcm_info_set_stream( pcminfo, stream );
5037 result = snd_pcm_open( &phandle, name, stream, openMode | SND_PCM_NONBLOCK);
5039 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_open error for device (" << name << "), " << snd_strerror( result ) << ".";
5040 errorText_ = errorStream_.str();
5041 error( RtError::WARNING );
5045 // The device is open ... fill the parameter structure.
5046 result = snd_pcm_hw_params_any( phandle, params );
5048 snd_pcm_close( phandle );
5049 errorStream_ << "RtApiAlsa::getDeviceInfo: snd_pcm_hw_params error for device (" << name << "), " << snd_strerror( result ) << ".";
5050 errorText_ = errorStream_.str();
5051 error( RtError::WARNING );
5055 // Test our discrete set of sample rate values.
5056 info.sampleRates.clear();
5057 for ( unsigned int i=0; i<MAX_SAMPLE_RATES; i++ ) {
5058 if ( snd_pcm_hw_params_test_rate( phandle, params, SAMPLE_RATES[i], 0 ) == 0 )
5059 info.sampleRates.push_back( SAMPLE_RATES[i] );
5061 if ( info.sampleRates.size() == 0 ) {
5062 snd_pcm_close( phandle );
5063 errorStream_ << "RtApiAlsa::getDeviceInfo: no supported sample rates found for device (" << name << ").";
5064 errorText_ = errorStream_.str();
5065 error( RtError::WARNING );
5069 // Probe the supported data formats ... we don't care about endian-ness just yet
5070 snd_pcm_format_t format;
5071 info.nativeFormats = 0;
5072 format = SND_PCM_FORMAT_S8;
5073 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5074 info.nativeFormats |= RTAUDIO_SINT8;
5075 format = SND_PCM_FORMAT_S16;
5076 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5077 info.nativeFormats |= RTAUDIO_SINT16;
5078 format = SND_PCM_FORMAT_S24;
5079 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5080 info.nativeFormats |= RTAUDIO_SINT24;
5081 format = SND_PCM_FORMAT_S32;
5082 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5083 info.nativeFormats |= RTAUDIO_SINT32;
5084 format = SND_PCM_FORMAT_FLOAT;
5085 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5086 info.nativeFormats |= RTAUDIO_FLOAT32;
5087 format = SND_PCM_FORMAT_FLOAT64;
5088 if ( snd_pcm_hw_params_test_format( phandle, params, format ) == 0 )
5089 info.nativeFormats |= RTAUDIO_FLOAT64;
5091 // Check that we have at least one supported format
5092 if ( info.nativeFormats == 0 ) {
5093 errorStream_ << "RtApiAlsa::getDeviceInfo: pcm device (" << name << ") data format not supported by RtAudio.";
5094 errorText_ = errorStream_.str();
5095 error( RtError::WARNING );
5099 // Get the device name
5101 result = snd_card_get_name( card, &cardname );
5103 sprintf( name, "hw:%s,%d", cardname, subdevice );
5106 // That's all ... close the device and return
5107 snd_pcm_close( phandle );
5112 bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
5113 unsigned int firstChannel, unsigned int sampleRate,
5114 RtAudioFormat format, unsigned int *bufferSize,
5115 RtAudio::StreamOptions *options )
5118 #if defined(__RTAUDIO_DEBUG__)
5120 snd_output_stdio_attach(&out, stderr, 0);
5123 // I'm not using the "plug" interface ... too much inconsistent behavior.
5125 unsigned nDevices = 0;
5126 int result, subdevice, card;
5130 // Count cards and devices
5132 snd_card_next( &card );
5133 while ( card >= 0 ) {
5134 sprintf( name, "hw:%d", card );
5135 result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
5137 errorStream_ << "RtApiAlsa::probeDeviceOpen: control open, card = " << card << ", " << snd_strerror( result ) << ".";
5138 errorText_ = errorStream_.str();
5143 result = snd_ctl_pcm_next_device( chandle, &subdevice );
5144 if ( result < 0 ) break;
5145 if ( subdevice < 0 ) break;
5146 if ( nDevices == device ) {
5147 sprintf( name, "hw:%d,%d", card, subdevice );
5152 snd_ctl_close( chandle );
5153 snd_card_next( &card );
5156 if ( nDevices == 0 ) {
5157 // This should not happen because a check is made before this function is called.
5158 errorText_ = "RtApiAlsa::probeDeviceOpen: no devices found!";
5162 if ( device >= nDevices ) {
5163 // This should not happen because a check is made before this function is called.
5164 errorText_ = "RtApiAlsa::probeDeviceOpen: device ID is invalid!";
5170 snd_pcm_stream_t stream;
5171 if ( mode == OUTPUT )
5172 stream = SND_PCM_STREAM_PLAYBACK;
5174 stream = SND_PCM_STREAM_CAPTURE;
5177 int openMode = SND_PCM_ASYNC;
5178 result = snd_pcm_open( &phandle, name, stream, openMode );
5180 if ( mode == OUTPUT )
5181 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for output.";
5183 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device (" << name << ") won't open for input.";
5184 errorText_ = errorStream_.str();
5188 // Fill the parameter structure.
5189 snd_pcm_hw_params_t *hw_params;
5190 snd_pcm_hw_params_alloca( &hw_params );
5191 result = snd_pcm_hw_params_any( phandle, hw_params );
5193 snd_pcm_close( phandle );
5194 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") parameters, " << snd_strerror( result ) << ".";
5195 errorText_ = errorStream_.str();
5199 #if defined(__RTAUDIO_DEBUG__)
5200 fprintf( stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n" );
5201 snd_pcm_hw_params_dump( hw_params, out );
5204 // Set access ... check user preference.
5205 if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) {
5206 stream_.userInterleaved = false;
5207 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5209 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5210 stream_.deviceInterleaved[mode] = true;
5213 stream_.deviceInterleaved[mode] = false;
5216 stream_.userInterleaved = true;
5217 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED );
5219 result = snd_pcm_hw_params_set_access( phandle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED );
5220 stream_.deviceInterleaved[mode] = false;
5223 stream_.deviceInterleaved[mode] = true;
5227 snd_pcm_close( phandle );
5228 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") access, " << snd_strerror( result ) << ".";
5229 errorText_ = errorStream_.str();
5233 // Determine how to set the device format.
5234 stream_.userFormat = format;
5235 snd_pcm_format_t deviceFormat = SND_PCM_FORMAT_UNKNOWN;
5237 if ( format == RTAUDIO_SINT8 )
5238 deviceFormat = SND_PCM_FORMAT_S8;
5239 else if ( format == RTAUDIO_SINT16 )
5240 deviceFormat = SND_PCM_FORMAT_S16;
5241 else if ( format == RTAUDIO_SINT24 )
5242 deviceFormat = SND_PCM_FORMAT_S24;
5243 else if ( format == RTAUDIO_SINT32 )
5244 deviceFormat = SND_PCM_FORMAT_S32;
5245 else if ( format == RTAUDIO_FLOAT32 )
5246 deviceFormat = SND_PCM_FORMAT_FLOAT;
5247 else if ( format == RTAUDIO_FLOAT64 )
5248 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5250 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat) == 0) {
5251 stream_.deviceFormat[mode] = format;
5255 // The user requested format is not natively supported by the device.
5256 deviceFormat = SND_PCM_FORMAT_FLOAT64;
5257 if ( snd_pcm_hw_params_test_format( phandle, hw_params, deviceFormat ) == 0 ) {
5258 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5262 deviceFormat = SND_PCM_FORMAT_FLOAT;
5263 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5264 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5268 deviceFormat = SND_PCM_FORMAT_S32;
5269 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5270 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5274 deviceFormat = SND_PCM_FORMAT_S24;
5275 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5276 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
5280 deviceFormat = SND_PCM_FORMAT_S16;
5281 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5282 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5286 deviceFormat = SND_PCM_FORMAT_S8;
5287 if ( snd_pcm_hw_params_test_format(phandle, hw_params, deviceFormat ) == 0 ) {
5288 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
5292 // If we get here, no supported format was found.
5293 errorStream_ << "RtApiAlsa::probeDeviceOpen: pcm device " << device << " data format not supported by RtAudio.";
5294 errorText_ = errorStream_.str();
5298 result = snd_pcm_hw_params_set_format( phandle, hw_params, deviceFormat );
5300 snd_pcm_close( phandle );
5301 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting pcm device (" << name << ") data format, " << snd_strerror( result ) << ".";
5302 errorText_ = errorStream_.str();
5306 // Determine whether byte-swaping is necessary.
5307 stream_.doByteSwap[mode] = false;
5308 if ( deviceFormat != SND_PCM_FORMAT_S8 ) {
5309 result = snd_pcm_format_cpu_endian( deviceFormat );
5311 stream_.doByteSwap[mode] = true;
5312 else if (result < 0) {
5313 snd_pcm_close( phandle );
5314 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting pcm device (" << name << ") endian-ness, " << snd_strerror( result ) << ".";
5315 errorText_ = errorStream_.str();
5320 // Set the sample rate.
5321 result = snd_pcm_hw_params_set_rate_near( phandle, hw_params, (unsigned int*) &sampleRate, 0 );
5323 snd_pcm_close( phandle );
5324 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting sample rate on device (" << name << "), " << snd_strerror( result ) << ".";
5325 errorText_ = errorStream_.str();
5329 // Determine the number of channels for this device. We support a possible
5330 // minimum device channel number > than the value requested by the user.
5331 stream_.nUserChannels[mode] = channels;
5333 result = snd_pcm_hw_params_get_channels_max( hw_params, &value );
5334 unsigned int deviceChannels = value;
5335 if ( result < 0 || deviceChannels < channels + firstChannel ) {
5336 snd_pcm_close( phandle );
5337 errorStream_ << "RtApiAlsa::probeDeviceOpen: requested channel parameters not supported by device (" << name << "), " << snd_strerror( result ) << ".";
5338 errorText_ = errorStream_.str();
5342 result = snd_pcm_hw_params_get_channels_min( hw_params, &value );
5344 snd_pcm_close( phandle );
5345 errorStream_ << "RtApiAlsa::probeDeviceOpen: error getting minimum channels for device (" << name << "), " << snd_strerror( result ) << ".";
5346 errorText_ = errorStream_.str();
5349 deviceChannels = value;
5350 if ( deviceChannels < channels + firstChannel ) deviceChannels = channels + firstChannel;
5351 stream_.nDeviceChannels[mode] = deviceChannels;
5353 // Set the device channels.
5354 result = snd_pcm_hw_params_set_channels( phandle, hw_params, deviceChannels );
5356 snd_pcm_close( phandle );
5357 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting channels for device (" << name << "), " << snd_strerror( result ) << ".";
5358 errorText_ = errorStream_.str();
5362 // Set the buffer number, which in ALSA is referred to as the "period".
5364 unsigned int periods = 0;
5365 if ( options ) periods = options->numberOfBuffers;
5366 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) periods = 2;
5367 // Even though the hardware might allow 1 buffer, it won't work reliably.
5368 if ( periods < 2 ) periods = 2;
5369 result = snd_pcm_hw_params_set_periods_near( phandle, hw_params, &periods, &dir );
5371 snd_pcm_close( phandle );
5372 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting periods for device (" << name << "), " << snd_strerror( result ) << ".";
5373 errorText_ = errorStream_.str();
5377 // Set the buffer (or period) size.
5378 snd_pcm_uframes_t periodSize = *bufferSize;
5379 result = snd_pcm_hw_params_set_period_size_near( phandle, hw_params, &periodSize, &dir );
5381 snd_pcm_close( phandle );
5382 errorStream_ << "RtApiAlsa::probeDeviceOpen: error setting period size for device (" << name << "), " << snd_strerror( result ) << ".";
5383 errorText_ = errorStream_.str();
5386 *bufferSize = periodSize;
5388 // If attempting to setup a duplex stream, the bufferSize parameter
5389 // MUST be the same in both directions!
5390 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
5391 errorStream_ << "RtApiAlsa::probeDeviceOpen: system error setting buffer size for duplex stream on device (" << name << ").";
5392 errorText_ = errorStream_.str();
5396 stream_.bufferSize = *bufferSize;
5398 // Install the hardware configuration
5399 result = snd_pcm_hw_params( phandle, hw_params );
5401 snd_pcm_close( phandle );
5402 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing hardware configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5403 errorText_ = errorStream_.str();
5407 #if defined(__RTAUDIO_DEBUG__)
5408 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
5409 snd_pcm_hw_params_dump( hw_params, out );
5412 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
5413 snd_pcm_sw_params_t *sw_params = NULL;
5414 snd_pcm_sw_params_alloca( &sw_params );
5415 snd_pcm_sw_params_current( phandle, sw_params );
5416 snd_pcm_sw_params_set_start_threshold( phandle, sw_params, *bufferSize );
5417 snd_pcm_sw_params_set_stop_threshold( phandle, sw_params, 0x7fffffff );
5418 snd_pcm_sw_params_set_silence_threshold( phandle, sw_params, 0 );
5419 snd_pcm_sw_params_set_silence_size( phandle, sw_params, INT_MAX );
5420 result = snd_pcm_sw_params( phandle, sw_params );
5422 snd_pcm_close( phandle );
5423 errorStream_ << "RtApiAlsa::probeDeviceOpen: error installing software configuration on device (" << name << "), " << snd_strerror( result ) << ".";
5424 errorText_ = errorStream_.str();
5428 #if defined(__RTAUDIO_DEBUG__)
5429 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
5430 snd_pcm_sw_params_dump( sw_params, out );
5433 // Set flags for buffer conversion
5434 stream_.doConvertBuffer[mode] = false;
5435 if ( stream_.userFormat != stream_.deviceFormat[mode] )
5436 stream_.doConvertBuffer[mode] = true;
5437 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
5438 stream_.doConvertBuffer[mode] = true;
5439 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
5440 stream_.nUserChannels[mode] > 1 )
5441 stream_.doConvertBuffer[mode] = true;
5443 // Allocate the ApiHandle if necessary and then save.
5444 AlsaHandle *apiInfo = 0;
5445 if ( stream_.apiHandle == 0 ) {
5447 apiInfo = (AlsaHandle *) new AlsaHandle;
5449 catch ( std::bad_alloc& ) {
5450 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating AlsaHandle memory.";
5453 stream_.apiHandle = (void *) apiInfo;
5454 apiInfo->handles[0] = 0;
5455 apiInfo->handles[1] = 0;
5458 apiInfo = (AlsaHandle *) stream_.apiHandle;
5460 apiInfo->handles[mode] = phandle;
5462 // Allocate necessary internal buffers.
5463 unsigned long bufferBytes;
5464 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
5465 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
5466 if ( stream_.userBuffer[mode] == NULL ) {
5467 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating user buffer memory.";
5471 if ( stream_.doConvertBuffer[mode] ) {
5473 bool makeBuffer = true;
5474 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
5475 if ( mode == INPUT ) {
5476 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5477 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
5478 if ( bufferBytes <= bytesOut ) makeBuffer = false;
5483 bufferBytes *= *bufferSize;
5484 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
5485 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
5486 if ( stream_.deviceBuffer == NULL ) {
5487 errorText_ = "RtApiAlsa::probeDeviceOpen: error allocating device buffer memory.";
5493 stream_.sampleRate = sampleRate;
5494 stream_.nBuffers = periods;
5495 stream_.device[mode] = device;
5496 stream_.state = STREAM_STOPPED;
5498 // Setup the buffer conversion information structure.
5499 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
5501 // Setup thread if necessary.
5502 if ( stream_.mode == OUTPUT && mode == INPUT ) {
5503 // We had already set up an output stream.
5504 stream_.mode = DUPLEX;
5505 // Link the streams if possible.
5506 apiInfo->synchronized = false;
5507 if ( snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0 )
5508 apiInfo->synchronized = true;
5510 errorText_ = "RtApiAlsa::probeDeviceOpen: unable to synchronize input and output devices.";
5511 error( RtError::WARNING );
5515 stream_.mode = mode;
5517 // Setup callback thread.
5518 stream_.callbackInfo.object = (void *) this;
5520 // Set the thread attributes for joinable and realtime scheduling
5521 // priority. The higher priority will only take affect if the
5522 // program is run as root or suid.
5523 pthread_attr_t attr;
5524 pthread_attr_init( &attr );
5525 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
5526 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
5527 pthread_attr_setschedpolicy( &attr, SCHED_RR );
5529 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
5532 stream_.callbackInfo.isRunning = true;
5533 result = pthread_create( &stream_.callbackInfo.thread, &attr, alsaCallbackHandler, &stream_.callbackInfo );
5534 pthread_attr_destroy( &attr );
5536 stream_.callbackInfo.isRunning = false;
5537 errorText_ = "RtApiAlsa::error creating callback thread!";
5546 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5547 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5549 stream_.apiHandle = 0;
5552 for ( int i=0; i<2; i++ ) {
5553 if ( stream_.userBuffer[i] ) {
5554 free( stream_.userBuffer[i] );
5555 stream_.userBuffer[i] = 0;
5559 if ( stream_.deviceBuffer ) {
5560 free( stream_.deviceBuffer );
5561 stream_.deviceBuffer = 0;
5567 void RtApiAlsa :: closeStream()
5569 if ( stream_.state == STREAM_CLOSED ) {
5570 errorText_ = "RtApiAlsa::closeStream(): no open stream to close!";
5571 error( RtError::WARNING );
5575 stream_.callbackInfo.isRunning = false;
5576 pthread_join( stream_.callbackInfo.thread, NULL );
5578 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5579 if ( stream_.state == STREAM_RUNNING ) {
5580 stream_.state = STREAM_STOPPED;
5581 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
5582 snd_pcm_drop( apiInfo->handles[0] );
5583 if ( stream_.mode == INPUT || stream_.mode == DUPLEX )
5584 snd_pcm_drop( apiInfo->handles[1] );
5588 if ( apiInfo->handles[0] ) snd_pcm_close( apiInfo->handles[0] );
5589 if ( apiInfo->handles[1] ) snd_pcm_close( apiInfo->handles[1] );
5591 stream_.apiHandle = 0;
5594 for ( int i=0; i<2; i++ ) {
5595 if ( stream_.userBuffer[i] ) {
5596 free( stream_.userBuffer[i] );
5597 stream_.userBuffer[i] = 0;
5601 if ( stream_.deviceBuffer ) {
5602 free( stream_.deviceBuffer );
5603 stream_.deviceBuffer = 0;
5606 stream_.mode = UNINITIALIZED;
5607 stream_.state = STREAM_CLOSED;
5610 void RtApiAlsa :: startStream()
5612 // This method calls snd_pcm_prepare if the device isn't already in that state.
5615 if ( stream_.state == STREAM_RUNNING ) {
5616 errorText_ = "RtApiAlsa::startStream(): the stream is already running!";
5617 error( RtError::WARNING );
5621 MUTEX_LOCK( &stream_.mutex );
5624 snd_pcm_state_t state;
5625 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5626 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5627 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5628 state = snd_pcm_state( handle[0] );
5629 if ( state != SND_PCM_STATE_PREPARED ) {
5630 result = snd_pcm_prepare( handle[0] );
5632 errorStream_ << "RtApiAlsa::startStream: error preparing output pcm device, " << snd_strerror( result ) << ".";
5633 errorText_ = errorStream_.str();
5639 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5640 state = snd_pcm_state( handle[1] );
5641 if ( state != SND_PCM_STATE_PREPARED ) {
5642 result = snd_pcm_prepare( handle[1] );
5644 errorStream_ << "RtApiAlsa::startStream: error preparing input pcm device, " << snd_strerror( result ) << ".";
5645 errorText_ = errorStream_.str();
5651 stream_.state = STREAM_RUNNING;
5654 MUTEX_UNLOCK( &stream_.mutex );
5656 if ( result >= 0 ) return;
5657 error( RtError::SYSTEM_ERROR );
5660 void RtApiAlsa :: stopStream()
5663 if ( stream_.state == STREAM_STOPPED ) {
5664 errorText_ = "RtApiAlsa::stopStream(): the stream is already stopped!";
5665 error( RtError::WARNING );
5669 // Change the state before the lock to improve shutdown response
5670 // when using a callback.
5671 stream_.state = STREAM_STOPPED;
5672 MUTEX_LOCK( &stream_.mutex );
5675 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5676 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5677 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5678 if ( apiInfo->synchronized )
5679 result = snd_pcm_drop( handle[0] );
5681 result = snd_pcm_drain( handle[0] );
5683 errorStream_ << "RtApiAlsa::stopStream: error draining output pcm device, " << snd_strerror( result ) << ".";
5684 errorText_ = errorStream_.str();
5689 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5690 result = snd_pcm_drop( handle[1] );
5692 errorStream_ << "RtApiAlsa::stopStream: error stopping input pcm device, " << snd_strerror( result ) << ".";
5693 errorText_ = errorStream_.str();
5699 MUTEX_UNLOCK( &stream_.mutex );
5701 if ( result >= 0 ) return;
5702 error( RtError::SYSTEM_ERROR );
5705 void RtApiAlsa :: abortStream()
5708 if ( stream_.state == STREAM_STOPPED ) {
5709 errorText_ = "RtApiAlsa::abortStream(): the stream is already stopped!";
5710 error( RtError::WARNING );
5714 // Change the state before the lock to improve shutdown response
5715 // when using a callback.
5716 stream_.state = STREAM_STOPPED;
5717 MUTEX_LOCK( &stream_.mutex );
5720 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5721 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
5722 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5723 result = snd_pcm_drop( handle[0] );
5725 errorStream_ << "RtApiAlsa::abortStream: error aborting output pcm device, " << snd_strerror( result ) << ".";
5726 errorText_ = errorStream_.str();
5731 if ( ( stream_.mode == INPUT || stream_.mode == DUPLEX ) && !apiInfo->synchronized ) {
5732 result = snd_pcm_drop( handle[1] );
5734 errorStream_ << "RtApiAlsa::abortStream: error aborting input pcm device, " << snd_strerror( result ) << ".";
5735 errorText_ = errorStream_.str();
5741 MUTEX_UNLOCK( &stream_.mutex );
5743 stream_.state = STREAM_STOPPED;
5744 if ( result >= 0 ) return;
5745 error( RtError::SYSTEM_ERROR );
5748 void RtApiAlsa :: callbackEvent()
5750 if ( stream_.state == STREAM_STOPPED ) {
5751 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
5755 if ( stream_.state == STREAM_CLOSED ) {
5756 errorText_ = "RtApiAlsa::callbackEvent(): the stream is closed ... this shouldn't happen!";
5757 error( RtError::WARNING );
5761 int doStopStream = 0;
5762 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
5763 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
5764 double streamTime = getStreamTime();
5765 RtAudioStreamStatus status = 0;
5766 if ( stream_.mode != INPUT && apiInfo->xrun[0] == true ) {
5767 status |= RTAUDIO_OUTPUT_UNDERFLOW;
5768 apiInfo->xrun[0] = false;
5770 if ( stream_.mode != OUTPUT && apiInfo->xrun[1] == true ) {
5771 status |= RTAUDIO_INPUT_OVERFLOW;
5772 apiInfo->xrun[1] = false;
5774 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
5775 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
5777 MUTEX_LOCK( &stream_.mutex );
5779 // The state might change while waiting on a mutex.
5780 if ( stream_.state == STREAM_STOPPED ) goto unlock;
5786 snd_pcm_sframes_t frames;
5787 RtAudioFormat format;
5788 handle = (snd_pcm_t **) apiInfo->handles;
5790 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5792 // Setup parameters.
5793 if ( stream_.doConvertBuffer[1] ) {
5794 buffer = stream_.deviceBuffer;
5795 channels = stream_.nDeviceChannels[1];
5796 format = stream_.deviceFormat[1];
5799 buffer = stream_.userBuffer[1];
5800 channels = stream_.nUserChannels[1];
5801 format = stream_.userFormat;
5804 // Read samples from device in interleaved/non-interleaved format.
5805 if ( stream_.deviceInterleaved[1] )
5806 result = snd_pcm_readi( handle[1], buffer, stream_.bufferSize );
5808 void *bufs[channels];
5809 size_t offset = stream_.bufferSize * formatBytes( format );
5810 for ( int i=0; i<channels; i++ )
5811 bufs[i] = (void *) (buffer + (i * offset));
5812 result = snd_pcm_readn( handle[1], bufs, stream_.bufferSize );
5815 if ( result < (int) stream_.bufferSize ) {
5816 // Either an error or underrun occured.
5817 if ( result == -EPIPE ) {
5818 snd_pcm_state_t state = snd_pcm_state( handle[1] );
5819 if ( state == SND_PCM_STATE_XRUN ) {
5820 apiInfo->xrun[1] = true;
5821 result = snd_pcm_prepare( handle[1] );
5823 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after overrun, " << snd_strerror( result ) << ".";
5824 errorText_ = errorStream_.str();
5828 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5829 errorText_ = errorStream_.str();
5833 errorStream_ << "RtApiAlsa::callbackEvent: audio read error, " << snd_strerror( result ) << ".";
5834 errorText_ = errorStream_.str();
5836 error( RtError::WARNING );
5840 // Do byte swapping if necessary.
5841 if ( stream_.doByteSwap[1] )
5842 byteSwapBuffer( buffer, stream_.bufferSize * channels, format );
5844 // Do buffer conversion if necessary.
5845 if ( stream_.doConvertBuffer[1] )
5846 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
5848 // Check stream latency
5849 result = snd_pcm_delay( handle[1], &frames );
5850 if ( result == 0 && frames > 0 ) stream_.latency[1] = frames;
5853 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5855 // Setup parameters and do buffer conversion if necessary.
5856 if ( stream_.doConvertBuffer[0] ) {
5857 buffer = stream_.deviceBuffer;
5858 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
5859 channels = stream_.nDeviceChannels[0];
5860 format = stream_.deviceFormat[0];
5863 buffer = stream_.userBuffer[0];
5864 channels = stream_.nUserChannels[0];
5865 format = stream_.userFormat;
5868 // Do byte swapping if necessary.
5869 if ( stream_.doByteSwap[0] )
5870 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
5872 // Write samples to device in interleaved/non-interleaved format.
5873 if ( stream_.deviceInterleaved[0] )
5874 result = snd_pcm_writei( handle[0], buffer, stream_.bufferSize );
5876 void *bufs[channels];
5877 size_t offset = stream_.bufferSize * formatBytes( format );
5878 for ( int i=0; i<channels; i++ )
5879 bufs[i] = (void *) (buffer + (i * offset));
5880 result = snd_pcm_writen( handle[0], bufs, stream_.bufferSize );
5883 if ( result < (int) stream_.bufferSize ) {
5884 // Either an error or underrun occured.
5885 if ( result == -EPIPE ) {
5886 snd_pcm_state_t state = snd_pcm_state( handle[0] );
5887 if ( state == SND_PCM_STATE_XRUN ) {
5888 apiInfo->xrun[0] = true;
5889 result = snd_pcm_prepare( handle[0] );
5891 errorStream_ << "RtApiAlsa::callbackEvent: error preparing device after underrun, " << snd_strerror( result ) << ".";
5892 errorText_ = errorStream_.str();
5896 errorStream_ << "RtApiAlsa::callbackEvent: error, current state is " << snd_pcm_state_name( state ) << ", " << snd_strerror( result ) << ".";
5897 errorText_ = errorStream_.str();
5901 errorStream_ << "RtApiAlsa::callbackEvent: audio write error, " << snd_strerror( result ) << ".";
5902 errorText_ = errorStream_.str();
5904 error( RtError::WARNING );
5908 // Check stream latency
5909 result = snd_pcm_delay( handle[0], &frames );
5910 if ( result == 0 && frames > 0 ) stream_.latency[0] = frames;
5914 MUTEX_UNLOCK( &stream_.mutex );
5916 RtApi::tickStreamTime();
5917 if ( doStopStream == 1 ) this->stopStream();
5918 else if ( doStopStream == 2 ) this->abortStream();
5921 extern "C" void *alsaCallbackHandler( void *ptr )
5923 CallbackInfo *info = (CallbackInfo *) ptr;
5924 RtApiAlsa *object = (RtApiAlsa *) info->object;
5925 bool *isRunning = &info->isRunning;
5928 // Set a higher scheduler priority (P.J. Leonard)
5929 struct sched_param param;
5930 int min = sched_get_priority_min( SCHED_RR );
5931 int max = sched_get_priority_max( SCHED_RR );
5932 param.sched_priority = min + ( max - min ) / 2; // Is this the best number?
5933 sched_setscheduler( 0, SCHED_RR, ¶m );
5936 while ( *isRunning == true ) {
5937 pthread_testcancel();
5938 object->callbackEvent();
5941 pthread_exit( NULL );
5944 //******************** End of __LINUX_ALSA__ *********************//
5948 #if defined(__LINUX_OSS__)
5951 #include <sys/ioctl.h>
5954 #include "oss/soundcard.h"
5958 extern "C" void *ossCallbackHandler(void * ptr);
5960 // A structure to hold various information related to the OSS API
5963 int id[2]; // device ids
5968 :triggered(false) { id[0] = 0; id[1] = 0; xrun[0] = false; xrun[1] = false; }
5971 RtApiOss :: RtApiOss()
5973 // Nothing to do here.
5976 RtApiOss :: ~RtApiOss()
5978 if ( stream_.state != STREAM_CLOSED ) closeStream();
5981 unsigned int RtApiOss :: getDeviceCount( void )
5983 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
5984 if ( mixerfd == -1 ) {
5985 errorText_ = "RtApiOss::getDeviceCount: error opening '/dev/mixer'.";
5986 error( RtError::WARNING );
5990 oss_sysinfo sysinfo;
5991 if ( ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo ) == -1 ) {
5993 errorText_ = "RtApiOss::getDeviceCount: error getting sysinfo, OSS version >= 4.0 is required.";
5994 error( RtError::WARNING );
5998 return sysinfo.numaudios;
6001 RtAudio::DeviceInfo RtApiOss :: getDeviceInfo( unsigned int device )
6003 RtAudio::DeviceInfo info;
6004 info.probed = false;
6006 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6007 if ( mixerfd == -1 ) {
6008 errorText_ = "RtApiOss::getDeviceInfo: error opening '/dev/mixer'.";
6009 error( RtError::WARNING );
6013 oss_sysinfo sysinfo;
6014 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6015 if ( result == -1 ) {
6017 errorText_ = "RtApiOss::getDeviceInfo: error getting sysinfo, OSS version >= 4.0 is required.";
6018 error( RtError::WARNING );
6022 unsigned nDevices = sysinfo.numaudios;
6023 if ( nDevices == 0 ) {
6025 errorText_ = "RtApiOss::getDeviceInfo: no devices found!";
6026 error( RtError::INVALID_USE );
6029 if ( device >= nDevices ) {
6031 errorText_ = "RtApiOss::getDeviceInfo: device ID is invalid!";
6032 error( RtError::INVALID_USE );
6035 oss_audioinfo ainfo;
6037 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6039 if ( result == -1 ) {
6040 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6041 errorText_ = errorStream_.str();
6042 error( RtError::WARNING );
6047 if ( ainfo.caps & PCM_CAP_OUTPUT ) info.outputChannels = ainfo.max_channels;
6048 if ( ainfo.caps & PCM_CAP_INPUT ) info.inputChannels = ainfo.max_channels;
6049 if ( ainfo.caps & PCM_CAP_DUPLEX ) {
6050 if ( info.outputChannels > 0 && info.inputChannels > 0 && ainfo.caps & PCM_CAP_DUPLEX )
6051 info.duplexChannels = (info.outputChannels > info.inputChannels) ? info.inputChannels : info.outputChannels;
6054 // Probe data formats ... do for input
6055 unsigned long mask = ainfo.iformats;
6056 if ( mask & AFMT_S16_LE || mask & AFMT_S16_BE )
6057 info.nativeFormats |= RTAUDIO_SINT16;
6058 if ( mask & AFMT_S8 )
6059 info.nativeFormats |= RTAUDIO_SINT8;
6060 if ( mask & AFMT_S32_LE || mask & AFMT_S32_BE )
6061 info.nativeFormats |= RTAUDIO_SINT32;
6062 if ( mask & AFMT_FLOAT )
6063 info.nativeFormats |= RTAUDIO_FLOAT32;
6064 if ( mask & AFMT_S24_LE || mask & AFMT_S24_BE )
6065 info.nativeFormats |= RTAUDIO_SINT24;
6067 // Check that we have at least one supported format
6068 if ( info.nativeFormats == 0 ) {
6069 errorStream_ << "RtApiOss::getDeviceInfo: device (" << ainfo.name << ") data format not supported by RtAudio.";
6070 errorText_ = errorStream_.str();
6071 error( RtError::WARNING );
6075 // Probe the supported sample rates.
6076 info.sampleRates.clear();
6077 if ( ainfo.nrates ) {
6078 for ( unsigned int i=0; i<ainfo.nrates; i++ ) {
6079 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6080 if ( ainfo.rates[i] == SAMPLE_RATES[k] ) {
6081 info.sampleRates.push_back( SAMPLE_RATES[k] );
6088 // Check min and max rate values;
6089 for ( unsigned int k=0; k<MAX_SAMPLE_RATES; k++ ) {
6090 if ( ainfo.min_rate <= (int) SAMPLE_RATES[k] && ainfo.max_rate >= (int) SAMPLE_RATES[k] )
6091 info.sampleRates.push_back( SAMPLE_RATES[k] );
6095 if ( info.sampleRates.size() == 0 ) {
6096 errorStream_ << "RtApiOss::getDeviceInfo: no supported sample rates found for device (" << ainfo.name << ").";
6097 errorText_ = errorStream_.str();
6098 error( RtError::WARNING );
6102 info.name = ainfo.name;
6109 bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,
6110 unsigned int firstChannel, unsigned int sampleRate,
6111 RtAudioFormat format, unsigned int *bufferSize,
6112 RtAudio::StreamOptions *options )
6114 int mixerfd = open( "/dev/mixer", O_RDWR, 0 );
6115 if ( mixerfd == -1 ) {
6116 errorText_ = "RtApiOss::probeDeviceOpen: error opening '/dev/mixer'.";
6120 oss_sysinfo sysinfo;
6121 int result = ioctl( mixerfd, SNDCTL_SYSINFO, &sysinfo );
6122 if ( result == -1 ) {
6124 errorText_ = "RtApiOss::probeDeviceOpen: error getting sysinfo, OSS version >= 4.0 is required.";
6128 unsigned nDevices = sysinfo.numaudios;
6129 if ( nDevices == 0 ) {
6130 // This should not happen because a check is made before this function is called.
6132 errorText_ = "RtApiOss::probeDeviceOpen: no devices found!";
6136 if ( device >= nDevices ) {
6137 // This should not happen because a check is made before this function is called.
6139 errorText_ = "RtApiOss::probeDeviceOpen: device ID is invalid!";
6143 oss_audioinfo ainfo;
6145 result = ioctl( mixerfd, SNDCTL_AUDIOINFO, &ainfo );
6147 if ( result == -1 ) {
6148 errorStream_ << "RtApiOss::getDeviceInfo: error getting device (" << ainfo.name << ") info.";
6149 errorText_ = errorStream_.str();
6153 // Check if device supports input or output
6154 if ( ( mode == OUTPUT && !( ainfo.caps & PCM_CAP_OUTPUT ) ) ||
6155 ( mode == INPUT && !( ainfo.caps & PCM_CAP_INPUT ) ) ) {
6156 if ( mode == OUTPUT )
6157 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support output.";
6159 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support input.";
6160 errorText_ = errorStream_.str();
6165 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6166 if ( mode == OUTPUT )
6168 else { // mode == INPUT
6169 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
6170 // We just set the same device for playback ... close and reopen for duplex (OSS only).
6171 close( handle->id[0] );
6173 if ( !( ainfo.caps & PCM_CAP_DUPLEX ) ) {
6174 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support duplex mode.";
6175 errorText_ = errorStream_.str();
6178 // Check that the number previously set channels is the same.
6179 if ( stream_.nUserChannels[0] != channels ) {
6180 errorStream_ << "RtApiOss::probeDeviceOpen: input/output channels must be equal for OSS duplex device (" << ainfo.name << ").";
6181 errorText_ = errorStream_.str();
6190 // Set exclusive access if specified.
6191 if ( options && options->flags & RTAUDIO_HOG_DEVICE ) flags |= O_EXCL;
6193 // Try to open the device.
6195 fd = open( ainfo.devnode, flags, 0 );
6197 if ( errno == EBUSY )
6198 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") is busy.";
6200 errorStream_ << "RtApiOss::probeDeviceOpen: error opening device (" << ainfo.name << ").";
6201 errorText_ = errorStream_.str();
6205 // For duplex operation, specifically set this mode (this doesn't seem to work).
6207 if ( flags | O_RDWR ) {
6208 result = ioctl( fd, SNDCTL_DSP_SETDUPLEX, NULL );
6209 if ( result == -1) {
6210 errorStream_ << "RtApiOss::probeDeviceOpen: error setting duplex mode for device (" << ainfo.name << ").";
6211 errorText_ = errorStream_.str();
6217 // Check the device channel support.
6218 stream_.nUserChannels[mode] = channels;
6219 if ( ainfo.max_channels < (int)(channels + firstChannel) ) {
6221 errorStream_ << "RtApiOss::probeDeviceOpen: the device (" << ainfo.name << ") does not support requested channel parameters.";
6222 errorText_ = errorStream_.str();
6226 // Set the number of channels.
6227 int deviceChannels = channels + firstChannel;
6228 result = ioctl( fd, SNDCTL_DSP_CHANNELS, &deviceChannels );
6229 if ( result == -1 || deviceChannels < (int)(channels + firstChannel) ) {
6231 errorStream_ << "RtApiOss::probeDeviceOpen: error setting channel parameters on device (" << ainfo.name << ").";
6232 errorText_ = errorStream_.str();
6235 stream_.nDeviceChannels[mode] = deviceChannels;
6237 // Get the data format mask
6239 result = ioctl( fd, SNDCTL_DSP_GETFMTS, &mask );
6240 if ( result == -1 ) {
6242 errorStream_ << "RtApiOss::probeDeviceOpen: error getting device (" << ainfo.name << ") data formats.";
6243 errorText_ = errorStream_.str();
6247 // Determine how to set the device format.
6248 stream_.userFormat = format;
6249 int deviceFormat = -1;
6250 stream_.doByteSwap[mode] = false;
6251 if ( format == RTAUDIO_SINT8 ) {
6252 if ( mask & AFMT_S8 ) {
6253 deviceFormat = AFMT_S8;
6254 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6257 else if ( format == RTAUDIO_SINT16 ) {
6258 if ( mask & AFMT_S16_NE ) {
6259 deviceFormat = AFMT_S16_NE;
6260 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6262 else if ( mask & AFMT_S16_OE ) {
6263 deviceFormat = AFMT_S16_OE;
6264 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6265 stream_.doByteSwap[mode] = true;
6268 else if ( format == RTAUDIO_SINT24 ) {
6269 if ( mask & AFMT_S24_NE ) {
6270 deviceFormat = AFMT_S24_NE;
6271 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6273 else if ( mask & AFMT_S24_OE ) {
6274 deviceFormat = AFMT_S24_OE;
6275 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6276 stream_.doByteSwap[mode] = true;
6279 else if ( format == RTAUDIO_SINT32 ) {
6280 if ( mask & AFMT_S32_NE ) {
6281 deviceFormat = AFMT_S32_NE;
6282 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6284 else if ( mask & AFMT_S32_OE ) {
6285 deviceFormat = AFMT_S32_OE;
6286 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6287 stream_.doByteSwap[mode] = true;
6291 if ( deviceFormat == -1 ) {
6292 // The user requested format is not natively supported by the device.
6293 if ( mask & AFMT_S16_NE ) {
6294 deviceFormat = AFMT_S16_NE;
6295 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6297 else if ( mask & AFMT_S32_NE ) {
6298 deviceFormat = AFMT_S32_NE;
6299 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6301 else if ( mask & AFMT_S24_NE ) {
6302 deviceFormat = AFMT_S24_NE;
6303 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6305 else if ( mask & AFMT_S16_OE ) {
6306 deviceFormat = AFMT_S16_OE;
6307 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6308 stream_.doByteSwap[mode] = true;
6310 else if ( mask & AFMT_S32_OE ) {
6311 deviceFormat = AFMT_S32_OE;
6312 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
6313 stream_.doByteSwap[mode] = true;
6315 else if ( mask & AFMT_S24_OE ) {
6316 deviceFormat = AFMT_S24_OE;
6317 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
6318 stream_.doByteSwap[mode] = true;
6320 else if ( mask & AFMT_S8) {
6321 deviceFormat = AFMT_S8;
6322 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6326 if ( stream_.deviceFormat[mode] == 0 ) {
6327 // This really shouldn't happen ...
6329 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") data format not supported by RtAudio.";
6330 errorText_ = errorStream_.str();
6334 // Set the data format.
6335 int temp = deviceFormat;
6336 result = ioctl( fd, SNDCTL_DSP_SETFMT, &deviceFormat );
6337 if ( result == -1 || deviceFormat != temp ) {
6339 errorStream_ << "RtApiOss::probeDeviceOpen: error setting data format on device (" << ainfo.name << ").";
6340 errorText_ = errorStream_.str();
6344 // Attempt to set the buffer size. According to OSS, the minimum
6345 // number of buffers is two. The supposed minimum buffer size is 16
6346 // bytes, so that will be our lower bound. The argument to this
6347 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
6348 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
6349 // We'll check the actual value used near the end of the setup
6351 int ossBufferBytes = *bufferSize * formatBytes( stream_.deviceFormat[mode] ) * deviceChannels;
6352 if ( ossBufferBytes < 16 ) ossBufferBytes = 16;
6354 if ( options ) buffers = options->numberOfBuffers;
6355 if ( options && options->flags & RTAUDIO_MINIMIZE_LATENCY ) buffers = 2;
6356 if ( buffers < 2 ) buffers = 3;
6357 temp = ((int) buffers << 16) + (int)( log10( (double)ossBufferBytes ) / log10( 2.0 ) );
6358 result = ioctl( fd, SNDCTL_DSP_SETFRAGMENT, &temp );
6359 if ( result == -1 ) {
6361 errorStream_ << "RtApiOss::probeDeviceOpen: error setting buffer size on device (" << ainfo.name << ").";
6362 errorText_ = errorStream_.str();
6365 stream_.nBuffers = buffers;
6367 // Save buffer size (in sample frames).
6368 *bufferSize = ossBufferBytes / ( formatBytes(stream_.deviceFormat[mode]) * deviceChannels );
6369 stream_.bufferSize = *bufferSize;
6371 // Set the sample rate.
6372 int srate = sampleRate;
6373 result = ioctl( fd, SNDCTL_DSP_SPEED, &srate );
6374 if ( result == -1 ) {
6376 errorStream_ << "RtApiOss::probeDeviceOpen: error setting sample rate (" << sampleRate << ") on device (" << ainfo.name << ").";
6377 errorText_ = errorStream_.str();
6381 // Verify the sample rate setup worked.
6382 if ( abs( srate - sampleRate ) > 100 ) {
6384 errorStream_ << "RtApiOss::probeDeviceOpen: device (" << ainfo.name << ") does not support sample rate (" << sampleRate << ").";
6385 errorText_ = errorStream_.str();
6388 stream_.sampleRate = sampleRate;
6390 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) {
6391 // We're doing duplex setup here.
6392 stream_.deviceFormat[0] = stream_.deviceFormat[1];
6393 stream_.nDeviceChannels[0] = deviceChannels;
6396 // Set interleaving parameters.
6397 stream_.userInterleaved = true;
6398 stream_.deviceInterleaved[mode] = true;
6399 if ( options && options->flags & RTAUDIO_NONINTERLEAVED )
6400 stream_.userInterleaved = false;
6402 // Set flags for buffer conversion
6403 stream_.doConvertBuffer[mode] = false;
6404 if ( stream_.userFormat != stream_.deviceFormat[mode] )
6405 stream_.doConvertBuffer[mode] = true;
6406 if ( stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode] )
6407 stream_.doConvertBuffer[mode] = true;
6408 if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
6409 stream_.nUserChannels[mode] > 1 )
6410 stream_.doConvertBuffer[mode] = true;
6412 // Allocate the stream handles if necessary and then save.
6413 if ( stream_.apiHandle == 0 ) {
6415 handle = new OssHandle;
6417 catch ( std::bad_alloc& ) {
6418 errorText_ = "RtApiOss::probeDeviceOpen: error allocating OssHandle memory.";
6422 stream_.apiHandle = (void *) handle;
6425 handle = (OssHandle *) stream_.apiHandle;
6427 handle->id[mode] = fd;
6429 // Allocate necessary internal buffers.
6430 unsigned long bufferBytes;
6431 bufferBytes = stream_.nUserChannels[mode] * *bufferSize * formatBytes( stream_.userFormat );
6432 stream_.userBuffer[mode] = (char *) calloc( bufferBytes, 1 );
6433 if ( stream_.userBuffer[mode] == NULL ) {
6434 errorText_ = "RtApiOss::probeDeviceOpen: error allocating user buffer memory.";
6438 if ( stream_.doConvertBuffer[mode] ) {
6440 bool makeBuffer = true;
6441 bufferBytes = stream_.nDeviceChannels[mode] * formatBytes( stream_.deviceFormat[mode] );
6442 if ( mode == INPUT ) {
6443 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6444 unsigned long bytesOut = stream_.nDeviceChannels[0] * formatBytes( stream_.deviceFormat[0] );
6445 if ( bufferBytes <= bytesOut ) makeBuffer = false;
6450 bufferBytes *= *bufferSize;
6451 if ( stream_.deviceBuffer ) free( stream_.deviceBuffer );
6452 stream_.deviceBuffer = (char *) calloc( bufferBytes, 1 );
6453 if ( stream_.deviceBuffer == NULL ) {
6454 errorText_ = "RtApiOss::probeDeviceOpen: error allocating device buffer memory.";
6460 stream_.device[mode] = device;
6461 stream_.state = STREAM_STOPPED;
6463 // Setup the buffer conversion information structure.
6464 if ( stream_.doConvertBuffer[mode] ) setConvertInfo( mode, firstChannel );
6466 // Setup thread if necessary.
6467 if ( stream_.mode == OUTPUT && mode == INPUT ) {
6468 // We had already set up an output stream.
6469 stream_.mode = DUPLEX;
6470 if ( stream_.device[0] == device ) handle->id[0] = fd;
6473 stream_.mode = mode;
6475 // Setup callback thread.
6476 stream_.callbackInfo.object = (void *) this;
6478 // Set the thread attributes for joinable and realtime scheduling
6479 // priority. The higher priority will only take affect if the
6480 // program is run as root or suid.
6481 pthread_attr_t attr;
6482 pthread_attr_init( &attr );
6483 pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE );
6484 #ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread)
6485 pthread_attr_setschedpolicy( &attr, SCHED_RR );
6487 pthread_attr_setschedpolicy( &attr, SCHED_OTHER );
6490 stream_.callbackInfo.isRunning = true;
6491 result = pthread_create( &stream_.callbackInfo.thread, &attr, ossCallbackHandler, &stream_.callbackInfo );
6492 pthread_attr_destroy( &attr );
6494 stream_.callbackInfo.isRunning = false;
6495 errorText_ = "RtApiOss::error creating callback thread!";
6504 if ( handle->id[0] ) close( handle->id[0] );
6505 if ( handle->id[1] ) close( handle->id[1] );
6507 stream_.apiHandle = 0;
6510 for ( int i=0; i<2; i++ ) {
6511 if ( stream_.userBuffer[i] ) {
6512 free( stream_.userBuffer[i] );
6513 stream_.userBuffer[i] = 0;
6517 if ( stream_.deviceBuffer ) {
6518 free( stream_.deviceBuffer );
6519 stream_.deviceBuffer = 0;
6525 void RtApiOss :: closeStream()
6527 if ( stream_.state == STREAM_CLOSED ) {
6528 errorText_ = "RtApiOss::closeStream(): no open stream to close!";
6529 error( RtError::WARNING );
6533 stream_.callbackInfo.isRunning = false;
6534 pthread_join( stream_.callbackInfo.thread, NULL );
6536 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6537 if ( stream_.state == STREAM_RUNNING ) {
6538 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX )
6539 ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6541 ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6542 stream_.state = STREAM_STOPPED;
6546 if ( handle->id[0] ) close( handle->id[0] );
6547 if ( handle->id[1] ) close( handle->id[1] );
6549 stream_.apiHandle = 0;
6552 for ( int i=0; i<2; i++ ) {
6553 if ( stream_.userBuffer[i] ) {
6554 free( stream_.userBuffer[i] );
6555 stream_.userBuffer[i] = 0;
6559 if ( stream_.deviceBuffer ) {
6560 free( stream_.deviceBuffer );
6561 stream_.deviceBuffer = 0;
6564 stream_.mode = UNINITIALIZED;
6565 stream_.state = STREAM_CLOSED;
6568 void RtApiOss :: startStream()
6571 if ( stream_.state == STREAM_RUNNING ) {
6572 errorText_ = "RtApiOss::startStream(): the stream is already running!";
6573 error( RtError::WARNING );
6577 MUTEX_LOCK( &stream_.mutex );
6579 stream_.state = STREAM_RUNNING;
6581 // No need to do anything else here ... OSS automatically starts
6582 // when fed samples.
6584 MUTEX_UNLOCK( &stream_.mutex );
6587 void RtApiOss :: stopStream()
6590 if ( stream_.state == STREAM_STOPPED ) {
6591 errorText_ = "RtApiOss::stopStream(): the stream is already stopped!";
6592 error( RtError::WARNING );
6596 // Change the state before the lock to improve shutdown response
6597 // when using a callback.
6598 stream_.state = STREAM_STOPPED;
6599 MUTEX_LOCK( &stream_.mutex );
6602 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6603 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6605 // Flush the output with zeros a few times.
6608 RtAudioFormat format;
6610 if ( stream_.doConvertBuffer[0] ) {
6611 buffer = stream_.deviceBuffer;
6612 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6613 format = stream_.deviceFormat[0];
6616 buffer = stream_.userBuffer[0];
6617 samples = stream_.bufferSize * stream_.nUserChannels[0];
6618 format = stream_.userFormat;
6621 memset( buffer, 0, samples * formatBytes(format) );
6622 for ( unsigned int i=0; i<stream_.nBuffers+1; i++ ) {
6623 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6624 if ( result == -1 ) {
6625 errorText_ = "RtApiOss::stopStream: audio write error.";
6626 error( RtError::WARNING );
6630 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6631 if ( result == -1 ) {
6632 errorStream_ << "RtApiOss::stopStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6633 errorText_ = errorStream_.str();
6636 handle->triggered = false;
6639 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6640 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6641 if ( result == -1 ) {
6642 errorStream_ << "RtApiOss::stopStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6643 errorText_ = errorStream_.str();
6649 MUTEX_UNLOCK( &stream_.mutex );
6651 stream_.state = STREAM_STOPPED;
6652 if ( result != -1 ) return;
6653 error( RtError::SYSTEM_ERROR );
6656 void RtApiOss :: abortStream()
6659 if ( stream_.state == STREAM_STOPPED ) {
6660 errorText_ = "RtApiOss::abortStream(): the stream is already stopped!";
6661 error( RtError::WARNING );
6665 // Change the state before the lock to improve shutdown response
6666 // when using a callback.
6667 stream_.state = STREAM_STOPPED;
6668 MUTEX_LOCK( &stream_.mutex );
6671 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6672 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6673 result = ioctl( handle->id[0], SNDCTL_DSP_HALT, 0 );
6674 if ( result == -1 ) {
6675 errorStream_ << "RtApiOss::abortStream: system error stopping callback procedure on device (" << stream_.device[0] << ").";
6676 errorText_ = errorStream_.str();
6679 handle->triggered = false;
6682 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && handle->id[0] != handle->id[1] ) ) {
6683 result = ioctl( handle->id[1], SNDCTL_DSP_HALT, 0 );
6684 if ( result == -1 ) {
6685 errorStream_ << "RtApiOss::abortStream: system error stopping input callback procedure on device (" << stream_.device[0] << ").";
6686 errorText_ = errorStream_.str();
6692 MUTEX_UNLOCK( &stream_.mutex );
6694 stream_.state = STREAM_STOPPED;
6695 if ( result != -1 ) return;
6696 error( RtError::SYSTEM_ERROR );
6699 void RtApiOss :: callbackEvent()
6701 if ( stream_.state == STREAM_STOPPED ) {
6702 if ( stream_.callbackInfo.isRunning ) usleep( 50000 ); // sleep 50 milliseconds
6706 if ( stream_.state == STREAM_CLOSED ) {
6707 errorText_ = "RtApiOss::callbackEvent(): the stream is closed ... this shouldn't happen!";
6708 error( RtError::WARNING );
6712 // Invoke user callback to get fresh output data.
6713 int doStopStream = 0;
6714 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6715 double streamTime = getStreamTime();
6716 RtAudioStreamStatus status = 0;
6717 OssHandle *handle = (OssHandle *) stream_.apiHandle;
6718 if ( stream_.mode != INPUT && handle->xrun[0] == true ) {
6719 status |= RTAUDIO_OUTPUT_UNDERFLOW;
6720 handle->xrun[0] = false;
6722 if ( stream_.mode != OUTPUT && handle->xrun[1] == true ) {
6723 status |= RTAUDIO_INPUT_OVERFLOW;
6724 handle->xrun[1] = false;
6726 doStopStream = callback( stream_.userBuffer[0], stream_.userBuffer[1],
6727 stream_.bufferSize, streamTime, status, stream_.callbackInfo.userData );
6729 MUTEX_LOCK( &stream_.mutex );
6731 // The state might change while waiting on a mutex.
6732 if ( stream_.state == STREAM_STOPPED ) goto unlock;
6737 RtAudioFormat format;
6739 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
6741 // Setup parameters and do buffer conversion if necessary.
6742 if ( stream_.doConvertBuffer[0] ) {
6743 buffer = stream_.deviceBuffer;
6744 convertBuffer( buffer, stream_.userBuffer[0], stream_.convertInfo[0] );
6745 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
6746 format = stream_.deviceFormat[0];
6749 buffer = stream_.userBuffer[0];
6750 samples = stream_.bufferSize * stream_.nUserChannels[0];
6751 format = stream_.userFormat;
6754 // Do byte swapping if necessary.
6755 if ( stream_.doByteSwap[0] )
6756 byteSwapBuffer( buffer, samples, format );
6758 if ( stream_.mode == DUPLEX && handle->triggered == false ) {
6760 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6761 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6762 trig = PCM_ENABLE_INPUT|PCM_ENABLE_OUTPUT;
6763 ioctl( handle->id[0], SNDCTL_DSP_SETTRIGGER, &trig );
6764 handle->triggered = true;
6767 // Write samples to device.
6768 result = write( handle->id[0], buffer, samples * formatBytes(format) );
6770 if ( result == -1 ) {
6771 // We'll assume this is an underrun, though there isn't a
6772 // specific means for determining that.
6773 handle->xrun[0] = true;
6774 errorText_ = "RtApiOss::callbackEvent: audio write error.";
6775 error( RtError::WARNING );
6780 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
6782 // Setup parameters.
6783 if ( stream_.doConvertBuffer[1] ) {
6784 buffer = stream_.deviceBuffer;
6785 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
6786 format = stream_.deviceFormat[1];
6789 buffer = stream_.userBuffer[1];
6790 samples = stream_.bufferSize * stream_.nUserChannels[1];
6791 format = stream_.userFormat;
6794 // Read samples from device.
6795 result = read( handle->id[1], buffer, samples * formatBytes(format) );
6797 if ( result == -1 ) {
6798 // We'll assume this is an overrun, though there isn't a
6799 // specific means for determining that.
6800 handle->xrun[1] = true;
6801 errorText_ = "RtApiOss::callbackEvent: audio read error.";
6802 error( RtError::WARNING );
6806 // Do byte swapping if necessary.
6807 if ( stream_.doByteSwap[1] )
6808 byteSwapBuffer( buffer, samples, format );
6810 // Do buffer conversion if necessary.
6811 if ( stream_.doConvertBuffer[1] )
6812 convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );
6816 MUTEX_UNLOCK( &stream_.mutex );
6818 RtApi::tickStreamTime();
6819 if ( doStopStream == 1 ) this->stopStream();
6820 else if ( doStopStream == 2 ) this->abortStream();
6823 extern "C" void *ossCallbackHandler( void *ptr )
6825 CallbackInfo *info = (CallbackInfo *) ptr;
6826 RtApiOss *object = (RtApiOss *) info->object;
6827 bool *isRunning = &info->isRunning;
6830 // Set a higher scheduler priority (P.J. Leonard)
6831 struct sched_param param;
6832 param.sched_priority = 39; // Is this the best number?
6833 sched_setscheduler( 0, SCHED_RR, ¶m );
6836 while ( *isRunning == true ) {
6837 pthread_testcancel();
6838 object->callbackEvent();
6841 pthread_exit( NULL );
6844 //******************** End of __LINUX_OSS__ *********************//
6848 // *************************************************** //
6850 // Protected common (OS-independent) RtAudio methods.
6852 // *************************************************** //
6854 // This method can be modified to control the behavior of error
6855 // message printing.
6856 void RtApi :: error( RtError::Type type )
6858 if ( type == RtError::WARNING && showWarnings_ == true )
6859 std::cerr << '\n' << errorText_ << "\n\n";
6861 throw( RtError( errorText_, type ) );
6862 errorStream_.str(""); // clear the ostringstream
6865 void RtApi :: verifyStream()
6867 if ( stream_.state == STREAM_CLOSED ) {
6868 errorText_ = "RtApi:: a stream is not open!";
6869 error( RtError::INVALID_USE );
6873 void RtApi :: clearStreamInfo()
6875 stream_.mode = UNINITIALIZED;
6876 stream_.state = STREAM_CLOSED;
6877 stream_.sampleRate = 0;
6878 stream_.bufferSize = 0;
6879 stream_.nBuffers = 0;
6880 stream_.userFormat = 0;
6881 stream_.userInterleaved = true;
6882 stream_.streamTime = 0.0;
6883 stream_.apiHandle = 0;
6884 stream_.deviceBuffer = 0;
6885 stream_.callbackInfo.callback = 0;
6886 stream_.callbackInfo.userData = 0;
6887 stream_.callbackInfo.isRunning = false;
6888 for ( int i=0; i<2; i++ ) {
6889 stream_.device[i] = 0;
6890 stream_.doConvertBuffer[i] = false;
6891 stream_.deviceInterleaved[i] = true;
6892 stream_.doByteSwap[i] = false;
6893 stream_.nUserChannels[i] = 0;
6894 stream_.nDeviceChannels[i] = 0;
6895 stream_.channelOffset[i] = 0;
6896 stream_.deviceFormat[i] = 0;
6897 stream_.latency[i] = 0;
6898 stream_.userBuffer[i] = 0;
6899 stream_.convertInfo[i].channels = 0;
6900 stream_.convertInfo[i].inJump = 0;
6901 stream_.convertInfo[i].outJump = 0;
6902 stream_.convertInfo[i].inFormat = 0;
6903 stream_.convertInfo[i].outFormat = 0;
6904 stream_.convertInfo[i].inOffset.clear();
6905 stream_.convertInfo[i].outOffset.clear();
6909 unsigned int RtApi :: formatBytes( RtAudioFormat format )
6911 if ( format == RTAUDIO_SINT16 )
6913 else if ( format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
6914 format == RTAUDIO_FLOAT32 )
6916 else if ( format == RTAUDIO_FLOAT64 )
6918 else if ( format == RTAUDIO_SINT8 )
6921 errorText_ = "RtApi::formatBytes: undefined format.";
6922 error( RtError::WARNING );
6927 void RtApi :: setConvertInfo( StreamMode mode, unsigned int firstChannel )
6929 if ( mode == INPUT ) { // convert device to user buffer
6930 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6931 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6932 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6933 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6935 else { // convert user to device buffer
6936 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6937 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6938 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6939 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6942 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6943 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
6945 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
6947 // Set up the interleave/deinterleave offsets.
6948 if ( stream_.deviceInterleaved[mode] != stream_.userInterleaved ) {
6949 if ( ( mode == OUTPUT && stream_.deviceInterleaved[mode] ) ||
6950 ( mode == INPUT && stream_.userInterleaved ) ) {
6951 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6952 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6953 stream_.convertInfo[mode].outOffset.push_back( k );
6954 stream_.convertInfo[mode].inJump = 1;
6958 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6959 stream_.convertInfo[mode].inOffset.push_back( k );
6960 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6961 stream_.convertInfo[mode].outJump = 1;
6965 else { // no (de)interleaving
6966 if ( stream_.userInterleaved ) {
6967 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6968 stream_.convertInfo[mode].inOffset.push_back( k );
6969 stream_.convertInfo[mode].outOffset.push_back( k );
6973 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ ) {
6974 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6975 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6976 stream_.convertInfo[mode].inJump = 1;
6977 stream_.convertInfo[mode].outJump = 1;
6982 // Add channel offset.
6983 if ( firstChannel > 0 ) {
6984 if ( stream_.deviceInterleaved[mode] ) {
6985 if ( mode == OUTPUT ) {
6986 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
6987 stream_.convertInfo[mode].outOffset[k] += firstChannel;
6990 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
6991 stream_.convertInfo[mode].inOffset[k] += firstChannel;
6995 if ( mode == OUTPUT ) {
6996 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
6997 stream_.convertInfo[mode].outOffset[k] += ( firstChannel * stream_.bufferSize );
7000 for ( int k=0; k<stream_.convertInfo[mode].channels; k++ )
7001 stream_.convertInfo[mode].inOffset[k] += ( firstChannel * stream_.bufferSize );
7007 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
7009 // This function does format conversion, input/output channel compensation, and
7010 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
7011 // the upper three bytes of a 32-bit integer.
7013 // Clear our device buffer when in/out duplex device channels are different
7014 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
7015 ( stream_.nDeviceChannels[0] < stream_.nDeviceChannels[1] ) )
7016 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
7019 if (info.outFormat == RTAUDIO_FLOAT64) {
7021 Float64 *out = (Float64 *)outBuffer;
7023 if (info.inFormat == RTAUDIO_SINT8) {
7024 signed char *in = (signed char *)inBuffer;
7025 scale = 1.0 / 128.0;
7026 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7027 for (j=0; j<info.channels; j++) {
7028 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7029 out[info.outOffset[j]] *= scale;
7032 out += info.outJump;
7035 else if (info.inFormat == RTAUDIO_SINT16) {
7036 Int16 *in = (Int16 *)inBuffer;
7037 scale = 1.0 / 32768.0;
7038 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7039 for (j=0; j<info.channels; j++) {
7040 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7041 out[info.outOffset[j]] *= scale;
7044 out += info.outJump;
7047 else if (info.inFormat == RTAUDIO_SINT24) {
7048 Int32 *in = (Int32 *)inBuffer;
7049 scale = 1.0 / 8388608.0;
7050 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7051 for (j=0; j<info.channels; j++) {
7052 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0x00ffffff);
7053 out[info.outOffset[j]] *= scale;
7056 out += info.outJump;
7059 else if (info.inFormat == RTAUDIO_SINT32) {
7060 Int32 *in = (Int32 *)inBuffer;
7061 scale = 1.0 / 2147483648.0;
7062 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7063 for (j=0; j<info.channels; j++) {
7064 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7065 out[info.outOffset[j]] *= scale;
7068 out += info.outJump;
7071 else if (info.inFormat == RTAUDIO_FLOAT32) {
7072 Float32 *in = (Float32 *)inBuffer;
7073 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7074 for (j=0; j<info.channels; j++) {
7075 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
7078 out += info.outJump;
7081 else if (info.inFormat == RTAUDIO_FLOAT64) {
7082 // Channel compensation and/or (de)interleaving only.
7083 Float64 *in = (Float64 *)inBuffer;
7084 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7085 for (j=0; j<info.channels; j++) {
7086 out[info.outOffset[j]] = in[info.inOffset[j]];
7089 out += info.outJump;
7093 else if (info.outFormat == RTAUDIO_FLOAT32) {
7095 Float32 *out = (Float32 *)outBuffer;
7097 if (info.inFormat == RTAUDIO_SINT8) {
7098 signed char *in = (signed char *)inBuffer;
7099 scale = 1.0 / 128.0;
7100 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7101 for (j=0; j<info.channels; j++) {
7102 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7103 out[info.outOffset[j]] *= scale;
7106 out += info.outJump;
7109 else if (info.inFormat == RTAUDIO_SINT16) {
7110 Int16 *in = (Int16 *)inBuffer;
7111 scale = 1.0 / 32768.0;
7112 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7113 for (j=0; j<info.channels; j++) {
7114 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7115 out[info.outOffset[j]] *= scale;
7118 out += info.outJump;
7121 else if (info.inFormat == RTAUDIO_SINT24) {
7122 Int32 *in = (Int32 *)inBuffer;
7123 scale = 1.0 / 8388608.0;
7124 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7125 for (j=0; j<info.channels; j++) {
7126 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0x00ffffff);
7127 out[info.outOffset[j]] *= scale;
7130 out += info.outJump;
7133 else if (info.inFormat == RTAUDIO_SINT32) {
7134 Int32 *in = (Int32 *)inBuffer;
7135 scale = 1.0 / 2147483648.0;
7136 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7137 for (j=0; j<info.channels; j++) {
7138 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7139 out[info.outOffset[j]] *= scale;
7142 out += info.outJump;
7145 else if (info.inFormat == RTAUDIO_FLOAT32) {
7146 // Channel compensation and/or (de)interleaving only.
7147 Float32 *in = (Float32 *)inBuffer;
7148 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7149 for (j=0; j<info.channels; j++) {
7150 out[info.outOffset[j]] = in[info.inOffset[j]];
7153 out += info.outJump;
7156 else if (info.inFormat == RTAUDIO_FLOAT64) {
7157 Float64 *in = (Float64 *)inBuffer;
7158 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7159 for (j=0; j<info.channels; j++) {
7160 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
7163 out += info.outJump;
7167 else if (info.outFormat == RTAUDIO_SINT32) {
7168 Int32 *out = (Int32 *)outBuffer;
7169 if (info.inFormat == RTAUDIO_SINT8) {
7170 signed char *in = (signed char *)inBuffer;
7171 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7172 for (j=0; j<info.channels; j++) {
7173 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7174 out[info.outOffset[j]] <<= 24;
7177 out += info.outJump;
7180 else if (info.inFormat == RTAUDIO_SINT16) {
7181 Int16 *in = (Int16 *)inBuffer;
7182 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7183 for (j=0; j<info.channels; j++) {
7184 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7185 out[info.outOffset[j]] <<= 16;
7188 out += info.outJump;
7191 else if (info.inFormat == RTAUDIO_SINT24) {
7192 Int32 *in = (Int32 *)inBuffer;
7193 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7194 for (j=0; j<info.channels; j++) {
7195 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7196 out[info.outOffset[j]] <<= 8;
7199 out += info.outJump;
7202 else if (info.inFormat == RTAUDIO_SINT32) {
7203 // Channel compensation and/or (de)interleaving only.
7204 Int32 *in = (Int32 *)inBuffer;
7205 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7206 for (j=0; j<info.channels; j++) {
7207 out[info.outOffset[j]] = in[info.inOffset[j]];
7210 out += info.outJump;
7213 else if (info.inFormat == RTAUDIO_FLOAT32) {
7214 Float32 *in = (Float32 *)inBuffer;
7215 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7216 for (j=0; j<info.channels; j++) {
7217 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7220 out += info.outJump;
7223 else if (info.inFormat == RTAUDIO_FLOAT64) {
7224 Float64 *in = (Float64 *)inBuffer;
7225 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7226 for (j=0; j<info.channels; j++) {
7227 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7230 out += info.outJump;
7234 else if (info.outFormat == RTAUDIO_SINT24) {
7235 Int32 *out = (Int32 *)outBuffer;
7236 if (info.inFormat == RTAUDIO_SINT8) {
7237 signed char *in = (signed char *)inBuffer;
7238 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7239 for (j=0; j<info.channels; j++) {
7240 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7241 out[info.outOffset[j]] <<= 16;
7244 out += info.outJump;
7247 else if (info.inFormat == RTAUDIO_SINT16) {
7248 Int16 *in = (Int16 *)inBuffer;
7249 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7250 for (j=0; j<info.channels; j++) {
7251 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7252 out[info.outOffset[j]] <<= 8;
7255 out += info.outJump;
7258 else if (info.inFormat == RTAUDIO_SINT24) {
7259 // Channel compensation and/or (de)interleaving only.
7260 Int32 *in = (Int32 *)inBuffer;
7261 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7262 for (j=0; j<info.channels; j++) {
7263 out[info.outOffset[j]] = in[info.inOffset[j]];
7266 out += info.outJump;
7269 else if (info.inFormat == RTAUDIO_SINT32) {
7270 Int32 *in = (Int32 *)inBuffer;
7271 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7272 for (j=0; j<info.channels; j++) {
7273 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
7274 out[info.outOffset[j]] >>= 8;
7277 out += info.outJump;
7280 else if (info.inFormat == RTAUDIO_FLOAT32) {
7281 Float32 *in = (Float32 *)inBuffer;
7282 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7283 for (j=0; j<info.channels; j++) {
7284 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 8388608.0);
7287 out += info.outJump;
7290 else if (info.inFormat == RTAUDIO_FLOAT64) {
7291 Float64 *in = (Float64 *)inBuffer;
7292 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7293 for (j=0; j<info.channels; j++) {
7294 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
7297 out += info.outJump;
7301 else if (info.outFormat == RTAUDIO_SINT16) {
7302 Int16 *out = (Int16 *)outBuffer;
7303 if (info.inFormat == RTAUDIO_SINT8) {
7304 signed char *in = (signed char *)inBuffer;
7305 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7306 for (j=0; j<info.channels; j++) {
7307 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
7308 out[info.outOffset[j]] <<= 8;
7311 out += info.outJump;
7314 else if (info.inFormat == RTAUDIO_SINT16) {
7315 // Channel compensation and/or (de)interleaving only.
7316 Int16 *in = (Int16 *)inBuffer;
7317 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7318 for (j=0; j<info.channels; j++) {
7319 out[info.outOffset[j]] = in[info.inOffset[j]];
7322 out += info.outJump;
7325 else if (info.inFormat == RTAUDIO_SINT24) {
7326 Int32 *in = (Int32 *)inBuffer;
7327 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7328 for (j=0; j<info.channels; j++) {
7329 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 8) & 0x0000ffff);
7332 out += info.outJump;
7335 else if (info.inFormat == RTAUDIO_SINT32) {
7336 Int32 *in = (Int32 *)inBuffer;
7337 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7338 for (j=0; j<info.channels; j++) {
7339 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
7342 out += info.outJump;
7345 else if (info.inFormat == RTAUDIO_FLOAT32) {
7346 Float32 *in = (Float32 *)inBuffer;
7347 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7348 for (j=0; j<info.channels; j++) {
7349 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7352 out += info.outJump;
7355 else if (info.inFormat == RTAUDIO_FLOAT64) {
7356 Float64 *in = (Float64 *)inBuffer;
7357 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7358 for (j=0; j<info.channels; j++) {
7359 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
7362 out += info.outJump;
7366 else if (info.outFormat == RTAUDIO_SINT8) {
7367 signed char *out = (signed char *)outBuffer;
7368 if (info.inFormat == RTAUDIO_SINT8) {
7369 // Channel compensation and/or (de)interleaving only.
7370 signed char *in = (signed char *)inBuffer;
7371 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7372 for (j=0; j<info.channels; j++) {
7373 out[info.outOffset[j]] = in[info.inOffset[j]];
7376 out += info.outJump;
7379 if (info.inFormat == RTAUDIO_SINT16) {
7380 Int16 *in = (Int16 *)inBuffer;
7381 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7382 for (j=0; j<info.channels; j++) {
7383 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
7386 out += info.outJump;
7389 else if (info.inFormat == RTAUDIO_SINT24) {
7390 Int32 *in = (Int32 *)inBuffer;
7391 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7392 for (j=0; j<info.channels; j++) {
7393 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 16) & 0x000000ff);
7396 out += info.outJump;
7399 else if (info.inFormat == RTAUDIO_SINT32) {
7400 Int32 *in = (Int32 *)inBuffer;
7401 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7402 for (j=0; j<info.channels; j++) {
7403 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
7406 out += info.outJump;
7409 else if (info.inFormat == RTAUDIO_FLOAT32) {
7410 Float32 *in = (Float32 *)inBuffer;
7411 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7412 for (j=0; j<info.channels; j++) {
7413 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7416 out += info.outJump;
7419 else if (info.inFormat == RTAUDIO_FLOAT64) {
7420 Float64 *in = (Float64 *)inBuffer;
7421 for (unsigned int i=0; i<stream_.bufferSize; i++) {
7422 for (j=0; j<info.channels; j++) {
7423 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
7426 out += info.outJump;
7432 void RtApi :: byteSwapBuffer( char *buffer, unsigned int samples, RtAudioFormat format )
7438 if ( format == RTAUDIO_SINT16 ) {
7439 for ( unsigned int i=0; i<samples; i++ ) {
7440 // Swap 1st and 2nd bytes.
7445 // Increment 2 bytes.
7449 else if ( format == RTAUDIO_SINT24 ||
7450 format == RTAUDIO_SINT32 ||
7451 format == RTAUDIO_FLOAT32 ) {
7452 for ( unsigned int i=0; i<samples; i++ ) {
7453 // Swap 1st and 4th bytes.
7458 // Swap 2nd and 3rd bytes.
7464 // Increment 4 bytes.
7468 else if ( format == RTAUDIO_FLOAT64 ) {
7469 for ( unsigned int i=0; i<samples; i++ ) {
7470 // Swap 1st and 8th bytes
7475 // Swap 2nd and 7th bytes
7481 // Swap 3rd and 6th bytes
7487 // Swap 4th and 5th bytes
7493 // Increment 8 bytes.
7499 // Indentation settings for Vim and Emacs
7502 // c-basic-offset: 2
7503 // indent-tabs-mode: nil
7506 // vim: et sts=2 sw=2