1 /************************************************************************/
3 \brief Realtime audio i/o C++ classes.
5 RtAudio provides a common API (Application Programming Interface)
6 for realtime audio input/output across Linux (native ALSA, Jack,
7 and OSS), SGI, Macintosh OS X (CoreAudio), and Windows
8 (DirectSound and ASIO) operating systems.
10 RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/
12 RtAudio: realtime audio i/o C++ classes
13 Copyright (c) 2001-2005 Gary P. Scavone
15 Permission is hereby granted, free of charge, to any person
16 obtaining a copy of this software and associated documentation files
17 (the "Software"), to deal in the Software without restriction,
18 including without limitation the rights to use, copy, modify, merge,
19 publish, distribute, sublicense, and/or sell copies of the Software,
20 and to permit persons to whom the Software is furnished to do so,
21 subject to the following conditions:
23 The above copyright notice and this permission notice shall be
24 included in all copies or substantial portions of the Software.
26 Any person wishing to distribute modifications to the Software is
27 requested to send the modifications to the original developer so that
28 they can be incorporated into the canonical version.
30 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
31 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
32 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
33 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
34 ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
35 CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
36 WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
38 /************************************************************************/
40 // RtAudio: Version 3.0.2 (14 October 2005)
42 // Modified by Robin Davies, 1 October 2005
43 // - Improvements to DirectX pointer chasing.
44 // - Backdoor RtDsStatistics hook provides DirectX performance information.
45 // - Bug fix for non-power-of-two Asio granularity used by Edirol PCR-A30.
46 // - Auto-call CoInitialize for DSOUND and ASIO platforms.
52 // Static variable definitions.
53 const unsigned int RtApi::MAX_SAMPLE_RATES = 14;
54 const unsigned int RtApi::SAMPLE_RATES[] = {
55 4000, 5512, 8000, 9600, 11025, 16000, 22050,
56 32000, 44100, 48000, 88200, 96000, 176400, 192000
59 #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__)
60 #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A)
61 #define MUTEX_DESTROY(A) DeleteCriticalSection(A);
62 #define MUTEX_LOCK(A) EnterCriticalSection(A)
63 #define MUTEX_UNLOCK(A) LeaveCriticalSection(A)
65 #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL)
66 #define MUTEX_DESTROY(A) pthread_mutex_destroy(A);
67 #define MUTEX_LOCK(A) pthread_mutex_lock(A)
68 #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A)
71 // *************************************************** //
73 // Public common (OS-independent) methods.
75 // *************************************************** //
77 RtAudio :: RtAudio( RtAudioApi api )
82 RtAudio :: RtAudio( int outputDevice, int outputChannels,
83 int inputDevice, int inputChannels,
84 RtAudioFormat format, int sampleRate,
85 int *bufferSize, int numberOfBuffers, RtAudioApi api )
90 rtapi_->openStream( outputDevice, outputChannels,
91 inputDevice, inputChannels,
93 bufferSize, numberOfBuffers );
95 catch (RtError &exception) {
96 // Deallocate the RtApi instance.
102 RtAudio :: RtAudio( int outputDevice, int outputChannels,
103 int inputDevice, int inputChannels,
104 RtAudioFormat format, int sampleRate,
105 int *bufferSize, int *numberOfBuffers, RtAudioApi api )
110 rtapi_->openStream( outputDevice, outputChannels,
111 inputDevice, inputChannels,
113 bufferSize, numberOfBuffers );
115 catch (RtError &exception) {
116 // Deallocate the RtApi instance.
122 RtAudio :: ~RtAudio()
127 void RtAudio :: openStream( int outputDevice, int outputChannels,
128 int inputDevice, int inputChannels,
129 RtAudioFormat format, int sampleRate,
130 int *bufferSize, int numberOfBuffers )
132 rtapi_->openStream( outputDevice, outputChannels, inputDevice,
133 inputChannels, format, sampleRate,
134 bufferSize, numberOfBuffers );
137 void RtAudio :: openStream( int outputDevice, int outputChannels,
138 int inputDevice, int inputChannels,
139 RtAudioFormat format, int sampleRate,
140 int *bufferSize, int *numberOfBuffers )
142 rtapi_->openStream( outputDevice, outputChannels, inputDevice,
143 inputChannels, format, sampleRate,
144 bufferSize, *numberOfBuffers );
147 void RtAudio::initialize( RtAudioApi api )
151 // First look for a compiled match to a specified API value. If one
152 // of these constructors throws an error, it will be passed up the
153 // inheritance chain.
154 #if defined(__LINUX_JACK__)
155 if ( api == LINUX_JACK )
156 rtapi_ = new RtApiJack();
158 #if defined(__LINUX_ALSA__)
159 if ( api == LINUX_ALSA )
160 rtapi_ = new RtApiAlsa();
162 #if defined(__LINUX_OSS__)
163 if ( api == LINUX_OSS )
164 rtapi_ = new RtApiOss();
166 #if defined(__WINDOWS_ASIO__)
167 if ( api == WINDOWS_ASIO )
168 rtapi_ = new RtApiAsio();
170 #if defined(__WINDOWS_DS__)
171 if ( api == WINDOWS_DS )
172 rtapi_ = new RtApiDs();
174 #if defined(__IRIX_AL__)
175 if ( api == IRIX_AL )
176 rtapi_ = new RtApiAl();
178 #if defined(__MACOSX_CORE__)
179 if ( api == MACOSX_CORE )
180 rtapi_ = new RtApiCore();
183 if ( rtapi_ ) return;
185 // No compiled support for specified API value.
186 throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER );
189 // No specified API ... search for "best" option.
191 #if defined(__LINUX_JACK__)
192 rtapi_ = new RtApiJack();
193 #elif defined(__WINDOWS_ASIO__)
194 rtapi_ = new RtApiAsio();
195 #elif defined(__IRIX_AL__)
196 rtapi_ = new RtApiAl();
197 #elif defined(__MACOSX_CORE__)
198 rtapi_ = new RtApiCore();
204 #if defined(__RTAUDIO_DEBUG__)
205 fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n");
210 if ( rtapi_ ) return;
212 // Try second API support
215 #if defined(__LINUX_ALSA__)
216 rtapi_ = new RtApiAlsa();
217 #elif defined(__WINDOWS_DS__)
218 rtapi_ = new RtApiDs();
224 #if defined(__RTAUDIO_DEBUG__)
225 fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n");
231 if ( rtapi_ ) return;
233 // Try third API support
235 #if defined(__LINUX_OSS__)
237 rtapi_ = new RtApiOss();
239 catch (RtError &error) {
249 throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND );
255 stream_.mode = UNINITIALIZED;
256 stream_.state = STREAM_STOPPED;
257 stream_.apiHandle = 0;
258 MUTEX_INITIALIZE(&stream_.mutex);
263 MUTEX_DESTROY(&stream_.mutex);
266 void RtApi :: openStream( int outputDevice, int outputChannels,
267 int inputDevice, int inputChannels,
268 RtAudioFormat format, int sampleRate,
269 int *bufferSize, int *numberOfBuffers )
271 this->openStream( outputDevice, outputChannels, inputDevice,
272 inputChannels, format, sampleRate,
273 bufferSize, *numberOfBuffers );
274 *numberOfBuffers = stream_.nBuffers;
277 void RtApi :: openStream( int outputDevice, int outputChannels,
278 int inputDevice, int inputChannels,
279 RtAudioFormat format, int sampleRate,
280 int *bufferSize, int numberOfBuffers )
282 if ( stream_.mode != UNINITIALIZED ) {
283 sprintf(message_, "RtApi: only one open stream allowed per class instance.");
284 error(RtError::INVALID_STREAM);
287 if (outputChannels < 1 && inputChannels < 1) {
288 sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero.");
289 error(RtError::INVALID_PARAMETER);
292 if ( formatBytes(format) == 0 ) {
293 sprintf(message_,"RtApi: 'format' parameter value is undefined.");
294 error(RtError::INVALID_PARAMETER);
297 if ( outputChannels > 0 ) {
298 if (outputDevice > nDevices_ || outputDevice < 0) {
299 sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice);
300 error(RtError::INVALID_PARAMETER);
304 if ( inputChannels > 0 ) {
305 if (inputDevice > nDevices_ || inputDevice < 0) {
306 sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice);
307 error(RtError::INVALID_PARAMETER);
311 std::string errorMessages;
313 bool result = FAILURE;
314 int device, defaultDevice = 0;
317 if ( outputChannels > 0 ) {
320 channels = outputChannels;
322 if ( outputDevice == 0 ) { // Try default device first.
323 defaultDevice = getDefaultOutputDevice();
324 device = defaultDevice;
327 device = outputDevice - 1;
329 for ( int i=-1; i<nDevices_; i++ ) {
331 if ( i == defaultDevice ) continue;
334 if ( devices_[device].probed == false ) {
335 // If the device wasn't successfully probed before, try it
337 clearDeviceInfo(&devices_[device]);
338 probeDeviceInfo(&devices_[device]);
340 if ( devices_[device].probed )
341 result = probeDeviceOpen(device, mode, channels, sampleRate,
342 format, bufferSize, numberOfBuffers);
343 if ( result == SUCCESS ) break;
344 errorMessages.append( " " );
345 errorMessages.append( message_ );
346 errorMessages.append( "\n" );
347 if ( outputDevice > 0 ) break;
352 if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) {
355 channels = inputChannels;
357 if ( inputDevice == 0 ) { // Try default device first.
358 defaultDevice = getDefaultInputDevice();
359 device = defaultDevice;
362 device = inputDevice - 1;
364 for ( int i=-1; i<nDevices_; i++ ) {
366 if ( i == defaultDevice ) continue;
369 if ( devices_[device].probed == false ) {
370 // If the device wasn't successfully probed before, try it
372 clearDeviceInfo(&devices_[device]);
373 probeDeviceInfo(&devices_[device]);
375 if ( devices_[device].probed )
376 result = probeDeviceOpen( device, mode, channels, sampleRate,
377 format, bufferSize, numberOfBuffers );
378 if ( result == SUCCESS ) break;
379 errorMessages.append( " " );
380 errorMessages.append( message_ );
381 errorMessages.append( "\n" );
382 if ( inputDevice > 0 ) break;
386 if ( result == SUCCESS )
389 // If we get here, all attempted probes failed. Close any opened
390 // devices and clear the stream structure.
391 if ( stream_.mode != UNINITIALIZED ) closeStream();
393 if ( ( outputDevice == 0 && outputChannels > 0 )
394 || ( inputDevice == 0 && inputChannels > 0 ) )
395 sprintf(message_,"RtApi: no devices found for given stream parameters: \n%s",
396 errorMessages.c_str());
398 sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters: \n%s",
399 errorMessages.c_str());
400 error(RtError::INVALID_PARAMETER);
405 int RtApi :: getDeviceCount(void)
407 return devices_.size();
410 RtApi::StreamState RtApi :: getStreamState( void ) const
412 return stream_.state;
415 RtAudioDeviceInfo RtApi :: getDeviceInfo( int device )
417 if (device > (int) devices_.size() || device < 1) {
418 sprintf(message_, "RtApi: invalid device specifier (%d)!", device);
419 error(RtError::INVALID_DEVICE);
422 RtAudioDeviceInfo info;
423 int deviceIndex = device - 1;
425 // If the device wasn't successfully probed before, try it now (or again).
426 if (devices_[deviceIndex].probed == false) {
427 clearDeviceInfo(&devices_[deviceIndex]);
428 probeDeviceInfo(&devices_[deviceIndex]);
431 info.name.append( devices_[deviceIndex].name );
432 info.probed = devices_[deviceIndex].probed;
433 if ( info.probed == true ) {
434 info.outputChannels = devices_[deviceIndex].maxOutputChannels;
435 info.inputChannels = devices_[deviceIndex].maxInputChannels;
436 info.duplexChannels = devices_[deviceIndex].maxDuplexChannels;
437 for (unsigned int i=0; i<devices_[deviceIndex].sampleRates.size(); i++)
438 info.sampleRates.push_back( devices_[deviceIndex].sampleRates[i] );
439 info.nativeFormats = devices_[deviceIndex].nativeFormats;
440 if ( (deviceIndex == getDefaultOutputDevice()) ||
441 (deviceIndex == getDefaultInputDevice()) )
442 info.isDefault = true;
448 char * const RtApi :: getStreamBuffer(void)
451 return stream_.userBuffer;
454 int RtApi :: getDefaultInputDevice(void)
456 // Should be implemented in subclasses if appropriate.
460 int RtApi :: getDefaultOutputDevice(void)
462 // Should be implemented in subclasses if appropriate.
466 void RtApi :: closeStream(void)
468 // MUST be implemented in subclasses!
471 void RtApi :: probeDeviceInfo( RtApiDevice *info )
473 // MUST be implemented in subclasses!
476 bool RtApi :: probeDeviceOpen( int device, StreamMode mode, int channels,
477 int sampleRate, RtAudioFormat format,
478 int *bufferSize, int numberOfBuffers )
480 // MUST be implemented in subclasses!
485 // *************************************************** //
487 // OS/API-specific methods.
489 // *************************************************** //
491 #if defined(__LINUX_OSS__)
494 #include <sys/stat.h>
495 #include <sys/types.h>
496 #include <sys/ioctl.h>
499 #include <sys/soundcard.h>
503 #define DAC_NAME "/dev/dsp"
504 #define MAX_DEVICES 16
505 #define MAX_CHANNELS 16
507 extern "C" void *ossCallbackHandler(void * ptr);
509 RtApiOss :: RtApiOss()
513 if (nDevices_ <= 0) {
514 sprintf(message_, "RtApiOss: no Linux OSS audio devices found!");
515 error(RtError::NO_DEVICES_FOUND);
519 RtApiOss :: ~RtApiOss()
521 if ( stream_.mode != UNINITIALIZED )
525 void RtApiOss :: initialize(void)
527 // Count cards and devices
530 // We check /dev/dsp before probing devices. /dev/dsp is supposed to
531 // be a link to the "default" audio device, of the form /dev/dsp0,
532 // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a
533 // real device, so we need to check for that. Also, sometimes the
534 // link is to /dev/dspx and other times just dspx. I'm not sure how
535 // the latter works, but it does.
536 char device_name[16];
540 if (lstat(DAC_NAME, &dspstat) == 0) {
541 if (S_ISLNK(dspstat.st_mode)) {
542 i = readlink(DAC_NAME, device_name, sizeof(device_name));
544 device_name[i] = '\0';
545 if (i > 8) { // check for "/dev/dspx"
546 if (!strncmp(DAC_NAME, device_name, 8))
547 dsplink = atoi(&device_name[8]);
549 else if (i > 3) { // check for "dspx"
550 if (!strncmp("dsp", device_name, 3))
551 dsplink = atoi(&device_name[3]);
555 sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME);
556 error(RtError::SYSTEM_ERROR);
561 sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME);
562 error(RtError::SYSTEM_ERROR);
565 // The OSS API doesn't provide a routine for determining the number
566 // of devices. Thus, we'll just pursue a brute force method. The
567 // idea is to start with /dev/dsp(0) and continue with higher device
568 // numbers until we reach MAX_DSP_DEVICES. This should tell us how
569 // many devices we have ... it is not a fullproof scheme, but hopefully
570 // it will work most of the time.
573 for (i=-1; i<MAX_DEVICES; i++) {
575 // Probe /dev/dsp first, since it is supposed to be the default device.
577 sprintf(device_name, "%s", DAC_NAME);
578 else if (i == dsplink)
579 continue; // We've aready probed this device via /dev/dsp link ... try next device.
581 sprintf(device_name, "%s%d", DAC_NAME, i);
583 // First try to open the device for playback, then record mode.
584 fd = open(device_name, O_WRONLY | O_NONBLOCK);
586 // Open device for playback failed ... either busy or doesn't exist.
587 if (errno != EBUSY && errno != EAGAIN) {
588 // Try to open for capture
589 fd = open(device_name, O_RDONLY | O_NONBLOCK);
591 // Open device for record failed.
592 if (errno != EBUSY && errno != EAGAIN)
595 sprintf(message_, "RtApiOss: OSS record device (%s) is busy.", device_name);
596 error(RtError::WARNING);
597 // still count it for now
602 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy.", device_name);
603 error(RtError::WARNING);
604 // still count it for now
608 if (fd >= 0) close(fd);
610 device.name.append( (const char *)device_name, strlen(device_name)+1);
611 devices_.push_back(device);
616 void RtApiOss :: probeDeviceInfo(RtApiDevice *info)
618 int i, fd, channels, mask;
620 // The OSS API doesn't provide a means for probing the capabilities
621 // of devices. Thus, we'll just pursue a brute force method.
623 // First try for playback
624 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
626 // Open device failed ... either busy or doesn't exist
627 if (errno == EBUSY || errno == EAGAIN)
628 sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.",
631 sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str());
632 error(RtError::DEBUG_WARNING);
636 // We have an open device ... see how many channels it can handle
637 for (i=MAX_CHANNELS; i>0; i--) {
639 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) {
640 // This would normally indicate some sort of hardware error, but under ALSA's
641 // OSS emulation, it sometimes indicates an invalid channel value. Further,
642 // the returned channel value is not changed. So, we'll ignore the possible
644 continue; // try next channel number
646 // Check to see whether the device supports the requested number of channels
647 if (channels != i ) continue; // try next channel number
648 // If here, we found the largest working channel value
651 info->maxOutputChannels = i;
653 // Now find the minimum number of channels it can handle
654 for (i=1; i<=info->maxOutputChannels; i++) {
656 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
657 continue; // try next channel number
658 // If here, we found the smallest working channel value
661 info->minOutputChannels = i;
665 // Now try for capture
666 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
668 // Open device for capture failed ... either busy or doesn't exist
669 if (errno == EBUSY || errno == EAGAIN)
670 sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.",
673 sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str());
674 error(RtError::DEBUG_WARNING);
675 if (info->maxOutputChannels == 0)
676 // didn't open for playback either ... device invalid
678 goto probe_parameters;
681 // We have the device open for capture ... see how many channels it can handle
682 for (i=MAX_CHANNELS; i>0; i--) {
684 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
685 continue; // as above
687 // If here, we found a working channel value
690 info->maxInputChannels = i;
692 // Now find the minimum number of channels it can handle
693 for (i=1; i<=info->maxInputChannels; i++) {
695 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
696 continue; // try next channel number
697 // If here, we found the smallest working channel value
700 info->minInputChannels = i;
703 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
704 sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.",
706 error(RtError::DEBUG_WARNING);
710 // If device opens for both playback and capture, we determine the channels.
711 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
712 goto probe_parameters;
714 fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK);
716 goto probe_parameters;
718 ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0);
719 ioctl(fd, SNDCTL_DSP_GETCAPS, &mask);
720 if (mask & DSP_CAP_DUPLEX) {
721 info->hasDuplexSupport = true;
722 // We have the device open for duplex ... see how many channels it can handle
723 for (i=MAX_CHANNELS; i>0; i--) {
725 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
726 continue; // as above
727 // If here, we found a working channel value
730 info->maxDuplexChannels = i;
732 // Now find the minimum number of channels it can handle
733 for (i=1; i<=info->maxDuplexChannels; i++) {
735 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i)
736 continue; // try next channel number
737 // If here, we found the smallest working channel value
740 info->minDuplexChannels = i;
745 // At this point, we need to figure out the supported data formats
746 // and sample rates. We'll proceed by openning the device in the
747 // direction with the maximum number of channels, or playback if
748 // they are equal. This might limit our sample rate options, but so
751 if (info->maxOutputChannels >= info->maxInputChannels) {
752 fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK);
753 channels = info->maxOutputChannels;
756 fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK);
757 channels = info->maxInputChannels;
761 // We've got some sort of conflict ... abort
762 sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.",
764 error(RtError::DEBUG_WARNING);
768 // We have an open device ... set to maximum channels.
770 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) {
771 // We've got some sort of conflict ... abort
773 sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.",
775 error(RtError::DEBUG_WARNING);
779 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
781 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
783 error(RtError::DEBUG_WARNING);
787 // Probe the supported data formats ... we don't care about endian-ness just yet.
789 info->nativeFormats = 0;
790 #if defined (AFMT_S32_BE)
791 // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h
792 if (mask & AFMT_S32_BE) {
793 format = AFMT_S32_BE;
794 info->nativeFormats |= RTAUDIO_SINT32;
797 #if defined (AFMT_S32_LE)
798 /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */
799 if (mask & AFMT_S32_LE) {
800 format = AFMT_S32_LE;
801 info->nativeFormats |= RTAUDIO_SINT32;
804 if (mask & AFMT_S8) {
806 info->nativeFormats |= RTAUDIO_SINT8;
808 if (mask & AFMT_S16_BE) {
809 format = AFMT_S16_BE;
810 info->nativeFormats |= RTAUDIO_SINT16;
812 if (mask & AFMT_S16_LE) {
813 format = AFMT_S16_LE;
814 info->nativeFormats |= RTAUDIO_SINT16;
817 // Check that we have at least one supported format
818 if (info->nativeFormats == 0) {
820 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
822 error(RtError::DEBUG_WARNING);
828 if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) {
830 sprintf(message_, "RtApiOss: device (%s) error setting data format.",
832 error(RtError::DEBUG_WARNING);
836 // Probe the supported sample rates.
837 info->sampleRates.clear();
838 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
839 int speed = SAMPLE_RATES[k];
840 if (ioctl(fd, SNDCTL_DSP_SPEED, &speed) != -1 && speed == (int)SAMPLE_RATES[k])
841 info->sampleRates.push_back(speed);
844 if (info->sampleRates.size() == 0) {
846 sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).",
848 error(RtError::DEBUG_WARNING);
852 // That's all ... close the device and return
858 bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels,
859 int sampleRate, RtAudioFormat format,
860 int *bufferSize, int numberOfBuffers)
862 int buffers, buffer_bytes, device_channels, device_format;
864 int *handle = (int *) stream_.apiHandle;
866 const char *name = devices_[device].name.c_str();
869 fd = open(name, O_WRONLY | O_NONBLOCK);
870 else { // mode == INPUT
871 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
872 // We just set the same device for playback ... close and reopen for duplex (OSS only).
875 // First check that the number previously set channels is the same.
876 if (stream_.nUserChannels[0] != channels) {
877 sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name);
880 fd = open(name, O_RDWR | O_NONBLOCK);
883 fd = open(name, O_RDONLY | O_NONBLOCK);
887 if (errno == EBUSY || errno == EAGAIN)
888 sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.",
891 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
895 // Now reopen in blocking mode.
898 fd = open(name, O_WRONLY | O_SYNC);
899 else { // mode == INPUT
900 if (stream_.mode == OUTPUT && stream_.device[0] == device)
901 fd = open(name, O_RDWR | O_SYNC);
903 fd = open(name, O_RDONLY | O_SYNC);
907 sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name);
911 // Get the sample format mask
913 if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) {
915 sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.",
920 // Determine how to set the device format.
921 stream_.userFormat = format;
923 stream_.doByteSwap[mode] = false;
924 if (format == RTAUDIO_SINT8) {
925 if (mask & AFMT_S8) {
926 device_format = AFMT_S8;
927 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
930 else if (format == RTAUDIO_SINT16) {
931 if (mask & AFMT_S16_NE) {
932 device_format = AFMT_S16_NE;
933 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
935 #if BYTE_ORDER == LITTLE_ENDIAN
936 else if (mask & AFMT_S16_BE) {
937 device_format = AFMT_S16_BE;
938 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
939 stream_.doByteSwap[mode] = true;
942 else if (mask & AFMT_S16_LE) {
943 device_format = AFMT_S16_LE;
944 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
945 stream_.doByteSwap[mode] = true;
949 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
950 else if (format == RTAUDIO_SINT32) {
951 if (mask & AFMT_S32_NE) {
952 device_format = AFMT_S32_NE;
953 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
955 #if BYTE_ORDER == LITTLE_ENDIAN
956 else if (mask & AFMT_S32_BE) {
957 device_format = AFMT_S32_BE;
958 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
959 stream_.doByteSwap[mode] = true;
962 else if (mask & AFMT_S32_LE) {
963 device_format = AFMT_S32_LE;
964 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
965 stream_.doByteSwap[mode] = true;
971 if (device_format == -1) {
972 // The user requested format is not natively supported by the device.
973 if (mask & AFMT_S16_NE) {
974 device_format = AFMT_S16_NE;
975 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
977 #if BYTE_ORDER == LITTLE_ENDIAN
978 else if (mask & AFMT_S16_BE) {
979 device_format = AFMT_S16_BE;
980 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
981 stream_.doByteSwap[mode] = true;
984 else if (mask & AFMT_S16_LE) {
985 device_format = AFMT_S16_LE;
986 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
987 stream_.doByteSwap[mode] = true;
990 #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE)
991 else if (mask & AFMT_S32_NE) {
992 device_format = AFMT_S32_NE;
993 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
995 #if BYTE_ORDER == LITTLE_ENDIAN
996 else if (mask & AFMT_S32_BE) {
997 device_format = AFMT_S32_BE;
998 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
999 stream_.doByteSwap[mode] = true;
1002 else if (mask & AFMT_S32_LE) {
1003 device_format = AFMT_S32_LE;
1004 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
1005 stream_.doByteSwap[mode] = true;
1009 else if (mask & AFMT_S8) {
1010 device_format = AFMT_S8;
1011 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
1015 if (stream_.deviceFormat[mode] == 0) {
1016 // This really shouldn't happen ...
1018 sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.",
1023 // Determine the number of channels for this device. Note that the
1024 // channel value requested by the user might be < min_X_Channels.
1025 stream_.nUserChannels[mode] = channels;
1026 device_channels = channels;
1027 if (mode == OUTPUT) {
1028 if (channels < devices_[device].minOutputChannels)
1029 device_channels = devices_[device].minOutputChannels;
1031 else { // mode == INPUT
1032 if (stream_.mode == OUTPUT && stream_.device[0] == device) {
1033 // We're doing duplex setup here.
1034 if (channels < devices_[device].minDuplexChannels)
1035 device_channels = devices_[device].minDuplexChannels;
1038 if (channels < devices_[device].minInputChannels)
1039 device_channels = devices_[device].minInputChannels;
1042 stream_.nDeviceChannels[mode] = device_channels;
1044 // Attempt to set the buffer size. According to OSS, the minimum
1045 // number of buffers is two. The supposed minimum buffer size is 16
1046 // bytes, so that will be our lower bound. The argument to this
1047 // call is in the form 0xMMMMSSSS (hex), where the buffer size (in
1048 // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM.
1049 // We'll check the actual value used near the end of the setup
1051 buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels;
1052 if (buffer_bytes < 16) buffer_bytes = 16;
1053 buffers = numberOfBuffers;
1054 if (buffers < 2) buffers = 2;
1055 temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0));
1056 if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) {
1058 sprintf(message_, "RtApiOss: error setting fragment size for device (%s).",
1062 stream_.nBuffers = buffers;
1064 // Set the data format.
1065 temp = device_format;
1066 if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) {
1068 sprintf(message_, "RtApiOss: error setting data format for device (%s).",
1073 // Set the number of channels.
1074 temp = device_channels;
1075 if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) {
1077 sprintf(message_, "RtApiOss: error setting %d channels on device (%s).",
1082 // Set the sample rate.
1085 if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) {
1087 sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).",
1092 // Verify the sample rate setup worked.
1093 if (abs(srate - temp) > 100) {
1095 sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.",
1099 stream_.sampleRate = sampleRate;
1101 if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) {
1103 sprintf(message_, "RtApiOss: error getting buffer size for device (%s).",
1108 // Save buffer size (in sample frames).
1109 *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels);
1110 stream_.bufferSize = *bufferSize;
1112 if (mode == INPUT && stream_.mode == OUTPUT &&
1113 stream_.device[0] == device) {
1114 // We're doing duplex setup here.
1115 stream_.deviceFormat[0] = stream_.deviceFormat[1];
1116 stream_.nDeviceChannels[0] = device_channels;
1119 // Allocate the stream handles if necessary and then save.
1120 if ( stream_.apiHandle == 0 ) {
1121 handle = (int *) calloc(2, sizeof(int));
1122 stream_.apiHandle = (void *) handle;
1127 handle = (int *) stream_.apiHandle;
1131 // Set flags for buffer conversion
1132 stream_.doConvertBuffer[mode] = false;
1133 if (stream_.userFormat != stream_.deviceFormat[mode])
1134 stream_.doConvertBuffer[mode] = true;
1135 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
1136 stream_.doConvertBuffer[mode] = true;
1138 // Allocate necessary internal buffers
1139 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
1142 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
1143 buffer_bytes = stream_.nUserChannels[0];
1145 buffer_bytes = stream_.nUserChannels[1];
1147 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
1148 if (stream_.userBuffer) free(stream_.userBuffer);
1149 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
1150 if (stream_.userBuffer == NULL) {
1152 sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).",
1158 if ( stream_.doConvertBuffer[mode] ) {
1161 bool makeBuffer = true;
1162 if ( mode == OUTPUT )
1163 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1164 else { // mode == INPUT
1165 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
1166 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
1167 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
1168 if ( buffer_bytes < bytes_out ) makeBuffer = false;
1173 buffer_bytes *= *bufferSize;
1174 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
1175 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
1176 if (stream_.deviceBuffer == NULL) {
1178 sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).",
1185 stream_.device[mode] = device;
1186 stream_.state = STREAM_STOPPED;
1188 if ( stream_.mode == OUTPUT && mode == INPUT ) {
1189 stream_.mode = DUPLEX;
1190 if (stream_.device[0] == device)
1194 stream_.mode = mode;
1196 // Setup the buffer conversion information structure.
1197 if ( stream_.doConvertBuffer[mode] ) {
1198 if (mode == INPUT) { // convert device to user buffer
1199 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
1200 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
1201 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
1202 stream_.convertInfo[mode].outFormat = stream_.userFormat;
1204 else { // convert user to device buffer
1205 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
1206 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
1207 stream_.convertInfo[mode].inFormat = stream_.userFormat;
1208 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
1211 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
1212 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
1214 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
1216 // Set up the interleave/deinterleave offsets.
1217 if ( mode == INPUT && stream_.deInterleave[1] ) {
1218 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1219 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
1220 stream_.convertInfo[mode].outOffset.push_back( k );
1221 stream_.convertInfo[mode].inJump = 1;
1224 else if (mode == OUTPUT && stream_.deInterleave[0]) {
1225 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1226 stream_.convertInfo[mode].inOffset.push_back( k );
1227 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
1228 stream_.convertInfo[mode].outJump = 1;
1232 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
1233 stream_.convertInfo[mode].inOffset.push_back( k );
1234 stream_.convertInfo[mode].outOffset.push_back( k );
1246 stream_.apiHandle = 0;
1249 if (stream_.userBuffer) {
1250 free(stream_.userBuffer);
1251 stream_.userBuffer = 0;
1254 error(RtError::DEBUG_WARNING);
1258 void RtApiOss :: closeStream()
1260 // We don't want an exception to be thrown here because this
1261 // function is called by our class destructor. So, do our own
1263 if ( stream_.mode == UNINITIALIZED ) {
1264 sprintf(message_, "RtApiOss::closeStream(): no open stream to close!");
1265 error(RtError::WARNING);
1269 int *handle = (int *) stream_.apiHandle;
1270 if (stream_.state == STREAM_RUNNING) {
1271 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
1272 ioctl(handle[0], SNDCTL_DSP_RESET, 0);
1274 ioctl(handle[1], SNDCTL_DSP_RESET, 0);
1275 stream_.state = STREAM_STOPPED;
1278 if (stream_.callbackInfo.usingCallback) {
1279 stream_.callbackInfo.usingCallback = false;
1280 pthread_join(stream_.callbackInfo.thread, NULL);
1284 if (handle[0]) close(handle[0]);
1285 if (handle[1]) close(handle[1]);
1287 stream_.apiHandle = 0;
1290 if (stream_.userBuffer) {
1291 free(stream_.userBuffer);
1292 stream_.userBuffer = 0;
1295 if (stream_.deviceBuffer) {
1296 free(stream_.deviceBuffer);
1297 stream_.deviceBuffer = 0;
1300 stream_.mode = UNINITIALIZED;
1303 void RtApiOss :: startStream()
1306 if (stream_.state == STREAM_RUNNING) return;
1308 MUTEX_LOCK(&stream_.mutex);
1310 stream_.state = STREAM_RUNNING;
1312 // No need to do anything else here ... OSS automatically starts
1313 // when fed samples.
1315 MUTEX_UNLOCK(&stream_.mutex);
1318 void RtApiOss :: stopStream()
1321 if (stream_.state == STREAM_STOPPED) return;
1323 // Change the state before the lock to improve shutdown response
1324 // when using a callback.
1325 stream_.state = STREAM_STOPPED;
1326 MUTEX_LOCK(&stream_.mutex);
1329 int *handle = (int *) stream_.apiHandle;
1330 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1331 err = ioctl(handle[0], SNDCTL_DSP_POST, 0);
1332 //err = ioctl(handle[0], SNDCTL_DSP_SYNC, 0);
1334 sprintf(message_, "RtApiOss: error stopping device (%s).",
1335 devices_[stream_.device[0]].name.c_str());
1336 error(RtError::DRIVER_ERROR);
1340 err = ioctl(handle[1], SNDCTL_DSP_POST, 0);
1341 //err = ioctl(handle[1], SNDCTL_DSP_SYNC, 0);
1343 sprintf(message_, "RtApiOss: error stopping device (%s).",
1344 devices_[stream_.device[1]].name.c_str());
1345 error(RtError::DRIVER_ERROR);
1349 MUTEX_UNLOCK(&stream_.mutex);
1352 void RtApiOss :: abortStream()
1357 int RtApiOss :: streamWillBlock()
1360 if (stream_.state == STREAM_STOPPED) return 0;
1362 MUTEX_LOCK(&stream_.mutex);
1364 int bytes = 0, channels = 0, frames = 0;
1365 audio_buf_info info;
1366 int *handle = (int *) stream_.apiHandle;
1367 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1368 ioctl(handle[0], SNDCTL_DSP_GETOSPACE, &info);
1370 channels = stream_.nDeviceChannels[0];
1373 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1374 ioctl(handle[1], SNDCTL_DSP_GETISPACE, &info);
1375 if (stream_.mode == DUPLEX ) {
1376 bytes = (bytes < info.bytes) ? bytes : info.bytes;
1377 channels = stream_.nDeviceChannels[0];
1381 channels = stream_.nDeviceChannels[1];
1385 frames = (int) (bytes / (channels * formatBytes(stream_.deviceFormat[0])));
1386 frames -= stream_.bufferSize;
1387 if (frames < 0) frames = 0;
1389 MUTEX_UNLOCK(&stream_.mutex);
1393 void RtApiOss :: tickStream()
1398 if (stream_.state == STREAM_STOPPED) {
1399 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
1402 else if (stream_.callbackInfo.usingCallback) {
1403 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
1404 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
1407 MUTEX_LOCK(&stream_.mutex);
1409 // The state might change while waiting on a mutex.
1410 if (stream_.state == STREAM_STOPPED)
1413 int result, *handle;
1416 RtAudioFormat format;
1417 handle = (int *) stream_.apiHandle;
1418 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
1420 // Setup parameters and do buffer conversion if necessary.
1421 if (stream_.doConvertBuffer[0]) {
1422 buffer = stream_.deviceBuffer;
1423 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
1424 samples = stream_.bufferSize * stream_.nDeviceChannels[0];
1425 format = stream_.deviceFormat[0];
1428 buffer = stream_.userBuffer;
1429 samples = stream_.bufferSize * stream_.nUserChannels[0];
1430 format = stream_.userFormat;
1433 // Do byte swapping if necessary.
1434 if (stream_.doByteSwap[0])
1435 byteSwapBuffer(buffer, samples, format);
1437 // Write samples to device.
1438 result = write(handle[0], buffer, samples * formatBytes(format));
1441 // This could be an underrun, but the basic OSS API doesn't provide a means for determining that.
1442 sprintf(message_, "RtApiOss: audio write error for device (%s).",
1443 devices_[stream_.device[0]].name.c_str());
1444 error(RtError::DRIVER_ERROR);
1448 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
1450 // Setup parameters.
1451 if (stream_.doConvertBuffer[1]) {
1452 buffer = stream_.deviceBuffer;
1453 samples = stream_.bufferSize * stream_.nDeviceChannels[1];
1454 format = stream_.deviceFormat[1];
1457 buffer = stream_.userBuffer;
1458 samples = stream_.bufferSize * stream_.nUserChannels[1];
1459 format = stream_.userFormat;
1462 // Read samples from device.
1463 result = read(handle[1], buffer, samples * formatBytes(format));
1466 // This could be an overrun, but the basic OSS API doesn't provide a means for determining that.
1467 sprintf(message_, "RtApiOss: audio read error for device (%s).",
1468 devices_[stream_.device[1]].name.c_str());
1469 error(RtError::DRIVER_ERROR);
1472 // Do byte swapping if necessary.
1473 if (stream_.doByteSwap[1])
1474 byteSwapBuffer(buffer, samples, format);
1476 // Do buffer conversion if necessary.
1477 if (stream_.doConvertBuffer[1])
1478 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
1482 MUTEX_UNLOCK(&stream_.mutex);
1484 if (stream_.callbackInfo.usingCallback && stopStream)
1488 void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData)
1492 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
1493 if ( info->usingCallback ) {
1494 sprintf(message_, "RtApiOss: A callback is already set for this stream!");
1495 error(RtError::WARNING);
1499 info->callback = (void *) callback;
1500 info->userData = userData;
1501 info->usingCallback = true;
1502 info->object = (void *) this;
1504 // Set the thread attributes for joinable and realtime scheduling
1505 // priority. The higher priority will only take affect if the
1506 // program is run as root or suid.
1507 pthread_attr_t attr;
1508 pthread_attr_init(&attr);
1509 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
1510 pthread_attr_setschedpolicy(&attr, SCHED_RR);
1512 int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo);
1513 pthread_attr_destroy(&attr);
1515 info->usingCallback = false;
1516 sprintf(message_, "RtApiOss: error starting callback thread!");
1517 error(RtError::THREAD_ERROR);
1521 void RtApiOss :: cancelStreamCallback()
1525 if (stream_.callbackInfo.usingCallback) {
1527 if (stream_.state == STREAM_RUNNING)
1530 MUTEX_LOCK(&stream_.mutex);
1532 stream_.callbackInfo.usingCallback = false;
1533 pthread_join(stream_.callbackInfo.thread, NULL);
1534 stream_.callbackInfo.thread = 0;
1535 stream_.callbackInfo.callback = NULL;
1536 stream_.callbackInfo.userData = NULL;
1538 MUTEX_UNLOCK(&stream_.mutex);
1542 extern "C" void *ossCallbackHandler(void *ptr)
1544 CallbackInfo *info = (CallbackInfo *) ptr;
1545 RtApiOss *object = (RtApiOss *) info->object;
1546 bool *usingCallback = &info->usingCallback;
1548 while ( *usingCallback ) {
1549 pthread_testcancel();
1551 object->tickStream();
1553 catch (RtError &exception) {
1554 fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n",
1555 exception.getMessageString());
1563 //******************** End of __LINUX_OSS__ *********************//
1566 #if defined(__MACOSX_CORE__)
1569 // The OS X CoreAudio API is designed to use a separate callback
1570 // procedure for each of its audio devices. A single RtAudio duplex
1571 // stream using two different devices is supported here, though it
1572 // cannot be guaranteed to always behave correctly because we cannot
1573 // synchronize these two callbacks. This same functionality can be
1574 // achieved with better synchrony by opening two separate streams for
1575 // the devices and using RtAudio blocking calls (i.e. tickStream()).
1577 // A property listener is installed for over/underrun information.
1578 // However, no functionality is currently provided to allow property
1579 // listeners to trigger user handlers because it is unclear what could
1580 // be done if a critical stream parameter (buffer size, sample rate,
1581 // device disconnect) notification arrived. The listeners entail
1582 // quite a bit of extra code and most likely, a user program wouldn't
1583 // be prepared for the result anyway.
1585 // A structure to hold various information related to the CoreAudio API
1592 pthread_cond_t condition;
1595 :stopStream(false), xrun(false), deviceBuffer(0) {}
1598 RtApiCore :: RtApiCore()
1602 if (nDevices_ <= 0) {
1603 sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!");
1604 error(RtError::NO_DEVICES_FOUND);
1608 RtApiCore :: ~RtApiCore()
1610 // The subclass destructor gets called before the base class
1611 // destructor, so close an existing stream before deallocating
1612 // apiDeviceId memory.
1613 if ( stream_.mode != UNINITIALIZED ) closeStream();
1615 // Free our allocated apiDeviceId memory.
1617 for ( unsigned int i=0; i<devices_.size(); i++ ) {
1618 id = (AudioDeviceID *) devices_[i].apiDeviceId;
1623 void RtApiCore :: initialize(void)
1625 OSStatus err = noErr;
1627 AudioDeviceID *deviceList = NULL;
1630 // Find out how many audio devices there are, if any.
1631 err = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &dataSize, NULL);
1633 sprintf(message_, "RtApiCore: OS-X error getting device info!");
1634 error(RtError::SYSTEM_ERROR);
1637 nDevices_ = dataSize / sizeof(AudioDeviceID);
1638 if (nDevices_ == 0) return;
1640 // Make space for the devices we are about to get.
1641 deviceList = (AudioDeviceID *) malloc( dataSize );
1642 if (deviceList == NULL) {
1643 sprintf(message_, "RtApiCore: memory allocation error during initialization!");
1644 error(RtError::MEMORY_ERROR);
1647 // Get the array of AudioDeviceIDs.
1648 err = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &dataSize, (void *) deviceList);
1651 sprintf(message_, "RtApiCore: OS-X error getting device properties!");
1652 error(RtError::SYSTEM_ERROR);
1655 // Create list of device structures and write device identifiers.
1658 for (int i=0; i<nDevices_; i++) {
1659 devices_.push_back(device);
1660 id = (AudioDeviceID *) malloc( sizeof(AudioDeviceID) );
1661 *id = deviceList[i];
1662 devices_[i].apiDeviceId = (void *) id;
1668 int RtApiCore :: getDefaultInputDevice(void)
1670 AudioDeviceID id, *deviceId;
1671 UInt32 dataSize = sizeof( AudioDeviceID );
1673 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultInputDevice,
1676 if (result != noErr) {
1677 sprintf( message_, "RtApiCore: OS-X error getting default input device." );
1678 error(RtError::WARNING);
1682 for ( int i=0; i<nDevices_; i++ ) {
1683 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1684 if ( id == *deviceId ) return i;
1690 int RtApiCore :: getDefaultOutputDevice(void)
1692 AudioDeviceID id, *deviceId;
1693 UInt32 dataSize = sizeof( AudioDeviceID );
1695 OSStatus result = AudioHardwareGetProperty( kAudioHardwarePropertyDefaultOutputDevice,
1698 if (result != noErr) {
1699 sprintf( message_, "RtApiCore: OS-X error getting default output device." );
1700 error(RtError::WARNING);
1704 for ( int i=0; i<nDevices_; i++ ) {
1705 deviceId = (AudioDeviceID *) devices_[i].apiDeviceId;
1706 if ( id == *deviceId ) return i;
1712 static bool deviceSupportsFormat( AudioDeviceID id, bool isInput,
1713 AudioStreamBasicDescription *desc, bool isDuplex )
1715 OSStatus result = noErr;
1716 UInt32 dataSize = sizeof( AudioStreamBasicDescription );
1718 result = AudioDeviceGetProperty( id, 0, isInput,
1719 kAudioDevicePropertyStreamFormatSupported,
1722 if (result == kAudioHardwareNoError) {
1724 result = AudioDeviceGetProperty( id, 0, true,
1725 kAudioDevicePropertyStreamFormatSupported,
1729 if (result != kAudioHardwareNoError)
1738 void RtApiCore :: probeDeviceInfo( RtApiDevice *info )
1740 OSStatus err = noErr;
1742 // Get the device manufacturer and name.
1745 UInt32 dataSize = 256;
1746 AudioDeviceID *id = (AudioDeviceID *) info->apiDeviceId;
1747 err = AudioDeviceGetProperty( *id, 0, false,
1748 kAudioDevicePropertyDeviceManufacturer,
1751 sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." );
1752 error(RtError::DEBUG_WARNING);
1755 strncpy(fullname, name, 256);
1756 strcat(fullname, ": " );
1759 err = AudioDeviceGetProperty( *id, 0, false,
1760 kAudioDevicePropertyDeviceName,
1763 sprintf( message_, "RtApiCore: OS-X error getting device name." );
1764 error(RtError::DEBUG_WARNING);
1767 strncat(fullname, name, 254);
1769 info->name.append( (const char *)fullname, strlen(fullname)+1);
1771 // Get output channel information.
1772 unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0;
1773 AudioBufferList *bufferList = nil;
1774 err = AudioDeviceGetPropertyInfo( *id, 0, false,
1775 kAudioDevicePropertyStreamConfiguration,
1777 if (err == noErr && dataSize > 0) {
1778 bufferList = (AudioBufferList *) malloc( dataSize );
1779 if (bufferList == NULL) {
1780 sprintf(message_, "RtApiCore: memory allocation error!");
1781 error(RtError::DEBUG_WARNING);
1785 err = AudioDeviceGetProperty( *id, 0, false,
1786 kAudioDevicePropertyStreamConfiguration,
1787 &dataSize, bufferList );
1791 nStreams = bufferList->mNumberBuffers;
1792 for ( i=0; i<nStreams; i++ ) {
1793 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1794 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1795 minChannels = bufferList->mBuffers[i].mNumberChannels;
1801 if (err != noErr || dataSize <= 0) {
1802 sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).",
1803 info->name.c_str() );
1804 error(RtError::DEBUG_WARNING);
1809 if ( maxChannels > 0 )
1810 info->maxOutputChannels = maxChannels;
1811 if ( minChannels > 0 )
1812 info->minOutputChannels = minChannels;
1815 // Get input channel information.
1817 err = AudioDeviceGetPropertyInfo( *id, 0, true,
1818 kAudioDevicePropertyStreamConfiguration,
1820 if (err == noErr && dataSize > 0) {
1821 bufferList = (AudioBufferList *) malloc( dataSize );
1822 if (bufferList == NULL) {
1823 sprintf(message_, "RtApiCore: memory allocation error!");
1824 error(RtError::DEBUG_WARNING);
1827 err = AudioDeviceGetProperty( *id, 0, true,
1828 kAudioDevicePropertyStreamConfiguration,
1829 &dataSize, bufferList );
1833 nStreams = bufferList->mNumberBuffers;
1834 for ( i=0; i<nStreams; i++ ) {
1835 if ( bufferList->mBuffers[i].mNumberChannels < minChannels )
1836 minChannels = bufferList->mBuffers[i].mNumberChannels;
1837 maxChannels += bufferList->mBuffers[i].mNumberChannels;
1843 if (err != noErr || dataSize <= 0) {
1844 sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).",
1845 info->name.c_str() );
1846 error(RtError::DEBUG_WARNING);
1851 if ( maxChannels > 0 )
1852 info->maxInputChannels = maxChannels;
1853 if ( minChannels > 0 )
1854 info->minInputChannels = minChannels;
1857 // If device opens for both playback and capture, we determine the channels.
1858 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
1859 info->hasDuplexSupport = true;
1860 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
1861 info->maxInputChannels : info->maxOutputChannels;
1862 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
1863 info->minInputChannels : info->minOutputChannels;
1866 // Probe the device sample rate and data format parameters. The
1867 // core audio query mechanism is performed on a "stream"
1868 // description, which can have a variable number of channels and
1869 // apply to input or output only.
1871 // Create a stream description structure.
1872 AudioStreamBasicDescription description;
1873 dataSize = sizeof( AudioStreamBasicDescription );
1874 memset(&description, 0, sizeof(AudioStreamBasicDescription));
1875 bool isInput = false;
1876 if ( info->maxOutputChannels == 0 ) isInput = true;
1877 bool isDuplex = false;
1878 if ( info->maxDuplexChannels > 0 ) isDuplex = true;
1880 // Determine the supported sample rates.
1881 info->sampleRates.clear();
1882 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
1883 description.mSampleRate = (double) SAMPLE_RATES[k];
1884 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1885 info->sampleRates.push_back( SAMPLE_RATES[k] );
1888 if (info->sampleRates.size() == 0) {
1889 sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).",
1890 info->name.c_str() );
1891 error(RtError::DEBUG_WARNING);
1895 // Determine the supported data formats.
1896 info->nativeFormats = 0;
1897 description.mFormatID = kAudioFormatLinearPCM;
1898 description.mBitsPerChannel = 8;
1899 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1900 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1901 info->nativeFormats |= RTAUDIO_SINT8;
1903 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1904 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1905 info->nativeFormats |= RTAUDIO_SINT8;
1908 description.mBitsPerChannel = 16;
1909 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1910 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1911 info->nativeFormats |= RTAUDIO_SINT16;
1913 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1914 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1915 info->nativeFormats |= RTAUDIO_SINT16;
1918 description.mBitsPerChannel = 32;
1919 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1920 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1921 info->nativeFormats |= RTAUDIO_SINT32;
1923 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1924 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1925 info->nativeFormats |= RTAUDIO_SINT32;
1928 description.mBitsPerChannel = 24;
1929 description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian;
1930 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1931 info->nativeFormats |= RTAUDIO_SINT24;
1933 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1934 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1935 info->nativeFormats |= RTAUDIO_SINT24;
1938 description.mBitsPerChannel = 32;
1939 description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian;
1940 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1941 info->nativeFormats |= RTAUDIO_FLOAT32;
1943 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1944 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1945 info->nativeFormats |= RTAUDIO_FLOAT32;
1948 description.mBitsPerChannel = 64;
1949 description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
1950 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1951 info->nativeFormats |= RTAUDIO_FLOAT64;
1953 description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian;
1954 if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) )
1955 info->nativeFormats |= RTAUDIO_FLOAT64;
1958 // Check that we have at least one supported format.
1959 if (info->nativeFormats == 0) {
1960 sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.",
1961 info->name.c_str());
1962 error(RtError::DEBUG_WARNING);
1966 info->probed = true;
1969 OSStatus callbackHandler( AudioDeviceID inDevice,
1970 const AudioTimeStamp* inNow,
1971 const AudioBufferList* inInputData,
1972 const AudioTimeStamp* inInputTime,
1973 AudioBufferList* outOutputData,
1974 const AudioTimeStamp* inOutputTime,
1977 CallbackInfo *info = (CallbackInfo *) infoPointer;
1979 RtApiCore *object = (RtApiCore *) info->object;
1981 object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData );
1983 catch (RtError &exception) {
1984 fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString());
1985 return kAudioHardwareUnspecifiedError;
1988 return kAudioHardwareNoError;
1991 OSStatus deviceListener( AudioDeviceID inDevice,
1994 AudioDevicePropertyID propertyID,
1995 void* handlePointer )
1997 CoreHandle *handle = (CoreHandle *) handlePointer;
1998 if ( propertyID == kAudioDeviceProcessorOverload ) {
2000 fprintf(stderr, "\nRtApiCore: OS-X audio input overrun detected!\n");
2002 fprintf(stderr, "\nRtApiCore: OS-X audio output underrun detected!\n");
2003 handle->xrun = true;
2006 return kAudioHardwareNoError;
2009 bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels,
2010 int sampleRate, RtAudioFormat format,
2011 int *bufferSize, int numberOfBuffers )
2013 // Setup for stream mode.
2014 bool isInput = false;
2015 AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId);
2016 if ( mode == INPUT ) isInput = true;
2018 // Search for a stream which contains the desired number of channels.
2019 OSStatus err = noErr;
2021 unsigned int deviceChannels, nStreams = 0;
2022 UInt32 iChannel = 0, iStream = 0;
2023 AudioBufferList *bufferList = nil;
2024 err = AudioDeviceGetPropertyInfo( id, 0, isInput,
2025 kAudioDevicePropertyStreamConfiguration,
2028 if (err == noErr && dataSize > 0) {
2029 bufferList = (AudioBufferList *) malloc( dataSize );
2030 if (bufferList == NULL) {
2031 sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!");
2032 error(RtError::DEBUG_WARNING);
2035 err = AudioDeviceGetProperty( id, 0, isInput,
2036 kAudioDevicePropertyStreamConfiguration,
2037 &dataSize, bufferList );
2040 stream_.deInterleave[mode] = false;
2041 nStreams = bufferList->mNumberBuffers;
2042 for ( iStream=0; iStream<nStreams; iStream++ ) {
2043 if ( bufferList->mBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break;
2044 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
2046 // If we didn't find a single stream above, see if we can meet
2047 // the channel specification in mono mode (i.e. using separate
2048 // non-interleaved buffers). This can only work if there are N
2049 // consecutive one-channel streams, where N is the number of
2050 // desired channels.
2052 if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) {
2054 for ( iStream=0; iStream<nStreams; iStream++ ) {
2055 if ( bufferList->mBuffers[iStream].mNumberChannels == 1 )
2059 if ( counter == channels ) {
2060 iStream -= channels - 1;
2061 iChannel -= channels - 1;
2062 stream_.deInterleave[mode] = true;
2065 iChannel += bufferList->mBuffers[iStream].mNumberChannels;
2070 if (err != noErr || dataSize <= 0) {
2071 if ( bufferList ) free( bufferList );
2072 sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).",
2073 devices_[device].name.c_str() );
2074 error(RtError::DEBUG_WARNING);
2078 if (iStream >= nStreams) {
2080 sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).",
2081 devices_[device].name.c_str(), channels );
2082 error(RtError::DEBUG_WARNING);
2086 // This is ok even for mono mode ... it gets updated later.
2087 deviceChannels = bufferList->mBuffers[iStream].mNumberChannels;
2090 // Determine the buffer size.
2091 AudioValueRange bufferRange;
2092 dataSize = sizeof(AudioValueRange);
2093 err = AudioDeviceGetProperty( id, 0, isInput,
2094 kAudioDevicePropertyBufferSizeRange,
2095 &dataSize, &bufferRange);
2097 sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).",
2098 devices_[device].name.c_str() );
2099 error(RtError::DEBUG_WARNING);
2103 long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32);
2104 if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum;
2105 else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum;
2107 // Set the buffer size. For mono mode, I'm assuming we only need to
2108 // make this setting for the first channel.
2109 UInt32 theSize = (UInt32) bufferBytes;
2110 dataSize = sizeof( UInt32);
2111 err = AudioDeviceSetProperty(id, NULL, 0, isInput,
2112 kAudioDevicePropertyBufferSize,
2113 dataSize, &theSize);
2115 sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).",
2116 devices_[device].name.c_str() );
2117 error(RtError::DEBUG_WARNING);
2121 // If attempting to setup a duplex stream, the bufferSize parameter
2122 // MUST be the same in both directions!
2123 *bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) );
2124 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
2125 sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).",
2126 devices_[device].name.c_str() );
2127 error(RtError::DEBUG_WARNING);
2131 stream_.bufferSize = *bufferSize;
2132 stream_.nBuffers = 1;
2134 // Set the stream format description. Do for each channel in mono mode.
2135 AudioStreamBasicDescription description;
2136 dataSize = sizeof( AudioStreamBasicDescription );
2137 if ( stream_.deInterleave[mode] ) nStreams = channels;
2139 for ( unsigned int i=0; i<nStreams; i++, iChannel++ ) {
2141 err = AudioDeviceGetProperty( id, iChannel, isInput,
2142 kAudioDevicePropertyStreamFormat,
2143 &dataSize, &description );
2145 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).",
2146 devices_[device].name.c_str() );
2147 error(RtError::DEBUG_WARNING);
2151 // Set the sample rate and data format id.
2152 description.mSampleRate = (double) sampleRate;
2153 description.mFormatID = kAudioFormatLinearPCM;
2154 err = AudioDeviceSetProperty( id, NULL, iChannel, isInput,
2155 kAudioDevicePropertyStreamFormat,
2156 dataSize, &description );
2158 sprintf( message_, "RtApiCore: OS-X error setting sample rate or data format for device (%s).",
2159 devices_[device].name.c_str() );
2160 error(RtError::DEBUG_WARNING);
2165 // Check whether we need byte-swapping (assuming OS-X host is big-endian).
2166 iChannel -= nStreams;
2167 err = AudioDeviceGetProperty( id, iChannel, isInput,
2168 kAudioDevicePropertyStreamFormat,
2169 &dataSize, &description );
2171 sprintf( message_, "RtApiCore: OS-X error getting stream format for device (%s).", devices_[device].name.c_str() );
2172 error(RtError::DEBUG_WARNING);
2176 stream_.doByteSwap[mode] = false;
2177 if ( !description.mFormatFlags & kLinearPCMFormatFlagIsBigEndian )
2178 stream_.doByteSwap[mode] = true;
2180 // From the CoreAudio documentation, PCM data must be supplied as
2182 stream_.userFormat = format;
2183 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
2185 if ( stream_.deInterleave[mode] ) // mono mode
2186 stream_.nDeviceChannels[mode] = channels;
2188 stream_.nDeviceChannels[mode] = description.mChannelsPerFrame;
2189 stream_.nUserChannels[mode] = channels;
2191 // Set flags for buffer conversion.
2192 stream_.doConvertBuffer[mode] = false;
2193 if (stream_.userFormat != stream_.deviceFormat[mode])
2194 stream_.doConvertBuffer[mode] = true;
2195 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
2196 stream_.doConvertBuffer[mode] = true;
2197 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
2198 stream_.doConvertBuffer[mode] = true;
2200 // Allocate our CoreHandle structure for the stream.
2202 if ( stream_.apiHandle == 0 ) {
2203 handle = (CoreHandle *) calloc(1, sizeof(CoreHandle));
2204 if ( handle == NULL ) {
2205 sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).",
2206 devices_[device].name.c_str());
2209 handle->index[0] = 0;
2210 handle->index[1] = 0;
2211 if ( pthread_cond_init(&handle->condition, NULL) ) {
2212 sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).",
2213 devices_[device].name.c_str());
2216 stream_.apiHandle = (void *) handle;
2219 handle = (CoreHandle *) stream_.apiHandle;
2220 handle->index[mode] = iStream;
2222 // Allocate necessary internal buffers.
2223 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2226 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2227 buffer_bytes = stream_.nUserChannels[0];
2229 buffer_bytes = stream_.nUserChannels[1];
2231 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2232 if (stream_.userBuffer) free(stream_.userBuffer);
2233 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
2234 if (stream_.userBuffer == NULL) {
2235 sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).",
2236 devices_[device].name.c_str());
2241 if ( stream_.deInterleave[mode] ) {
2244 bool makeBuffer = true;
2245 if ( mode == OUTPUT )
2246 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2247 else { // mode == INPUT
2248 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
2249 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2250 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2251 if ( buffer_bytes < bytes_out ) makeBuffer = false;
2256 buffer_bytes *= *bufferSize;
2257 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
2258 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
2259 if (stream_.deviceBuffer == NULL) {
2260 sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).",
2261 devices_[device].name.c_str());
2265 // If not de-interleaving, we point stream_.deviceBuffer to the
2266 // OS X supplied device buffer before doing any necessary data
2267 // conversions. This presents a problem if we have a duplex
2268 // stream using one device which needs de-interleaving and
2269 // another device which doesn't. So, save a pointer to our own
2270 // device buffer in the CallbackInfo structure.
2271 handle->deviceBuffer = stream_.deviceBuffer;
2275 stream_.sampleRate = sampleRate;
2276 stream_.device[mode] = device;
2277 stream_.state = STREAM_STOPPED;
2278 stream_.callbackInfo.object = (void *) this;
2280 // Setup the buffer conversion information structure.
2281 if ( stream_.doConvertBuffer[mode] ) {
2282 if (mode == INPUT) { // convert device to user buffer
2283 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
2284 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
2285 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
2286 stream_.convertInfo[mode].outFormat = stream_.userFormat;
2288 else { // convert user to device buffer
2289 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
2290 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
2291 stream_.convertInfo[mode].inFormat = stream_.userFormat;
2292 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
2295 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
2296 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
2298 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
2300 // Set up the interleave/deinterleave offsets.
2301 if ( mode == INPUT && stream_.deInterleave[1] ) {
2302 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2303 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
2304 stream_.convertInfo[mode].outOffset.push_back( k );
2305 stream_.convertInfo[mode].inJump = 1;
2308 else if (mode == OUTPUT && stream_.deInterleave[0]) {
2309 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2310 stream_.convertInfo[mode].inOffset.push_back( k );
2311 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
2312 stream_.convertInfo[mode].outJump = 1;
2316 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
2317 stream_.convertInfo[mode].inOffset.push_back( k );
2318 stream_.convertInfo[mode].outOffset.push_back( k );
2323 if ( stream_.mode == OUTPUT && mode == INPUT && stream_.device[0] == device )
2324 // Only one callback procedure per device.
2325 stream_.mode = DUPLEX;
2327 err = AudioDeviceAddIOProc( id, callbackHandler, (void *) &stream_.callbackInfo );
2329 sprintf( message_, "RtApiCore: OS-X error setting callback for device (%s).", devices_[device].name.c_str() );
2330 error(RtError::DEBUG_WARNING);
2333 if ( stream_.mode == OUTPUT && mode == INPUT )
2334 stream_.mode = DUPLEX;
2336 stream_.mode = mode;
2339 // Setup the device property listener for over/underload.
2340 err = AudioDeviceAddPropertyListener( id, iChannel, isInput,
2341 kAudioDeviceProcessorOverload,
2342 deviceListener, (void *) handle );
2348 pthread_cond_destroy(&handle->condition);
2350 stream_.apiHandle = 0;
2353 if (stream_.userBuffer) {
2354 free(stream_.userBuffer);
2355 stream_.userBuffer = 0;
2358 error(RtError::DEBUG_WARNING);
2362 void RtApiCore :: closeStream()
2364 // We don't want an exception to be thrown here because this
2365 // function is called by our class destructor. So, do our own
2367 if ( stream_.mode == UNINITIALIZED ) {
2368 sprintf(message_, "RtApiCore::closeStream(): no open stream to close!");
2369 error(RtError::WARNING);
2373 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2374 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2375 if (stream_.state == STREAM_RUNNING)
2376 AudioDeviceStop( id, callbackHandler );
2377 AudioDeviceRemoveIOProc( id, callbackHandler );
2380 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2381 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2382 if (stream_.state == STREAM_RUNNING)
2383 AudioDeviceStop( id, callbackHandler );
2384 AudioDeviceRemoveIOProc( id, callbackHandler );
2387 if (stream_.userBuffer) {
2388 free(stream_.userBuffer);
2389 stream_.userBuffer = 0;
2392 if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) {
2393 free(stream_.deviceBuffer);
2394 stream_.deviceBuffer = 0;
2397 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2399 // Destroy pthread condition variable and free the CoreHandle structure.
2401 pthread_cond_destroy(&handle->condition);
2403 stream_.apiHandle = 0;
2406 stream_.mode = UNINITIALIZED;
2409 void RtApiCore :: startStream()
2412 if (stream_.state == STREAM_RUNNING) return;
2414 MUTEX_LOCK(&stream_.mutex);
2418 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2420 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2421 err = AudioDeviceStart(id, callbackHandler);
2423 sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).",
2424 devices_[stream_.device[0]].name.c_str());
2425 MUTEX_UNLOCK(&stream_.mutex);
2426 error(RtError::DRIVER_ERROR);
2430 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2432 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2433 err = AudioDeviceStart(id, callbackHandler);
2435 sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).",
2436 devices_[stream_.device[0]].name.c_str());
2437 MUTEX_UNLOCK(&stream_.mutex);
2438 error(RtError::DRIVER_ERROR);
2442 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2443 handle->stopStream = false;
2444 stream_.state = STREAM_RUNNING;
2446 MUTEX_UNLOCK(&stream_.mutex);
2449 void RtApiCore :: stopStream()
2452 if (stream_.state == STREAM_STOPPED) return;
2454 // Change the state before the lock to improve shutdown response
2455 // when using a callback.
2456 stream_.state = STREAM_STOPPED;
2457 MUTEX_LOCK(&stream_.mutex);
2461 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
2463 id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2464 err = AudioDeviceStop(id, callbackHandler);
2466 sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).",
2467 devices_[stream_.device[0]].name.c_str());
2468 MUTEX_UNLOCK(&stream_.mutex);
2469 error(RtError::DRIVER_ERROR);
2473 if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) {
2475 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2476 err = AudioDeviceStop(id, callbackHandler);
2478 sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).",
2479 devices_[stream_.device[0]].name.c_str());
2480 MUTEX_UNLOCK(&stream_.mutex);
2481 error(RtError::DRIVER_ERROR);
2485 MUTEX_UNLOCK(&stream_.mutex);
2488 void RtApiCore :: abortStream()
2493 void RtApiCore :: tickStream()
2497 if (stream_.state == STREAM_STOPPED) return;
2499 if (stream_.callbackInfo.usingCallback) {
2500 sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!");
2501 error(RtError::WARNING);
2505 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2507 MUTEX_LOCK(&stream_.mutex);
2509 pthread_cond_wait(&handle->condition, &stream_.mutex);
2511 MUTEX_UNLOCK(&stream_.mutex);
2514 void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData )
2518 if (stream_.state == STREAM_STOPPED) return;
2520 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
2521 CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
2522 AudioBufferList *inBufferList = (AudioBufferList *) inData;
2523 AudioBufferList *outBufferList = (AudioBufferList *) outData;
2525 if ( info->usingCallback && handle->stopStream ) {
2526 // Check if the stream should be stopped (via the previous user
2527 // callback return value). We stop the stream here, rather than
2528 // after the function call, so that output data can first be
2534 MUTEX_LOCK(&stream_.mutex);
2536 // Invoke user callback first, to get fresh output data. Don't
2537 // invoke the user callback if duplex mode AND the input/output devices
2538 // are different AND this function is called for the input device.
2539 AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId );
2540 if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) {
2541 RtAudioCallback callback = (RtAudioCallback) info->callback;
2542 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
2543 if ( handle->xrun == true ) {
2544 handle->xrun = false;
2545 MUTEX_UNLOCK(&stream_.mutex);
2550 if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2552 if (stream_.doConvertBuffer[0]) {
2554 if ( !stream_.deInterleave[0] )
2555 stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData;
2557 stream_.deviceBuffer = handle->deviceBuffer;
2559 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
2560 if ( stream_.doByteSwap[0] )
2561 byteSwapBuffer(stream_.deviceBuffer,
2562 stream_.bufferSize * stream_.nDeviceChannels[0],
2563 stream_.deviceFormat[0]);
2565 if ( stream_.deInterleave[0] ) {
2566 int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize;
2567 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
2568 memcpy(outBufferList->mBuffers[handle->index[0]+i].mData,
2569 &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
2575 if (stream_.doByteSwap[0])
2576 byteSwapBuffer(stream_.userBuffer,
2577 stream_.bufferSize * stream_.nUserChannels[0],
2578 stream_.userFormat);
2580 memcpy(outBufferList->mBuffers[handle->index[0]].mData,
2582 outBufferList->mBuffers[handle->index[0]].mDataByteSize );
2586 id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId );
2587 if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) {
2589 if (stream_.doConvertBuffer[1]) {
2591 if ( stream_.deInterleave[1] ) {
2592 stream_.deviceBuffer = (char *) handle->deviceBuffer;
2593 int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize;
2594 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
2595 memcpy(&stream_.deviceBuffer[i*bufferBytes],
2596 inBufferList->mBuffers[handle->index[1]+i].mData, bufferBytes );
2600 stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData;
2602 if ( stream_.doByteSwap[1] )
2603 byteSwapBuffer(stream_.deviceBuffer,
2604 stream_.bufferSize * stream_.nDeviceChannels[1],
2605 stream_.deviceFormat[1]);
2606 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
2610 memcpy(stream_.userBuffer,
2611 inBufferList->mBuffers[handle->index[1]].mData,
2612 inBufferList->mBuffers[handle->index[1]].mDataByteSize );
2614 if (stream_.doByteSwap[1])
2615 byteSwapBuffer(stream_.userBuffer,
2616 stream_.bufferSize * stream_.nUserChannels[1],
2617 stream_.userFormat);
2621 if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) )
2622 pthread_cond_signal(&handle->condition);
2624 MUTEX_UNLOCK(&stream_.mutex);
2627 void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData)
2631 if ( stream_.callbackInfo.usingCallback ) {
2632 sprintf(message_, "RtApiCore: A callback is already set for this stream!");
2633 error(RtError::WARNING);
2637 stream_.callbackInfo.callback = (void *) callback;
2638 stream_.callbackInfo.userData = userData;
2639 stream_.callbackInfo.usingCallback = true;
2642 void RtApiCore :: cancelStreamCallback()
2646 if (stream_.callbackInfo.usingCallback) {
2648 if (stream_.state == STREAM_RUNNING)
2651 MUTEX_LOCK(&stream_.mutex);
2653 stream_.callbackInfo.usingCallback = false;
2654 stream_.callbackInfo.userData = NULL;
2655 stream_.state = STREAM_STOPPED;
2656 stream_.callbackInfo.callback = NULL;
2658 MUTEX_UNLOCK(&stream_.mutex);
2663 //******************** End of __MACOSX_CORE__ *********************//
2666 #if defined(__LINUX_JACK__)
2668 // JACK is a low-latency audio server, written primarily for the
2669 // GNU/Linux operating system. It can connect a number of different
2670 // applications to an audio device, as well as allowing them to share
2671 // audio between themselves.
2673 // The JACK server must be running before RtApiJack can be instantiated.
2674 // RtAudio will report just a single "device", which is the JACK audio
2675 // server. The JACK server is typically started in a terminal as follows:
2677 // .jackd -d alsa -d hw:0
2679 // or through an interface program such as qjackctl. Many of the
2680 // parameters normally set for a stream are fixed by the JACK server
2681 // and can be specified when the JACK server is started. In
2684 // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4
2686 // specifies a sample rate of 44100 Hz, a buffer size of 512 sample
2687 // frames, and number of buffers = 4. Once the server is running, it
2688 // is not possible to override these values. If the values are not
2689 // specified in the command-line, the JACK server uses default values.
2691 #include <jack/jack.h>
2694 // A structure to hold various information related to the Jack API
2697 jack_client_t *client;
2698 jack_port_t **ports[2];
2701 pthread_cond_t condition;
2704 :client(0), clientOpen(false), stopStream(false) {}
2707 std::string jackmsg;
2709 static void jackerror (const char *desc)
2712 jackmsg.append( desc, strlen(desc)+1 );
2715 RtApiJack :: RtApiJack()
2719 if (nDevices_ <= 0) {
2720 sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!",
2722 error(RtError::NO_DEVICES_FOUND);
2726 RtApiJack :: ~RtApiJack()
2728 if ( stream_.mode != UNINITIALIZED ) closeStream();
2731 void RtApiJack :: initialize(void)
2735 // Tell the jack server to call jackerror() when it experiences an
2736 // error. This function saves the error message for subsequent
2737 // reporting via the normal RtAudio error function.
2738 jack_set_error_function( jackerror );
2740 // Look for jack server and try to become a client.
2741 jack_client_t *client;
2742 if ( (client = jack_client_new( "RtApiJack" )) == 0)
2746 // Determine the name of the device.
2747 device.name = "Jack Server";
2748 devices_.push_back(device);
2751 jack_client_close(client);
2754 void RtApiJack :: probeDeviceInfo(RtApiDevice *info)
2756 // Look for jack server and try to become a client.
2757 jack_client_t *client;
2758 if ( (client = jack_client_new( "RtApiJack" )) == 0) {
2759 sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!",
2761 error(RtError::WARNING);
2765 // Get the current jack server sample rate.
2766 info->sampleRates.clear();
2767 info->sampleRates.push_back( jack_get_sample_rate(client) );
2769 // Count the available ports as device channels. Jack "input ports"
2770 // equal RtAudio output channels.
2773 unsigned int nChannels = 0;
2774 ports = jack_get_ports( client, NULL, NULL, JackPortIsInput );
2776 port = (char *) ports[nChannels];
2778 port = (char *) ports[++nChannels];
2780 info->maxOutputChannels = nChannels;
2781 info->minOutputChannels = 1;
2784 // Jack "output ports" equal RtAudio input channels.
2786 ports = jack_get_ports( client, NULL, NULL, JackPortIsOutput );
2788 port = (char *) ports[nChannels];
2790 port = (char *) ports[++nChannels];
2792 info->maxInputChannels = nChannels;
2793 info->minInputChannels = 1;
2796 if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) {
2797 jack_client_close(client);
2798 sprintf(message_, "RtApiJack: error determining jack input/output channels!");
2799 error(RtError::DEBUG_WARNING);
2803 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
2804 info->hasDuplexSupport = true;
2805 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
2806 info->maxInputChannels : info->maxOutputChannels;
2807 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
2808 info->minInputChannels : info->minOutputChannels;
2811 // Get the jack data format type. There isn't much documentation
2812 // regarding supported data formats in jack. I'm assuming here that
2813 // the default type will always be a floating-point type, of length
2814 // equal to either 4 or 8 bytes.
2815 int sample_size = sizeof( jack_default_audio_sample_t );
2816 if ( sample_size == 4 )
2817 info->nativeFormats = RTAUDIO_FLOAT32;
2818 else if ( sample_size == 8 )
2819 info->nativeFormats = RTAUDIO_FLOAT64;
2821 // Check that we have a supported format
2822 if (info->nativeFormats == 0) {
2823 jack_client_close(client);
2824 sprintf(message_, "RtApiJack: error determining jack server data format!");
2825 error(RtError::DEBUG_WARNING);
2829 jack_client_close(client);
2830 info->probed = true;
2833 int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer)
2835 CallbackInfo *info = (CallbackInfo *) infoPointer;
2836 RtApiJack *object = (RtApiJack *) info->object;
2838 object->callbackEvent( (unsigned long) nframes );
2840 catch (RtError &exception) {
2841 fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString());
2848 void jackShutdown(void *infoPointer)
2850 CallbackInfo *info = (CallbackInfo *) infoPointer;
2851 JackHandle *handle = (JackHandle *) info->apiInfo;
2852 handle->clientOpen = false;
2853 RtApiJack *object = (RtApiJack *) info->object;
2855 // Check current stream state. If stopped, then we'll assume this
2856 // was called as a result of a call to RtApiJack::stopStream (the
2857 // deactivation of a client handle causes this function to be called).
2858 // If not, we'll assume the Jack server is shutting down or some
2859 // other problem occurred and we should close the stream.
2860 if ( object->getStreamState() == RtApi::STREAM_STOPPED ) return;
2863 object->closeStream();
2865 catch (RtError &exception) {
2866 fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString());
2870 fprintf(stderr, "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!!\n\n");
2873 int jackXrun( void * )
2875 fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n");
2879 bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels,
2880 int sampleRate, RtAudioFormat format,
2881 int *bufferSize, int numberOfBuffers)
2883 // Compare the jack server channels to the requested number of channels.
2884 if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) ||
2885 (mode == INPUT && devices_[device].maxInputChannels < channels ) ) {
2886 sprintf(message_, "RtApiJack: the Jack server does not support requested channels!");
2887 error(RtError::DEBUG_WARNING);
2891 JackHandle *handle = (JackHandle *) stream_.apiHandle;
2893 // Look for jack server and try to become a client (only do once per stream).
2895 jack_client_t *client = 0;
2896 if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) {
2897 snprintf(label, 32, "RtApiJack");
2898 if ( (client = jack_client_new( (const char *) label )) == 0) {
2899 sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!",
2901 error(RtError::DEBUG_WARNING);
2906 // The handle must have been created on an earlier pass.
2907 client = handle->client;
2910 // First, check the jack server sample rate.
2912 jack_rate = (int) jack_get_sample_rate(client);
2913 if ( sampleRate != jack_rate ) {
2914 jack_client_close(client);
2915 sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).",
2916 sampleRate, jack_rate );
2917 error(RtError::DEBUG_WARNING);
2920 stream_.sampleRate = jack_rate;
2922 // The jack server seems to support just a single floating-point
2923 // data type. Since we already checked it before, just use what we
2925 stream_.deviceFormat[mode] = devices_[device].nativeFormats;
2926 stream_.userFormat = format;
2928 // Jack always uses non-interleaved buffers. We'll need to
2929 // de-interleave if we have more than one channel.
2930 stream_.deInterleave[mode] = false;
2932 stream_.deInterleave[mode] = true;
2934 // Jack always provides host byte-ordered data.
2935 stream_.doByteSwap[mode] = false;
2937 // Get the buffer size. The buffer size and number of buffers
2938 // (periods) is set when the jack server is started.
2939 stream_.bufferSize = (int) jack_get_buffer_size(client);
2940 *bufferSize = stream_.bufferSize;
2942 stream_.nDeviceChannels[mode] = channels;
2943 stream_.nUserChannels[mode] = channels;
2945 stream_.doConvertBuffer[mode] = false;
2946 if (stream_.userFormat != stream_.deviceFormat[mode])
2947 stream_.doConvertBuffer[mode] = true;
2948 if (stream_.deInterleave[mode])
2949 stream_.doConvertBuffer[mode] = true;
2951 // Allocate our JackHandle structure for the stream.
2952 if ( handle == 0 ) {
2953 handle = (JackHandle *) calloc(1, sizeof(JackHandle));
2954 if ( handle == NULL ) {
2955 sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).",
2956 devices_[device].name.c_str());
2959 handle->ports[0] = 0;
2960 handle->ports[1] = 0;
2961 if ( pthread_cond_init(&handle->condition, NULL) ) {
2962 sprintf(message_, "RtApiJack: error initializing pthread condition variable!");
2965 stream_.apiHandle = (void *) handle;
2966 handle->client = client;
2967 handle->clientOpen = true;
2970 // Allocate necessary internal buffers.
2971 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
2974 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
2975 buffer_bytes = stream_.nUserChannels[0];
2977 buffer_bytes = stream_.nUserChannels[1];
2979 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
2980 if (stream_.userBuffer) free(stream_.userBuffer);
2981 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
2982 if (stream_.userBuffer == NULL) {
2983 sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).",
2984 devices_[device].name.c_str());
2989 if ( stream_.doConvertBuffer[mode] ) {
2992 bool makeBuffer = true;
2993 if ( mode == OUTPUT )
2994 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2995 else { // mode == INPUT
2996 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
2997 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
2998 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
2999 if ( buffer_bytes < bytes_out ) makeBuffer = false;
3004 buffer_bytes *= *bufferSize;
3005 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
3006 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
3007 if (stream_.deviceBuffer == NULL) {
3008 sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).",
3009 devices_[device].name.c_str());
3015 // Allocate memory for the Jack ports (channels) identifiers.
3016 handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels);
3017 if ( handle->ports[mode] == NULL ) {
3018 sprintf(message_, "RtApiJack: error allocating port handle memory (%s).",
3019 devices_[device].name.c_str());
3023 stream_.device[mode] = device;
3024 stream_.state = STREAM_STOPPED;
3025 stream_.callbackInfo.usingCallback = false;
3026 stream_.callbackInfo.object = (void *) this;
3027 stream_.callbackInfo.apiInfo = (void *) handle;
3029 if ( stream_.mode == OUTPUT && mode == INPUT )
3030 // We had already set up the stream for output.
3031 stream_.mode = DUPLEX;
3033 stream_.mode = mode;
3034 jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo );
3035 jack_set_xrun_callback( handle->client, jackXrun, NULL );
3036 jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo );
3039 // Setup the buffer conversion information structure.
3040 if ( stream_.doConvertBuffer[mode] ) {
3041 if (mode == INPUT) { // convert device to user buffer
3042 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
3043 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
3044 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
3045 stream_.convertInfo[mode].outFormat = stream_.userFormat;
3047 else { // convert user to device buffer
3048 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
3049 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
3050 stream_.convertInfo[mode].inFormat = stream_.userFormat;
3051 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
3054 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
3055 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
3057 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
3059 // Set up the interleave/deinterleave offsets.
3060 if ( mode == INPUT && stream_.deInterleave[1] ) {
3061 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3062 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
3063 stream_.convertInfo[mode].outOffset.push_back( k );
3064 stream_.convertInfo[mode].inJump = 1;
3067 else if (mode == OUTPUT && stream_.deInterleave[0]) {
3068 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3069 stream_.convertInfo[mode].inOffset.push_back( k );
3070 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
3071 stream_.convertInfo[mode].outJump = 1;
3075 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
3076 stream_.convertInfo[mode].inOffset.push_back( k );
3077 stream_.convertInfo[mode].outOffset.push_back( k );
3086 pthread_cond_destroy(&handle->condition);
3087 if ( handle->clientOpen == true )
3088 jack_client_close(handle->client);
3090 if ( handle->ports[0] ) free(handle->ports[0]);
3091 if ( handle->ports[1] ) free(handle->ports[1]);
3094 stream_.apiHandle = 0;
3097 if (stream_.userBuffer) {
3098 free(stream_.userBuffer);
3099 stream_.userBuffer = 0;
3102 error(RtError::DEBUG_WARNING);
3106 void RtApiJack :: closeStream()
3108 // We don't want an exception to be thrown here because this
3109 // function is called by our class destructor. So, do our own
3111 if ( stream_.mode == UNINITIALIZED ) {
3112 sprintf(message_, "RtApiJack::closeStream(): no open stream to close!");
3113 error(RtError::WARNING);
3117 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3118 if ( handle && handle->clientOpen == true ) {
3119 if (stream_.state == STREAM_RUNNING)
3120 jack_deactivate(handle->client);
3122 jack_client_close(handle->client);
3126 if ( handle->ports[0] ) free(handle->ports[0]);
3127 if ( handle->ports[1] ) free(handle->ports[1]);
3128 pthread_cond_destroy(&handle->condition);
3130 stream_.apiHandle = 0;
3133 if (stream_.userBuffer) {
3134 free(stream_.userBuffer);
3135 stream_.userBuffer = 0;
3138 if (stream_.deviceBuffer) {
3139 free(stream_.deviceBuffer);
3140 stream_.deviceBuffer = 0;
3143 stream_.mode = UNINITIALIZED;
3147 void RtApiJack :: startStream()
3150 if (stream_.state == STREAM_RUNNING) return;
3152 MUTEX_LOCK(&stream_.mutex);
3155 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3156 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3157 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
3158 snprintf(label, 64, "outport %d", i);
3159 handle->ports[0][i] = jack_port_register(handle->client, (const char *)label,
3160 JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0);
3164 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3165 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
3166 snprintf(label, 64, "inport %d", i);
3167 handle->ports[1][i] = jack_port_register(handle->client, (const char *)label,
3168 JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0);
3172 if (jack_activate(handle->client)) {
3173 sprintf(message_, "RtApiJack: unable to activate JACK client!");
3174 error(RtError::SYSTEM_ERROR);
3179 // Get the list of available ports.
3180 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3181 ports = jack_get_ports(handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsInput);
3182 if ( ports == NULL) {
3183 sprintf(message_, "RtApiJack: error determining available jack input ports!");
3184 error(RtError::SYSTEM_ERROR);
3187 // Now make the port connections. Since RtAudio wasn't designed to
3188 // allow the user to select particular channels of a device, we'll
3189 // just open the first "nChannels" ports.
3190 for ( int i=0; i<stream_.nUserChannels[0]; i++ ) {
3193 result = jack_connect( handle->client, jack_port_name(handle->ports[0][i]), ports[i] );
3196 sprintf(message_, "RtApiJack: error connecting output ports!");
3197 error(RtError::SYSTEM_ERROR);
3203 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3204 ports = jack_get_ports( handle->client, NULL, NULL, JackPortIsPhysical|JackPortIsOutput );
3205 if ( ports == NULL) {
3206 sprintf(message_, "RtApiJack: error determining available jack output ports!");
3207 error(RtError::SYSTEM_ERROR);
3210 // Now make the port connections. See note above.
3211 for ( int i=0; i<stream_.nUserChannels[1]; i++ ) {
3214 result = jack_connect( handle->client, ports[i], jack_port_name(handle->ports[1][i]) );
3217 sprintf(message_, "RtApiJack: error connecting input ports!");
3218 error(RtError::SYSTEM_ERROR);
3224 handle->stopStream = false;
3225 stream_.state = STREAM_RUNNING;
3227 MUTEX_UNLOCK(&stream_.mutex);
3230 void RtApiJack :: stopStream()
3233 if (stream_.state == STREAM_STOPPED) return;
3235 // Change the state before the lock to improve shutdown response
3236 // when using a callback.
3237 stream_.state = STREAM_STOPPED;
3238 MUTEX_LOCK(&stream_.mutex);
3240 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3241 jack_deactivate(handle->client);
3243 MUTEX_UNLOCK(&stream_.mutex);
3246 void RtApiJack :: abortStream()
3251 void RtApiJack :: tickStream()
3255 if (stream_.state == STREAM_STOPPED) return;
3257 if (stream_.callbackInfo.usingCallback) {
3258 sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!");
3259 error(RtError::WARNING);
3263 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3265 MUTEX_LOCK(&stream_.mutex);
3267 pthread_cond_wait(&handle->condition, &stream_.mutex);
3269 MUTEX_UNLOCK(&stream_.mutex);
3272 void RtApiJack :: callbackEvent( unsigned long nframes )
3276 if (stream_.state == STREAM_STOPPED) return;
3278 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
3279 JackHandle *handle = (JackHandle *) stream_.apiHandle;
3280 if ( info->usingCallback && handle->stopStream ) {
3281 // Check if the stream should be stopped (via the previous user
3282 // callback return value). We stop the stream here, rather than
3283 // after the function call, so that output data can first be
3289 MUTEX_LOCK(&stream_.mutex);
3291 // Invoke user callback first, to get fresh output data.
3292 if ( info->usingCallback ) {
3293 RtAudioCallback callback = (RtAudioCallback) info->callback;
3294 handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData);
3297 jack_default_audio_sample_t *jackbuffer;
3298 long bufferBytes = nframes * sizeof(jack_default_audio_sample_t);
3299 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
3301 if (stream_.doConvertBuffer[0]) {
3302 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
3304 for ( int i=0; i<stream_.nDeviceChannels[0]; i++ ) {
3305 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][i],
3306 (jack_nframes_t) nframes);
3307 memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes );
3310 else { // single channel only
3311 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0],
3312 (jack_nframes_t) nframes);
3313 memcpy(jackbuffer, stream_.userBuffer, bufferBytes );
3317 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
3319 if (stream_.doConvertBuffer[1]) {
3320 for ( int i=0; i<stream_.nDeviceChannels[1]; i++ ) {
3321 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][i],
3322 (jack_nframes_t) nframes);
3323 memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes );
3325 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
3327 else { // single channel only
3328 jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0],
3329 (jack_nframes_t) nframes);
3330 memcpy(stream_.userBuffer, jackbuffer, bufferBytes );
3334 if ( !info->usingCallback )
3335 pthread_cond_signal(&handle->condition);
3337 MUTEX_UNLOCK(&stream_.mutex);
3340 void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData)
3344 if ( stream_.callbackInfo.usingCallback ) {
3345 sprintf(message_, "RtApiJack: A callback is already set for this stream!");
3346 error(RtError::WARNING);
3350 stream_.callbackInfo.callback = (void *) callback;
3351 stream_.callbackInfo.userData = userData;
3352 stream_.callbackInfo.usingCallback = true;
3355 void RtApiJack :: cancelStreamCallback()
3359 if (stream_.callbackInfo.usingCallback) {
3361 if (stream_.state == STREAM_RUNNING)
3364 MUTEX_LOCK(&stream_.mutex);
3366 stream_.callbackInfo.usingCallback = false;
3367 stream_.callbackInfo.userData = NULL;
3368 stream_.state = STREAM_STOPPED;
3369 stream_.callbackInfo.callback = NULL;
3371 MUTEX_UNLOCK(&stream_.mutex);
3377 #if defined(__LINUX_ALSA__)
3379 #include <alsa/asoundlib.h>
3383 // A structure to hold various information related to the ALSA API
3386 snd_pcm_t *handles[2];
3391 :synchronized(false), tempBuffer(0) {}
3394 extern "C" void *alsaCallbackHandler(void * ptr);
3396 RtApiAlsa :: RtApiAlsa()
3400 if (nDevices_ <= 0) {
3401 sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!");
3402 error(RtError::NO_DEVICES_FOUND);
3406 RtApiAlsa :: ~RtApiAlsa()
3408 if ( stream_.mode != UNINITIALIZED )
3412 void RtApiAlsa :: initialize(void)
3414 int card, subdevice, result;
3418 snd_ctl_card_info_t *info;
3419 snd_ctl_card_info_alloca(&info);
3422 // Count cards and devices
3425 snd_card_next(&card);
3426 while ( card >= 0 ) {
3427 sprintf(name, "hw:%d", card);
3428 result = snd_ctl_open(&handle, name, 0);
3430 sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result));
3431 error(RtError::DEBUG_WARNING);
3434 result = snd_ctl_card_info(handle, info);
3436 sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result));
3437 error(RtError::DEBUG_WARNING);
3440 cardId = snd_ctl_card_info_get_id(info);
3443 result = snd_ctl_pcm_next_device(handle, &subdevice);
3445 sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result));
3446 error(RtError::DEBUG_WARNING);
3451 sprintf( name, "hw:%d,%d", card, subdevice );
3452 // If a cardId exists and it contains at least one non-numeric
3453 // character, use it to identify the device. This avoids a bug
3454 // in ALSA such that a numeric string is interpreted as a device
3456 for ( unsigned int i=0; i<strlen(cardId); i++ ) {
3457 if ( !isdigit( cardId[i] ) ) {
3458 sprintf( name, "hw:%s,%d", cardId, subdevice );
3462 device.name.erase();
3463 device.name.append( (const char *)name, strlen(name)+1 );
3464 devices_.push_back(device);
3468 snd_ctl_close(handle);
3469 snd_card_next(&card);
3473 void RtApiAlsa :: probeDeviceInfo(RtApiDevice *info)
3476 int open_mode = SND_PCM_ASYNC;
3479 snd_pcm_stream_t stream;
3480 snd_pcm_info_t *pcminfo;
3481 snd_pcm_info_alloca(&pcminfo);
3482 snd_pcm_hw_params_t *params;
3483 snd_pcm_hw_params_alloca(¶ms);
3487 // Open the control interface for this card.
3488 strncpy( name, info->name.c_str(), 64 );
3489 card = strtok(name, ",");
3490 err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK);
3492 sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err));
3493 error(RtError::DEBUG_WARNING);
3496 unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") );
3498 // First try for playback
3499 stream = SND_PCM_STREAM_PLAYBACK;
3500 snd_pcm_info_set_device(pcminfo, dev);
3501 snd_pcm_info_set_subdevice(pcminfo, 0);
3502 snd_pcm_info_set_stream(pcminfo, stream);
3504 if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) {
3505 if (err == -ENOENT) {
3506 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str());
3507 error(RtError::DEBUG_WARNING);
3510 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s",
3511 info->name.c_str(), snd_strerror(err));
3512 error(RtError::DEBUG_WARNING);
3517 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK );
3520 sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.",
3521 info->name.c_str(), snd_strerror(err));
3523 sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.",
3524 info->name.c_str(), snd_strerror(err));
3525 error(RtError::DEBUG_WARNING);
3529 // We have an open device ... allocate the parameter structure.
3530 err = snd_pcm_hw_params_any(handle, params);
3532 snd_pcm_close(handle);
3533 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3534 info->name.c_str(), snd_strerror(err));
3535 error(RtError::DEBUG_WARNING);
3539 // Get output channel information.
3541 err = snd_pcm_hw_params_get_channels_min(params, &value);
3543 snd_pcm_close(handle);
3544 sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.",
3545 info->name.c_str(), snd_strerror(err));
3546 error(RtError::DEBUG_WARNING);
3549 info->minOutputChannels = value;
3551 err = snd_pcm_hw_params_get_channels_max(params, &value);
3553 snd_pcm_close(handle);
3554 sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.",
3555 info->name.c_str(), snd_strerror(err));
3556 error(RtError::DEBUG_WARNING);
3559 info->maxOutputChannels = value;
3561 snd_pcm_close(handle);
3564 // Now try for capture
3565 stream = SND_PCM_STREAM_CAPTURE;
3566 snd_pcm_info_set_stream(pcminfo, stream);
3568 err = snd_ctl_pcm_info(chandle, pcminfo);
3569 snd_ctl_close(chandle);
3571 if (err == -ENOENT) {
3572 sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str());
3573 error(RtError::DEBUG_WARNING);
3576 sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s",
3577 info->name.c_str(), snd_strerror(err));
3578 error(RtError::DEBUG_WARNING);
3580 if (info->maxOutputChannels == 0)
3581 // didn't open for playback either ... device invalid
3583 goto probe_parameters;
3586 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK);
3589 sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.",
3590 info->name.c_str(), snd_strerror(err));
3592 sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.",
3593 info->name.c_str(), snd_strerror(err));
3594 error(RtError::DEBUG_WARNING);
3595 if (info->maxOutputChannels == 0)
3596 // didn't open for playback either ... device invalid
3598 goto probe_parameters;
3601 // We have an open capture device ... allocate the parameter structure.
3602 err = snd_pcm_hw_params_any(handle, params);
3604 snd_pcm_close(handle);
3605 sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.",
3606 info->name.c_str(), snd_strerror(err));
3607 error(RtError::DEBUG_WARNING);
3608 if (info->maxOutputChannels > 0)
3609 goto probe_parameters;
3614 // Get input channel information.
3615 err = snd_pcm_hw_params_get_channels_min(params, &value);
3617 snd_pcm_close(handle);
3618 sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.",
3619 info->name.c_str(), snd_strerror(err));
3620 error(RtError::DEBUG_WARNING);
3621 if (info->maxOutputChannels > 0)
3622 goto probe_parameters;
3626 info->minInputChannels = value;
3628 err = snd_pcm_hw_params_get_channels_max(params, &value);
3630 snd_pcm_close(handle);
3631 sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.",
3632 info->name.c_str(), snd_strerror(err));
3633 error(RtError::DEBUG_WARNING);
3634 if (info->maxOutputChannels > 0)
3635 goto probe_parameters;
3639 info->maxInputChannels = value;
3641 snd_pcm_close(handle);
3643 // If device opens for both playback and capture, we determine the channels.
3644 if (info->maxOutputChannels == 0 || info->maxInputChannels == 0)
3645 goto probe_parameters;
3647 info->hasDuplexSupport = true;
3648 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
3649 info->maxInputChannels : info->maxOutputChannels;
3650 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
3651 info->minInputChannels : info->minOutputChannels;
3654 // At this point, we just need to figure out the supported data
3655 // formats and sample rates. We'll proceed by opening the device in
3656 // the direction with the maximum number of channels, or playback if
3657 // they are equal. This might limit our sample rate options, but so
3660 if (info->maxOutputChannels >= info->maxInputChannels)
3661 stream = SND_PCM_STREAM_PLAYBACK;
3663 stream = SND_PCM_STREAM_CAPTURE;
3665 err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode);
3667 sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.",
3668 info->name.c_str(), snd_strerror(err));
3669 error(RtError::DEBUG_WARNING);
3673 // We have an open device ... allocate the parameter structure.
3674 err = snd_pcm_hw_params_any(handle, params);
3676 snd_pcm_close(handle);
3677 sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.",
3678 info->name.c_str(), snd_strerror(err));
3679 error(RtError::DEBUG_WARNING);
3683 // Test our discrete set of sample rate values.
3685 info->sampleRates.clear();
3686 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
3687 if (snd_pcm_hw_params_test_rate(handle, params, SAMPLE_RATES[i], dir) == 0)
3688 info->sampleRates.push_back(SAMPLE_RATES[i]);
3690 if (info->sampleRates.size() == 0) {
3691 snd_pcm_close(handle);
3692 sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).",
3693 info->name.c_str());
3694 error(RtError::DEBUG_WARNING);
3698 // Probe the supported data formats ... we don't care about endian-ness just yet
3699 snd_pcm_format_t format;
3700 info->nativeFormats = 0;
3701 format = SND_PCM_FORMAT_S8;
3702 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3703 info->nativeFormats |= RTAUDIO_SINT8;
3704 format = SND_PCM_FORMAT_S16;
3705 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3706 info->nativeFormats |= RTAUDIO_SINT16;
3707 format = SND_PCM_FORMAT_S24;
3708 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3709 info->nativeFormats |= RTAUDIO_SINT24;
3710 format = SND_PCM_FORMAT_S32;
3711 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3712 info->nativeFormats |= RTAUDIO_SINT32;
3713 format = SND_PCM_FORMAT_FLOAT;
3714 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3715 info->nativeFormats |= RTAUDIO_FLOAT32;
3716 format = SND_PCM_FORMAT_FLOAT64;
3717 if (snd_pcm_hw_params_test_format(handle, params, format) == 0)
3718 info->nativeFormats |= RTAUDIO_FLOAT64;
3720 // Check that we have at least one supported format
3721 if (info->nativeFormats == 0) {
3722 snd_pcm_close(handle);
3723 sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.",
3724 info->name.c_str());
3725 error(RtError::DEBUG_WARNING);
3729 // That's all ... close the device and return
3730 snd_pcm_close(handle);
3731 info->probed = true;
3735 bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels,
3736 int sampleRate, RtAudioFormat format,
3737 int *bufferSize, int numberOfBuffers )
3739 #if defined(__RTAUDIO_DEBUG__)
3741 snd_output_stdio_attach(&out, stderr, 0);
3744 // I'm not using the "plug" interface ... too much inconsistent behavior.
3745 const char *name = devices_[device].name.c_str();
3747 snd_pcm_stream_t alsa_stream;
3749 alsa_stream = SND_PCM_STREAM_PLAYBACK;
3751 alsa_stream = SND_PCM_STREAM_CAPTURE;
3755 int alsa_open_mode = SND_PCM_ASYNC;
3756 err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode);
3758 sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.",
3759 name, snd_strerror(err));
3760 error(RtError::DEBUG_WARNING);
3764 // Fill the parameter structure.
3765 snd_pcm_hw_params_t *hw_params;
3766 snd_pcm_hw_params_alloca(&hw_params);
3767 err = snd_pcm_hw_params_any(handle, hw_params);
3769 snd_pcm_close(handle);
3770 sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.",
3771 name, snd_strerror(err));
3772 error(RtError::DEBUG_WARNING);
3776 #if defined(__RTAUDIO_DEBUG__)
3777 fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n");
3778 snd_pcm_hw_params_dump(hw_params, out);
3781 // Set access ... try interleaved access first, then non-interleaved
3782 if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) {
3783 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED);
3785 else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) {
3786 err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED);
3787 stream_.deInterleave[mode] = true;
3790 snd_pcm_close(handle);
3791 sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name);
3792 error(RtError::DEBUG_WARNING);
3797 snd_pcm_close(handle);
3798 sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err));
3799 error(RtError::DEBUG_WARNING);
3803 // Determine how to set the device format.
3804 stream_.userFormat = format;
3805 snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN;
3807 if (format == RTAUDIO_SINT8)
3808 device_format = SND_PCM_FORMAT_S8;
3809 else if (format == RTAUDIO_SINT16)
3810 device_format = SND_PCM_FORMAT_S16;
3811 else if (format == RTAUDIO_SINT24)
3812 device_format = SND_PCM_FORMAT_S24;
3813 else if (format == RTAUDIO_SINT32)
3814 device_format = SND_PCM_FORMAT_S32;
3815 else if (format == RTAUDIO_FLOAT32)
3816 device_format = SND_PCM_FORMAT_FLOAT;
3817 else if (format == RTAUDIO_FLOAT64)
3818 device_format = SND_PCM_FORMAT_FLOAT64;
3820 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3821 stream_.deviceFormat[mode] = format;
3825 // The user requested format is not natively supported by the device.
3826 device_format = SND_PCM_FORMAT_FLOAT64;
3827 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3828 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
3832 device_format = SND_PCM_FORMAT_FLOAT;
3833 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3834 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
3838 device_format = SND_PCM_FORMAT_S32;
3839 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3840 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
3844 device_format = SND_PCM_FORMAT_S24;
3845 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3846 stream_.deviceFormat[mode] = RTAUDIO_SINT24;
3850 device_format = SND_PCM_FORMAT_S16;
3851 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3852 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
3856 device_format = SND_PCM_FORMAT_S8;
3857 if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) {
3858 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
3862 // If we get here, no supported format was found.
3863 sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name);
3864 snd_pcm_close(handle);
3865 error(RtError::DEBUG_WARNING);
3869 err = snd_pcm_hw_params_set_format(handle, hw_params, device_format);
3871 snd_pcm_close(handle);
3872 sprintf(message_, "RtApiAlsa: error setting format (%s): %s.",
3873 name, snd_strerror(err));
3874 error(RtError::DEBUG_WARNING);
3878 // Determine whether byte-swaping is necessary.
3879 stream_.doByteSwap[mode] = false;
3880 if (device_format != SND_PCM_FORMAT_S8) {
3881 err = snd_pcm_format_cpu_endian(device_format);
3883 stream_.doByteSwap[mode] = true;
3885 snd_pcm_close(handle);
3886 sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.",
3887 name, snd_strerror(err));
3888 error(RtError::DEBUG_WARNING);
3893 // Set the sample rate.
3894 err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0);
3896 snd_pcm_close(handle);
3897 sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.",
3898 sampleRate, name, snd_strerror(err));
3899 error(RtError::DEBUG_WARNING);
3903 // Determine the number of channels for this device. We support a possible
3904 // minimum device channel number > than the value requested by the user.
3905 stream_.nUserChannels[mode] = channels;
3907 err = snd_pcm_hw_params_get_channels_max(hw_params, &value);
3908 int device_channels = value;
3909 if (err < 0 || device_channels < channels) {
3910 snd_pcm_close(handle);
3911 sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).",
3913 error(RtError::DEBUG_WARNING);
3917 err = snd_pcm_hw_params_get_channels_min(hw_params, &value);
3919 snd_pcm_close(handle);
3920 sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name);
3921 error(RtError::DEBUG_WARNING);
3924 device_channels = value;
3925 if (device_channels < channels) device_channels = channels;
3926 stream_.nDeviceChannels[mode] = device_channels;
3928 // Set the device channels.
3929 err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels);
3931 snd_pcm_close(handle);
3932 sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.",
3933 device_channels, name, snd_strerror(err));
3934 error(RtError::DEBUG_WARNING);
3938 // Set the buffer number, which in ALSA is referred to as the "period".
3940 unsigned int periods = numberOfBuffers;
3941 // Even though the hardware might allow 1 buffer, it won't work reliably.
3942 if (periods < 2) periods = 2;
3943 err = snd_pcm_hw_params_set_periods_near(handle, hw_params, &periods, &dir);
3945 snd_pcm_close(handle);
3946 sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.",
3947 name, snd_strerror(err));
3948 error(RtError::DEBUG_WARNING);
3952 // Set the buffer (or period) size.
3953 snd_pcm_uframes_t period_size = *bufferSize;
3954 err = snd_pcm_hw_params_set_period_size_near(handle, hw_params, &period_size, &dir);
3956 snd_pcm_close(handle);
3957 sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.",
3958 name, snd_strerror(err));
3959 error(RtError::DEBUG_WARNING);
3962 *bufferSize = period_size;
3964 // If attempting to setup a duplex stream, the bufferSize parameter
3965 // MUST be the same in both directions!
3966 if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) {
3967 sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).",
3969 error(RtError::DEBUG_WARNING);
3973 stream_.bufferSize = *bufferSize;
3975 // Install the hardware configuration
3976 err = snd_pcm_hw_params(handle, hw_params);
3978 snd_pcm_close(handle);
3979 sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.",
3980 name, snd_strerror(err));
3981 error(RtError::DEBUG_WARNING);
3985 #if defined(__RTAUDIO_DEBUG__)
3986 fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n");
3987 snd_pcm_hw_params_dump(hw_params, out);
3990 // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns.
3991 snd_pcm_sw_params_t *sw_params = NULL;
3992 snd_pcm_sw_params_alloca( &sw_params );
3993 snd_pcm_sw_params_current( handle, sw_params );
3994 snd_pcm_sw_params_set_start_threshold( handle, sw_params, *bufferSize );
3995 snd_pcm_sw_params_set_stop_threshold( handle, sw_params, 0x7fffffff );
3996 snd_pcm_sw_params_set_silence_threshold( handle, sw_params, 0 );
3997 snd_pcm_sw_params_set_silence_size( handle, sw_params, INT_MAX );
3998 err = snd_pcm_sw_params( handle, sw_params );
4000 snd_pcm_close(handle);
4001 sprintf(message_, "RtAudio: ALSA error installing software configuration (%s): %s.",
4002 name, snd_strerror(err));
4003 error(RtError::DEBUG_WARNING);
4007 #if defined(__RTAUDIO_DEBUG__)
4008 fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n");
4009 snd_pcm_sw_params_dump(sw_params, out);
4012 // Allocate the ApiHandle if necessary and then save.
4013 AlsaHandle *apiInfo = 0;
4014 if ( stream_.apiHandle == 0 ) {
4015 apiInfo = (AlsaHandle *) new AlsaHandle;
4016 stream_.apiHandle = (void *) apiInfo;
4017 apiInfo->handles[0] = 0;
4018 apiInfo->handles[1] = 0;
4021 apiInfo = (AlsaHandle *) stream_.apiHandle;
4023 apiInfo->handles[mode] = handle;
4025 // Set flags for buffer conversion
4026 stream_.doConvertBuffer[mode] = false;
4027 if (stream_.userFormat != stream_.deviceFormat[mode])
4028 stream_.doConvertBuffer[mode] = true;
4029 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
4030 stream_.doConvertBuffer[mode] = true;
4031 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
4032 stream_.doConvertBuffer[mode] = true;
4034 // Allocate necessary internal buffers
4035 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
4038 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
4039 buffer_bytes = stream_.nUserChannels[0];
4041 buffer_bytes = stream_.nUserChannels[1];
4043 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
4044 if (stream_.userBuffer) free(stream_.userBuffer);
4045 if (apiInfo->tempBuffer) free(apiInfo->tempBuffer);
4046 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
4047 apiInfo->tempBuffer = (char *) calloc(buffer_bytes, 1);
4048 if ( stream_.userBuffer == NULL || apiInfo->tempBuffer == NULL ) {
4049 sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).",
4050 devices_[device].name.c_str());
4055 if ( stream_.doConvertBuffer[mode] ) {
4058 bool makeBuffer = true;
4059 if ( mode == OUTPUT )
4060 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4061 else { // mode == INPUT
4062 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
4063 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
4064 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
4065 if ( buffer_bytes < bytes_out ) makeBuffer = false;
4070 buffer_bytes *= *bufferSize;
4071 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
4072 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
4073 if (stream_.deviceBuffer == NULL) {
4074 sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).",
4075 devices_[device].name.c_str());
4081 stream_.device[mode] = device;
4082 stream_.state = STREAM_STOPPED;
4083 if ( stream_.mode == OUTPUT && mode == INPUT ) {
4084 // We had already set up an output stream.
4085 stream_.mode = DUPLEX;
4086 // Link the streams if possible.
4087 apiInfo->synchronized = false;
4088 if (snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0)
4089 apiInfo->synchronized = true;
4091 sprintf(message_, "RtApiAlsa: unable to synchronize input and output streams (%s).",
4092 devices_[device].name.c_str());
4093 error(RtError::DEBUG_WARNING);
4097 stream_.mode = mode;
4098 stream_.nBuffers = periods;
4099 stream_.sampleRate = sampleRate;
4101 // Setup the buffer conversion information structure.
4102 if ( stream_.doConvertBuffer[mode] ) {
4103 if (mode == INPUT) { // convert device to user buffer
4104 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
4105 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
4106 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
4107 stream_.convertInfo[mode].outFormat = stream_.userFormat;
4109 else { // convert user to device buffer
4110 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
4111 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
4112 stream_.convertInfo[mode].inFormat = stream_.userFormat;
4113 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
4116 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
4117 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
4119 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
4121 // Set up the interleave/deinterleave offsets.
4122 if ( mode == INPUT && stream_.deInterleave[1] ) {
4123 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4124 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
4125 stream_.convertInfo[mode].outOffset.push_back( k );
4126 stream_.convertInfo[mode].inJump = 1;
4129 else if (mode == OUTPUT && stream_.deInterleave[0]) {
4130 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4131 stream_.convertInfo[mode].inOffset.push_back( k );
4132 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
4133 stream_.convertInfo[mode].outJump = 1;
4137 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
4138 stream_.convertInfo[mode].inOffset.push_back( k );
4139 stream_.convertInfo[mode].outOffset.push_back( k );
4148 if (apiInfo->handles[0])
4149 snd_pcm_close(apiInfo->handles[0]);
4150 if (apiInfo->handles[1])
4151 snd_pcm_close(apiInfo->handles[1]);
4152 if ( apiInfo->tempBuffer ) free(apiInfo->tempBuffer);
4154 stream_.apiHandle = 0;
4157 if (stream_.userBuffer) {
4158 free(stream_.userBuffer);
4159 stream_.userBuffer = 0;
4162 error(RtError::DEBUG_WARNING);
4166 void RtApiAlsa :: closeStream()
4168 // We don't want an exception to be thrown here because this
4169 // function is called by our class destructor. So, do our own
4171 if ( stream_.mode == UNINITIALIZED ) {
4172 sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!");
4173 error(RtError::WARNING);
4177 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4178 if (stream_.state == STREAM_RUNNING) {
4179 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
4180 snd_pcm_drop(apiInfo->handles[0]);
4181 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
4182 snd_pcm_drop(apiInfo->handles[1]);
4183 stream_.state = STREAM_STOPPED;
4186 if (stream_.callbackInfo.usingCallback) {
4187 stream_.callbackInfo.usingCallback = false;
4188 pthread_join(stream_.callbackInfo.thread, NULL);
4192 if (apiInfo->handles[0]) snd_pcm_close(apiInfo->handles[0]);
4193 if (apiInfo->handles[1]) snd_pcm_close(apiInfo->handles[1]);
4194 free(apiInfo->tempBuffer);
4196 stream_.apiHandle = 0;
4199 if (stream_.userBuffer) {
4200 free(stream_.userBuffer);
4201 stream_.userBuffer = 0;
4204 if (stream_.deviceBuffer) {
4205 free(stream_.deviceBuffer);
4206 stream_.deviceBuffer = 0;
4209 stream_.mode = UNINITIALIZED;
4212 void RtApiAlsa :: startStream()
4214 // This method calls snd_pcm_prepare if the device isn't already in that state.
4217 if (stream_.state == STREAM_RUNNING) return;
4219 MUTEX_LOCK(&stream_.mutex);
4222 snd_pcm_state_t state;
4223 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4224 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4225 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4226 state = snd_pcm_state(handle[0]);
4227 if (state != SND_PCM_STATE_PREPARED) {
4228 err = snd_pcm_prepare(handle[0]);
4230 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
4231 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4232 MUTEX_UNLOCK(&stream_.mutex);
4233 error(RtError::DRIVER_ERROR);
4238 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4239 state = snd_pcm_state(handle[1]);
4240 if (state != SND_PCM_STATE_PREPARED) {
4241 err = snd_pcm_prepare(handle[1]);
4243 sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.",
4244 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4245 MUTEX_UNLOCK(&stream_.mutex);
4246 error(RtError::DRIVER_ERROR);
4250 stream_.state = STREAM_RUNNING;
4252 MUTEX_UNLOCK(&stream_.mutex);
4255 void RtApiAlsa :: stopStream()
4258 if (stream_.state == STREAM_STOPPED) return;
4260 // Change the state before the lock to improve shutdown response
4261 // when using a callback.
4262 stream_.state = STREAM_STOPPED;
4263 MUTEX_LOCK(&stream_.mutex);
4266 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4267 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4268 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4269 err = snd_pcm_drain(handle[0]);
4271 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4272 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4273 MUTEX_UNLOCK(&stream_.mutex);
4274 error(RtError::DRIVER_ERROR);
4278 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4279 err = snd_pcm_drain(handle[1]);
4281 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4282 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4283 MUTEX_UNLOCK(&stream_.mutex);
4284 error(RtError::DRIVER_ERROR);
4288 MUTEX_UNLOCK(&stream_.mutex);
4291 void RtApiAlsa :: abortStream()
4294 if (stream_.state == STREAM_STOPPED) return;
4296 // Change the state before the lock to improve shutdown response
4297 // when using a callback.
4298 stream_.state = STREAM_STOPPED;
4299 MUTEX_LOCK(&stream_.mutex);
4302 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4303 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4304 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4305 err = snd_pcm_drop(handle[0]);
4307 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4308 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4309 MUTEX_UNLOCK(&stream_.mutex);
4310 error(RtError::DRIVER_ERROR);
4314 if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) {
4315 err = snd_pcm_drop(handle[1]);
4317 sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.",
4318 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4319 MUTEX_UNLOCK(&stream_.mutex);
4320 error(RtError::DRIVER_ERROR);
4324 MUTEX_UNLOCK(&stream_.mutex);
4327 int RtApiAlsa :: streamWillBlock()
4330 if (stream_.state == STREAM_STOPPED) return 0;
4332 MUTEX_LOCK(&stream_.mutex);
4334 int err = 0, frames = 0;
4335 AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
4336 snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles;
4337 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4338 err = snd_pcm_avail_update(handle[0]);
4340 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4341 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4342 MUTEX_UNLOCK(&stream_.mutex);
4343 error(RtError::DRIVER_ERROR);
4349 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4350 err = snd_pcm_avail_update(handle[1]);
4352 sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.",
4353 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4354 MUTEX_UNLOCK(&stream_.mutex);
4355 error(RtError::DRIVER_ERROR);
4357 if (frames > err) frames = err;
4360 frames = stream_.bufferSize - frames;
4361 if (frames < 0) frames = 0;
4363 MUTEX_UNLOCK(&stream_.mutex);
4367 void RtApiAlsa :: tickStream()
4372 if (stream_.state == STREAM_STOPPED) {
4373 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
4376 else if (stream_.callbackInfo.usingCallback) {
4377 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
4378 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
4381 MUTEX_LOCK(&stream_.mutex);
4383 // The state might change while waiting on a mutex.
4384 if (stream_.state == STREAM_STOPPED)
4390 AlsaHandle *apiInfo;
4392 RtAudioFormat format;
4393 apiInfo = (AlsaHandle *) stream_.apiHandle;
4394 handle = (snd_pcm_t **) apiInfo->handles;
4396 if ( stream_.mode == DUPLEX ) {
4397 // In duplex mode, we need to make the snd_pcm_read call before
4398 // the snd_pcm_write call in order to avoid under/over runs. So,
4399 // copy the userData to our temporary buffer.
4401 bufferBytes = stream_.bufferSize * stream_.nUserChannels[0] * formatBytes(stream_.userFormat);
4402 memcpy( apiInfo->tempBuffer, stream_.userBuffer, bufferBytes );
4405 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
4407 // Setup parameters.
4408 if (stream_.doConvertBuffer[1]) {
4409 buffer = stream_.deviceBuffer;
4410 channels = stream_.nDeviceChannels[1];
4411 format = stream_.deviceFormat[1];
4414 buffer = stream_.userBuffer;
4415 channels = stream_.nUserChannels[1];
4416 format = stream_.userFormat;
4419 // Read samples from device in interleaved/non-interleaved format.
4420 if (stream_.deInterleave[1]) {
4421 void *bufs[channels];
4422 size_t offset = stream_.bufferSize * formatBytes(format);
4423 for (int i=0; i<channels; i++)
4424 bufs[i] = (void *) (buffer + (i * offset));
4425 err = snd_pcm_readn(handle[1], bufs, stream_.bufferSize);
4428 err = snd_pcm_readi(handle[1], buffer, stream_.bufferSize);
4430 if (err < stream_.bufferSize) {
4431 // Either an error or underrun occured.
4432 if (err == -EPIPE) {
4433 snd_pcm_state_t state = snd_pcm_state(handle[1]);
4434 if (state == SND_PCM_STATE_XRUN) {
4435 sprintf(message_, "RtApiAlsa: overrun detected.");
4436 error(RtError::WARNING);
4437 err = snd_pcm_prepare(handle[1]);
4439 sprintf(message_, "RtApiAlsa: error preparing handle after overrun: %s.",
4441 MUTEX_UNLOCK(&stream_.mutex);
4442 error(RtError::DRIVER_ERROR);
4446 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4447 snd_pcm_state_name(state));
4448 MUTEX_UNLOCK(&stream_.mutex);
4449 error(RtError::DRIVER_ERROR);
4454 sprintf(message_, "RtApiAlsa: audio read error for device (%s): %s.",
4455 devices_[stream_.device[1]].name.c_str(), snd_strerror(err));
4456 MUTEX_UNLOCK(&stream_.mutex);
4457 error(RtError::DRIVER_ERROR);
4461 // Do byte swapping if necessary.
4462 if (stream_.doByteSwap[1])
4463 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4465 // Do buffer conversion if necessary.
4466 if (stream_.doConvertBuffer[1])
4467 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
4470 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
4472 // Setup parameters and do buffer conversion if necessary.
4473 if (stream_.doConvertBuffer[0]) {
4474 buffer = stream_.deviceBuffer;
4475 if ( stream_.mode == DUPLEX )
4476 convertBuffer( buffer, apiInfo->tempBuffer, stream_.convertInfo[0] );
4478 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
4479 channels = stream_.nDeviceChannels[0];
4480 format = stream_.deviceFormat[0];
4483 if ( stream_.mode == DUPLEX )
4484 buffer = apiInfo->tempBuffer;
4486 buffer = stream_.userBuffer;
4487 channels = stream_.nUserChannels[0];
4488 format = stream_.userFormat;
4491 // Do byte swapping if necessary.
4492 if (stream_.doByteSwap[0])
4493 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
4495 // Write samples to device in interleaved/non-interleaved format.
4496 if (stream_.deInterleave[0]) {
4497 void *bufs[channels];
4498 size_t offset = stream_.bufferSize * formatBytes(format);
4499 for (int i=0; i<channels; i++)
4500 bufs[i] = (void *) (buffer + (i * offset));
4501 err = snd_pcm_writen(handle[0], bufs, stream_.bufferSize);
4504 err = snd_pcm_writei(handle[0], buffer, stream_.bufferSize);
4506 if (err < stream_.bufferSize) {
4507 // Either an error or underrun occured.
4508 if (err == -EPIPE) {
4509 snd_pcm_state_t state = snd_pcm_state(handle[0]);
4510 if (state == SND_PCM_STATE_XRUN) {
4511 sprintf(message_, "RtApiAlsa: underrun detected.");
4512 error(RtError::WARNING);
4513 err = snd_pcm_prepare(handle[0]);
4515 sprintf(message_, "RtApiAlsa: error preparing handle after underrun: %s.",
4517 MUTEX_UNLOCK(&stream_.mutex);
4518 error(RtError::DRIVER_ERROR);
4522 sprintf(message_, "RtApiAlsa: tickStream() error, current state is %s.",
4523 snd_pcm_state_name(state));
4524 MUTEX_UNLOCK(&stream_.mutex);
4525 error(RtError::DRIVER_ERROR);
4530 sprintf(message_, "RtApiAlsa: audio write error for device (%s): %s.",
4531 devices_[stream_.device[0]].name.c_str(), snd_strerror(err));
4532 MUTEX_UNLOCK(&stream_.mutex);
4533 error(RtError::DRIVER_ERROR);
4539 MUTEX_UNLOCK(&stream_.mutex);
4541 if (stream_.callbackInfo.usingCallback && stopStream)
4545 void RtApiAlsa :: setStreamCallback(RtAudioCallback callback, void *userData)
4549 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
4550 if ( info->usingCallback ) {
4551 sprintf(message_, "RtApiAlsa: A callback is already set for this stream!");
4552 error(RtError::WARNING);
4556 info->callback = (void *) callback;
4557 info->userData = userData;
4558 info->usingCallback = true;
4559 info->object = (void *) this;
4561 // Set the thread attributes for joinable and realtime scheduling
4562 // priority. The higher priority will only take affect if the
4563 // program is run as root or suid.
4564 pthread_attr_t attr;
4565 pthread_attr_init(&attr);
4566 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
4567 pthread_attr_setschedpolicy(&attr, SCHED_RR);
4569 int err = pthread_create(&info->thread, &attr, alsaCallbackHandler, &stream_.callbackInfo);
4570 pthread_attr_destroy(&attr);
4572 info->usingCallback = false;
4573 sprintf(message_, "RtApiAlsa: error starting callback thread!");
4574 error(RtError::THREAD_ERROR);
4578 void RtApiAlsa :: cancelStreamCallback()
4582 if (stream_.callbackInfo.usingCallback) {
4584 if (stream_.state == STREAM_RUNNING)
4587 MUTEX_LOCK(&stream_.mutex);
4589 stream_.callbackInfo.usingCallback = false;
4590 pthread_join(stream_.callbackInfo.thread, NULL);
4591 stream_.callbackInfo.thread = 0;
4592 stream_.callbackInfo.callback = NULL;
4593 stream_.callbackInfo.userData = NULL;
4595 MUTEX_UNLOCK(&stream_.mutex);
4599 extern "C" void *alsaCallbackHandler(void *ptr)
4601 CallbackInfo *info = (CallbackInfo *) ptr;
4602 RtApiAlsa *object = (RtApiAlsa *) info->object;
4603 bool *usingCallback = &info->usingCallback;
4605 while ( *usingCallback ) {
4607 object->tickStream();
4609 catch (RtError &exception) {
4610 fprintf(stderr, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n",
4611 exception.getMessageString());
4619 //******************** End of __LINUX_ALSA__ *********************//
4622 #if defined(__WINDOWS_ASIO__) // ASIO API on Windows
4624 // The ASIO API is designed around a callback scheme, so this
4625 // implementation is similar to that used for OS-X CoreAudio and Linux
4626 // Jack. The primary constraint with ASIO is that it only allows
4627 // access to a single driver at a time. Thus, it is not possible to
4628 // have more than one simultaneous RtAudio stream.
4630 // This implementation also requires a number of external ASIO files
4631 // and a few global variables. The ASIO callback scheme does not
4632 // allow for the passing of user data, so we must create a global
4633 // pointer to our callbackInfo structure.
4635 // On unix systems, we make use of a pthread condition variable.
4636 // Since there is no equivalent in Windows, I hacked something based
4637 // on information found in
4638 // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html.
4640 #include "asio/asiosys.h"
4641 #include "asio/asio.h"
4642 #include "asio/asiodrivers.h"
4645 AsioDrivers drivers;
4646 ASIOCallbacks asioCallbacks;
4647 ASIODriverInfo driverInfo;
4648 CallbackInfo *asioCallbackInfo;
4652 ASIOBufferInfo *bufferInfos;
4656 :stopStream(false), bufferInfos(0) {}
4659 static const char*GetAsioErrorString(ASIOError result)
4666 static Messages m[] =
4668 { ASE_NotPresent, "Hardware input or output is not present or available." },
4669 { ASE_HWMalfunction, "Hardware is malfunctioning." },
4670 { ASE_InvalidParameter, "Invalid input parameter." },
4671 { ASE_InvalidMode, "Invalid mode." },
4672 { ASE_SPNotAdvancing, "Sample position not advancing." },
4673 { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },
4674 { ASE_NoMemory, "Not enough memory to complete the request." }
4677 for (int i = 0; i < sizeof(m)/sizeof(m[0]); ++i)
4679 if (m[i].value == result) return m[i].message;
4681 return "Unknown error.";
4684 RtApiAsio :: RtApiAsio()
4686 this->coInitialized = false;
4689 if (nDevices_ <= 0) {
4690 sprintf(message_, "RtApiAsio: no Windows ASIO audio drivers found!");
4691 error(RtError::NO_DEVICES_FOUND);
4695 RtApiAsio :: ~RtApiAsio()
4697 if ( stream_.mode != UNINITIALIZED ) closeStream();
4698 if ( coInitialized )
4705 void RtApiAsio :: initialize(void)
4708 // ASIO cannot run on a multi-threaded appartment. You can call CoInitialize beforehand, but it must be
4709 // for appartment threading (in which case, CoInitilialize will return S_FALSE here).
4710 coInitialized = false;
4711 HRESULT hr = CoInitialize(NULL);
4714 sprintf(message_,"RtApiAsio: ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)");
4716 coInitialized = true;
4718 nDevices_ = drivers.asioGetNumDev();
4719 if (nDevices_ <= 0) return;
4721 // Create device structures and write device driver names to each.
4724 for (int i=0; i<nDevices_; i++) {
4725 if ( drivers.asioGetDriverName( i, name, 128 ) == 0 ) {
4726 device.name.erase();
4727 device.name.append( (const char *)name, strlen(name)+1);
4728 devices_.push_back(device);
4731 sprintf(message_, "RtApiAsio: error getting driver name for device index %d!", i);
4732 error(RtError::WARNING);
4736 nDevices_ = (int) devices_.size();
4738 drivers.removeCurrentDriver();
4739 driverInfo.asioVersion = 2;
4740 // See note in DirectSound implementation about GetDesktopWindow().
4741 driverInfo.sysRef = GetForegroundWindow();
4744 void RtApiAsio :: probeDeviceInfo(RtApiDevice *info)
4746 // Don't probe if a stream is already open.
4747 if ( stream_.mode != UNINITIALIZED ) {
4748 sprintf(message_, "RtApiAsio: unable to probe driver while a stream is open.");
4749 error(RtError::DEBUG_WARNING);
4753 if ( !drivers.loadDriver( (char *)info->name.c_str() ) ) {
4754 sprintf(message_, "RtApiAsio: error loading driver (%s).", info->name.c_str());
4755 error(RtError::DEBUG_WARNING);
4759 ASIOError result = ASIOInit( &driverInfo );
4760 if ( result != ASE_OK ) {
4761 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).",
4762 GetAsioErrorString(result), info->name.c_str());
4763 error(RtError::DEBUG_WARNING);
4767 // Determine the device channel information.
4768 long inputChannels, outputChannels;
4769 result = ASIOGetChannels( &inputChannels, &outputChannels );
4770 if ( result != ASE_OK ) {
4771 drivers.removeCurrentDriver();
4772 sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).",
4773 GetAsioErrorString(result),
4774 info->name.c_str());
4775 error(RtError::DEBUG_WARNING);
4779 info->maxOutputChannels = outputChannels;
4780 if ( outputChannels > 0 ) info->minOutputChannels = 1;
4782 info->maxInputChannels = inputChannels;
4783 if ( inputChannels > 0 ) info->minInputChannels = 1;
4785 // If device opens for both playback and capture, we determine the channels.
4786 if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) {
4787 info->hasDuplexSupport = true;
4788 info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ?
4789 info->maxInputChannels : info->maxOutputChannels;
4790 info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ?
4791 info->minInputChannels : info->minOutputChannels;
4794 // Determine the supported sample rates.
4795 info->sampleRates.clear();
4796 for (unsigned int i=0; i<MAX_SAMPLE_RATES; i++) {
4797 result = ASIOCanSampleRate( (ASIOSampleRate) SAMPLE_RATES[i] );
4798 if ( result == ASE_OK )
4799 info->sampleRates.push_back( SAMPLE_RATES[i] );
4802 if (info->sampleRates.size() == 0) {
4803 drivers.removeCurrentDriver();
4804 sprintf( message_, "RtApiAsio: No supported sample rates found for driver (%s).", info->name.c_str() );
4805 error(RtError::DEBUG_WARNING);
4809 // Determine supported data types ... just check first channel and assume rest are the same.
4810 ASIOChannelInfo channelInfo;
4811 channelInfo.channel = 0;
4812 channelInfo.isInput = true;
4813 if ( info->maxInputChannels <= 0 ) channelInfo.isInput = false;
4814 result = ASIOGetChannelInfo( &channelInfo );
4815 if ( result != ASE_OK ) {
4816 drivers.removeCurrentDriver();
4817 sprintf(message_, "RtApiAsio: error (%s) getting driver (%s) channel information.",
4818 GetAsioErrorString(result),
4819 info->name.c_str());
4820 error(RtError::DEBUG_WARNING);
4824 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB )
4825 info->nativeFormats |= RTAUDIO_SINT16;
4826 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB )
4827 info->nativeFormats |= RTAUDIO_SINT32;
4828 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB )
4829 info->nativeFormats |= RTAUDIO_FLOAT32;
4830 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB )
4831 info->nativeFormats |= RTAUDIO_FLOAT64;
4833 // Check that we have at least one supported format.
4834 if (info->nativeFormats == 0) {
4835 drivers.removeCurrentDriver();
4836 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
4837 info->name.c_str());
4838 error(RtError::DEBUG_WARNING);
4842 info->probed = true;
4843 drivers.removeCurrentDriver();
4846 void bufferSwitch(long index, ASIOBool processNow)
4848 RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object;
4850 object->callbackEvent( index );
4852 catch (RtError &exception) {
4853 fprintf(stderr, "\nRtApiAsio: callback handler error (%s)!\n\n", exception.getMessageString());
4860 void sampleRateChanged(ASIOSampleRate sRate)
4862 // The ASIO documentation says that this usually only happens during
4863 // external sync. Audio processing is not stopped by the driver,
4864 // actual sample rate might not have even changed, maybe only the
4865 // sample rate status of an AES/EBU or S/PDIF digital input at the
4868 RtAudio *object = (RtAudio *) asioCallbackInfo->object;
4870 object->stopStream();
4872 catch (RtError &exception) {
4873 fprintf(stderr, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception.getMessageString());
4877 fprintf(stderr, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate);
4880 long asioMessages(long selector, long value, void* message, double* opt)
4884 case kAsioSelectorSupported:
4885 if(value == kAsioResetRequest
4886 || value == kAsioEngineVersion
4887 || value == kAsioResyncRequest
4888 || value == kAsioLatenciesChanged
4889 // The following three were added for ASIO 2.0, you don't
4890 // necessarily have to support them.
4891 || value == kAsioSupportsTimeInfo
4892 || value == kAsioSupportsTimeCode
4893 || value == kAsioSupportsInputMonitor)
4896 case kAsioResetRequest:
4897 // Defer the task and perform the reset of the driver during the
4898 // next "safe" situation. You cannot reset the driver right now,
4899 // as this code is called from the driver. Reset the driver is
4900 // done by completely destruct is. I.e. ASIOStop(),
4901 // ASIODisposeBuffers(), Destruction Afterwards you initialize the
4903 fprintf(stderr, "\nRtApiAsio: driver reset requested!!!");
4906 case kAsioResyncRequest:
4907 // This informs the application that the driver encountered some
4908 // non-fatal data loss. It is used for synchronization purposes
4909 // of different media. Added mainly to work around the Win16Mutex
4910 // problems in Windows 95/98 with the Windows Multimedia system,
4911 // which could lose data because the Mutex was held too long by
4912 // another thread. However a driver can issue it in other
4914 fprintf(stderr, "\nRtApiAsio: driver resync requested!!!");
4917 case kAsioLatenciesChanged:
4918 // This will inform the host application that the drivers were
4919 // latencies changed. Beware, it this does not mean that the
4920 // buffer sizes have changed! You might need to update internal
4922 fprintf(stderr, "\nRtApiAsio: driver latency may have changed!!!");
4925 case kAsioEngineVersion:
4926 // Return the supported ASIO version of the host application. If
4927 // a host application does not implement this selector, ASIO 1.0
4928 // is assumed by the driver.
4931 case kAsioSupportsTimeInfo:
4932 // Informs the driver whether the
4933 // asioCallbacks.bufferSwitchTimeInfo() callback is supported.
4934 // For compatibility with ASIO 1.0 drivers the host application
4935 // should always support the "old" bufferSwitch method, too.
4938 case kAsioSupportsTimeCode:
4939 // Informs the driver wether application is interested in time
4940 // code info. If an application does not need to know about time
4941 // code, the driver has less work to do.
4948 bool RtApiAsio :: probeDeviceOpen(int device, StreamMode mode, int channels,
4949 int sampleRate, RtAudioFormat format,
4950 int *bufferSize, int numberOfBuffers)
4952 // For ASIO, a duplex stream MUST use the same driver.
4953 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) {
4954 sprintf(message_, "RtApiAsio: duplex stream must use the same device for input and output.");
4955 error(RtError::WARNING);
4959 // Only load the driver once for duplex stream.
4961 if ( mode != INPUT || stream_.mode != OUTPUT ) {
4962 if ( !drivers.loadDriver( (char *)devices_[device].name.c_str() ) ) {
4963 sprintf(message_, "RtApiAsio: error loading driver (%s).",
4964 devices_[device].name.c_str());
4965 error(RtError::DEBUG_WARNING);
4969 result = ASIOInit( &driverInfo );
4970 if ( result != ASE_OK ) {
4971 sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).",
4972 GetAsioErrorString(result), devices_[device].name.c_str());
4973 error(RtError::DEBUG_WARNING);
4978 // Check the device channel count.
4979 long inputChannels, outputChannels;
4980 result = ASIOGetChannels( &inputChannels, &outputChannels );
4981 if ( result != ASE_OK ) {
4982 drivers.removeCurrentDriver();
4983 sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).",
4984 GetAsioErrorString(result),
4985 devices_[device].name.c_str());
4986 error(RtError::DEBUG_WARNING);
4990 if ( ( mode == OUTPUT && channels > outputChannels) ||
4991 ( mode == INPUT && channels > inputChannels) ) {
4992 drivers.removeCurrentDriver();
4993 sprintf(message_, "RtApiAsio: driver (%s) does not support requested channel count (%d).",
4994 devices_[device].name.c_str(), channels);
4995 error(RtError::DEBUG_WARNING);
4998 stream_.nDeviceChannels[mode] = channels;
4999 stream_.nUserChannels[mode] = channels;
5001 // Verify the sample rate is supported.
5002 result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate );
5003 if ( result != ASE_OK ) {
5004 drivers.removeCurrentDriver();
5005 sprintf(message_, "RtApiAsio: driver (%s) does not support requested sample rate (%d).",
5006 devices_[device].name.c_str(), sampleRate);
5007 error(RtError::DEBUG_WARNING);
5011 // Set the sample rate.
5012 result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate );
5013 if ( result != ASE_OK ) {
5014 drivers.removeCurrentDriver();
5015 sprintf(message_, "RtApiAsio: driver (%s) error setting sample rate (%d).",
5016 devices_[device].name.c_str(), sampleRate);
5017 error(RtError::DEBUG_WARNING);
5021 // Determine the driver data type.
5022 ASIOChannelInfo channelInfo;
5023 channelInfo.channel = 0;
5024 if ( mode == OUTPUT ) channelInfo.isInput = false;
5025 else channelInfo.isInput = true;
5026 result = ASIOGetChannelInfo( &channelInfo );
5027 if ( result != ASE_OK ) {
5028 drivers.removeCurrentDriver();
5029 sprintf(message_, "RtApiAsio: driver (%s) error getting data format.",
5030 devices_[device].name.c_str());
5031 error(RtError::DEBUG_WARNING);
5035 // Assuming WINDOWS host is always little-endian.
5036 stream_.doByteSwap[mode] = false;
5037 stream_.userFormat = format;
5038 stream_.deviceFormat[mode] = 0;
5039 if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) {
5040 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
5041 if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true;
5043 else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) {
5044 stream_.deviceFormat[mode] = RTAUDIO_SINT32;
5045 if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true;
5047 else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) {
5048 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
5049 if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true;
5051 else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) {
5052 stream_.deviceFormat[mode] = RTAUDIO_FLOAT64;
5053 if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true;
5056 if ( stream_.deviceFormat[mode] == 0 ) {
5057 drivers.removeCurrentDriver();
5058 sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.",
5059 devices_[device].name.c_str());
5060 error(RtError::DEBUG_WARNING);
5064 // Set the buffer size. For a duplex stream, this will end up
5065 // setting the buffer size based on the input constraints, which
5067 long minSize, maxSize, preferSize, granularity;
5068 result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity );
5069 if ( result != ASE_OK ) {
5070 drivers.removeCurrentDriver();
5071 sprintf(message_, "RtApiAsio: error (%s) on driver (%s) error getting buffer size.",
5072 GetAsioErrorString(result),
5073 devices_[device].name.c_str());
5074 error(RtError::DEBUG_WARNING);
5078 if ( *bufferSize < minSize ) *bufferSize = minSize;
5079 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
5080 else if ( granularity == -1 ) {
5081 // Make sure bufferSize is a power of two.
5082 double power = log10( (double) *bufferSize ) / log10( 2.0 );
5083 *bufferSize = (int) pow( 2.0, floor(power+0.5) );
5084 if ( *bufferSize < minSize ) *bufferSize = minSize;
5085 else if ( *bufferSize > maxSize ) *bufferSize = maxSize;
5086 else *bufferSize = preferSize;
5087 } else if (granularity != 0)
5089 // to an even multiple of granularity, rounding up.
5090 *bufferSize = (*bufferSize + granularity-1)/granularity*granularity;
5095 if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize )
5096 std::cerr << "Possible input/output buffersize discrepancy!" << std::endl;
5098 stream_.bufferSize = *bufferSize;
5099 stream_.nBuffers = 2;
5101 // ASIO always uses deinterleaved channels.
5102 stream_.deInterleave[mode] = true;
5104 // Allocate, if necessary, our AsioHandle structure for the stream.
5105 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5106 if ( handle == 0 ) {
5107 handle = (AsioHandle *) calloc(1, sizeof(AsioHandle));
5108 if ( handle == NULL ) {
5109 drivers.removeCurrentDriver();
5110 sprintf(message_, "RtApiAsio: error allocating AsioHandle memory (%s).",
5111 devices_[device].name.c_str());
5112 error(RtError::DEBUG_WARNING);
5115 handle->bufferInfos = 0;
5116 // Create a manual-reset event.
5117 handle->condition = CreateEvent( NULL, // no security
5118 TRUE, // manual-reset
5119 FALSE, // non-signaled initially
5121 stream_.apiHandle = (void *) handle;
5124 // Create the ASIO internal buffers. Since RtAudio sets up input
5125 // and output separately, we'll have to dispose of previously
5126 // created output buffers for a duplex stream.
5127 if ( mode == INPUT && stream_.mode == OUTPUT ) {
5128 ASIODisposeBuffers();
5129 if ( handle->bufferInfos ) free( handle->bufferInfos );
5132 // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure.
5133 int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
5134 handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) );
5135 if (handle->bufferInfos == NULL) {
5136 sprintf(message_, "RtApiAsio: error allocating bufferInfo memory (%s).",
5137 devices_[device].name.c_str());
5140 ASIOBufferInfo *infos;
5141 infos = handle->bufferInfos;
5142 for ( i=0; i<stream_.nDeviceChannels[0]; i++, infos++ ) {
5143 infos->isInput = ASIOFalse;
5144 infos->channelNum = i;
5145 infos->buffers[0] = infos->buffers[1] = 0;
5147 for ( i=0; i<stream_.nDeviceChannels[1]; i++, infos++ ) {
5148 infos->isInput = ASIOTrue;
5149 infos->channelNum = i;
5150 infos->buffers[0] = infos->buffers[1] = 0;
5153 // Set up the ASIO callback structure and create the ASIO data buffers.
5154 asioCallbacks.bufferSwitch = &bufferSwitch;
5155 asioCallbacks.sampleRateDidChange = &sampleRateChanged;
5156 asioCallbacks.asioMessage = &asioMessages;
5157 asioCallbacks.bufferSwitchTimeInfo = NULL;
5158 result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks);
5159 if ( result != ASE_OK ) {
5160 sprintf(message_, "RtApiAsio: eror (%s) on driver (%s) error creating buffers.",
5161 GetAsioErrorString(result),
5162 devices_[device].name.c_str());
5166 // Set flags for buffer conversion.
5167 stream_.doConvertBuffer[mode] = false;
5168 if (stream_.userFormat != stream_.deviceFormat[mode])
5169 stream_.doConvertBuffer[mode] = true;
5170 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
5171 stream_.doConvertBuffer[mode] = true;
5172 if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode])
5173 stream_.doConvertBuffer[mode] = true;
5175 // Allocate necessary internal buffers
5176 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
5179 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
5180 buffer_bytes = stream_.nUserChannels[0];
5182 buffer_bytes = stream_.nUserChannels[1];
5184 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
5185 if (stream_.userBuffer) free(stream_.userBuffer);
5186 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
5187 if (stream_.userBuffer == NULL) {
5188 sprintf(message_, "RtApiAsio: error (%s) allocating user buffer memory (%s).",
5189 GetAsioErrorString(result),
5190 devices_[device].name.c_str());
5195 if ( stream_.doConvertBuffer[mode] ) {
5198 bool makeBuffer = true;
5199 if ( mode == OUTPUT )
5200 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5201 else { // mode == INPUT
5202 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
5203 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
5204 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
5205 if ( buffer_bytes < bytes_out ) makeBuffer = false;
5210 buffer_bytes *= *bufferSize;
5211 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
5212 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
5213 if (stream_.deviceBuffer == NULL) {
5214 sprintf(message_, "RtApiAsio: error (%s) allocating device buffer memory (%s).",
5215 GetAsioErrorString(result),
5216 devices_[device].name.c_str());
5222 stream_.device[mode] = device;
5223 stream_.state = STREAM_STOPPED;
5224 if ( stream_.mode == OUTPUT && mode == INPUT )
5225 // We had already set up an output stream.
5226 stream_.mode = DUPLEX;
5228 stream_.mode = mode;
5229 stream_.sampleRate = sampleRate;
5230 asioCallbackInfo = &stream_.callbackInfo;
5231 stream_.callbackInfo.object = (void *) this;
5233 // Setup the buffer conversion information structure.
5234 if ( stream_.doConvertBuffer[mode] ) {
5235 if (mode == INPUT) { // convert device to user buffer
5236 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
5237 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
5238 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
5239 stream_.convertInfo[mode].outFormat = stream_.userFormat;
5241 else { // convert user to device buffer
5242 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
5243 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
5244 stream_.convertInfo[mode].inFormat = stream_.userFormat;
5245 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
5248 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
5249 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
5251 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
5253 // Set up the interleave/deinterleave offsets.
5254 if ( mode == INPUT && stream_.deInterleave[1] ) {
5255 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5256 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
5257 stream_.convertInfo[mode].outOffset.push_back( k );
5258 stream_.convertInfo[mode].inJump = 1;
5261 else if (mode == OUTPUT && stream_.deInterleave[0]) {
5262 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5263 stream_.convertInfo[mode].inOffset.push_back( k );
5264 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
5265 stream_.convertInfo[mode].outJump = 1;
5269 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
5270 stream_.convertInfo[mode].inOffset.push_back( k );
5271 stream_.convertInfo[mode].outOffset.push_back( k );
5279 ASIODisposeBuffers();
5280 drivers.removeCurrentDriver();
5283 CloseHandle( handle->condition );
5284 if ( handle->bufferInfos )
5285 free( handle->bufferInfos );
5287 stream_.apiHandle = 0;
5290 if (stream_.userBuffer) {
5291 free(stream_.userBuffer);
5292 stream_.userBuffer = 0;
5295 error(RtError::DEBUG_WARNING);
5299 void RtApiAsio :: closeStream()
5301 // We don't want an exception to be thrown here because this
5302 // function is called by our class destructor. So, do our own
5304 if ( stream_.mode == UNINITIALIZED ) {
5305 sprintf(message_, "RtApiAsio::closeStream(): no open stream to close!");
5306 error(RtError::WARNING);
5310 if (stream_.state == STREAM_RUNNING)
5313 ASIODisposeBuffers();
5314 drivers.removeCurrentDriver();
5316 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5318 CloseHandle( handle->condition );
5319 if ( handle->bufferInfos )
5320 free( handle->bufferInfos );
5322 stream_.apiHandle = 0;
5325 if (stream_.userBuffer) {
5326 free(stream_.userBuffer);
5327 stream_.userBuffer = 0;
5330 if (stream_.deviceBuffer) {
5331 free(stream_.deviceBuffer);
5332 stream_.deviceBuffer = 0;
5335 stream_.mode = UNINITIALIZED;
5338 void RtApiAsio :: setStreamCallback(RtAudioCallback callback, void *userData)
5342 if ( stream_.callbackInfo.usingCallback ) {
5343 sprintf(message_, "RtApiAsio: A callback is already set for this stream!");
5344 error(RtError::WARNING);
5348 stream_.callbackInfo.callback = (void *) callback;
5349 stream_.callbackInfo.userData = userData;
5350 stream_.callbackInfo.usingCallback = true;
5353 void RtApiAsio :: cancelStreamCallback()
5357 if (stream_.callbackInfo.usingCallback) {
5359 if (stream_.state == STREAM_RUNNING)
5362 MUTEX_LOCK(&stream_.mutex);
5364 stream_.callbackInfo.usingCallback = false;
5365 stream_.callbackInfo.userData = NULL;
5366 stream_.state = STREAM_STOPPED;
5367 stream_.callbackInfo.callback = NULL;
5369 MUTEX_UNLOCK(&stream_.mutex);
5373 void RtApiAsio :: startStream()
5376 if (stream_.state == STREAM_RUNNING) return;
5378 MUTEX_LOCK(&stream_.mutex);
5380 ASIOError result = ASIOStart();
5381 if ( result != ASE_OK ) {
5382 sprintf(message_, "RtApiAsio: error starting device (%s).",
5383 devices_[stream_.device[0]].name.c_str());
5384 MUTEX_UNLOCK(&stream_.mutex);
5385 error(RtError::DRIVER_ERROR);
5387 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5388 handle->stopStream = false;
5389 stream_.state = STREAM_RUNNING;
5391 MUTEX_UNLOCK(&stream_.mutex);
5394 void RtApiAsio :: stopStream()
5397 if (stream_.state == STREAM_STOPPED) return;
5399 // Change the state before the lock to improve shutdown response
5400 // when using a callback.
5401 stream_.state = STREAM_STOPPED;
5402 MUTEX_LOCK(&stream_.mutex);
5404 ASIOError result = ASIOStop();
5405 if ( result != ASE_OK ) {
5406 sprintf(message_, "RtApiAsio: error stopping device (%s).",
5407 devices_[stream_.device[0]].name.c_str());
5408 MUTEX_UNLOCK(&stream_.mutex);
5409 error(RtError::DRIVER_ERROR);
5412 MUTEX_UNLOCK(&stream_.mutex);
5415 void RtApiAsio :: abortStream()
5420 void RtApiAsio :: tickStream()
5424 if (stream_.state == STREAM_STOPPED)
5427 if (stream_.callbackInfo.usingCallback) {
5428 sprintf(message_, "RtApiAsio: tickStream() should not be used when a callback function is set!");
5429 error(RtError::WARNING);
5433 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5435 MUTEX_LOCK(&stream_.mutex);
5437 // Release the stream_mutex here and wait for the event
5438 // to become signaled by the callback process.
5439 MUTEX_UNLOCK(&stream_.mutex);
5440 WaitForMultipleObjects(1, &handle->condition, FALSE, INFINITE);
5441 ResetEvent( handle->condition );
5444 void RtApiAsio :: callbackEvent(long bufferIndex)
5448 if (stream_.state == STREAM_STOPPED) return;
5450 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
5451 AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
5452 if ( info->usingCallback && handle->stopStream ) {
5453 // Check if the stream should be stopped (via the previous user
5454 // callback return value). We stop the stream here, rather than
5455 // after the function call, so that output data can first be
5461 MUTEX_LOCK(&stream_.mutex);
5463 // Invoke user callback first, to get fresh output data.
5464 if ( info->usingCallback ) {
5465 RtAudioCallback callback = (RtAudioCallback) info->callback;
5466 if ( callback(stream_.userBuffer, stream_.bufferSize, info->userData) )
5467 handle->stopStream = true;
5471 int nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1];
5472 if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
5474 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[0]);
5475 if (stream_.doConvertBuffer[0]) {
5477 convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] );
5478 if ( stream_.doByteSwap[0] )
5479 byteSwapBuffer(stream_.deviceBuffer,
5480 stream_.bufferSize * stream_.nDeviceChannels[0],
5481 stream_.deviceFormat[0]);
5483 // Always de-interleave ASIO output data.
5485 for ( int i=0; i<nChannels; i++ ) {
5486 if ( handle->bufferInfos[i].isInput != ASIOTrue )
5487 memcpy(handle->bufferInfos[i].buffers[bufferIndex],
5488 &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );
5491 else { // single channel only
5493 if (stream_.doByteSwap[0])
5494 byteSwapBuffer(stream_.userBuffer,
5495 stream_.bufferSize * stream_.nUserChannels[0],
5496 stream_.userFormat);
5498 for ( int i=0; i<nChannels; i++ ) {
5499 if ( handle->bufferInfos[i].isInput != ASIOTrue ) {
5500 memcpy(handle->bufferInfos[i].buffers[bufferIndex], stream_.userBuffer, bufferBytes );
5507 if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {
5509 bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);
5510 if (stream_.doConvertBuffer[1]) {
5512 // Always interleave ASIO input data.
5514 for ( int i=0; i<nChannels; i++ ) {
5515 if ( handle->bufferInfos[i].isInput == ASIOTrue )
5516 memcpy(&stream_.deviceBuffer[j++*bufferBytes],
5517 handle->bufferInfos[i].buffers[bufferIndex],
5521 if ( stream_.doByteSwap[1] )
5522 byteSwapBuffer(stream_.deviceBuffer,
5523 stream_.bufferSize * stream_.nDeviceChannels[1],
5524 stream_.deviceFormat[1]);
5525 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
5528 else { // single channel only
5529 for ( int i=0; i<nChannels; i++ ) {
5530 if ( handle->bufferInfos[i].isInput == ASIOTrue ) {
5531 memcpy(stream_.userBuffer,
5532 handle->bufferInfos[i].buffers[bufferIndex],
5538 if (stream_.doByteSwap[1])
5539 byteSwapBuffer(stream_.userBuffer,
5540 stream_.bufferSize * stream_.nUserChannels[1],
5541 stream_.userFormat);
5545 if ( !info->usingCallback )
5546 SetEvent( handle->condition );
5548 // The following call was suggested by Malte Clasen. While the API
5549 // documentation indicates it should not be required, some device
5550 // drivers apparently do not function correctly without it.
5553 MUTEX_UNLOCK(&stream_.mutex);
5556 //******************** End of __WINDOWS_ASIO__ *********************//
5559 #if defined(__WINDOWS_DS__) // Windows DirectSound API
5565 #define MINIMUM_DEVICE_BUFFER_SIZE 32768
5568 #ifdef _MSC_VER // if Microsoft Visual C++
5569 #pragma comment(lib,"winmm.lib") // then, auto-link winmm.lib. Otherwise, it has to be added manually.
5573 static inline DWORD dsPointerDifference(DWORD laterPointer,DWORD earlierPointer,DWORD bufferSize)
5575 if (laterPointer > earlierPointer)
5577 return laterPointer-earlierPointer;
5580 return laterPointer-earlierPointer+bufferSize;
5584 static inline DWORD dsPointerBetween(DWORD pointer, DWORD laterPointer,DWORD earlierPointer, DWORD bufferSize)
5586 if (pointer > bufferSize) pointer -= bufferSize;
5587 if (laterPointer < earlierPointer)
5589 laterPointer += bufferSize;
5591 if (pointer < earlierPointer)
5593 pointer += bufferSize;
5595 return pointer >= earlierPointer && pointer < laterPointer;
5599 #undef GENERATE_DEBUG_LOG // Define this to generate a debug timing log file in c:/rtaudiolog.txt"
5600 #ifdef GENERATE_DEBUG_LOG
5602 #include "mmsystem.h"
5607 DWORD currentReadPointer, safeReadPointer;
5608 DWORD currentWritePointer, safeWritePointer;
5609 DWORD readTime, writeTime;
5610 DWORD nextWritePointer, nextReadPointer;
5613 int currentDebugLogEntry = 0;
5614 std::vector<TTickRecord> debugLog(2000);
5619 // A structure to hold various information related to the DirectSound
5620 // API implementation.
5626 DWORD dsPointerLeadTime; // the number of bytes ahead of the safe pointer to lead by.
5630 RtApiDs::RtDsStatistics RtApiDs::statistics;
5632 // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns.
5633 RtApiDs::RtDsStatistics RtApiDs::getDsStatistics()
5635 RtDsStatistics s = statistics;
5636 // update the calculated fields.
5639 if (s.inputFrameSize != 0)
5641 s.latency += s.readDeviceSafeLeadBytes*1.0/s.inputFrameSize / s.sampleRate;
5643 if (s.outputFrameSize != 0)
5646 (s.writeDeviceSafeLeadBytes+ s.writeDeviceBufferLeadBytes)*1.0/s.outputFrameSize / s.sampleRate;
5652 // Declarations for utility functions, callbacks, and structures
5653 // specific to the DirectSound implementation.
5654 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
5655 LPCSTR lpcstrDescription,
5656 LPCSTR lpcstrModule,
5659 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
5660 LPCSTR lpcstrDescription,
5661 LPCSTR lpcstrModule,
5664 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
5665 LPCSTR lpcstrDescription,
5666 LPCSTR lpcstrModule,
5669 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
5670 LPCSTR lpcstrDescription,
5671 LPCSTR lpcstrModule,
5674 static char* getErrorString(int code);
5676 extern "C" unsigned __stdcall callbackHandler(void *ptr);
5685 RtApiDs :: RtApiDs()
5687 // Dsound will run both-threaded. If CoInitialize fails, then just accept whatever the mainline
5688 // chose for a threading model.
5689 coInitialized = false;
5690 HRESULT hr = CoInitialize(NULL);
5692 coInitialized = true;
5697 if (nDevices_ <= 0) {
5698 sprintf(message_, "RtApiDs: no Windows DirectSound audio devices found!");
5699 error(RtError::NO_DEVICES_FOUND);
5703 RtApiDs :: ~RtApiDs()
5707 CoUninitialize(); // balanced call.
5709 if ( stream_.mode != UNINITIALIZED ) closeStream();
5712 int RtApiDs :: getDefaultInputDevice(void)
5715 info.name[0] = '\0';
5717 // Enumerate through devices to find the default output.
5718 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5719 if ( FAILED(result) ) {
5720 sprintf(message_, "RtApiDs: Error performing default input device enumeration: %s.",
5721 getErrorString(result));
5722 error(RtError::WARNING);
5726 for ( int i=0; i<nDevices_; i++ ) {
5727 if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
5734 int RtApiDs :: getDefaultOutputDevice(void)
5737 info.name[0] = '\0';
5739 // Enumerate through devices to find the default output.
5740 HRESULT result = DirectSoundEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info);
5741 if ( FAILED(result) ) {
5742 sprintf(message_, "RtApiDs: Error performing default output device enumeration: %s.",
5743 getErrorString(result));
5744 error(RtError::WARNING);
5748 for ( int i=0; i<nDevices_; i++ )
5749 if ( strncmp( info.name, devices_[i].name.c_str(), 64 ) == 0 ) return i;
5754 void RtApiDs :: initialize(void)
5756 int i, ins = 0, outs = 0, count = 0;
5760 // Count DirectSound devices.
5761 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &outs);
5762 if ( FAILED(result) ) {
5763 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5764 getErrorString(result));
5765 error(RtError::DRIVER_ERROR);
5768 // Count DirectSoundCapture devices.
5769 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceCountCallback, &ins);
5770 if ( FAILED(result) ) {
5771 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5772 getErrorString(result));
5773 error(RtError::DRIVER_ERROR);
5777 if (count == 0) return;
5779 std::vector<enum_info> info(count);
5780 for (i=0; i<count; i++) {
5781 info[i].name[0] = '\0';
5782 if (i < outs) info[i].isInput = false;
5783 else info[i].isInput = true;
5786 // Get playback device info and check capabilities.
5787 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5788 if ( FAILED(result) ) {
5789 sprintf(message_, "RtApiDs: Unable to enumerate through sound playback devices: %s.",
5790 getErrorString(result));
5791 error(RtError::DRIVER_ERROR);
5794 // Get capture device info and check capabilities.
5795 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceInfoCallback, &info[0]);
5796 if ( FAILED(result) ) {
5797 sprintf(message_, "RtApiDs: Unable to enumerate through sound capture devices: %s.",
5798 getErrorString(result));
5799 error(RtError::DRIVER_ERROR);
5802 // Create device structures for valid devices and write device names
5803 // to each. Devices are considered invalid if they cannot be
5804 // opened, they report < 1 supported channels, or they report no
5805 // supported data (capture only).
5808 for (i=0; i<count; i++) {
5809 if ( info[i].isValid ) {
5810 device.name.erase();
5811 device.name.append( (const char *)info[i].name, strlen(info[i].name)+1);
5812 devices_.push_back(device);
5816 nDevices_ = devices_.size();
5820 void RtApiDs :: probeDeviceInfo(RtApiDevice *info)
5823 strncpy( dsinfo.name, info->name.c_str(), 64 );
5824 dsinfo.isValid = false;
5826 // Enumerate through input devices to find the id (if it exists).
5827 HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5828 if ( FAILED(result) ) {
5829 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
5830 getErrorString(result));
5831 error(RtError::DEBUG_WARNING);
5835 // Do capture probe first.
5836 if ( dsinfo.isValid == false )
5837 goto playback_probe;
5839 LPDIRECTSOUNDCAPTURE input;
5840 result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL );
5841 if ( FAILED(result) ) {
5842 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
5843 info->name.c_str(), getErrorString(result));
5844 error(RtError::DEBUG_WARNING);
5845 goto playback_probe;
5849 in_caps.dwSize = sizeof(in_caps);
5850 result = input->GetCaps( &in_caps );
5851 if ( FAILED(result) ) {
5853 sprintf(message_, "RtApiDs: Could not get capture capabilities (%s): %s.",
5854 info->name.c_str(), getErrorString(result));
5855 error(RtError::DEBUG_WARNING);
5856 goto playback_probe;
5859 // Get input channel information.
5860 info->minInputChannels = 1;
5861 info->maxInputChannels = in_caps.dwChannels;
5863 // Get sample rate and format information.
5864 info->sampleRates.clear();
5865 if( in_caps.dwChannels == 2 ) {
5866 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5867 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5868 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->nativeFormats |= RTAUDIO_SINT16;
5869 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5870 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5871 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->nativeFormats |= RTAUDIO_SINT8;
5873 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5874 if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->sampleRates.push_back( 11025 );
5875 if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->sampleRates.push_back( 22050 );
5876 if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->sampleRates.push_back( 44100 );
5878 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5879 if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->sampleRates.push_back( 11025 );
5880 if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->sampleRates.push_back( 22050 );
5881 if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->sampleRates.push_back( 44100 );
5884 else if ( in_caps.dwChannels == 1 ) {
5885 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5886 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5887 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->nativeFormats |= RTAUDIO_SINT16;
5888 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5889 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5890 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->nativeFormats |= RTAUDIO_SINT8;
5892 if ( info->nativeFormats & RTAUDIO_SINT16 ) {
5893 if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->sampleRates.push_back( 11025 );
5894 if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->sampleRates.push_back( 22050 );
5895 if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->sampleRates.push_back( 44100 );
5897 else if ( info->nativeFormats & RTAUDIO_SINT8 ) {
5898 if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->sampleRates.push_back( 11025 );
5899 if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->sampleRates.push_back( 22050 );
5900 if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->sampleRates.push_back( 44100 );
5903 else info->minInputChannels = 0; // technically, this would be an error
5909 dsinfo.isValid = false;
5911 // Enumerate through output devices to find the id (if it exists).
5912 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
5913 if ( FAILED(result) ) {
5914 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
5915 getErrorString(result));
5916 error(RtError::DEBUG_WARNING);
5920 // Now do playback probe.
5921 if ( dsinfo.isValid == false )
5922 goto check_parameters;
5924 LPDIRECTSOUND output;
5926 result = DirectSoundCreate( dsinfo.id, &output, NULL );
5927 if ( FAILED(result) ) {
5928 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
5929 info->name.c_str(), getErrorString(result));
5930 error(RtError::DEBUG_WARNING);
5931 goto check_parameters;
5934 out_caps.dwSize = sizeof(out_caps);
5935 result = output->GetCaps( &out_caps );
5936 if ( FAILED(result) ) {
5938 sprintf(message_, "RtApiDs: Could not get playback capabilities (%s): %s.",
5939 info->name.c_str(), getErrorString(result));
5940 error(RtError::DEBUG_WARNING);
5941 goto check_parameters;
5944 // Get output channel information.
5945 info->minOutputChannels = 1;
5946 info->maxOutputChannels = ( out_caps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1;
5948 // Get sample rate information. Use capture device rate information
5950 if ( info->sampleRates.size() == 0 ) {
5951 info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
5952 if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate )
5953 info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
5956 // Check input rates against output rate range. If there's an
5957 // inconsistency (such as a duplex-capable device which reports a
5958 // single output rate of 48000 Hz), we'll go with the output
5959 // rate(s) since the DirectSoundCapture API is stupid and broken.
5960 // Note that the probed sample rate values are NOT used when
5961 // opening the device. Thanks to Tue Andersen for reporting this.
5962 if ( info->sampleRates.back() < (int) out_caps.dwMinSecondarySampleRate ) {
5963 info->sampleRates.clear();
5964 info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate );
5965 if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate )
5966 info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate );
5969 for ( int i=info->sampleRates.size()-1; i>=0; i-- ) {
5970 if ( (unsigned int) info->sampleRates[i] > out_caps.dwMaxSecondarySampleRate )
5971 info->sampleRates.erase( info->sampleRates.begin() + i );
5973 while ( info->sampleRates.size() > 0 &&
5974 ((unsigned int) info->sampleRates[0] < out_caps.dwMinSecondarySampleRate) ) {
5975 info->sampleRates.erase( info->sampleRates.begin() );
5980 // Get format information.
5981 if ( out_caps.dwFlags & DSCAPS_PRIMARY16BIT ) info->nativeFormats |= RTAUDIO_SINT16;
5982 if ( out_caps.dwFlags & DSCAPS_PRIMARY8BIT ) info->nativeFormats |= RTAUDIO_SINT8;
5987 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) {
5988 sprintf(message_, "RtApiDs: no reported input or output channels for device (%s).",
5989 info->name.c_str());
5990 error(RtError::DEBUG_WARNING);
5993 if ( info->sampleRates.size() == 0 || info->nativeFormats == 0 ) {
5994 sprintf(message_, "RtApiDs: no reported sample rates or data formats for device (%s).",
5995 info->name.c_str());
5996 error(RtError::DEBUG_WARNING);
6000 // Determine duplex status.
6001 if (info->maxInputChannels < info->maxOutputChannels)
6002 info->maxDuplexChannels = info->maxInputChannels;
6004 info->maxDuplexChannels = info->maxOutputChannels;
6005 if (info->minInputChannels < info->minOutputChannels)
6006 info->minDuplexChannels = info->minInputChannels;
6008 info->minDuplexChannels = info->minOutputChannels;
6010 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
6011 else info->hasDuplexSupport = false;
6013 info->probed = true;
6018 bool RtApiDs :: probeDeviceOpen( int device, StreamMode mode, int channels,
6019 int sampleRate, RtAudioFormat format,
6020 int *bufferSize, int numberOfBuffers)
6023 HWND hWnd = GetForegroundWindow();
6025 // According to a note in PortAudio, using GetDesktopWindow()
6026 // instead of GetForegroundWindow() is supposed to avoid problems
6027 // that occur when the application's window is not the foreground
6028 // window. Also, if the application window closes before the
6029 // DirectSound buffer, DirectSound can crash. However, for console
6030 // applications, no sound was produced when using GetDesktopWindow().
6036 // Check the numberOfBuffers parameter and limit the lowest value to
6037 // two. This is a judgement call and a value of two is probably too
6038 // low for capture, but it should work for playback.
6039 if (numberOfBuffers < 2)
6042 nBuffers = numberOfBuffers;
6044 // Define the wave format structure (16-bit PCM, srate, channels)
6045 WAVEFORMATEX waveFormat;
6046 ZeroMemory(&waveFormat, sizeof(WAVEFORMATEX));
6047 waveFormat.wFormatTag = WAVE_FORMAT_PCM;
6048 waveFormat.nChannels = channels;
6049 waveFormat.nSamplesPerSec = (unsigned long) sampleRate;
6051 // Determine the data format.
6052 if ( devices_[device].nativeFormats ) { // 8-bit and/or 16-bit support
6053 if ( format == RTAUDIO_SINT8 ) {
6054 if ( devices_[device].nativeFormats & RTAUDIO_SINT8 )
6055 waveFormat.wBitsPerSample = 8;
6057 waveFormat.wBitsPerSample = 16;
6060 if ( devices_[device].nativeFormats & RTAUDIO_SINT16 )
6061 waveFormat.wBitsPerSample = 16;
6063 waveFormat.wBitsPerSample = 8;
6067 sprintf(message_, "RtApiDs: no reported data formats for device (%s).",
6068 devices_[device].name.c_str());
6069 error(RtError::DEBUG_WARNING);
6073 waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
6074 waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
6076 // Determine the device buffer size. By default, 32k,
6077 // but we will grow it to make allowances for very large softare buffer sizes.
6078 DWORD dsBufferSize = 0;
6079 DWORD dsPointerLeadTime = 0;
6081 buffer_size = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this
6084 // poisonously large buffer lead time? Then increase the device buffer size accordingly.
6085 while (dsPointerLeadTime *2U > (DWORD)buffer_size)
6093 void *ohandle = 0, *bhandle = 0;
6094 strncpy( dsinfo.name, devices_[device].name.c_str(), 64 );
6095 dsinfo.isValid = false;
6096 if ( mode == OUTPUT ) {
6097 dsPointerLeadTime = (numberOfBuffers) *
6099 (waveFormat.wBitsPerSample / 8)
6103 if ( devices_[device].maxOutputChannels < channels ) {
6104 sprintf(message_, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).",
6105 channels, devices_[device].maxOutputChannels, devices_[device].name.c_str());
6106 error(RtError::DEBUG_WARNING);
6110 // Enumerate through output devices to find the id (if it exists).
6111 result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
6112 if ( FAILED(result) ) {
6113 sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.",
6114 getErrorString(result));
6115 error(RtError::DEBUG_WARNING);
6119 if ( dsinfo.isValid == false ) {
6120 sprintf(message_, "RtApiDs: output device (%s) id not found!", devices_[device].name.c_str());
6121 error(RtError::DEBUG_WARNING);
6125 LPGUID id = dsinfo.id;
6126 LPDIRECTSOUND object;
6127 LPDIRECTSOUNDBUFFER buffer;
6128 DSBUFFERDESC bufferDescription;
6130 result = DirectSoundCreate( id, &object, NULL );
6131 if ( FAILED(result) ) {
6132 sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.",
6133 devices_[device].name.c_str(), getErrorString(result));
6134 error(RtError::DEBUG_WARNING);
6138 // Set cooperative level to DSSCL_EXCLUSIVE
6139 result = object->SetCooperativeLevel(hWnd, DSSCL_EXCLUSIVE);
6140 if ( FAILED(result) ) {
6142 sprintf(message_, "RtApiDs: Unable to set cooperative level (%s): %s.",
6143 devices_[device].name.c_str(), getErrorString(result));
6144 error(RtError::DEBUG_WARNING);
6148 // Even though we will write to the secondary buffer, we need to
6149 // access the primary buffer to set the correct output format
6150 // (since the default is 8-bit, 22 kHz!). Setup the DS primary
6151 // buffer description.
6152 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
6153 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
6154 bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER;
6155 // Obtain the primary buffer
6156 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6157 if ( FAILED(result) ) {
6159 sprintf(message_, "RtApiDs: Unable to access primary buffer (%s): %s.",
6160 devices_[device].name.c_str(), getErrorString(result));
6161 error(RtError::DEBUG_WARNING);
6165 // Set the primary DS buffer sound format.
6166 result = buffer->SetFormat(&waveFormat);
6167 if ( FAILED(result) ) {
6169 sprintf(message_, "RtApiDs: Unable to set primary buffer format (%s): %s.",
6170 devices_[device].name.c_str(), getErrorString(result));
6171 error(RtError::DEBUG_WARNING);
6175 // Setup the secondary DS buffer description.
6176 dsBufferSize = (DWORD)buffer_size;
6177 ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC));
6178 bufferDescription.dwSize = sizeof(DSBUFFERDESC);
6179 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
6180 DSBCAPS_GETCURRENTPOSITION2 |
6181 DSBCAPS_LOCHARDWARE ); // Force hardware mixing
6182 bufferDescription.dwBufferBytes = buffer_size;
6183 bufferDescription.lpwfxFormat = &waveFormat;
6185 // Try to create the secondary DS buffer. If that doesn't work,
6186 // try to use software mixing. Otherwise, there's a problem.
6187 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6188 if ( FAILED(result) ) {
6189 bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS |
6190 DSBCAPS_GETCURRENTPOSITION2 |
6191 DSBCAPS_LOCSOFTWARE ); // Force software mixing
6192 result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL);
6193 if ( FAILED(result) ) {
6195 sprintf(message_, "RtApiDs: Unable to create secondary DS buffer (%s): %s.",
6196 devices_[device].name.c_str(), getErrorString(result));
6197 error(RtError::DEBUG_WARNING);
6202 // Get the buffer size ... might be different from what we specified.
6204 dsbcaps.dwSize = sizeof(DSBCAPS);
6205 buffer->GetCaps(&dsbcaps);
6206 buffer_size = dsbcaps.dwBufferBytes;
6208 // Lock the DS buffer
6209 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
6210 if ( FAILED(result) ) {
6213 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
6214 devices_[device].name.c_str(), getErrorString(result));
6215 error(RtError::DEBUG_WARNING);
6219 // Zero the DS buffer
6220 ZeroMemory(audioPtr, dataLen);
6222 // Unlock the DS buffer
6223 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6224 if ( FAILED(result) ) {
6227 sprintf(message_, "RtApiDs: Unable to unlock buffer(%s): %s.",
6228 devices_[device].name.c_str(), getErrorString(result));
6229 error(RtError::DEBUG_WARNING);
6233 ohandle = (void *) object;
6234 bhandle = (void *) buffer;
6235 stream_.nDeviceChannels[0] = channels;
6238 if ( mode == INPUT ) {
6240 if ( devices_[device].maxInputChannels < channels ) {
6241 sprintf(message_, "RtAudioDS: device (%s) does not support %d channels.", devices_[device].name.c_str(), channels);
6242 error(RtError::DEBUG_WARNING);
6246 // Enumerate through input devices to find the id (if it exists).
6247 result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo);
6248 if ( FAILED(result) ) {
6249 sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.",
6250 getErrorString(result));
6251 error(RtError::DEBUG_WARNING);
6255 if ( dsinfo.isValid == false ) {
6256 sprintf(message_, "RtAudioDS: input device (%s) id not found!", devices_[device].name.c_str());
6257 error(RtError::DEBUG_WARNING);
6261 LPGUID id = dsinfo.id;
6262 LPDIRECTSOUNDCAPTURE object;
6263 LPDIRECTSOUNDCAPTUREBUFFER buffer;
6264 DSCBUFFERDESC bufferDescription;
6266 result = DirectSoundCaptureCreate( id, &object, NULL );
6267 if ( FAILED(result) ) {
6268 sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.",
6269 devices_[device].name.c_str(), getErrorString(result));
6270 error(RtError::DEBUG_WARNING);
6274 // Setup the secondary DS buffer description.
6275 dsBufferSize = buffer_size;
6276 ZeroMemory(&bufferDescription, sizeof(DSCBUFFERDESC));
6277 bufferDescription.dwSize = sizeof(DSCBUFFERDESC);
6278 bufferDescription.dwFlags = 0;
6279 bufferDescription.dwReserved = 0;
6280 bufferDescription.dwBufferBytes = buffer_size;
6281 bufferDescription.lpwfxFormat = &waveFormat;
6283 // Create the capture buffer.
6284 result = object->CreateCaptureBuffer(&bufferDescription, &buffer, NULL);
6285 if ( FAILED(result) ) {
6287 sprintf(message_, "RtApiDs: Unable to create capture buffer (%s): %s.",
6288 devices_[device].name.c_str(), getErrorString(result));
6289 error(RtError::DEBUG_WARNING);
6293 // Lock the capture buffer
6294 result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0);
6295 if ( FAILED(result) ) {
6298 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6299 devices_[device].name.c_str(), getErrorString(result));
6300 error(RtError::DEBUG_WARNING);
6305 ZeroMemory(audioPtr, dataLen);
6307 // Unlock the buffer
6308 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6309 if ( FAILED(result) ) {
6312 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6313 devices_[device].name.c_str(), getErrorString(result));
6314 error(RtError::DEBUG_WARNING);
6318 ohandle = (void *) object;
6319 bhandle = (void *) buffer;
6320 stream_.nDeviceChannels[1] = channels;
6323 stream_.userFormat = format;
6324 if ( waveFormat.wBitsPerSample == 8 )
6325 stream_.deviceFormat[mode] = RTAUDIO_SINT8;
6327 stream_.deviceFormat[mode] = RTAUDIO_SINT16;
6328 stream_.nUserChannels[mode] = channels;
6330 stream_.bufferSize = *bufferSize;
6332 // Set flags for buffer conversion
6333 stream_.doConvertBuffer[mode] = false;
6334 if (stream_.userFormat != stream_.deviceFormat[mode])
6335 stream_.doConvertBuffer[mode] = true;
6336 if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode])
6337 stream_.doConvertBuffer[mode] = true;
6339 // Allocate necessary internal buffers
6340 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
6343 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
6344 buffer_bytes = stream_.nUserChannels[0];
6346 buffer_bytes = stream_.nUserChannels[1];
6348 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
6349 if (stream_.userBuffer) free(stream_.userBuffer);
6350 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
6351 if (stream_.userBuffer == NULL) {
6352 sprintf(message_, "RtApiDs: error allocating user buffer memory (%s).",
6353 devices_[device].name.c_str());
6358 if ( stream_.doConvertBuffer[mode] ) {
6361 bool makeBuffer = true;
6362 if ( mode == OUTPUT )
6363 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
6364 else { // mode == INPUT
6365 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
6366 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
6367 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
6368 if ( buffer_bytes < bytes_out ) makeBuffer = false;
6373 buffer_bytes *= *bufferSize;
6374 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
6375 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
6376 if (stream_.deviceBuffer == NULL) {
6377 sprintf(message_, "RtApiDs: error allocating device buffer memory (%s).",
6378 devices_[device].name.c_str());
6384 // Allocate our DsHandle structures for the stream.
6386 if ( stream_.apiHandle == 0 ) {
6387 handles = (DsHandle *) calloc(2, sizeof(DsHandle));
6388 if ( handles == NULL ) {
6389 sprintf(message_, "RtApiDs: Error allocating DsHandle memory (%s).",
6390 devices_[device].name.c_str());
6393 handles[0].object = 0;
6394 handles[1].object = 0;
6395 stream_.apiHandle = (void *) handles;
6398 handles = (DsHandle *) stream_.apiHandle;
6399 handles[mode].object = ohandle;
6400 handles[mode].buffer = bhandle;
6401 handles[mode].dsBufferSize = dsBufferSize;
6402 handles[mode].dsPointerLeadTime = dsPointerLeadTime;
6404 stream_.device[mode] = device;
6405 stream_.state = STREAM_STOPPED;
6406 if ( stream_.mode == OUTPUT && mode == INPUT )
6407 // We had already set up an output stream.
6408 stream_.mode = DUPLEX;
6410 stream_.mode = mode;
6411 stream_.nBuffers = nBuffers;
6412 stream_.sampleRate = sampleRate;
6414 // Setup the buffer conversion information structure.
6415 if ( stream_.doConvertBuffer[mode] ) {
6416 if (mode == INPUT) { // convert device to user buffer
6417 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
6418 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
6419 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
6420 stream_.convertInfo[mode].outFormat = stream_.userFormat;
6422 else { // convert user to device buffer
6423 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
6424 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
6425 stream_.convertInfo[mode].inFormat = stream_.userFormat;
6426 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
6429 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
6430 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
6432 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
6434 // Set up the interleave/deinterleave offsets.
6435 if ( mode == INPUT && stream_.deInterleave[1] ) {
6436 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6437 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
6438 stream_.convertInfo[mode].outOffset.push_back( k );
6439 stream_.convertInfo[mode].inJump = 1;
6442 else if (mode == OUTPUT && stream_.deInterleave[0]) {
6443 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6444 stream_.convertInfo[mode].inOffset.push_back( k );
6445 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
6446 stream_.convertInfo[mode].outJump = 1;
6450 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
6451 stream_.convertInfo[mode].inOffset.push_back( k );
6452 stream_.convertInfo[mode].outOffset.push_back( k );
6461 if (handles[0].object) {
6462 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
6463 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6464 if (buffer) buffer->Release();
6467 if (handles[1].object) {
6468 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
6469 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6470 if (buffer) buffer->Release();
6474 stream_.apiHandle = 0;
6477 if (stream_.userBuffer) {
6478 free(stream_.userBuffer);
6479 stream_.userBuffer = 0;
6482 error(RtError::DEBUG_WARNING);
6486 void RtApiDs :: setStreamCallback(RtAudioCallback callback, void *userData)
6490 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
6491 if ( info->usingCallback ) {
6492 sprintf(message_, "RtApiDs: A callback is already set for this stream!");
6493 error(RtError::WARNING);
6497 info->callback = (void *) callback;
6498 info->userData = userData;
6499 info->usingCallback = true;
6500 info->object = (void *) this;
6503 info->thread = _beginthreadex(NULL, 0, &callbackHandler,
6504 &stream_.callbackInfo, 0, &thread_id);
6505 if (info->thread == 0) {
6506 info->usingCallback = false;
6507 sprintf(message_, "RtApiDs: error starting callback thread!");
6508 error(RtError::THREAD_ERROR);
6511 // When spawning multiple threads in quick succession, it appears to be
6512 // necessary to wait a bit for each to initialize ... another windoism!
6516 void RtApiDs :: cancelStreamCallback()
6520 if (stream_.callbackInfo.usingCallback) {
6522 if (stream_.state == STREAM_RUNNING)
6525 MUTEX_LOCK(&stream_.mutex);
6527 stream_.callbackInfo.usingCallback = false;
6528 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
6529 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
6530 stream_.callbackInfo.thread = 0;
6531 stream_.callbackInfo.callback = NULL;
6532 stream_.callbackInfo.userData = NULL;
6534 MUTEX_UNLOCK(&stream_.mutex);
6538 void RtApiDs :: closeStream()
6540 // We don't want an exception to be thrown here because this
6541 // function is called by our class destructor. So, do our own
6543 if ( stream_.mode == UNINITIALIZED ) {
6544 sprintf(message_, "RtApiDs::closeStream(): no open stream to close!");
6545 error(RtError::WARNING);
6549 if (stream_.callbackInfo.usingCallback) {
6550 stream_.callbackInfo.usingCallback = false;
6551 WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE );
6552 CloseHandle( (HANDLE)stream_.callbackInfo.thread );
6555 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6557 if (handles[0].object) {
6558 LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object;
6559 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6567 if (handles[1].object) {
6568 LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object;
6569 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6577 stream_.apiHandle = 0;
6580 if (stream_.userBuffer) {
6581 free(stream_.userBuffer);
6582 stream_.userBuffer = 0;
6585 if (stream_.deviceBuffer) {
6586 free(stream_.deviceBuffer);
6587 stream_.deviceBuffer = 0;
6590 stream_.mode = UNINITIALIZED;
6593 void RtApiDs :: startStream()
6596 if (stream_.state == STREAM_RUNNING) return;
6599 // increase scheduler frequency on lesser windows (a side-effect of increasing timer accuracy.
6600 // on greater windows (Win2K or later), this is already in effect.
6602 MUTEX_LOCK(&stream_.mutex);
6605 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6610 memset(&statistics,0,sizeof(statistics));
6611 statistics.sampleRate = stream_.sampleRate;
6612 statistics.writeDeviceBufferLeadBytes = handles[0].dsPointerLeadTime ;
6614 buffersRolling = false;
6615 duplexPrerollBytes = 0;
6617 if (stream_.mode == DUPLEX)
6619 // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize.
6620 duplexPrerollBytes = (int)(0.5*stream_.sampleRate*formatBytes( stream_.deviceFormat[1])*stream_.nDeviceChannels[1]);
6623 #ifdef GENERATE_DEBUG_LOG
6624 currentDebugLogEntry = 0;
6628 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6629 statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0])
6630 *stream_.nDeviceChannels[0];
6633 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6634 result = buffer->Play(0, 0, DSBPLAY_LOOPING );
6635 if ( FAILED(result) ) {
6636 sprintf(message_, "RtApiDs: Unable to start buffer (%s): %s.",
6637 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6638 error(RtError::DRIVER_ERROR);
6642 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6643 statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1])
6644 *stream_.nDeviceChannels[1];
6646 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6647 result = buffer->Start(DSCBSTART_LOOPING );
6648 if ( FAILED(result) ) {
6649 sprintf(message_, "RtApiDs: Unable to start capture buffer (%s): %s.",
6650 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6651 error(RtError::DRIVER_ERROR);
6654 stream_.state = STREAM_RUNNING;
6656 MUTEX_UNLOCK(&stream_.mutex);
6659 void RtApiDs :: stopStream()
6662 if (stream_.state == STREAM_STOPPED) return;
6665 // Change the state before the lock to improve shutdown response
6666 // when using a callback.
6667 stream_.state = STREAM_STOPPED;
6668 MUTEX_LOCK(&stream_.mutex);
6671 timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows.
6673 #ifdef GENERATE_DEBUG_LOG
6674 // write the timing log to a .TSV file for analysis in Excel.
6675 unlink("c:/rtaudiolog.txt");
6676 std::ofstream os("c:/rtaudiolog.txt");
6677 os << "writeTime\treadDelay\tnextWritePointer\tnextReadPointer\tcurrentWritePointer\tsafeWritePointer\tcurrentReadPointer\tsafeReadPointer" << std::endl;
6678 for (int i = 0; i < currentDebugLogEntry ; ++i)
6680 TTickRecord &r = debugLog[i];
6682 << r.writeTime-debugLog[0].writeTime << "\t" << (r.readTime-r.writeTime) << "\t"
6683 << r.nextWritePointer % BUFFER_SIZE << "\t" << r.nextReadPointer % BUFFER_SIZE
6684 << "\t" << r.currentWritePointer % BUFFER_SIZE << "\t" << r.safeWritePointer % BUFFER_SIZE
6685 << "\t" << r.currentReadPointer % BUFFER_SIZE << "\t" << r.safeReadPointer % BUFFER_SIZE << std::endl;
6689 // There is no specific DirectSound API call to "drain" a buffer
6690 // before stopping. We can hack this for playback by writing zeroes
6691 // for another bufferSize * nBuffers frames. For capture, the
6692 // concept is less clear so we'll repeat what we do in the
6693 // abortStream() case.
6696 LPVOID buffer1 = NULL;
6697 LPVOID buffer2 = NULL;
6698 DWORD bufferSize1 = 0;
6699 DWORD bufferSize2 = 0;
6700 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6701 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6703 DWORD currentPos, safePos;
6704 long buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0]
6705 * formatBytes(stream_.deviceFormat[0]);
6708 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6709 long nextWritePos = handles[0].bufferPointer;
6710 dsBufferSize = handles[0].dsBufferSize;
6712 // Write zeroes for nBuffer counts.
6713 for (int i=0; i<stream_.nBuffers; i++) {
6715 // Find out where the read and "safe write" pointers are.
6716 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6717 if ( FAILED(result) ) {
6718 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6719 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6720 error(RtError::DRIVER_ERROR);
6722 // Chase nextWritePos.
6724 if ( currentPos < (DWORD)nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6725 DWORD endWrite = nextWritePos + buffer_bytes;
6727 // Check whether the entire write region is behind the play pointer.
6728 while ( currentPos < endWrite ) {
6729 double millis = (endWrite - currentPos) * 900.0;
6730 millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] *stream_.sampleRate);
6731 if ( millis < 1.0 ) millis = 1.0;
6732 Sleep( (DWORD) millis );
6734 // Wake up, find out where we are now
6735 result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos );
6736 if ( FAILED(result) ) {
6737 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6738 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6739 error(RtError::DRIVER_ERROR);
6741 if ( currentPos < (DWORD)nextWritePos ) currentPos += dsBufferSize; // unwrap offset
6744 // Lock free space in the buffer
6745 result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
6746 &bufferSize1, &buffer2, &bufferSize2, 0);
6747 if ( FAILED(result) ) {
6748 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
6749 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6750 error(RtError::DRIVER_ERROR);
6753 // Zero the free space
6754 ZeroMemory(buffer1, bufferSize1);
6755 if (buffer2 != NULL) ZeroMemory(buffer2, bufferSize2);
6757 // Update our buffer offset and unlock sound buffer
6758 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
6759 if ( FAILED(result) ) {
6760 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
6761 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6762 error(RtError::DRIVER_ERROR);
6764 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
6765 handles[0].bufferPointer = nextWritePos;
6768 // If we play again, start at the beginning of the buffer.
6769 handles[0].bufferPointer = 0;
6772 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6774 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6778 result = buffer->Stop();
6779 if ( FAILED(result) ) {
6780 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6781 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6782 error(RtError::DRIVER_ERROR);
6785 dsBufferSize = handles[1].dsBufferSize;
6787 // Lock the buffer and clear it so that if we start to play again,
6788 // we won't have old data playing.
6789 result = buffer->Lock(0, dsBufferSize, &buffer1, &bufferSize1, NULL, NULL, 0);
6790 if ( FAILED(result) ) {
6791 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6792 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6793 error(RtError::DRIVER_ERROR);
6796 // Zero the DS buffer
6797 ZeroMemory(buffer1, bufferSize1);
6799 // Unlock the DS buffer
6800 result = buffer->Unlock(buffer1, bufferSize1, NULL, 0);
6801 if ( FAILED(result) ) {
6802 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6803 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6804 error(RtError::DRIVER_ERROR);
6807 // If we start recording again, we must begin at beginning of buffer.
6808 handles[1].bufferPointer = 0;
6811 MUTEX_UNLOCK(&stream_.mutex);
6814 void RtApiDs :: abortStream()
6817 if (stream_.state == STREAM_STOPPED) return;
6819 // Change the state before the lock to improve shutdown response
6820 // when using a callback.
6821 stream_.state = STREAM_STOPPED;
6822 MUTEX_LOCK(&stream_.mutex);
6828 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6829 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6831 LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6832 result = buffer->Stop();
6833 if ( FAILED(result) ) {
6834 sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s",
6835 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6836 error(RtError::DRIVER_ERROR);
6839 dsBufferSize = handles[0].dsBufferSize;
6841 // Lock the buffer and clear it so that if we start to play again,
6842 // we won't have old data playing.
6843 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6844 if ( FAILED(result) ) {
6845 sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.",
6846 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6847 error(RtError::DRIVER_ERROR);
6850 // Zero the DS buffer
6851 ZeroMemory(audioPtr, dataLen);
6853 // Unlock the DS buffer
6854 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6855 if ( FAILED(result) ) {
6856 sprintf(message_, "RtApiDs: Unable to unlock buffer (%s): %s.",
6857 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6858 error(RtError::DRIVER_ERROR);
6861 // If we start playing again, we must begin at beginning of buffer.
6862 handles[0].bufferPointer = 0;
6865 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
6866 LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6870 result = buffer->Stop();
6871 if ( FAILED(result) ) {
6872 sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s",
6873 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6874 error(RtError::DRIVER_ERROR);
6877 dsBufferSize = handles[1].dsBufferSize;
6879 // Lock the buffer and clear it so that if we start to play again,
6880 // we won't have old data playing.
6881 result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0);
6882 if ( FAILED(result) ) {
6883 sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.",
6884 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6885 error(RtError::DRIVER_ERROR);
6888 // Zero the DS buffer
6889 ZeroMemory(audioPtr, dataLen);
6891 // Unlock the DS buffer
6892 result = buffer->Unlock(audioPtr, dataLen, NULL, 0);
6893 if ( FAILED(result) ) {
6894 sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.",
6895 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6896 error(RtError::DRIVER_ERROR);
6899 // If we start recording again, we must begin at beginning of buffer.
6900 handles[1].bufferPointer = 0;
6903 MUTEX_UNLOCK(&stream_.mutex);
6906 int RtApiDs :: streamWillBlock()
6909 if (stream_.state == STREAM_STOPPED) return 0;
6911 MUTEX_LOCK(&stream_.mutex);
6916 DWORD currentPos, safePos;
6918 DsHandle *handles = (DsHandle *) stream_.apiHandle;
6919 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
6921 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
6922 UINT nextWritePos = handles[0].bufferPointer;
6923 channels = stream_.nDeviceChannels[0];
6924 DWORD dsBufferSize = handles[0].dsBufferSize;
6926 // Find out where the read and "safe write" pointers are.
6927 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6928 if ( FAILED(result) ) {
6929 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
6930 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
6931 error(RtError::DRIVER_ERROR);
6934 DWORD leadPos = safePos + handles[0].dsPointerLeadTime;
6935 if (leadPos > dsBufferSize) {
6936 leadPos -= dsBufferSize;
6938 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
6940 frames = (leadPos - nextWritePos);
6941 frames /= channels * formatBytes(stream_.deviceFormat[0]);
6944 if (stream_.mode == INPUT ) {
6945 // note that we don't block on DUPLEX input anymore. We run lockstep with the write pointer instead.
6947 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
6948 UINT nextReadPos = handles[1].bufferPointer;
6949 channels = stream_.nDeviceChannels[1];
6950 DWORD dsBufferSize = handles[1].dsBufferSize;
6952 // Find out where the write and "safe read" pointers are.
6953 result = dsBuffer->GetCurrentPosition(¤tPos, &safePos);
6954 if ( FAILED(result) ) {
6955 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
6956 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
6957 error(RtError::DRIVER_ERROR);
6960 if ( safePos < (DWORD)nextReadPos ) safePos += dsBufferSize; // unwrap offset
6962 frames = (int)(safePos - nextReadPos);
6963 frames /= channels * formatBytes(stream_.deviceFormat[1]);
6966 frames = stream_.bufferSize - frames;
6967 if (frames < 0) frames = 0;
6969 MUTEX_UNLOCK(&stream_.mutex);
6973 void RtApiDs :: tickStream()
6978 if (stream_.state == STREAM_STOPPED) {
6979 if (stream_.callbackInfo.usingCallback) Sleep(50); // sleep 50 milliseconds
6982 else if (stream_.callbackInfo.usingCallback) {
6983 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
6984 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
6987 MUTEX_LOCK(&stream_.mutex);
6989 // The state might change while waiting on a mutex.
6990 if (stream_.state == STREAM_STOPPED) {
6991 MUTEX_UNLOCK(&stream_.mutex);
6996 DWORD currentWritePos, safeWritePos;
6997 DWORD currentReadPos, safeReadPos;
7001 #ifdef GENERATE_DEBUG_LOG
7002 DWORD writeTime, readTime;
7004 LPVOID buffer1 = NULL;
7005 LPVOID buffer2 = NULL;
7006 DWORD bufferSize1 = 0;
7007 DWORD bufferSize2 = 0;
7011 DsHandle *handles = (DsHandle *) stream_.apiHandle;
7013 if (stream_.mode == DUPLEX && !buffersRolling)
7015 assert(handles[0].dsBufferSize == handles[1].dsBufferSize);
7017 // it takes a while for the devices to get rolling. As a result, there's
7018 // no guarantee that the capture and write device pointers will move in lockstep.
7019 // Wait here for both devices to start rolling, and then set our buffer pointers accordingly.
7020 // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600 bytes later than the write
7023 // Stub: a serious risk of having a pre-emptive scheduling round take place between
7024 // the two GetCurrentPosition calls... but I'm really not sure how to solve the problem.
7025 // Temporarily boost to Realtime priority, maybe; but I'm not sure what priority the
7026 // directsound service threads run at. We *should* be roughly within a ms or so of correct.
7028 LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
7029 LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
7032 DWORD initialWritePos, initialSafeWritePos;
7033 DWORD initialReadPos, initialSafeReadPos;;
7036 result = dsWriteBuffer->GetCurrentPosition(&initialWritePos, &initialSafeWritePos);
7037 if ( FAILED(result) ) {
7038 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7039 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7040 error(RtError::DRIVER_ERROR);
7042 result = dsCaptureBuffer->GetCurrentPosition(&initialReadPos, &initialSafeReadPos);
7043 if ( FAILED(result) ) {
7044 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7045 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7046 error(RtError::DRIVER_ERROR);
7050 result = dsWriteBuffer->GetCurrentPosition(¤tWritePos, &safeWritePos);
7051 if ( FAILED(result) ) {
7052 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7053 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7054 error(RtError::DRIVER_ERROR);
7056 result = dsCaptureBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos);
7057 if ( FAILED(result) ) {
7058 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7059 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7060 error(RtError::DRIVER_ERROR);
7062 if (safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos)
7069 assert(handles[0].dsBufferSize == handles[1].dsBufferSize);
7071 UINT writeBufferLead = (safeWritePos-safeReadPos + handles[0].dsBufferSize) % handles[0].dsBufferSize;
7072 buffersRolling = true;
7073 handles[0].bufferPointer = (safeWritePos + handles[0].dsPointerLeadTime);
7074 handles[1].bufferPointer = safeReadPos;
7078 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
7080 LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer;
7082 // Setup parameters and do buffer conversion if necessary.
7083 if (stream_.doConvertBuffer[0]) {
7084 buffer = stream_.deviceBuffer;
7085 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
7086 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0];
7087 buffer_bytes *= formatBytes(stream_.deviceFormat[0]);
7090 buffer = stream_.userBuffer;
7091 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[0];
7092 buffer_bytes *= formatBytes(stream_.userFormat);
7095 // No byte swapping necessary in DirectSound implementation.
7097 // Ahhh ... windoze. 16-bit data is signed but 8-bit data is
7098 // unsigned. So, we need to convert our signed 8-bit data here to
7100 if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 )
7101 for ( int i=0; i<buffer_bytes; i++ ) buffer[i] = (unsigned char) (buffer[i] + 128);
7103 DWORD dsBufferSize = handles[0].dsBufferSize;
7104 nextWritePos = handles[0].bufferPointer;
7109 // Find out where the read and "safe write" pointers are.
7110 result = dsBuffer->GetCurrentPosition(¤tWritePos, &safeWritePos);
7111 if ( FAILED(result) ) {
7112 sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.",
7113 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7114 error(RtError::DRIVER_ERROR);
7117 leadPos = safeWritePos + handles[0].dsPointerLeadTime;
7118 if (leadPos > dsBufferSize) {
7119 leadPos -= dsBufferSize;
7121 if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset
7124 endWrite = nextWritePos + buffer_bytes;
7126 // Check whether the entire write region is behind the play pointer.
7128 if ( leadPos >= endWrite ) break;
7130 // If we are here, then we must wait until the play pointer gets
7131 // beyond the write region. The approach here is to use the
7132 // Sleep() function to suspend operation until safePos catches
7133 // up. Calculate number of milliseconds to wait as:
7134 // time = distance * (milliseconds/second) * fudgefactor /
7135 // ((bytes/sample) * (samples/second))
7136 // A "fudgefactor" less than 1 is used because it was found
7137 // that sleeping too long was MUCH worse than sleeping for
7138 // several shorter periods.
7139 double millis = (endWrite - leadPos) * 900.0;
7140 millis /= ( formatBytes(stream_.deviceFormat[0]) *stream_.nDeviceChannels[0]* stream_.sampleRate);
7141 if ( millis < 1.0 ) millis = 1.0;
7142 if (millis > 50.0) {
7143 static int nOverruns = 0;
7146 Sleep( (DWORD) millis );
7147 // Sleep( (DWORD) 2);
7149 #ifdef GENERATE_DEBUG_LOG
7150 writeTime = timeGetTime();
7152 if (statistics.writeDeviceSafeLeadBytes < dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize))
7154 statistics.writeDeviceSafeLeadBytes = dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize);
7158 dsPointerBetween(nextWritePos,safeWritePos,currentWritePos,dsBufferSize)
7159 || dsPointerBetween(endWrite,safeWritePos,currentWritePos,dsBufferSize)
7162 // we've strayed into the forbidden zone.
7163 // resync the read pointer.
7164 ++statistics.numberOfWriteUnderruns;
7165 nextWritePos = safeWritePos + handles[0].dsPointerLeadTime-buffer_bytes+dsBufferSize;
7166 while (nextWritePos >= dsBufferSize) nextWritePos-= dsBufferSize;
7167 handles[0].bufferPointer = nextWritePos;
7168 endWrite = nextWritePos + buffer_bytes;
7171 // Lock free space in the buffer
7172 result = dsBuffer->Lock (nextWritePos, buffer_bytes, &buffer1,
7173 &bufferSize1, &buffer2, &bufferSize2, 0);
7174 if ( FAILED(result) ) {
7175 sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.",
7176 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7177 error(RtError::DRIVER_ERROR);
7180 // Copy our buffer into the DS buffer
7181 CopyMemory(buffer1, buffer, bufferSize1);
7182 if (buffer2 != NULL) CopyMemory(buffer2, buffer+bufferSize1, bufferSize2);
7184 // Update our buffer offset and unlock sound buffer
7185 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
7186 if ( FAILED(result) ) {
7187 sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.",
7188 devices_[stream_.device[0]].name.c_str(), getErrorString(result));
7189 error(RtError::DRIVER_ERROR);
7191 nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize;
7192 handles[0].bufferPointer = nextWritePos;
7195 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
7197 // Setup parameters.
7198 if (stream_.doConvertBuffer[1]) {
7199 buffer = stream_.deviceBuffer;
7200 buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[1];
7201 buffer_bytes *= formatBytes(stream_.deviceFormat[1]);
7204 buffer = stream_.userBuffer;
7205 buffer_bytes = stream_.bufferSize * stream_.nUserChannels[1];
7206 buffer_bytes *= formatBytes(stream_.userFormat);
7208 LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer;
7209 long nextReadPos = handles[1].bufferPointer;
7210 DWORD dsBufferSize = handles[1].dsBufferSize;
7212 // Find out where the write and "safe read" pointers are.
7213 result = dsBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos);
7214 if ( FAILED(result) ) {
7215 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7216 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7217 error(RtError::DRIVER_ERROR);
7220 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
7221 DWORD endRead = nextReadPos + buffer_bytes;
7223 // Handling depends on whether we are INPUT or DUPLEX.
7224 // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode,
7225 // then a wait here will drag the write pointers into the forbidden zone.
7227 // In DUPLEX mode, rather than wait, we will back off the read pointer until
7228 // it's in a safe position. This causes dropouts, but it seems to be the only
7229 // practical way to sync up the read and write pointers reliably, given the
7230 // the very complex relationship between phase and increment of the read and write
7233 // In order to minimize audible dropouts in DUPLEX mode, we will provide a pre-roll
7234 // period of 0.5 seconds
7235 // in which we return zeros from the read buffer while the pointers sync up.
7237 if (stream_.mode == DUPLEX)
7239 if (safeReadPos < endRead)
7241 if (duplexPrerollBytes <= 0)
7243 // pre-roll time over. Be more agressive.
7244 int adjustment = endRead-safeReadPos;
7246 ++statistics.numberOfReadOverruns;
7248 // large adjustments: we've probably run out of CPU cycles, so just resync exactly,
7249 // and perform fine adjustments later.
7250 // small adjustments: back off by twice as much.
7251 if (adjustment >= 2*buffer_bytes)
7253 nextReadPos = safeReadPos-2*buffer_bytes;
7256 nextReadPos = safeReadPos-buffer_bytes-adjustment;
7258 statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos;
7259 if (statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize;
7261 if (nextReadPos < 0) nextReadPos += dsBufferSize;
7264 // in pre=roll time. Just do it.
7265 nextReadPos = safeReadPos-buffer_bytes;
7266 while (nextReadPos < 0) nextReadPos += dsBufferSize;
7268 endRead = nextReadPos + buffer_bytes;
7271 while ( safeReadPos < endRead ) {
7272 // See comments for playback.
7273 double millis = (endRead - safeReadPos) * 900.0;
7274 millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate);
7275 if ( millis < 1.0 ) millis = 1.0;
7276 Sleep( (DWORD) millis );
7278 // Wake up, find out where we are now
7279 result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos );
7280 if ( FAILED(result) ) {
7281 sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.",
7282 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7283 error(RtError::DRIVER_ERROR);
7286 if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset
7289 #ifdef GENERATE_DEBUG_LOG
7290 readTime = timeGetTime();
7292 if (statistics.readDeviceSafeLeadBytes < dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize))
7294 statistics.readDeviceSafeLeadBytes = dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize);
7297 // Lock free space in the buffer
7298 result = dsBuffer->Lock (nextReadPos, buffer_bytes, &buffer1,
7299 &bufferSize1, &buffer2, &bufferSize2, 0);
7300 if ( FAILED(result) ) {
7301 sprintf(message_, "RtApiDs: Unable to lock buffer during capture (%s): %s.",
7302 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7303 error(RtError::DRIVER_ERROR);
7306 if (duplexPrerollBytes <= 0)
7308 // Copy our buffer into the DS buffer
7309 CopyMemory(buffer, buffer1, bufferSize1);
7310 if (buffer2 != NULL) CopyMemory(buffer+bufferSize1, buffer2, bufferSize2);
7312 memset(buffer,0,bufferSize1);
7313 if (buffer2 != NULL) memset(buffer+bufferSize1,0,bufferSize2);
7314 duplexPrerollBytes -= bufferSize1 + bufferSize2;
7317 // Update our buffer offset and unlock sound buffer
7318 nextReadPos = (nextReadPos + bufferSize1 + bufferSize2) % dsBufferSize;
7319 dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2);
7320 if ( FAILED(result) ) {
7321 sprintf(message_, "RtApiDs: Unable to unlock buffer during capture (%s): %s.",
7322 devices_[stream_.device[1]].name.c_str(), getErrorString(result));
7323 error(RtError::DRIVER_ERROR);
7325 handles[1].bufferPointer = nextReadPos;
7328 // No byte swapping necessary in DirectSound implementation.
7330 // If necessary, convert 8-bit data from unsigned to signed.
7331 if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 )
7332 for ( int j=0; j<buffer_bytes; j++ ) buffer[j] = (signed char) (buffer[j] - 128);
7334 // Do buffer conversion if necessary.
7335 if (stream_.doConvertBuffer[1])
7336 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
7338 #ifdef GENERATE_DEBUG_LOG
7339 if (currentDebugLogEntry < debugLog.size())
7341 TTickRecord &r = debugLog[currentDebugLogEntry++];
7342 r.currentReadPointer = currentReadPos;
7343 r.safeReadPointer = safeReadPos;
7344 r.currentWritePointer = currentWritePos;
7345 r.safeWritePointer = safeWritePos;
7346 r.readTime = readTime;
7347 r.writeTime = writeTime;
7348 r.nextReadPointer = handles[1].bufferPointer;
7349 r.nextWritePointer = handles[0].bufferPointer;
7354 MUTEX_UNLOCK(&stream_.mutex);
7356 if (stream_.callbackInfo.usingCallback && stopStream)
7359 // Definitions for utility functions and callbacks
7360 // specific to the DirectSound implementation.
7362 extern "C" unsigned __stdcall callbackHandler(void *ptr)
7364 CallbackInfo *info = (CallbackInfo *) ptr;
7365 RtApiDs *object = (RtApiDs *) info->object;
7366 bool *usingCallback = &info->usingCallback;
7368 while ( *usingCallback ) {
7370 object->tickStream();
7372 catch (RtError &exception) {
7373 fprintf(stderr, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n",
7374 exception.getMessageString());
7383 static bool CALLBACK deviceCountCallback(LPGUID lpguid,
7384 LPCSTR lpcstrDescription,
7385 LPCSTR lpcstrModule,
7388 int *pointer = ((int *) lpContext);
7394 static bool CALLBACK deviceInfoCallback(LPGUID lpguid,
7395 LPCSTR lpcstrDescription,
7396 LPCSTR lpcstrModule,
7399 enum_info *info = ((enum_info *) lpContext);
7400 while (strlen(info->name) > 0) info++;
7402 strncpy(info->name, lpcstrDescription, 64);
7406 info->isValid = false;
7407 if (info->isInput == true) {
7409 LPDIRECTSOUNDCAPTURE object;
7411 hr = DirectSoundCaptureCreate( lpguid, &object, NULL );
7412 if( hr != DS_OK ) return true;
7414 caps.dwSize = sizeof(caps);
7415 hr = object->GetCaps( &caps );
7417 if (caps.dwChannels > 0 && caps.dwFormats > 0)
7418 info->isValid = true;
7424 LPDIRECTSOUND object;
7425 hr = DirectSoundCreate( lpguid, &object, NULL );
7426 if( hr != DS_OK ) return true;
7428 caps.dwSize = sizeof(caps);
7429 hr = object->GetCaps( &caps );
7431 if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO )
7432 info->isValid = true;
7440 static bool CALLBACK defaultDeviceCallback(LPGUID lpguid,
7441 LPCSTR lpcstrDescription,
7442 LPCSTR lpcstrModule,
7445 enum_info *info = ((enum_info *) lpContext);
7447 if ( lpguid == NULL ) {
7448 strncpy(info->name, lpcstrDescription, 64);
7455 static bool CALLBACK deviceIdCallback(LPGUID lpguid,
7456 LPCSTR lpcstrDescription,
7457 LPCSTR lpcstrModule,
7460 enum_info *info = ((enum_info *) lpContext);
7462 if ( strncmp( info->name, lpcstrDescription, 64 ) == 0 ) {
7464 info->isValid = true;
7471 static char* getErrorString(int code)
7475 case DSERR_ALLOCATED:
7476 return "Already allocated.";
7478 case DSERR_CONTROLUNAVAIL:
7479 return "Control unavailable.";
7481 case DSERR_INVALIDPARAM:
7482 return "Invalid parameter.";
7484 case DSERR_INVALIDCALL:
7485 return "Invalid call.";
7488 return "Generic error.";
7490 case DSERR_PRIOLEVELNEEDED:
7491 return "Priority level needed";
7493 case DSERR_OUTOFMEMORY:
7494 return "Out of memory";
7496 case DSERR_BADFORMAT:
7497 return "The sample rate or the channel format is not supported.";
7499 case DSERR_UNSUPPORTED:
7500 return "Not supported.";
7502 case DSERR_NODRIVER:
7503 return "No driver.";
7505 case DSERR_ALREADYINITIALIZED:
7506 return "Already initialized.";
7508 case DSERR_NOAGGREGATION:
7509 return "No aggregation.";
7511 case DSERR_BUFFERLOST:
7512 return "Buffer lost.";
7514 case DSERR_OTHERAPPHASPRIO:
7515 return "Another application already has priority.";
7517 case DSERR_UNINITIALIZED:
7518 return "Uninitialized.";
7521 return "DirectSound unknown error";
7525 //******************** End of __WINDOWS_DS__ *********************//
7528 #if defined(__IRIX_AL__) // SGI's AL API for IRIX
7530 #include <dmedia/audio.h>
7534 extern "C" void *callbackHandler(void * ptr);
7536 RtApiAl :: RtApiAl()
7540 if (nDevices_ <= 0) {
7541 sprintf(message_, "RtApiAl: no Irix AL audio devices found!");
7542 error(RtError::NO_DEVICES_FOUND);
7546 RtApiAl :: ~RtApiAl()
7548 // The subclass destructor gets called before the base class
7549 // destructor, so close any existing streams before deallocating
7550 // apiDeviceId memory.
7551 if ( stream_.mode != UNINITIALIZED ) closeStream();
7553 // Free our allocated apiDeviceId memory.
7555 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7556 id = (long *) devices_[i].apiDeviceId;
7561 void RtApiAl :: initialize(void)
7563 // Count cards and devices
7566 // Determine the total number of input and output devices.
7567 nDevices_ = alQueryValues(AL_SYSTEM, AL_DEVICES, 0, 0, 0, 0);
7568 if (nDevices_ < 0) {
7569 sprintf(message_, "RtApiAl: error counting devices: %s.",
7570 alGetErrorString(oserror()));
7571 error(RtError::DRIVER_ERROR);
7574 if (nDevices_ <= 0) return;
7576 ALvalue *vls = (ALvalue *) new ALvalue[nDevices_];
7578 // Create our list of devices and write their ascii identifiers and resource ids.
7582 pvs[0].param = AL_NAME;
7583 pvs[0].value.ptr = name;
7588 outs = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, vls, nDevices_, 0, 0);
7591 sprintf(message_, "RtApiAl: error getting output devices: %s.",
7592 alGetErrorString(oserror()));
7593 error(RtError::DRIVER_ERROR);
7596 for (i=0; i<outs; i++) {
7597 if (alGetParams(vls[i].i, pvs, 1) < 0) {
7599 sprintf(message_, "RtApiAl: error querying output devices: %s.",
7600 alGetErrorString(oserror()));
7601 error(RtError::DRIVER_ERROR);
7603 device.name.erase();
7604 device.name.append( (const char *)name, strlen(name)+1);
7605 devices_.push_back(device);
7606 id = (long *) calloc(2, sizeof(long));
7608 devices_[i].apiDeviceId = (void *) id;
7611 ins = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &vls[outs], nDevices_-outs, 0, 0);
7614 sprintf(message_, "RtApiAl: error getting input devices: %s.",
7615 alGetErrorString(oserror()));
7616 error(RtError::DRIVER_ERROR);
7619 for (i=outs; i<ins+outs; i++) {
7620 if (alGetParams(vls[i].i, pvs, 1) < 0) {
7622 sprintf(message_, "RtApiAl: error querying input devices: %s.",
7623 alGetErrorString(oserror()));
7624 error(RtError::DRIVER_ERROR);
7626 device.name.erase();
7627 device.name.append( (const char *)name, strlen(name)+1);
7628 devices_.push_back(device);
7629 id = (long *) calloc(2, sizeof(long));
7631 devices_[i].apiDeviceId = (void *) id;
7637 int RtApiAl :: getDefaultInputDevice(void)
7641 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_INPUT, &value, 1, 0, 0);
7643 sprintf(message_, "RtApiAl: error getting default input device id: %s.",
7644 alGetErrorString(oserror()));
7645 error(RtError::WARNING);
7648 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7649 id = (long *) devices_[i].apiDeviceId;
7650 if ( id[1] == value.i ) return i;
7657 int RtApiAl :: getDefaultOutputDevice(void)
7661 int result = alQueryValues(AL_SYSTEM, AL_DEFAULT_OUTPUT, &value, 1, 0, 0);
7663 sprintf(message_, "RtApiAl: error getting default output device id: %s.",
7664 alGetErrorString(oserror()));
7665 error(RtError::WARNING);
7668 for ( unsigned int i=0; i<devices_.size(); i++ ) {
7669 id = (long *) devices_[i].apiDeviceId;
7670 if ( id[0] == value.i ) return i;
7677 void RtApiAl :: probeDeviceInfo(RtApiDevice *info)
7684 // Get output resource ID if it exists.
7685 long *id = (long *) info->apiDeviceId;
7689 // Probe output device parameters.
7690 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
7692 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
7693 info->name.c_str(), alGetErrorString(oserror()));
7694 error(RtError::DEBUG_WARNING);
7697 info->maxOutputChannels = value.i;
7698 info->minOutputChannels = 1;
7701 result = alGetParamInfo(resource, AL_RATE, &pinfo);
7703 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
7704 info->name.c_str(), alGetErrorString(oserror()));
7705 error(RtError::DEBUG_WARNING);
7708 info->sampleRates.clear();
7709 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
7710 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
7711 info->sampleRates.push_back( SAMPLE_RATES[k] );
7715 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7716 info->nativeFormats = (RtAudioFormat) 51;
7719 // Now get input resource ID if it exists.
7723 // Probe input device parameters.
7724 result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0);
7726 sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.",
7727 info->name.c_str(), alGetErrorString(oserror()));
7728 error(RtError::DEBUG_WARNING);
7731 info->maxInputChannels = value.i;
7732 info->minInputChannels = 1;
7735 result = alGetParamInfo(resource, AL_RATE, &pinfo);
7737 sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.",
7738 info->name.c_str(), alGetErrorString(oserror()));
7739 error(RtError::DEBUG_WARNING);
7742 // In the case of the default device, these values will
7743 // overwrite the rates determined for the output device. Since
7744 // the input device is most likely to be more limited than the
7745 // output device, this is ok.
7746 info->sampleRates.clear();
7747 for (unsigned int k=0; k<MAX_SAMPLE_RATES; k++) {
7748 if ( SAMPLE_RATES[k] >= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i )
7749 info->sampleRates.push_back( SAMPLE_RATES[k] );
7753 // The AL library supports all our formats, except 24-bit and 32-bit ints.
7754 info->nativeFormats = (RtAudioFormat) 51;
7757 if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 )
7759 if ( info->sampleRates.size() == 0 )
7762 // Determine duplex status.
7763 if (info->maxInputChannels < info->maxOutputChannels)
7764 info->maxDuplexChannels = info->maxInputChannels;
7766 info->maxDuplexChannels = info->maxOutputChannels;
7767 if (info->minInputChannels < info->minOutputChannels)
7768 info->minDuplexChannels = info->minInputChannels;
7770 info->minDuplexChannels = info->minOutputChannels;
7772 if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true;
7773 else info->hasDuplexSupport = false;
7775 info->probed = true;
7780 bool RtApiAl :: probeDeviceOpen(int device, StreamMode mode, int channels,
7781 int sampleRate, RtAudioFormat format,
7782 int *bufferSize, int numberOfBuffers)
7784 int result, nBuffers;
7789 long *id = (long *) devices_[device].apiDeviceId;
7791 // Get a new ALconfig structure.
7792 al_config = alNewConfig();
7794 sprintf(message_,"RtApiAl: can't get AL config: %s.",
7795 alGetErrorString(oserror()));
7796 error(RtError::DEBUG_WARNING);
7800 // Set the channels.
7801 result = alSetChannels(al_config, channels);
7803 alFreeConfig(al_config);
7804 sprintf(message_,"RtApiAl: can't set %d channels in AL config: %s.",
7805 channels, alGetErrorString(oserror()));
7806 error(RtError::DEBUG_WARNING);
7810 // Attempt to set the queue size. The al API doesn't provide a
7811 // means for querying the minimum/maximum buffer size of a device,
7812 // so if the specified size doesn't work, take whatever the
7813 // al_config structure returns.
7814 if ( numberOfBuffers < 1 )
7817 nBuffers = numberOfBuffers;
7818 long buffer_size = *bufferSize * nBuffers;
7819 result = alSetQueueSize(al_config, buffer_size); // in sample frames
7821 // Get the buffer size specified by the al_config and try that.
7822 buffer_size = alGetQueueSize(al_config);
7823 result = alSetQueueSize(al_config, buffer_size);
7825 alFreeConfig(al_config);
7826 sprintf(message_,"RtApiAl: can't set buffer size (%ld) in AL config: %s.",
7827 buffer_size, alGetErrorString(oserror()));
7828 error(RtError::DEBUG_WARNING);
7831 *bufferSize = buffer_size / nBuffers;
7834 // Set the data format.
7835 stream_.userFormat = format;
7836 stream_.deviceFormat[mode] = format;
7837 if (format == RTAUDIO_SINT8) {
7838 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7839 result = alSetWidth(al_config, AL_SAMPLE_8);
7841 else if (format == RTAUDIO_SINT16) {
7842 result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP);
7843 result = alSetWidth(al_config, AL_SAMPLE_16);
7845 else if (format == RTAUDIO_SINT24) {
7846 // Our 24-bit format assumes the upper 3 bytes of a 4 byte word.
7847 // The AL library uses the lower 3 bytes, so we'll need to do our
7849 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7850 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7852 else if (format == RTAUDIO_SINT32) {
7853 // The AL library doesn't seem to support the 32-bit integer
7854 // format, so we'll need to do our own conversion.
7855 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7856 stream_.deviceFormat[mode] = RTAUDIO_FLOAT32;
7858 else if (format == RTAUDIO_FLOAT32)
7859 result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT);
7860 else if (format == RTAUDIO_FLOAT64)
7861 result = alSetSampFmt(al_config, AL_SAMPFMT_DOUBLE);
7863 if ( result == -1 ) {
7864 alFreeConfig(al_config);
7865 sprintf(message_,"RtApiAl: error setting sample format in AL config: %s.",
7866 alGetErrorString(oserror()));
7867 error(RtError::DEBUG_WARNING);
7871 if (mode == OUTPUT) {
7875 resource = AL_DEFAULT_OUTPUT;
7878 result = alSetDevice(al_config, resource);
7879 if ( result == -1 ) {
7880 alFreeConfig(al_config);
7881 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
7882 devices_[device].name.c_str(), alGetErrorString(oserror()));
7883 error(RtError::DEBUG_WARNING);
7888 port = alOpenPort("RtApiAl Output Port", "w", al_config);
7890 alFreeConfig(al_config);
7891 sprintf(message_,"RtApiAl: error opening output port: %s.",
7892 alGetErrorString(oserror()));
7893 error(RtError::DEBUG_WARNING);
7897 // Set the sample rate
7898 pvs[0].param = AL_MASTER_CLOCK;
7899 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
7900 pvs[1].param = AL_RATE;
7901 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
7902 result = alSetParams(resource, pvs, 2);
7905 alFreeConfig(al_config);
7906 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7907 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
7908 error(RtError::DEBUG_WARNING);
7912 else { // mode == INPUT
7916 resource = AL_DEFAULT_INPUT;
7919 result = alSetDevice(al_config, resource);
7920 if ( result == -1 ) {
7921 alFreeConfig(al_config);
7922 sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.",
7923 devices_[device].name.c_str(), alGetErrorString(oserror()));
7924 error(RtError::DEBUG_WARNING);
7929 port = alOpenPort("RtApiAl Input Port", "r", al_config);
7931 alFreeConfig(al_config);
7932 sprintf(message_,"RtApiAl: error opening input port: %s.",
7933 alGetErrorString(oserror()));
7934 error(RtError::DEBUG_WARNING);
7938 // Set the sample rate
7939 pvs[0].param = AL_MASTER_CLOCK;
7940 pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE;
7941 pvs[1].param = AL_RATE;
7942 pvs[1].value.ll = alDoubleToFixed((double)sampleRate);
7943 result = alSetParams(resource, pvs, 2);
7946 alFreeConfig(al_config);
7947 sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.",
7948 sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror()));
7949 error(RtError::DEBUG_WARNING);
7954 alFreeConfig(al_config);
7956 stream_.nUserChannels[mode] = channels;
7957 stream_.nDeviceChannels[mode] = channels;
7959 // Save stream handle.
7960 ALport *handle = (ALport *) stream_.apiHandle;
7961 if ( handle == 0 ) {
7962 handle = (ALport *) calloc(2, sizeof(ALport));
7963 if ( handle == NULL ) {
7964 sprintf(message_, "RtApiAl: Irix Al error allocating handle memory (%s).",
7965 devices_[device].name.c_str());
7968 stream_.apiHandle = (void *) handle;
7972 handle[mode] = port;
7974 // Set flags for buffer conversion
7975 stream_.doConvertBuffer[mode] = false;
7976 if (stream_.userFormat != stream_.deviceFormat[mode])
7977 stream_.doConvertBuffer[mode] = true;
7979 // Allocate necessary internal buffers
7980 if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) {
7983 if (stream_.nUserChannels[0] >= stream_.nUserChannels[1])
7984 buffer_bytes = stream_.nUserChannels[0];
7986 buffer_bytes = stream_.nUserChannels[1];
7988 buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat);
7989 if (stream_.userBuffer) free(stream_.userBuffer);
7990 stream_.userBuffer = (char *) calloc(buffer_bytes, 1);
7991 if (stream_.userBuffer == NULL) {
7992 sprintf(message_, "RtApiAl: error allocating user buffer memory (%s).",
7993 devices_[device].name.c_str());
7998 if ( stream_.doConvertBuffer[mode] ) {
8001 bool makeBuffer = true;
8002 if ( mode == OUTPUT )
8003 buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
8004 else { // mode == INPUT
8005 buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]);
8006 if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) {
8007 long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]);
8008 if ( buffer_bytes < bytes_out ) makeBuffer = false;
8013 buffer_bytes *= *bufferSize;
8014 if (stream_.deviceBuffer) free(stream_.deviceBuffer);
8015 stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1);
8016 if (stream_.deviceBuffer == NULL) {
8017 sprintf(message_, "RtApiAl: error allocating device buffer memory (%s).",
8018 devices_[device].name.c_str());
8024 stream_.device[mode] = device;
8025 stream_.state = STREAM_STOPPED;
8026 if ( stream_.mode == OUTPUT && mode == INPUT )
8027 // We had already set up an output stream.
8028 stream_.mode = DUPLEX;
8030 stream_.mode = mode;
8031 stream_.nBuffers = nBuffers;
8032 stream_.bufferSize = *bufferSize;
8033 stream_.sampleRate = sampleRate;
8035 // Setup the buffer conversion information structure.
8036 if ( stream_.doConvertBuffer[mode] ) {
8037 if (mode == INPUT) { // convert device to user buffer
8038 stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1];
8039 stream_.convertInfo[mode].outJump = stream_.nUserChannels[1];
8040 stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1];
8041 stream_.convertInfo[mode].outFormat = stream_.userFormat;
8043 else { // convert user to device buffer
8044 stream_.convertInfo[mode].inJump = stream_.nUserChannels[0];
8045 stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0];
8046 stream_.convertInfo[mode].inFormat = stream_.userFormat;
8047 stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0];
8050 if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump )
8051 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump;
8053 stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump;
8055 // Set up the interleave/deinterleave offsets.
8056 if ( mode == INPUT && stream_.deInterleave[1] ) {
8057 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8058 stream_.convertInfo[mode].inOffset.push_back( k * stream_.bufferSize );
8059 stream_.convertInfo[mode].outOffset.push_back( k );
8060 stream_.convertInfo[mode].inJump = 1;
8063 else if (mode == OUTPUT && stream_.deInterleave[0]) {
8064 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8065 stream_.convertInfo[mode].inOffset.push_back( k );
8066 stream_.convertInfo[mode].outOffset.push_back( k * stream_.bufferSize );
8067 stream_.convertInfo[mode].outJump = 1;
8071 for (int k=0; k<stream_.convertInfo[mode].channels; k++) {
8072 stream_.convertInfo[mode].inOffset.push_back( k );
8073 stream_.convertInfo[mode].outOffset.push_back( k );
8083 alClosePort(handle[0]);
8085 alClosePort(handle[1]);
8087 stream_.apiHandle = 0;
8090 if (stream_.userBuffer) {
8091 free(stream_.userBuffer);
8092 stream_.userBuffer = 0;
8095 error(RtError::DEBUG_WARNING);
8099 void RtApiAl :: closeStream()
8101 // We don't want an exception to be thrown here because this
8102 // function is called by our class destructor. So, do our own
8104 if ( stream_.mode == UNINITIALIZED ) {
8105 sprintf(message_, "RtApiAl::closeStream(): no open stream to close!");
8106 error(RtError::WARNING);
8110 ALport *handle = (ALport *) stream_.apiHandle;
8111 if (stream_.state == STREAM_RUNNING) {
8112 int buffer_size = stream_.bufferSize * stream_.nBuffers;
8113 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
8114 alDiscardFrames(handle[0], buffer_size);
8115 if (stream_.mode == INPUT || stream_.mode == DUPLEX)
8116 alDiscardFrames(handle[1], buffer_size);
8117 stream_.state = STREAM_STOPPED;
8120 if (stream_.callbackInfo.usingCallback) {
8121 stream_.callbackInfo.usingCallback = false;
8122 pthread_join(stream_.callbackInfo.thread, NULL);
8126 if (handle[0]) alClosePort(handle[0]);
8127 if (handle[1]) alClosePort(handle[1]);
8129 stream_.apiHandle = 0;
8132 if (stream_.userBuffer) {
8133 free(stream_.userBuffer);
8134 stream_.userBuffer = 0;
8137 if (stream_.deviceBuffer) {
8138 free(stream_.deviceBuffer);
8139 stream_.deviceBuffer = 0;
8142 stream_.mode = UNINITIALIZED;
8145 void RtApiAl :: startStream()
8148 if (stream_.state == STREAM_RUNNING) return;
8150 MUTEX_LOCK(&stream_.mutex);
8152 // The AL port is ready as soon as it is opened.
8153 stream_.state = STREAM_RUNNING;
8155 MUTEX_UNLOCK(&stream_.mutex);
8158 void RtApiAl :: stopStream()
8161 if (stream_.state == STREAM_STOPPED) return;
8163 // Change the state before the lock to improve shutdown response
8164 // when using a callback.
8165 stream_.state = STREAM_STOPPED;
8166 MUTEX_LOCK(&stream_.mutex);
8168 int result, buffer_size = stream_.bufferSize * stream_.nBuffers;
8169 ALport *handle = (ALport *) stream_.apiHandle;
8171 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX)
8172 alZeroFrames(handle[0], buffer_size);
8174 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8175 result = alDiscardFrames(handle[1], buffer_size);
8177 sprintf(message_, "RtApiAl: error draining stream device (%s): %s.",
8178 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
8179 error(RtError::DRIVER_ERROR);
8183 MUTEX_UNLOCK(&stream_.mutex);
8186 void RtApiAl :: abortStream()
8189 if (stream_.state == STREAM_STOPPED) return;
8191 // Change the state before the lock to improve shutdown response
8192 // when using a callback.
8193 stream_.state = STREAM_STOPPED;
8194 MUTEX_LOCK(&stream_.mutex);
8196 ALport *handle = (ALport *) stream_.apiHandle;
8197 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8199 int buffer_size = stream_.bufferSize * stream_.nBuffers;
8200 int result = alDiscardFrames(handle[0], buffer_size);
8202 sprintf(message_, "RtApiAl: error aborting stream device (%s): %s.",
8203 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
8204 error(RtError::DRIVER_ERROR);
8208 // There is no clear action to take on the input stream, since the
8209 // port will continue to run in any event.
8211 MUTEX_UNLOCK(&stream_.mutex);
8214 int RtApiAl :: streamWillBlock()
8218 if (stream_.state == STREAM_STOPPED) return 0;
8220 MUTEX_LOCK(&stream_.mutex);
8224 ALport *handle = (ALport *) stream_.apiHandle;
8225 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8226 err = alGetFillable(handle[0]);
8228 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
8229 devices_[stream_.device[0]].name.c_str(), alGetErrorString(oserror()));
8230 error(RtError::DRIVER_ERROR);
8236 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8237 err = alGetFilled(handle[1]);
8239 sprintf(message_, "RtApiAl: error getting available frames for stream (%s): %s.",
8240 devices_[stream_.device[1]].name.c_str(), alGetErrorString(oserror()));
8241 error(RtError::DRIVER_ERROR);
8243 if (frames > err) frames = err;
8246 frames = stream_.bufferSize - frames;
8247 if (frames < 0) frames = 0;
8249 MUTEX_UNLOCK(&stream_.mutex);
8253 void RtApiAl :: tickStream()
8258 if (stream_.state == STREAM_STOPPED) {
8259 if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds
8262 else if (stream_.callbackInfo.usingCallback) {
8263 RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback;
8264 stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData);
8267 MUTEX_LOCK(&stream_.mutex);
8269 // The state might change while waiting on a mutex.
8270 if (stream_.state == STREAM_STOPPED)
8275 RtAudioFormat format;
8276 ALport *handle = (ALport *) stream_.apiHandle;
8277 if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) {
8279 // Setup parameters and do buffer conversion if necessary.
8280 if (stream_.doConvertBuffer[0]) {
8281 buffer = stream_.deviceBuffer;
8282 convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] );
8283 channels = stream_.nDeviceChannels[0];
8284 format = stream_.deviceFormat[0];
8287 buffer = stream_.userBuffer;
8288 channels = stream_.nUserChannels[0];
8289 format = stream_.userFormat;
8292 // Do byte swapping if necessary.
8293 if (stream_.doByteSwap[0])
8294 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
8296 // Write interleaved samples to device.
8297 alWriteFrames(handle[0], buffer, stream_.bufferSize);
8300 if (stream_.mode == INPUT || stream_.mode == DUPLEX) {
8302 // Setup parameters.
8303 if (stream_.doConvertBuffer[1]) {
8304 buffer = stream_.deviceBuffer;
8305 channels = stream_.nDeviceChannels[1];
8306 format = stream_.deviceFormat[1];
8309 buffer = stream_.userBuffer;
8310 channels = stream_.nUserChannels[1];
8311 format = stream_.userFormat;
8314 // Read interleaved samples from device.
8315 alReadFrames(handle[1], buffer, stream_.bufferSize);
8317 // Do byte swapping if necessary.
8318 if (stream_.doByteSwap[1])
8319 byteSwapBuffer(buffer, stream_.bufferSize * channels, format);
8321 // Do buffer conversion if necessary.
8322 if (stream_.doConvertBuffer[1])
8323 convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] );
8327 MUTEX_UNLOCK(&stream_.mutex);
8329 if (stream_.callbackInfo.usingCallback && stopStream)
8333 void RtApiAl :: setStreamCallback(RtAudioCallback callback, void *userData)
8337 CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo;
8338 if ( info->usingCallback ) {
8339 sprintf(message_, "RtApiAl: A callback is already set for this stream!");
8340 error(RtError::WARNING);
8344 info->callback = (void *) callback;
8345 info->userData = userData;
8346 info->usingCallback = true;
8347 info->object = (void *) this;
8349 // Set the thread attributes for joinable and realtime scheduling
8350 // priority. The higher priority will only take affect if the
8351 // program is run as root or suid.
8352 pthread_attr_t attr;
8353 pthread_attr_init(&attr);
8354 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
8355 pthread_attr_setschedpolicy(&attr, SCHED_RR);
8357 int err = pthread_create(&info->thread, &attr, callbackHandler, &stream_.callbackInfo);
8358 pthread_attr_destroy(&attr);
8360 info->usingCallback = false;
8361 sprintf(message_, "RtApiAl: error starting callback thread!");
8362 error(RtError::THREAD_ERROR);
8366 void RtApiAl :: cancelStreamCallback()
8370 if (stream_.callbackInfo.usingCallback) {
8372 if (stream_.state == STREAM_RUNNING)
8375 MUTEX_LOCK(&stream_.mutex);
8377 stream_.callbackInfo.usingCallback = false;
8378 pthread_join(stream_.callbackInfo.thread, NULL);
8379 stream_.callbackInfo.thread = 0;
8380 stream_.callbackInfo.callback = NULL;
8381 stream_.callbackInfo.userData = NULL;
8383 MUTEX_UNLOCK(&stream_.mutex);
8387 extern "C" void *callbackHandler(void *ptr)
8389 CallbackInfo *info = (CallbackInfo *) ptr;
8390 RtApiAl *object = (RtApiAl *) info->object;
8391 bool *usingCallback = &info->usingCallback;
8393 while ( *usingCallback ) {
8395 object->tickStream();
8397 catch (RtError &exception) {
8398 fprintf(stderr, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n",
8399 exception.getMessageString());
8407 //******************** End of __IRIX_AL__ *********************//
8411 // *************************************************** //
8413 // Protected common (OS-independent) RtAudio methods.
8415 // *************************************************** //
8417 // This method can be modified to control the behavior of error
8418 // message reporting and throwing.
8419 void RtApi :: error(RtError::Type type)
8421 if (type == RtError::WARNING) {
8422 fprintf(stderr, "\n%s\n\n", message_);
8424 else if (type == RtError::DEBUG_WARNING) {
8425 #if defined(__RTAUDIO_DEBUG__)
8426 fprintf(stderr, "\n%s\n\n", message_);
8430 #if defined(__RTAUDIO_DEBUG__)
8431 fprintf(stderr, "\n%s\n\n", message_);
8433 throw RtError(std::string(message_), type);
8437 void RtApi :: verifyStream()
8439 if ( stream_.mode == UNINITIALIZED ) {
8440 sprintf(message_, "RtAudio: stream is not open!");
8441 error(RtError::INVALID_STREAM);
8445 void RtApi :: clearDeviceInfo(RtApiDevice *info)
8447 // Don't clear the name or DEVICE_ID fields here ... they are
8448 // typically set prior to a call of this function.
8449 info->probed = false;
8450 info->maxOutputChannels = 0;
8451 info->maxInputChannels = 0;
8452 info->maxDuplexChannels = 0;
8453 info->minOutputChannels = 0;
8454 info->minInputChannels = 0;
8455 info->minDuplexChannels = 0;
8456 info->hasDuplexSupport = false;
8457 info->sampleRates.clear();
8458 info->nativeFormats = 0;
8461 void RtApi :: clearStreamInfo()
8463 stream_.mode = UNINITIALIZED;
8464 stream_.state = STREAM_STOPPED;
8465 stream_.sampleRate = 0;
8466 stream_.bufferSize = 0;
8467 stream_.nBuffers = 0;
8468 stream_.userFormat = 0;
8469 for ( int i=0; i<2; i++ ) {
8470 stream_.device[i] = 0;
8471 stream_.doConvertBuffer[i] = false;
8472 stream_.deInterleave[i] = false;
8473 stream_.doByteSwap[i] = false;
8474 stream_.nUserChannels[i] = 0;
8475 stream_.nDeviceChannels[i] = 0;
8476 stream_.deviceFormat[i] = 0;
8480 int RtApi :: formatBytes(RtAudioFormat format)
8482 if (format == RTAUDIO_SINT16)
8484 else if (format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 ||
8485 format == RTAUDIO_FLOAT32)
8487 else if (format == RTAUDIO_FLOAT64)
8489 else if (format == RTAUDIO_SINT8)
8492 sprintf(message_,"RtApi: undefined format in formatBytes().");
8493 error(RtError::WARNING);
8498 void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info )
8500 // This function does format conversion, input/output channel compensation, and
8501 // data interleaving/deinterleaving. 24-bit integers are assumed to occupy
8502 // the upper three bytes of a 32-bit integer.
8504 // Clear our device buffer when in/out duplex device channels are different
8505 if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX &&
8506 stream_.nDeviceChannels[0] != stream_.nDeviceChannels[1] )
8507 memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) );
8510 if (info.outFormat == RTAUDIO_FLOAT64) {
8512 Float64 *out = (Float64 *)outBuffer;
8514 if (info.inFormat == RTAUDIO_SINT8) {
8515 signed char *in = (signed char *)inBuffer;
8516 scale = 1.0 / 128.0;
8517 for (int i=0; i<stream_.bufferSize; i++) {
8518 for (j=0; j<info.channels; j++) {
8519 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8520 out[info.outOffset[j]] *= scale;
8523 out += info.outJump;
8526 else if (info.inFormat == RTAUDIO_SINT16) {
8527 Int16 *in = (Int16 *)inBuffer;
8528 scale = 1.0 / 32768.0;
8529 for (int i=0; i<stream_.bufferSize; i++) {
8530 for (j=0; j<info.channels; j++) {
8531 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8532 out[info.outOffset[j]] *= scale;
8535 out += info.outJump;
8538 else if (info.inFormat == RTAUDIO_SINT24) {
8539 Int32 *in = (Int32 *)inBuffer;
8540 scale = 1.0 / 2147483648.0;
8541 for (int i=0; i<stream_.bufferSize; i++) {
8542 for (j=0; j<info.channels; j++) {
8543 out[info.outOffset[j]] = (Float64) (in[info.inOffset[j]] & 0xffffff00);
8544 out[info.outOffset[j]] *= scale;
8547 out += info.outJump;
8550 else if (info.inFormat == RTAUDIO_SINT32) {
8551 Int32 *in = (Int32 *)inBuffer;
8552 scale = 1.0 / 2147483648.0;
8553 for (int i=0; i<stream_.bufferSize; i++) {
8554 for (j=0; j<info.channels; j++) {
8555 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8556 out[info.outOffset[j]] *= scale;
8559 out += info.outJump;
8562 else if (info.inFormat == RTAUDIO_FLOAT32) {
8563 Float32 *in = (Float32 *)inBuffer;
8564 for (int i=0; i<stream_.bufferSize; i++) {
8565 for (j=0; j<info.channels; j++) {
8566 out[info.outOffset[j]] = (Float64) in[info.inOffset[j]];
8569 out += info.outJump;
8572 else if (info.inFormat == RTAUDIO_FLOAT64) {
8573 // Channel compensation and/or (de)interleaving only.
8574 Float64 *in = (Float64 *)inBuffer;
8575 for (int i=0; i<stream_.bufferSize; i++) {
8576 for (j=0; j<info.channels; j++) {
8577 out[info.outOffset[j]] = in[info.inOffset[j]];
8580 out += info.outJump;
8584 else if (info.outFormat == RTAUDIO_FLOAT32) {
8586 Float32 *out = (Float32 *)outBuffer;
8588 if (info.inFormat == RTAUDIO_SINT8) {
8589 signed char *in = (signed char *)inBuffer;
8590 scale = 1.0 / 128.0;
8591 for (int i=0; i<stream_.bufferSize; i++) {
8592 for (j=0; j<info.channels; j++) {
8593 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8594 out[info.outOffset[j]] *= scale;
8597 out += info.outJump;
8600 else if (info.inFormat == RTAUDIO_SINT16) {
8601 Int16 *in = (Int16 *)inBuffer;
8602 scale = 1.0 / 32768.0;
8603 for (int i=0; i<stream_.bufferSize; i++) {
8604 for (j=0; j<info.channels; j++) {
8605 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8606 out[info.outOffset[j]] *= scale;
8609 out += info.outJump;
8612 else if (info.inFormat == RTAUDIO_SINT24) {
8613 Int32 *in = (Int32 *)inBuffer;
8614 scale = 1.0 / 2147483648.0;
8615 for (int i=0; i<stream_.bufferSize; i++) {
8616 for (j=0; j<info.channels; j++) {
8617 out[info.outOffset[j]] = (Float32) (in[info.inOffset[j]] & 0xffffff00);
8618 out[info.outOffset[j]] *= scale;
8621 out += info.outJump;
8624 else if (info.inFormat == RTAUDIO_SINT32) {
8625 Int32 *in = (Int32 *)inBuffer;
8626 scale = 1.0 / 2147483648.0;
8627 for (int i=0; i<stream_.bufferSize; i++) {
8628 for (j=0; j<info.channels; j++) {
8629 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8630 out[info.outOffset[j]] *= scale;
8633 out += info.outJump;
8636 else if (info.inFormat == RTAUDIO_FLOAT32) {
8637 // Channel compensation and/or (de)interleaving only.
8638 Float32 *in = (Float32 *)inBuffer;
8639 for (int i=0; i<stream_.bufferSize; i++) {
8640 for (j=0; j<info.channels; j++) {
8641 out[info.outOffset[j]] = in[info.inOffset[j]];
8644 out += info.outJump;
8647 else if (info.inFormat == RTAUDIO_FLOAT64) {
8648 Float64 *in = (Float64 *)inBuffer;
8649 for (int i=0; i<stream_.bufferSize; i++) {
8650 for (j=0; j<info.channels; j++) {
8651 out[info.outOffset[j]] = (Float32) in[info.inOffset[j]];
8654 out += info.outJump;
8658 else if (info.outFormat == RTAUDIO_SINT32) {
8659 Int32 *out = (Int32 *)outBuffer;
8660 if (info.inFormat == RTAUDIO_SINT8) {
8661 signed char *in = (signed char *)inBuffer;
8662 for (int i=0; i<stream_.bufferSize; i++) {
8663 for (j=0; j<info.channels; j++) {
8664 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8665 out[info.outOffset[j]] <<= 24;
8668 out += info.outJump;
8671 else if (info.inFormat == RTAUDIO_SINT16) {
8672 Int16 *in = (Int16 *)inBuffer;
8673 for (int i=0; i<stream_.bufferSize; i++) {
8674 for (j=0; j<info.channels; j++) {
8675 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8676 out[info.outOffset[j]] <<= 16;
8679 out += info.outJump;
8682 else if (info.inFormat == RTAUDIO_SINT24) {
8683 Int32 *in = (Int32 *)inBuffer;
8684 for (int i=0; i<stream_.bufferSize; i++) {
8685 for (j=0; j<info.channels; j++) {
8686 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8689 out += info.outJump;
8692 else if (info.inFormat == RTAUDIO_SINT32) {
8693 // Channel compensation and/or (de)interleaving only.
8694 Int32 *in = (Int32 *)inBuffer;
8695 for (int i=0; i<stream_.bufferSize; i++) {
8696 for (j=0; j<info.channels; j++) {
8697 out[info.outOffset[j]] = in[info.inOffset[j]];
8700 out += info.outJump;
8703 else if (info.inFormat == RTAUDIO_FLOAT32) {
8704 Float32 *in = (Float32 *)inBuffer;
8705 for (int i=0; i<stream_.bufferSize; i++) {
8706 for (j=0; j<info.channels; j++) {
8707 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8710 out += info.outJump;
8713 else if (info.inFormat == RTAUDIO_FLOAT64) {
8714 Float64 *in = (Float64 *)inBuffer;
8715 for (int i=0; i<stream_.bufferSize; i++) {
8716 for (j=0; j<info.channels; j++) {
8717 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8720 out += info.outJump;
8724 else if (info.outFormat == RTAUDIO_SINT24) {
8725 Int32 *out = (Int32 *)outBuffer;
8726 if (info.inFormat == RTAUDIO_SINT8) {
8727 signed char *in = (signed char *)inBuffer;
8728 for (int i=0; i<stream_.bufferSize; i++) {
8729 for (j=0; j<info.channels; j++) {
8730 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8731 out[info.outOffset[j]] <<= 24;
8734 out += info.outJump;
8737 else if (info.inFormat == RTAUDIO_SINT16) {
8738 Int16 *in = (Int16 *)inBuffer;
8739 for (int i=0; i<stream_.bufferSize; i++) {
8740 for (j=0; j<info.channels; j++) {
8741 out[info.outOffset[j]] = (Int32) in[info.inOffset[j]];
8742 out[info.outOffset[j]] <<= 16;
8745 out += info.outJump;
8748 else if (info.inFormat == RTAUDIO_SINT24) {
8749 // Channel compensation and/or (de)interleaving only.
8750 Int32 *in = (Int32 *)inBuffer;
8751 for (int i=0; i<stream_.bufferSize; i++) {
8752 for (j=0; j<info.channels; j++) {
8753 out[info.outOffset[j]] = in[info.inOffset[j]];
8756 out += info.outJump;
8759 else if (info.inFormat == RTAUDIO_SINT32) {
8760 Int32 *in = (Int32 *)inBuffer;
8761 for (int i=0; i<stream_.bufferSize; i++) {
8762 for (j=0; j<info.channels; j++) {
8763 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] & 0xffffff00);
8766 out += info.outJump;
8769 else if (info.inFormat == RTAUDIO_FLOAT32) {
8770 Float32 *in = (Float32 *)inBuffer;
8771 for (int i=0; i<stream_.bufferSize; i++) {
8772 for (j=0; j<info.channels; j++) {
8773 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8776 out += info.outJump;
8779 else if (info.inFormat == RTAUDIO_FLOAT64) {
8780 Float64 *in = (Float64 *)inBuffer;
8781 for (int i=0; i<stream_.bufferSize; i++) {
8782 for (j=0; j<info.channels; j++) {
8783 out[info.outOffset[j]] = (Int32) (in[info.inOffset[j]] * 2147483647.0);
8786 out += info.outJump;
8790 else if (info.outFormat == RTAUDIO_SINT16) {
8791 Int16 *out = (Int16 *)outBuffer;
8792 if (info.inFormat == RTAUDIO_SINT8) {
8793 signed char *in = (signed char *)inBuffer;
8794 for (int i=0; i<stream_.bufferSize; i++) {
8795 for (j=0; j<info.channels; j++) {
8796 out[info.outOffset[j]] = (Int16) in[info.inOffset[j]];
8797 out[info.outOffset[j]] <<= 8;
8800 out += info.outJump;
8803 else if (info.inFormat == RTAUDIO_SINT16) {
8804 // Channel compensation and/or (de)interleaving only.
8805 Int16 *in = (Int16 *)inBuffer;
8806 for (int i=0; i<stream_.bufferSize; i++) {
8807 for (j=0; j<info.channels; j++) {
8808 out[info.outOffset[j]] = in[info.inOffset[j]];
8811 out += info.outJump;
8814 else if (info.inFormat == RTAUDIO_SINT24) {
8815 Int32 *in = (Int32 *)inBuffer;
8816 for (int i=0; i<stream_.bufferSize; i++) {
8817 for (j=0; j<info.channels; j++) {
8818 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
8821 out += info.outJump;
8824 else if (info.inFormat == RTAUDIO_SINT32) {
8825 Int32 *in = (Int32 *)inBuffer;
8826 for (int i=0; i<stream_.bufferSize; i++) {
8827 for (j=0; j<info.channels; j++) {
8828 out[info.outOffset[j]] = (Int16) ((in[info.inOffset[j]] >> 16) & 0x0000ffff);
8831 out += info.outJump;
8834 else if (info.inFormat == RTAUDIO_FLOAT32) {
8835 Float32 *in = (Float32 *)inBuffer;
8836 for (int i=0; i<stream_.bufferSize; i++) {
8837 for (j=0; j<info.channels; j++) {
8838 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
8841 out += info.outJump;
8844 else if (info.inFormat == RTAUDIO_FLOAT64) {
8845 Float64 *in = (Float64 *)inBuffer;
8846 for (int i=0; i<stream_.bufferSize; i++) {
8847 for (j=0; j<info.channels; j++) {
8848 out[info.outOffset[j]] = (Int16) (in[info.inOffset[j]] * 32767.0);
8851 out += info.outJump;
8855 else if (info.outFormat == RTAUDIO_SINT8) {
8856 signed char *out = (signed char *)outBuffer;
8857 if (info.inFormat == RTAUDIO_SINT8) {
8858 // Channel compensation and/or (de)interleaving only.
8859 signed char *in = (signed char *)inBuffer;
8860 for (int i=0; i<stream_.bufferSize; i++) {
8861 for (j=0; j<info.channels; j++) {
8862 out[info.outOffset[j]] = in[info.inOffset[j]];
8865 out += info.outJump;
8868 if (info.inFormat == RTAUDIO_SINT16) {
8869 Int16 *in = (Int16 *)inBuffer;
8870 for (int i=0; i<stream_.bufferSize; i++) {
8871 for (j=0; j<info.channels; j++) {
8872 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 8) & 0x00ff);
8875 out += info.outJump;
8878 else if (info.inFormat == RTAUDIO_SINT24) {
8879 Int32 *in = (Int32 *)inBuffer;
8880 for (int i=0; i<stream_.bufferSize; i++) {
8881 for (j=0; j<info.channels; j++) {
8882 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
8885 out += info.outJump;
8888 else if (info.inFormat == RTAUDIO_SINT32) {
8889 Int32 *in = (Int32 *)inBuffer;
8890 for (int i=0; i<stream_.bufferSize; i++) {
8891 for (j=0; j<info.channels; j++) {
8892 out[info.outOffset[j]] = (signed char) ((in[info.inOffset[j]] >> 24) & 0x000000ff);
8895 out += info.outJump;
8898 else if (info.inFormat == RTAUDIO_FLOAT32) {
8899 Float32 *in = (Float32 *)inBuffer;
8900 for (int i=0; i<stream_.bufferSize; i++) {
8901 for (j=0; j<info.channels; j++) {
8902 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
8905 out += info.outJump;
8908 else if (info.inFormat == RTAUDIO_FLOAT64) {
8909 Float64 *in = (Float64 *)inBuffer;
8910 for (int i=0; i<stream_.bufferSize; i++) {
8911 for (j=0; j<info.channels; j++) {
8912 out[info.outOffset[j]] = (signed char) (in[info.inOffset[j]] * 127.0);
8915 out += info.outJump;
8921 void RtApi :: byteSwapBuffer( char *buffer, int samples, RtAudioFormat format )
8927 if (format == RTAUDIO_SINT16) {
8928 for (int i=0; i<samples; i++) {
8929 // Swap 1st and 2nd bytes.
8934 // Increment 2 bytes.
8938 else if (format == RTAUDIO_SINT24 ||
8939 format == RTAUDIO_SINT32 ||
8940 format == RTAUDIO_FLOAT32) {
8941 for (int i=0; i<samples; i++) {
8942 // Swap 1st and 4th bytes.
8947 // Swap 2nd and 3rd bytes.
8953 // Increment 4 bytes.
8957 else if (format == RTAUDIO_FLOAT64) {
8958 for (int i=0; i<samples; i++) {
8959 // Swap 1st and 8th bytes
8964 // Swap 2nd and 7th bytes
8970 // Swap 3rd and 6th bytes
8976 // Swap 4th and 5th bytes
8982 // Increment 8 bytes.