+ for ( i=0, j=0; i<nChannels; i++ ) {\r
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
+ memset( handle->bufferInfos[i].buffers[bufferIndex], 0, bufferBytes );\r
+ }\r
+\r
+ }\r
+ else if ( stream_.doConvertBuffer[0] ) {\r
+\r
+ convertBuffer( stream_.deviceBuffer, stream_.userBuffer[0], stream_.convertInfo[0] );\r
+ if ( stream_.doByteSwap[0] )\r
+ byteSwapBuffer( stream_.deviceBuffer,\r
+ stream_.bufferSize * stream_.nDeviceChannels[0],\r
+ stream_.deviceFormat[0] );\r
+\r
+ for ( i=0, j=0; i<nChannels; i++ ) {\r
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
+ memcpy( handle->bufferInfos[i].buffers[bufferIndex],\r
+ &stream_.deviceBuffer[j++*bufferBytes], bufferBytes );\r
+ }\r
+\r
+ }\r
+ else {\r
+\r
+ if ( stream_.doByteSwap[0] )\r
+ byteSwapBuffer( stream_.userBuffer[0],\r
+ stream_.bufferSize * stream_.nUserChannels[0],\r
+ stream_.userFormat );\r
+\r
+ for ( i=0, j=0; i<nChannels; i++ ) {\r
+ if ( handle->bufferInfos[i].isInput != ASIOTrue )\r
+ memcpy( handle->bufferInfos[i].buffers[bufferIndex],\r
+ &stream_.userBuffer[0][bufferBytes*j++], bufferBytes );\r
+ }\r
+\r
+ }\r
+ }\r
+\r
+ // Don't bother draining input\r
+ if ( handle->drainCounter ) {\r
+ handle->drainCounter++;\r
+ goto unlock;\r
+ }\r
+\r
+ if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) {\r
+\r
+ bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]);\r
+\r
+ if (stream_.doConvertBuffer[1]) {\r
+\r
+ // Always interleave ASIO input data.\r
+ for ( i=0, j=0; i<nChannels; i++ ) {\r
+ if ( handle->bufferInfos[i].isInput == ASIOTrue )\r
+ memcpy( &stream_.deviceBuffer[j++*bufferBytes],\r
+ handle->bufferInfos[i].buffers[bufferIndex],\r
+ bufferBytes );\r
+ }\r
+\r
+ if ( stream_.doByteSwap[1] )\r
+ byteSwapBuffer( stream_.deviceBuffer,\r
+ stream_.bufferSize * stream_.nDeviceChannels[1],\r
+ stream_.deviceFormat[1] );\r
+ convertBuffer( stream_.userBuffer[1], stream_.deviceBuffer, stream_.convertInfo[1] );\r
+\r
+ }\r
+ else {\r
+ for ( i=0, j=0; i<nChannels; i++ ) {\r
+ if ( handle->bufferInfos[i].isInput == ASIOTrue ) {\r
+ memcpy( &stream_.userBuffer[1][bufferBytes*j++],\r
+ handle->bufferInfos[i].buffers[bufferIndex],\r
+ bufferBytes );\r
+ }\r
+ }\r
+\r
+ if ( stream_.doByteSwap[1] )\r
+ byteSwapBuffer( stream_.userBuffer[1],\r
+ stream_.bufferSize * stream_.nUserChannels[1],\r
+ stream_.userFormat );\r
+ }\r
+ }\r
+\r
+ unlock:\r
+ // The following call was suggested by Malte Clasen. While the API\r
+ // documentation indicates it should not be required, some device\r
+ // drivers apparently do not function correctly without it.\r
+ ASIOOutputReady();\r
+\r
+ RtApi::tickStreamTime();\r
+ return SUCCESS;\r
+}\r
+\r
+static void sampleRateChanged( ASIOSampleRate sRate )\r
+{\r
+ // The ASIO documentation says that this usually only happens during\r
+ // external sync. Audio processing is not stopped by the driver,\r
+ // actual sample rate might not have even changed, maybe only the\r
+ // sample rate status of an AES/EBU or S/PDIF digital input at the\r
+ // audio device.\r
+\r
+ RtApi *object = (RtApi *) asioCallbackInfo->object;\r
+ try {\r
+ object->stopStream();\r
+ }\r
+ catch ( RtAudioError &exception ) {\r
+ std::cerr << "\nRtApiAsio: sampleRateChanged() error (" << exception.getMessage() << ")!\n" << std::endl;\r
+ return;\r
+ }\r
+\r
+ std::cerr << "\nRtApiAsio: driver reports sample rate changed to " << sRate << " ... stream stopped!!!\n" << std::endl;\r
+}\r
+\r
+static long asioMessages( long selector, long value, void* /*message*/, double* /*opt*/ )\r
+{\r
+ long ret = 0;\r
+\r
+ switch( selector ) {\r
+ case kAsioSelectorSupported:\r
+ if ( value == kAsioResetRequest\r
+ || value == kAsioEngineVersion\r
+ || value == kAsioResyncRequest\r
+ || value == kAsioLatenciesChanged\r
+ // The following three were added for ASIO 2.0, you don't\r
+ // necessarily have to support them.\r
+ || value == kAsioSupportsTimeInfo\r
+ || value == kAsioSupportsTimeCode\r
+ || value == kAsioSupportsInputMonitor)\r
+ ret = 1L;\r
+ break;\r
+ case kAsioResetRequest:\r
+ // Defer the task and perform the reset of the driver during the\r
+ // next "safe" situation. You cannot reset the driver right now,\r
+ // as this code is called from the driver. Reset the driver is\r
+ // done by completely destruct is. I.e. ASIOStop(),\r
+ // ASIODisposeBuffers(), Destruction Afterwards you initialize the\r
+ // driver again.\r
+ std::cerr << "\nRtApiAsio: driver reset requested!!!" << std::endl;\r
+ ret = 1L;\r
+ break;\r
+ case kAsioResyncRequest:\r
+ // This informs the application that the driver encountered some\r
+ // non-fatal data loss. It is used for synchronization purposes\r
+ // of different media. Added mainly to work around the Win16Mutex\r
+ // problems in Windows 95/98 with the Windows Multimedia system,\r
+ // which could lose data because the Mutex was held too long by\r
+ // another thread. However a driver can issue it in other\r
+ // situations, too.\r
+ // std::cerr << "\nRtApiAsio: driver resync requested!!!" << std::endl;\r
+ asioXRun = true;\r
+ ret = 1L;\r
+ break;\r
+ case kAsioLatenciesChanged:\r
+ // This will inform the host application that the drivers were\r
+ // latencies changed. Beware, it this does not mean that the\r
+ // buffer sizes have changed! You might need to update internal\r
+ // delay data.\r
+ std::cerr << "\nRtApiAsio: driver latency may have changed!!!" << std::endl;\r
+ ret = 1L;\r
+ break;\r
+ case kAsioEngineVersion:\r
+ // Return the supported ASIO version of the host application. If\r
+ // a host application does not implement this selector, ASIO 1.0\r
+ // is assumed by the driver.\r
+ ret = 2L;\r
+ break;\r
+ case kAsioSupportsTimeInfo:\r
+ // Informs the driver whether the\r
+ // asioCallbacks.bufferSwitchTimeInfo() callback is supported.\r
+ // For compatibility with ASIO 1.0 drivers the host application\r
+ // should always support the "old" bufferSwitch method, too.\r
+ ret = 0;\r
+ break;\r
+ case kAsioSupportsTimeCode:\r
+ // Informs the driver whether application is interested in time\r
+ // code info. If an application does not need to know about time\r
+ // code, the driver has less work to do.\r
+ ret = 0;\r
+ break;\r
+ }\r
+ return ret;\r
+}\r
+\r
+static const char* getAsioErrorString( ASIOError result )\r
+{\r
+ struct Messages \r
+ {\r
+ ASIOError value;\r
+ const char*message;\r
+ };\r
+\r
+ static const Messages m[] = \r
+ {\r
+ { ASE_NotPresent, "Hardware input or output is not present or available." },\r
+ { ASE_HWMalfunction, "Hardware is malfunctioning." },\r
+ { ASE_InvalidParameter, "Invalid input parameter." },\r
+ { ASE_InvalidMode, "Invalid mode." },\r
+ { ASE_SPNotAdvancing, "Sample position not advancing." },\r
+ { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." },\r
+ { ASE_NoMemory, "Not enough memory to complete the request." }\r
+ };\r
+\r
+ for ( unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i )\r
+ if ( m[i].value == result ) return m[i].message;\r
+\r
+ return "Unknown error.";\r
+}\r
+\r
+//******************** End of __WINDOWS_ASIO__ *********************//\r
+#endif\r
+\r
+\r
+#if defined(__WINDOWS_WASAPI__) // Windows WASAPI API\r
+\r
+// Authored by Marcus Tomlinson <themarcustomlinson@gmail.com>, April 2014\r
+// - Introduces support for the Windows WASAPI API\r
+// - Aims to deliver bit streams to and from hardware at the lowest possible latency, via the absolute minimum buffer sizes required\r
+// - Provides flexible stream configuration to an otherwise strict and inflexible WASAPI interface\r
+// - Includes automatic internal conversion of sample rate and buffer size between hardware and the user\r
+\r
+#ifndef INITGUID\r
+ #define INITGUID\r
+#endif\r
+#include <audioclient.h>\r
+#include <avrt.h>\r
+#include <mmdeviceapi.h>\r
+#include <functiondiscoverykeys_devpkey.h>\r
+\r
+//=============================================================================\r
+\r
+#define SAFE_RELEASE( objectPtr )\\r
+if ( objectPtr )\\r
+{\\r
+ objectPtr->Release();\\r
+ objectPtr = NULL;\\r
+}\r
+\r
+typedef HANDLE ( __stdcall *TAvSetMmThreadCharacteristicsPtr )( LPCWSTR TaskName, LPDWORD TaskIndex );\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+// WASAPI dictates stream sample rate, format, channel count, and in some cases, buffer size.\r
+// Therefore we must perform all necessary conversions to user buffers in order to satisfy these\r
+// requirements. WasapiBuffer ring buffers are used between HwIn->UserIn and UserOut->HwOut to\r
+// provide intermediate storage for read / write synchronization.\r
+class WasapiBuffer\r
+{\r
+public:\r
+ WasapiBuffer()\r
+ : buffer_( NULL ),\r
+ bufferSize_( 0 ),\r
+ inIndex_( 0 ),\r
+ outIndex_( 0 ) {}\r
+\r
+ ~WasapiBuffer() {\r
+ free( buffer_ );\r
+ }\r
+\r
+ // sets the length of the internal ring buffer\r
+ void setBufferSize( unsigned int bufferSize, unsigned int formatBytes ) {\r
+ free( buffer_ );\r
+\r
+ buffer_ = ( char* ) calloc( bufferSize, formatBytes );\r
+\r
+ bufferSize_ = bufferSize;\r
+ inIndex_ = 0;\r
+ outIndex_ = 0;\r
+ }\r
+\r
+ // attempt to push a buffer into the ring buffer at the current "in" index\r
+ bool pushBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )\r
+ {\r
+ if ( !buffer || // incoming buffer is NULL\r
+ bufferSize == 0 || // incoming buffer has no data\r
+ bufferSize > bufferSize_ ) // incoming buffer too large\r
+ {\r
+ return false;\r
+ }\r
+\r
+ unsigned int relOutIndex = outIndex_;\r
+ unsigned int inIndexEnd = inIndex_ + bufferSize;\r
+ if ( relOutIndex < inIndex_ && inIndexEnd >= bufferSize_ ) {\r
+ relOutIndex += bufferSize_;\r
+ }\r
+\r
+ // "in" index can end on the "out" index but cannot begin at it\r
+ if ( inIndex_ <= relOutIndex && inIndexEnd > relOutIndex ) {\r
+ return false; // not enough space between "in" index and "out" index\r
+ }\r
+\r
+ // copy buffer from external to internal\r
+ int fromZeroSize = inIndex_ + bufferSize - bufferSize_;\r
+ fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;\r
+ int fromInSize = bufferSize - fromZeroSize;\r
+\r
+ switch( format )\r
+ {\r
+ case RTAUDIO_SINT8:\r
+ memcpy( &( ( char* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( char ) );\r
+ memcpy( buffer_, &( ( char* ) buffer )[fromInSize], fromZeroSize * sizeof( char ) );\r
+ break;\r
+ case RTAUDIO_SINT16:\r
+ memcpy( &( ( short* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( short ) );\r
+ memcpy( buffer_, &( ( short* ) buffer )[fromInSize], fromZeroSize * sizeof( short ) );\r
+ break;\r
+ case RTAUDIO_SINT24:\r
+ memcpy( &( ( S24* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( S24 ) );\r
+ memcpy( buffer_, &( ( S24* ) buffer )[fromInSize], fromZeroSize * sizeof( S24 ) );\r
+ break;\r
+ case RTAUDIO_SINT32:\r
+ memcpy( &( ( int* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( int ) );\r
+ memcpy( buffer_, &( ( int* ) buffer )[fromInSize], fromZeroSize * sizeof( int ) );\r
+ break;\r
+ case RTAUDIO_FLOAT32:\r
+ memcpy( &( ( float* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( float ) );\r
+ memcpy( buffer_, &( ( float* ) buffer )[fromInSize], fromZeroSize * sizeof( float ) );\r
+ break;\r
+ case RTAUDIO_FLOAT64:\r
+ memcpy( &( ( double* ) buffer_ )[inIndex_], buffer, fromInSize * sizeof( double ) );\r
+ memcpy( buffer_, &( ( double* ) buffer )[fromInSize], fromZeroSize * sizeof( double ) );\r
+ break;\r
+ }\r
+\r
+ // update "in" index\r
+ inIndex_ += bufferSize;\r
+ inIndex_ %= bufferSize_;\r
+\r
+ return true;\r
+ }\r
+\r
+ // attempt to pull a buffer from the ring buffer from the current "out" index\r
+ bool pullBuffer( char* buffer, unsigned int bufferSize, RtAudioFormat format )\r
+ {\r
+ if ( !buffer || // incoming buffer is NULL\r
+ bufferSize == 0 || // incoming buffer has no data\r
+ bufferSize > bufferSize_ ) // incoming buffer too large\r
+ {\r
+ return false;\r
+ }\r
+\r
+ unsigned int relInIndex = inIndex_;\r
+ unsigned int outIndexEnd = outIndex_ + bufferSize;\r
+ if ( relInIndex < outIndex_ && outIndexEnd >= bufferSize_ ) {\r
+ relInIndex += bufferSize_;\r
+ }\r
+\r
+ // "out" index can begin at and end on the "in" index\r
+ if ( outIndex_ < relInIndex && outIndexEnd > relInIndex ) {\r
+ return false; // not enough space between "out" index and "in" index\r
+ }\r
+\r
+ // copy buffer from internal to external\r
+ int fromZeroSize = outIndex_ + bufferSize - bufferSize_;\r
+ fromZeroSize = fromZeroSize < 0 ? 0 : fromZeroSize;\r
+ int fromOutSize = bufferSize - fromZeroSize;\r
+\r
+ switch( format )\r
+ {\r
+ case RTAUDIO_SINT8:\r
+ memcpy( buffer, &( ( char* ) buffer_ )[outIndex_], fromOutSize * sizeof( char ) );\r
+ memcpy( &( ( char* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( char ) );\r
+ break;\r
+ case RTAUDIO_SINT16:\r
+ memcpy( buffer, &( ( short* ) buffer_ )[outIndex_], fromOutSize * sizeof( short ) );\r
+ memcpy( &( ( short* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( short ) );\r
+ break;\r
+ case RTAUDIO_SINT24:\r
+ memcpy( buffer, &( ( S24* ) buffer_ )[outIndex_], fromOutSize * sizeof( S24 ) );\r
+ memcpy( &( ( S24* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( S24 ) );\r
+ break;\r
+ case RTAUDIO_SINT32:\r
+ memcpy( buffer, &( ( int* ) buffer_ )[outIndex_], fromOutSize * sizeof( int ) );\r
+ memcpy( &( ( int* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( int ) );\r
+ break;\r
+ case RTAUDIO_FLOAT32:\r
+ memcpy( buffer, &( ( float* ) buffer_ )[outIndex_], fromOutSize * sizeof( float ) );\r
+ memcpy( &( ( float* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( float ) );\r
+ break;\r
+ case RTAUDIO_FLOAT64:\r
+ memcpy( buffer, &( ( double* ) buffer_ )[outIndex_], fromOutSize * sizeof( double ) );\r
+ memcpy( &( ( double* ) buffer )[fromOutSize], buffer_, fromZeroSize * sizeof( double ) );\r
+ break;\r
+ }\r
+\r
+ // update "out" index\r
+ outIndex_ += bufferSize;\r
+ outIndex_ %= bufferSize_;\r
+\r
+ return true;\r
+ }\r
+\r
+private:\r
+ char* buffer_;\r
+ unsigned int bufferSize_;\r
+ unsigned int inIndex_;\r
+ unsigned int outIndex_;\r
+};\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate\r
+// between HW and the user. The convertBufferWasapi function is used to perform this conversion\r
+// between HwIn->UserIn and UserOut->HwOut during the stream callback loop.\r
+// This sample rate converter favors speed over quality, and works best with conversions between\r
+// one rate and its multiple.\r
+void convertBufferWasapi( char* outBuffer,\r
+ const char* inBuffer,\r
+ const unsigned int& channelCount,\r
+ const unsigned int& inSampleRate,\r
+ const unsigned int& outSampleRate,\r
+ const unsigned int& inSampleCount,\r
+ unsigned int& outSampleCount,\r
+ const RtAudioFormat& format )\r
+{\r
+ // calculate the new outSampleCount and relative sampleStep\r
+ float sampleRatio = ( float ) outSampleRate / inSampleRate;\r
+ float sampleStep = 1.0f / sampleRatio;\r
+ float inSampleFraction = 0.0f;\r
+\r
+ outSampleCount = ( unsigned int ) roundf( inSampleCount * sampleRatio );\r
+\r
+ // frame-by-frame, copy each relative input sample into it's corresponding output sample\r
+ for ( unsigned int outSample = 0; outSample < outSampleCount; outSample++ )\r
+ {\r
+ unsigned int inSample = ( unsigned int ) inSampleFraction;\r
+\r
+ switch ( format )\r
+ {\r
+ case RTAUDIO_SINT8:\r
+ memcpy( &( ( char* ) outBuffer )[ outSample * channelCount ], &( ( char* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( char ) );\r
+ break;\r
+ case RTAUDIO_SINT16:\r
+ memcpy( &( ( short* ) outBuffer )[ outSample * channelCount ], &( ( short* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( short ) );\r
+ break;\r
+ case RTAUDIO_SINT24:\r
+ memcpy( &( ( S24* ) outBuffer )[ outSample * channelCount ], &( ( S24* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( S24 ) );\r
+ break;\r
+ case RTAUDIO_SINT32:\r
+ memcpy( &( ( int* ) outBuffer )[ outSample * channelCount ], &( ( int* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( int ) );\r
+ break;\r
+ case RTAUDIO_FLOAT32:\r
+ memcpy( &( ( float* ) outBuffer )[ outSample * channelCount ], &( ( float* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( float ) );\r
+ break;\r
+ case RTAUDIO_FLOAT64:\r
+ memcpy( &( ( double* ) outBuffer )[ outSample * channelCount ], &( ( double* ) inBuffer )[ inSample * channelCount ], channelCount * sizeof( double ) );\r
+ break;\r
+ }\r
+\r
+ // jump to next in sample\r
+ inSampleFraction += sampleStep;\r
+ }\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+// A structure to hold various information related to the WASAPI implementation.\r
+struct WasapiHandle\r
+{\r
+ IAudioClient* captureAudioClient;\r
+ IAudioClient* renderAudioClient;\r
+ IAudioCaptureClient* captureClient;\r
+ IAudioRenderClient* renderClient;\r
+ HANDLE captureEvent;\r
+ HANDLE renderEvent;\r
+\r
+ WasapiHandle()\r
+ : captureAudioClient( NULL ),\r
+ renderAudioClient( NULL ),\r
+ captureClient( NULL ),\r
+ renderClient( NULL ),\r
+ captureEvent( NULL ),\r
+ renderEvent( NULL ) {}\r
+};\r
+\r
+//=============================================================================\r
+\r
+RtApiWasapi::RtApiWasapi()\r
+ : coInitialized_( false ), deviceEnumerator_( NULL )\r
+{\r
+ // WASAPI can run either apartment or multi-threaded\r
+ HRESULT hr = CoInitialize( NULL );\r
+ if ( !FAILED( hr ) )\r
+ coInitialized_ = true;\r
+\r
+ // Instantiate device enumerator\r
+ hr = CoCreateInstance( __uuidof( MMDeviceEnumerator ), NULL,\r
+ CLSCTX_ALL, __uuidof( IMMDeviceEnumerator ),\r
+ ( void** ) &deviceEnumerator_ );\r
+\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::RtApiWasapi: Unable to instantiate device enumerator";\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ }\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+RtApiWasapi::~RtApiWasapi()\r
+{\r
+ if ( stream_.state != STREAM_CLOSED )\r
+ closeStream();\r
+\r
+ SAFE_RELEASE( deviceEnumerator_ );\r
+\r
+ // If this object previously called CoInitialize()\r
+ if ( coInitialized_ )\r
+ CoUninitialize();\r
+}\r
+\r
+//=============================================================================\r
+\r
+unsigned int RtApiWasapi::getDeviceCount( void )\r
+{\r
+ unsigned int captureDeviceCount = 0;\r
+ unsigned int renderDeviceCount = 0;\r
+\r
+ IMMDeviceCollection* captureDevices = NULL;\r
+ IMMDeviceCollection* renderDevices = NULL;\r
+\r
+ // Count capture devices\r
+ errorText_.clear();\r
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureDevices->GetCount( &captureDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve capture device count.";\r
+ goto Exit;\r
+ }\r
+\r
+ // Count render devices\r
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderDevices->GetCount( &renderDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceCount: Unable to retrieve render device count.";\r
+ goto Exit;\r
+ }\r
+\r
+Exit:\r
+ // release all references\r
+ SAFE_RELEASE( captureDevices );\r
+ SAFE_RELEASE( renderDevices );\r
+\r
+ if ( errorText_.empty() )\r
+ return captureDeviceCount + renderDeviceCount;\r
+\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ return 0;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+RtAudio::DeviceInfo RtApiWasapi::getDeviceInfo( unsigned int device )\r
+{\r
+ RtAudio::DeviceInfo info;\r
+ unsigned int captureDeviceCount = 0;\r
+ unsigned int renderDeviceCount = 0;\r
+ std::string defaultDeviceName;\r
+ bool isCaptureDevice = false;\r
+\r
+ PROPVARIANT deviceNameProp;\r
+ PROPVARIANT defaultDeviceNameProp;\r
+\r
+ IMMDeviceCollection* captureDevices = NULL;\r
+ IMMDeviceCollection* renderDevices = NULL;\r
+ IMMDevice* devicePtr = NULL;\r
+ IMMDevice* defaultDevicePtr = NULL;\r
+ IAudioClient* audioClient = NULL;\r
+ IPropertyStore* devicePropStore = NULL;\r
+ IPropertyStore* defaultDevicePropStore = NULL;\r
+\r
+ WAVEFORMATEX* deviceFormat = NULL;\r
+ WAVEFORMATEX* closestMatchFormat = NULL;\r
+\r
+ // probed\r
+ info.probed = false;\r
+\r
+ // Count capture devices\r
+ errorText_.clear();\r
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureDevices->GetCount( &captureDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device count.";\r
+ goto Exit;\r
+ }\r
+\r
+ // Count render devices\r
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderDevices->GetCount( &renderDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device count.";\r
+ goto Exit;\r
+ }\r
+\r
+ // validate device index\r
+ if ( device >= captureDeviceCount + renderDeviceCount ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Invalid device index.";\r
+ errorType = RtAudioError::INVALID_USE;\r
+ goto Exit;\r
+ }\r
+\r
+ // determine whether index falls within capture or render devices\r
+ if ( device >= renderDeviceCount ) {\r
+ hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve capture device handle.";\r
+ goto Exit;\r
+ }\r
+ isCaptureDevice = true;\r
+ }\r
+ else {\r
+ hr = renderDevices->Item( device, &devicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve render device handle.";\r
+ goto Exit;\r
+ }\r
+ isCaptureDevice = false;\r
+ }\r
+\r
+ // get default device name\r
+ if ( isCaptureDevice ) {\r
+ hr = deviceEnumerator_->GetDefaultAudioEndpoint( eCapture, eConsole, &defaultDevicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default capture device handle.";\r
+ goto Exit;\r
+ }\r
+ }\r
+ else {\r
+ hr = deviceEnumerator_->GetDefaultAudioEndpoint( eRender, eConsole, &defaultDevicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default render device handle.";\r
+ goto Exit;\r
+ }\r
+ }\r
+\r
+ hr = defaultDevicePtr->OpenPropertyStore( STGM_READ, &defaultDevicePropStore );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open default device property store.";\r
+ goto Exit;\r
+ }\r
+ PropVariantInit( &defaultDeviceNameProp );\r
+\r
+ hr = defaultDevicePropStore->GetValue( PKEY_Device_FriendlyName, &defaultDeviceNameProp );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve default device property: PKEY_Device_FriendlyName.";\r
+ goto Exit;\r
+ }\r
+\r
+ defaultDeviceName = convertCharPointerToStdString(defaultDeviceNameProp.pwszVal);\r
+\r
+ // name\r
+ hr = devicePtr->OpenPropertyStore( STGM_READ, &devicePropStore );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to open device property store.";\r
+ goto Exit;\r
+ }\r
+\r
+ PropVariantInit( &deviceNameProp );\r
+\r
+ hr = devicePropStore->GetValue( PKEY_Device_FriendlyName, &deviceNameProp );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device property: PKEY_Device_FriendlyName.";\r
+ goto Exit;\r
+ }\r
+\r
+ info.name =convertCharPointerToStdString(deviceNameProp.pwszVal);\r
+\r
+ // is default\r
+ if ( isCaptureDevice ) {\r
+ info.isDefaultInput = info.name == defaultDeviceName;\r
+ info.isDefaultOutput = false;\r
+ }\r
+ else {\r
+ info.isDefaultInput = false;\r
+ info.isDefaultOutput = info.name == defaultDeviceName;\r
+ }\r
+\r
+ // channel count\r
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, NULL, ( void** ) &audioClient );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device audio client.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = audioClient->GetMixFormat( &deviceFormat );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::getDeviceInfo: Unable to retrieve device mix format.";\r
+ goto Exit;\r
+ }\r
+\r
+ if ( isCaptureDevice ) {\r
+ info.inputChannels = deviceFormat->nChannels;\r
+ info.outputChannels = 0;\r
+ info.duplexChannels = 0;\r
+ }\r
+ else {\r
+ info.inputChannels = 0;\r
+ info.outputChannels = deviceFormat->nChannels;\r
+ info.duplexChannels = 0;\r
+ }\r
+\r
+ // sample rates\r
+ info.sampleRates.clear();\r
+\r
+ // allow support for all sample rates as we have a built-in sample rate converter\r
+ for ( unsigned int i = 0; i < MAX_SAMPLE_RATES; i++ ) {\r
+ info.sampleRates.push_back( SAMPLE_RATES[i] );\r
+ }\r
+ info.preferredSampleRate = deviceFormat->nSamplesPerSec;\r
+\r
+ // native format\r
+ info.nativeFormats = 0;\r
+\r
+ if ( deviceFormat->wFormatTag == WAVE_FORMAT_IEEE_FLOAT ||\r
+ ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&\r
+ ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT ) )\r
+ {\r
+ if ( deviceFormat->wBitsPerSample == 32 ) {\r
+ info.nativeFormats |= RTAUDIO_FLOAT32;\r
+ }\r
+ else if ( deviceFormat->wBitsPerSample == 64 ) {\r
+ info.nativeFormats |= RTAUDIO_FLOAT64;\r
+ }\r
+ }\r
+ else if ( deviceFormat->wFormatTag == WAVE_FORMAT_PCM ||\r
+ ( deviceFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE &&\r
+ ( ( WAVEFORMATEXTENSIBLE* ) deviceFormat )->SubFormat == KSDATAFORMAT_SUBTYPE_PCM ) )\r
+ {\r
+ if ( deviceFormat->wBitsPerSample == 8 ) {\r
+ info.nativeFormats |= RTAUDIO_SINT8;\r
+ }\r
+ else if ( deviceFormat->wBitsPerSample == 16 ) {\r
+ info.nativeFormats |= RTAUDIO_SINT16;\r
+ }\r
+ else if ( deviceFormat->wBitsPerSample == 24 ) {\r
+ info.nativeFormats |= RTAUDIO_SINT24;\r
+ }\r
+ else if ( deviceFormat->wBitsPerSample == 32 ) {\r
+ info.nativeFormats |= RTAUDIO_SINT32;\r
+ }\r
+ }\r
+\r
+ // probed\r
+ info.probed = true;\r
+\r
+Exit:\r
+ // release all references\r
+ PropVariantClear( &deviceNameProp );\r
+ PropVariantClear( &defaultDeviceNameProp );\r
+\r
+ SAFE_RELEASE( captureDevices );\r
+ SAFE_RELEASE( renderDevices );\r
+ SAFE_RELEASE( devicePtr );\r
+ SAFE_RELEASE( defaultDevicePtr );\r
+ SAFE_RELEASE( audioClient );\r
+ SAFE_RELEASE( devicePropStore );\r
+ SAFE_RELEASE( defaultDevicePropStore );\r
+\r
+ CoTaskMemFree( deviceFormat );\r
+ CoTaskMemFree( closestMatchFormat );\r
+\r
+ if ( !errorText_.empty() )\r
+ error( errorType );\r
+ return info;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+unsigned int RtApiWasapi::getDefaultOutputDevice( void )\r
+{\r
+ for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {\r
+ if ( getDeviceInfo( i ).isDefaultOutput ) {\r
+ return i;\r
+ }\r
+ }\r
+\r
+ return 0;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+unsigned int RtApiWasapi::getDefaultInputDevice( void )\r
+{\r
+ for ( unsigned int i = 0; i < getDeviceCount(); i++ ) {\r
+ if ( getDeviceInfo( i ).isDefaultInput ) {\r
+ return i;\r
+ }\r
+ }\r
+\r
+ return 0;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+void RtApiWasapi::closeStream( void )\r
+{\r
+ if ( stream_.state == STREAM_CLOSED ) {\r
+ errorText_ = "RtApiWasapi::closeStream: No open stream to close.";\r
+ error( RtAudioError::WARNING );\r
+ return;\r
+ }\r
+\r
+ if ( stream_.state != STREAM_STOPPED )\r
+ stopStream();\r
+\r
+ // clean up stream memory\r
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient )\r
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient )\r
+\r
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->captureClient )\r
+ SAFE_RELEASE( ( ( WasapiHandle* ) stream_.apiHandle )->renderClient )\r
+\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent )\r
+ CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent );\r
+\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent )\r
+ CloseHandle( ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent );\r
+\r
+ delete ( WasapiHandle* ) stream_.apiHandle;\r
+ stream_.apiHandle = NULL;\r
+\r
+ for ( int i = 0; i < 2; i++ ) {\r
+ if ( stream_.userBuffer[i] ) {\r
+ free( stream_.userBuffer[i] );\r
+ stream_.userBuffer[i] = 0;\r
+ }\r
+ }\r
+\r
+ if ( stream_.deviceBuffer ) {\r
+ free( stream_.deviceBuffer );\r
+ stream_.deviceBuffer = 0;\r
+ }\r
+\r
+ // update stream state\r
+ stream_.state = STREAM_CLOSED;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+void RtApiWasapi::startStream( void )\r
+{\r
+ verifyStream();\r
+\r
+ if ( stream_.state == STREAM_RUNNING ) {\r
+ errorText_ = "RtApiWasapi::startStream: The stream is already running.";\r
+ error( RtAudioError::WARNING );\r
+ return;\r
+ }\r
+\r
+ // update stream state\r
+ stream_.state = STREAM_RUNNING;\r
+\r
+ // create WASAPI stream thread\r
+ stream_.callbackInfo.thread = ( ThreadHandle ) CreateThread( NULL, 0, runWasapiThread, this, CREATE_SUSPENDED, NULL );\r
+\r
+ if ( !stream_.callbackInfo.thread ) {\r
+ errorText_ = "RtApiWasapi::startStream: Unable to instantiate callback thread.";\r
+ error( RtAudioError::THREAD_ERROR );\r
+ }\r
+ else {\r
+ SetThreadPriority( ( void* ) stream_.callbackInfo.thread, stream_.callbackInfo.priority );\r
+ ResumeThread( ( void* ) stream_.callbackInfo.thread );\r
+ }\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+void RtApiWasapi::stopStream( void )\r
+{\r
+ verifyStream();\r
+\r
+ if ( stream_.state == STREAM_STOPPED ) {\r
+ errorText_ = "RtApiWasapi::stopStream: The stream is already stopped.";\r
+ error( RtAudioError::WARNING );\r
+ return;\r
+ }\r
+\r
+ // inform stream thread by setting stream state to STREAM_STOPPING\r
+ stream_.state = STREAM_STOPPING;\r
+\r
+ // wait until stream thread is stopped\r
+ while( stream_.state != STREAM_STOPPED ) {\r
+ Sleep( 1 );\r
+ }\r
+\r
+ // Wait for the last buffer to play before stopping.\r
+ Sleep( 1000 * stream_.bufferSize / stream_.sampleRate );\r
+\r
+ // stop capture client if applicable\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {\r
+ HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::stopStream: Unable to stop capture stream.";\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ return;\r
+ }\r
+ }\r
+\r
+ // stop render client if applicable\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {\r
+ HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::stopStream: Unable to stop render stream.";\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ return;\r
+ }\r
+ }\r
+\r
+ // close thread handle\r
+ if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {\r
+ errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread.";\r
+ error( RtAudioError::THREAD_ERROR );\r
+ return;\r
+ }\r
+\r
+ stream_.callbackInfo.thread = (ThreadHandle) NULL;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+void RtApiWasapi::abortStream( void )\r
+{\r
+ verifyStream();\r
+\r
+ if ( stream_.state == STREAM_STOPPED ) {\r
+ errorText_ = "RtApiWasapi::abortStream: The stream is already stopped.";\r
+ error( RtAudioError::WARNING );\r
+ return;\r
+ }\r
+\r
+ // inform stream thread by setting stream state to STREAM_STOPPING\r
+ stream_.state = STREAM_STOPPING;\r
+\r
+ // wait until stream thread is stopped\r
+ while ( stream_.state != STREAM_STOPPED ) {\r
+ Sleep( 1 );\r
+ }\r
+\r
+ // stop capture client if applicable\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {\r
+ HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::abortStream: Unable to stop capture stream.";\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ return;\r
+ }\r
+ }\r
+\r
+ // stop render client if applicable\r
+ if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {\r
+ HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::abortStream: Unable to stop render stream.";\r
+ error( RtAudioError::DRIVER_ERROR );\r
+ return;\r
+ }\r
+ }\r
+\r
+ // close thread handle\r
+ if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {\r
+ errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread.";\r
+ error( RtAudioError::THREAD_ERROR );\r
+ return;\r
+ }\r
+\r
+ stream_.callbackInfo.thread = (ThreadHandle) NULL;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigned int channels,\r
+ unsigned int firstChannel, unsigned int sampleRate,\r
+ RtAudioFormat format, unsigned int* bufferSize,\r
+ RtAudio::StreamOptions* options )\r
+{\r
+ bool methodResult = FAILURE;\r
+ unsigned int captureDeviceCount = 0;\r
+ unsigned int renderDeviceCount = 0;\r
+\r
+ IMMDeviceCollection* captureDevices = NULL;\r
+ IMMDeviceCollection* renderDevices = NULL;\r
+ IMMDevice* devicePtr = NULL;\r
+ WAVEFORMATEX* deviceFormat = NULL;\r
+ unsigned int bufferBytes;\r
+ stream_.state = STREAM_STOPPED;\r
+\r
+ // create API Handle if not already created\r
+ if ( !stream_.apiHandle )\r
+ stream_.apiHandle = ( void* ) new WasapiHandle();\r
+\r
+ // Count capture devices\r
+ errorText_.clear();\r
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
+ HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureDevices->GetCount( &captureDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device count.";\r
+ goto Exit;\r
+ }\r
+\r
+ // Count render devices\r
+ hr = deviceEnumerator_->EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &renderDevices );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device collection.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderDevices->GetCount( &renderDeviceCount );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device count.";\r
+ goto Exit;\r
+ }\r
+\r
+ // validate device index\r
+ if ( device >= captureDeviceCount + renderDeviceCount ) {\r
+ errorType = RtAudioError::INVALID_USE;\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Invalid device index.";\r
+ goto Exit;\r
+ }\r
+\r
+ // determine whether index falls within capture or render devices\r
+ if ( device >= renderDeviceCount ) {\r
+ if ( mode != INPUT ) {\r
+ errorType = RtAudioError::INVALID_USE;\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Capture device selected as output device.";\r
+ goto Exit;\r
+ }\r
+\r
+ // retrieve captureAudioClient from devicePtr\r
+ IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;\r
+\r
+ hr = captureDevices->Item( device - renderDeviceCount, &devicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,\r
+ NULL, ( void** ) &captureAudioClient );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureAudioClient->GetMixFormat( &deviceFormat );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";\r
+ goto Exit;\r
+ }\r
+\r
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;\r
+ captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );\r
+ }\r
+ else {\r
+ if ( mode != OUTPUT ) {\r
+ errorType = RtAudioError::INVALID_USE;\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Render device selected as input device.";\r
+ goto Exit;\r
+ }\r
+\r
+ // retrieve renderAudioClient from devicePtr\r
+ IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;\r
+\r
+ hr = renderDevices->Item( device, &devicePtr );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL,\r
+ NULL, ( void** ) &renderAudioClient );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderAudioClient->GetMixFormat( &deviceFormat );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format.";\r
+ goto Exit;\r
+ }\r
+\r
+ stream_.nDeviceChannels[mode] = deviceFormat->nChannels;\r
+ renderAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] );\r
+ }\r
+\r
+ // fill stream data\r
+ if ( ( stream_.mode == OUTPUT && mode == INPUT ) ||\r
+ ( stream_.mode == INPUT && mode == OUTPUT ) ) {\r
+ stream_.mode = DUPLEX;\r
+ }\r
+ else {\r
+ stream_.mode = mode;\r
+ }\r
+\r
+ stream_.device[mode] = device;\r
+ stream_.doByteSwap[mode] = false;\r
+ stream_.sampleRate = sampleRate;\r
+ stream_.bufferSize = *bufferSize;\r
+ stream_.nBuffers = 1;\r
+ stream_.nUserChannels[mode] = channels;\r
+ stream_.channelOffset[mode] = firstChannel;\r
+ stream_.userFormat = format;\r
+ stream_.deviceFormat[mode] = getDeviceInfo( device ).nativeFormats;\r
+\r
+ if ( options && options->flags & RTAUDIO_NONINTERLEAVED )\r
+ stream_.userInterleaved = false;\r
+ else\r
+ stream_.userInterleaved = true;\r
+ stream_.deviceInterleaved[mode] = true;\r
+\r
+ // Set flags for buffer conversion.\r
+ stream_.doConvertBuffer[mode] = false;\r
+ if ( stream_.userFormat != stream_.deviceFormat[mode] ||\r
+ stream_.nUserChannels != stream_.nDeviceChannels )\r
+ stream_.doConvertBuffer[mode] = true;\r
+ else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&\r
+ stream_.nUserChannels[mode] > 1 )\r
+ stream_.doConvertBuffer[mode] = true;\r
+\r
+ if ( stream_.doConvertBuffer[mode] )\r
+ setConvertInfo( mode, 0 );\r
+\r
+ // Allocate necessary internal buffers\r
+ bufferBytes = stream_.nUserChannels[mode] * stream_.bufferSize * formatBytes( stream_.userFormat );\r
+\r
+ stream_.userBuffer[mode] = ( char* ) calloc( bufferBytes, 1 );\r
+ if ( !stream_.userBuffer[mode] ) {\r
+ errorType = RtAudioError::MEMORY_ERROR;\r
+ errorText_ = "RtApiWasapi::probeDeviceOpen: Error allocating user buffer memory.";\r
+ goto Exit;\r
+ }\r
+\r
+ if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME )\r
+ stream_.callbackInfo.priority = 15;\r
+ else\r
+ stream_.callbackInfo.priority = 0;\r
+\r
+ ///! TODO: RTAUDIO_MINIMIZE_LATENCY // Provide stream buffers directly to callback\r
+ ///! TODO: RTAUDIO_HOG_DEVICE // Exclusive mode\r
+\r
+ methodResult = SUCCESS;\r
+\r
+Exit:\r
+ //clean up\r
+ SAFE_RELEASE( captureDevices );\r
+ SAFE_RELEASE( renderDevices );\r
+ SAFE_RELEASE( devicePtr );\r
+ CoTaskMemFree( deviceFormat );\r
+\r
+ // if method failed, close the stream\r
+ if ( methodResult == FAILURE )\r
+ closeStream();\r
+\r
+ if ( !errorText_.empty() )\r
+ error( errorType );\r
+ return methodResult;\r
+}\r
+\r
+//=============================================================================\r
+\r
+DWORD WINAPI RtApiWasapi::runWasapiThread( void* wasapiPtr )\r
+{\r
+ if ( wasapiPtr )\r
+ ( ( RtApiWasapi* ) wasapiPtr )->wasapiThread();\r
+\r
+ return 0;\r
+}\r
+\r
+DWORD WINAPI RtApiWasapi::stopWasapiThread( void* wasapiPtr )\r
+{\r
+ if ( wasapiPtr )\r
+ ( ( RtApiWasapi* ) wasapiPtr )->stopStream();\r
+\r
+ return 0;\r
+}\r
+\r
+DWORD WINAPI RtApiWasapi::abortWasapiThread( void* wasapiPtr )\r
+{\r
+ if ( wasapiPtr )\r
+ ( ( RtApiWasapi* ) wasapiPtr )->abortStream();\r
+\r
+ return 0;\r
+}\r
+\r
+//-----------------------------------------------------------------------------\r
+\r
+void RtApiWasapi::wasapiThread()\r
+{\r
+ // as this is a new thread, we must CoInitialize it\r
+ CoInitialize( NULL );\r
+\r
+ HRESULT hr;\r
+\r
+ IAudioClient* captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient;\r
+ IAudioClient* renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient;\r
+ IAudioCaptureClient* captureClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureClient;\r
+ IAudioRenderClient* renderClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderClient;\r
+ HANDLE captureEvent = ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent;\r
+ HANDLE renderEvent = ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent;\r
+\r
+ WAVEFORMATEX* captureFormat = NULL;\r
+ WAVEFORMATEX* renderFormat = NULL;\r
+ float captureSrRatio = 0.0f;\r
+ float renderSrRatio = 0.0f;\r
+ WasapiBuffer captureBuffer;\r
+ WasapiBuffer renderBuffer;\r
+\r
+ // declare local stream variables\r
+ RtAudioCallback callback = ( RtAudioCallback ) stream_.callbackInfo.callback;\r
+ BYTE* streamBuffer = NULL;\r
+ unsigned long captureFlags = 0;\r
+ unsigned int bufferFrameCount = 0;\r
+ unsigned int numFramesPadding = 0;\r
+ unsigned int convBufferSize = 0;\r
+ bool callbackPushed = false;\r
+ bool callbackPulled = false;\r
+ bool callbackStopped = false;\r
+ int callbackResult = 0;\r
+\r
+ // convBuffer is used to store converted buffers between WASAPI and the user\r
+ char* convBuffer = NULL;\r
+ unsigned int convBuffSize = 0;\r
+ unsigned int deviceBuffSize = 0;\r
+\r
+ errorText_.clear();\r
+ RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR;\r
+\r
+ // Attempt to assign "Pro Audio" characteristic to thread\r
+ HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" );\r
+ if ( AvrtDll ) {\r
+ DWORD taskIndex = 0;\r
+ TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = ( TAvSetMmThreadCharacteristicsPtr ) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );\r
+ AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex );\r
+ FreeLibrary( AvrtDll );\r
+ }\r
+\r
+ // start capture stream if applicable\r
+ if ( captureAudioClient ) {\r
+ hr = captureAudioClient->GetMixFormat( &captureFormat );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";\r
+ goto Exit;\r
+ }\r
+\r
+ captureSrRatio = ( ( float ) captureFormat->nSamplesPerSec / stream_.sampleRate );\r
+\r
+ // initialize capture stream according to desire buffer size\r
+ float desiredBufferSize = stream_.bufferSize * captureSrRatio;\r
+ REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / captureFormat->nSamplesPerSec );\r
+\r
+ if ( !captureClient ) {\r
+ hr = captureAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,\r
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,\r
+ desiredBufferPeriod,\r
+ desiredBufferPeriod,\r
+ captureFormat,\r
+ NULL );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureAudioClient->GetService( __uuidof( IAudioCaptureClient ),\r
+ ( void** ) &captureClient );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ // configure captureEvent to trigger on every available capture buffer\r
+ captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL );\r
+ if ( !captureEvent ) {\r
+ errorType = RtAudioError::SYSTEM_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to create capture event.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = captureAudioClient->SetEventHandle( captureEvent );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to set capture event handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;\r
+ ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent;\r
+ }\r
+\r
+ unsigned int inBufferSize = 0;\r
+ hr = captureAudioClient->GetBufferSize( &inBufferSize );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to get capture buffer size.";\r
+ goto Exit;\r
+ }\r
+\r
+ // scale outBufferSize according to stream->user sample rate ratio\r
+ unsigned int outBufferSize = ( unsigned int ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT];\r
+ inBufferSize *= stream_.nDeviceChannels[INPUT];\r
+\r
+ // set captureBuffer size\r
+ captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) );\r
+\r
+ // reset the capture stream\r
+ hr = captureAudioClient->Reset();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";\r
+ goto Exit;\r
+ }\r
+\r
+ // start the capture stream\r
+ hr = captureAudioClient->Start();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to start capture stream.";\r
+ goto Exit;\r
+ }\r
+ }\r
+\r
+ // start render stream if applicable\r
+ if ( renderAudioClient ) {\r
+ hr = renderAudioClient->GetMixFormat( &renderFormat );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format.";\r
+ goto Exit;\r
+ }\r
+\r
+ renderSrRatio = ( ( float ) renderFormat->nSamplesPerSec / stream_.sampleRate );\r
+\r
+ // initialize render stream according to desire buffer size\r
+ float desiredBufferSize = stream_.bufferSize * renderSrRatio;\r
+ REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) desiredBufferSize * 10000000 / renderFormat->nSamplesPerSec );\r
+\r
+ if ( !renderClient ) {\r
+ hr = renderAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED,\r
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,\r
+ desiredBufferPeriod,\r
+ desiredBufferPeriod,\r
+ renderFormat,\r
+ NULL );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize render audio client.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderAudioClient->GetService( __uuidof( IAudioRenderClient ),\r
+ ( void** ) &renderClient );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ // configure renderEvent to trigger on every available render buffer\r
+ renderEvent = CreateEvent( NULL, FALSE, FALSE, NULL );\r
+ if ( !renderEvent ) {\r
+ errorType = RtAudioError::SYSTEM_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to create render event.";\r
+ goto Exit;\r
+ }\r
+\r
+ hr = renderAudioClient->SetEventHandle( renderEvent );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to set render event handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ ( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient;\r
+ ( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent;\r
+ }\r
+\r
+ unsigned int outBufferSize = 0;\r
+ hr = renderAudioClient->GetBufferSize( &outBufferSize );\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to get render buffer size.";\r
+ goto Exit;\r
+ }\r
+\r
+ // scale inBufferSize according to user->stream sample rate ratio\r
+ unsigned int inBufferSize = ( unsigned int ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT];\r
+ outBufferSize *= stream_.nDeviceChannels[OUTPUT];\r
+\r
+ // set renderBuffer size\r
+ renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
+\r
+ // reset the render stream\r
+ hr = renderAudioClient->Reset();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to reset render stream.";\r
+ goto Exit;\r
+ }\r
+\r
+ // start the render stream\r
+ hr = renderAudioClient->Start();\r
+ if ( FAILED( hr ) ) {\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to start render stream.";\r
+ goto Exit;\r
+ }\r
+ }\r
+\r
+ if ( stream_.mode == INPUT ) {\r
+ convBuffSize = ( size_t ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );\r
+ deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] );\r
+ }\r
+ else if ( stream_.mode == OUTPUT ) {\r
+ convBuffSize = ( size_t ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );\r
+ deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] );\r
+ }\r
+ else if ( stream_.mode == DUPLEX ) {\r
+ convBuffSize = std::max( ( size_t ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),\r
+ ( size_t ) ( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
+ deviceBuffSize = std::max( stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ),\r
+ stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) );\r
+ }\r
+\r
+ convBuffer = ( char* ) malloc( convBuffSize );\r
+ stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize );\r
+ if ( !convBuffer || !stream_.deviceBuffer ) {\r
+ errorType = RtAudioError::MEMORY_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";\r
+ goto Exit;\r
+ }\r
+\r
+ // stream process loop\r
+ while ( stream_.state != STREAM_STOPPING ) {\r
+ if ( !callbackPulled ) {\r
+ // Callback Input\r
+ // ==============\r
+ // 1. Pull callback buffer from inputBuffer\r
+ // 2. If 1. was successful: Convert callback buffer to user sample rate and channel count\r
+ // Convert callback buffer to user format\r
+\r
+ if ( captureAudioClient ) {\r
+ // Pull callback buffer from inputBuffer\r
+ callbackPulled = captureBuffer.pullBuffer( convBuffer,\r
+ ( unsigned int ) ( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT],\r
+ stream_.deviceFormat[INPUT] );\r
+\r
+ if ( callbackPulled ) {\r
+ // Convert callback buffer to user sample rate\r
+ convertBufferWasapi( stream_.deviceBuffer,\r
+ convBuffer,\r
+ stream_.nDeviceChannels[INPUT],\r
+ captureFormat->nSamplesPerSec,\r
+ stream_.sampleRate,\r
+ ( unsigned int ) ( stream_.bufferSize * captureSrRatio ),\r
+ convBufferSize,\r
+ stream_.deviceFormat[INPUT] );\r
+\r
+ if ( stream_.doConvertBuffer[INPUT] ) {\r
+ // Convert callback buffer to user format\r
+ convertBuffer( stream_.userBuffer[INPUT],\r
+ stream_.deviceBuffer,\r
+ stream_.convertInfo[INPUT] );\r
+ }\r
+ else {\r
+ // no further conversion, simple copy deviceBuffer to userBuffer\r
+ memcpy( stream_.userBuffer[INPUT],\r
+ stream_.deviceBuffer,\r
+ stream_.bufferSize * stream_.nUserChannels[INPUT] * formatBytes( stream_.userFormat ) );\r
+ }\r
+ }\r
+ }\r
+ else {\r
+ // if there is no capture stream, set callbackPulled flag\r
+ callbackPulled = true;\r
+ }\r
+\r
+ // Execute Callback\r
+ // ================\r
+ // 1. Execute user callback method\r
+ // 2. Handle return value from callback\r
+\r
+ // if callback has not requested the stream to stop\r
+ if ( callbackPulled && !callbackStopped ) {\r
+ // Execute user callback method\r
+ callbackResult = callback( stream_.userBuffer[OUTPUT],\r
+ stream_.userBuffer[INPUT],\r
+ stream_.bufferSize,\r
+ getStreamTime(),\r
+ captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0,\r
+ stream_.callbackInfo.userData );\r
+\r
+ // Handle return value from callback\r
+ if ( callbackResult == 1 ) {\r
+ // instantiate a thread to stop this thread\r
+ HANDLE threadHandle = CreateThread( NULL, 0, stopWasapiThread, this, 0, NULL );\r
+ if ( !threadHandle ) {\r
+ errorType = RtAudioError::THREAD_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread.";\r
+ goto Exit;\r
+ }\r
+ else if ( !CloseHandle( threadHandle ) ) {\r
+ errorType = RtAudioError::THREAD_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ callbackStopped = true;\r
+ }\r
+ else if ( callbackResult == 2 ) {\r
+ // instantiate a thread to stop this thread\r
+ HANDLE threadHandle = CreateThread( NULL, 0, abortWasapiThread, this, 0, NULL );\r
+ if ( !threadHandle ) {\r
+ errorType = RtAudioError::THREAD_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread.";\r
+ goto Exit;\r
+ }\r
+ else if ( !CloseHandle( threadHandle ) ) {\r
+ errorType = RtAudioError::THREAD_ERROR;\r
+ errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle.";\r
+ goto Exit;\r
+ }\r
+\r
+ callbackStopped = true;\r
+ }\r