X-Git-Url: https://main.carlh.net/gitweb/?a=blobdiff_plain;f=RtAudio.cpp;h=8037d2a886e8645c574cea57fcf0995c15e0e492;hb=bd3db58ad06f739fecbf7afe7a5c5df5a366c700;hp=c15f9be5e3583d78c942d94a1b6afc20b16cd40a;hpb=5c442239939aaefc532a548452c74dc7d946d2a3;p=rtaudio-cdist.git diff --git a/RtAudio.cpp b/RtAudio.cpp index c15f9be..8037d2a 100644 --- a/RtAudio.cpp +++ b/RtAudio.cpp @@ -1,4 +1,4 @@ -/************************************************************************/ +/************************************************************************/ /*! \class RtAudio \brief Realtime audio i/o C++ classes. @@ -118,7 +118,7 @@ const unsigned int rtaudio_num_api_names = // The order here will control the order of RtAudio's API search in // the constructor. -RtAudio::Api rtaudio_compiled_apis[] = { +extern "C" const RtAudio::Api rtaudio_compiled_apis[] = { #if defined(__UNIX_JACK__) RtAudio::UNIX_JACK, #endif @@ -148,16 +148,10 @@ RtAudio::Api rtaudio_compiled_apis[] = { #endif RtAudio::UNSPECIFIED, }; -const unsigned int rtaudio_num_compiled_apis = +extern "C" const unsigned int rtaudio_num_compiled_apis = sizeof(rtaudio_compiled_apis)/sizeof(rtaudio_compiled_apis[0])-1; } -static const std::vector init_compiledApis() { - return std::vector( - rtaudio_compiled_apis, rtaudio_compiled_apis + rtaudio_num_compiled_apis); -} -const std::vector RtAudio::compiledApis(init_compiledApis()); - // This is a compile-time check that rtaudio_num_api_names == RtAudio::NUM_APIS. // If the build breaks here, check that they match. template class StaticAssert { private: StaticAssert() {} }; @@ -168,28 +162,20 @@ class StaticAssertions { StaticAssertions() { void RtAudio :: getCompiledApi( std::vector &apis ) { - apis = compiledApis; + apis = std::vector(rtaudio_compiled_apis, + rtaudio_compiled_apis + rtaudio_num_compiled_apis); } -const std::vector& RtAudio :: getCompiledApis() +std::string RtAudio :: getApiName( RtAudio::Api api ) { - return compiledApis; -} - -const std::string RtAudio :: getCompiledApiName( RtAudio::Api api ) -{ - if (api < 0 || api > RtAudio::NUM_APIS - || (std::find(RtAudio::compiledApis.begin(), - RtAudio::compiledApis.end(), api) == RtAudio::compiledApis.end())) + if (api < 0 || api >= RtAudio::NUM_APIS) return ""; return rtaudio_api_names[api][0]; } -const std::string RtAudio :: getCompiledApiDisplayName( RtAudio::Api api ) +std::string RtAudio :: getApiDisplayName( RtAudio::Api api ) { - if (api < 0 || api > RtAudio::NUM_APIS - || (std::find(RtAudio::compiledApis.begin(), - RtAudio::compiledApis.end(), api) == RtAudio::compiledApis.end())) + if (api < 0 || api >= RtAudio::NUM_APIS) return "Unknown"; return rtaudio_api_names[api][1]; } @@ -197,10 +183,9 @@ const std::string RtAudio :: getCompiledApiDisplayName( RtAudio::Api api ) RtAudio::Api RtAudio :: getCompiledApiByName( const std::string &name ) { unsigned int i=0; - std::vector::const_iterator it; - for (it = compiledApis.begin(); it != compiledApis.end(); ++it, ++i) - if (name == rtaudio_api_names[*it][0]) - return *it; + for (i = 0; i < rtaudio_num_compiled_apis; ++i) + if (name == rtaudio_api_names[rtaudio_compiled_apis[i]][0]) + return rtaudio_compiled_apis[i]; return RtAudio::UNSPECIFIED; } @@ -1556,6 +1541,10 @@ void RtApiCore :: startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + OSStatus result = noErr; CoreHandle *handle = (CoreHandle *) stream_.apiHandle; if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { @@ -2044,7 +2033,7 @@ unsigned int RtApiJack :: getDeviceCount( void ) const char **ports; std::string port, previousPort; unsigned int nChannels = 0, nDevices = 0; - ports = jack_get_ports( client, NULL, NULL, 0 ); + ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 ); if ( ports ) { // Parse the port names up to the first colon (:). size_t iColon = 0; @@ -2083,7 +2072,7 @@ RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device ) const char **ports; std::string port, previousPort; unsigned int nPorts = 0, nDevices = 0; - ports = jack_get_ports( client, NULL, NULL, 0 ); + ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 ); if ( ports ) { // Parse the port names up to the first colon (:). size_t iColon = 0; @@ -2118,7 +2107,7 @@ RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device ) // Count the available ports containing the client name as device // channels. Jack "input ports" equal RtAudio output channels. unsigned int nChannels = 0; - ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsInput ); + ports = jack_get_ports( client, info.name.c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput ); if ( ports ) { while ( ports[ nChannels ] ) nChannels++; free( ports ); @@ -2127,7 +2116,7 @@ RtAudio::DeviceInfo RtApiJack :: getDeviceInfo( unsigned int device ) // Jack "output ports" equal RtAudio input channels. nChannels = 0; - ports = jack_get_ports( client, info.name.c_str(), NULL, JackPortIsOutput ); + ports = jack_get_ports( client, info.name.c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput ); if ( ports ) { while ( ports[ nChannels ] ) nChannels++; free( ports ); @@ -2239,7 +2228,7 @@ bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne const char **ports; std::string port, previousPort, deviceName; unsigned int nPorts = 0, nDevices = 0; - ports = jack_get_ports( client, NULL, NULL, 0 ); + ports = jack_get_ports( client, NULL, JACK_DEFAULT_AUDIO_TYPE, 0 ); if ( ports ) { // Parse the port names up to the first colon (:). size_t iColon = 0; @@ -2263,22 +2252,24 @@ bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne return FAILURE; } - // Count the available ports containing the client name as device - // channels. Jack "input ports" equal RtAudio output channels. - unsigned int nChannels = 0; unsigned long flag = JackPortIsInput; if ( mode == INPUT ) flag = JackPortIsOutput; - ports = jack_get_ports( client, deviceName.c_str(), NULL, flag ); - if ( ports ) { - while ( ports[ nChannels ] ) nChannels++; - free( ports ); - } - // Compare the jack ports for specified client to the requested number of channels. - if ( nChannels < (channels + firstChannel) ) { - errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ")."; - errorText_ = errorStream_.str(); - return FAILURE; + if ( ! (options && (options->flags & RTAUDIO_JACK_DONT_CONNECT)) ) { + // Count the available ports containing the client name as device + // channels. Jack "input ports" equal RtAudio output channels. + unsigned int nChannels = 0; + ports = jack_get_ports( client, deviceName.c_str(), JACK_DEFAULT_AUDIO_TYPE, flag ); + if ( ports ) { + while ( ports[ nChannels ] ) nChannels++; + free( ports ); + } + // Compare the jack ports for specified client to the requested number of channels. + if ( nChannels < (channels + firstChannel) ) { + errorStream_ << "RtApiJack::probeDeviceOpen: requested number of channels (" << channels << ") + offset (" << firstChannel << ") not found for specified device (" << device << ":" << deviceName << ")."; + errorText_ = errorStream_.str(); + return FAILURE; + } } // Check the jack server sample rate. @@ -2292,7 +2283,7 @@ bool RtApiJack :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne stream_.sampleRate = jackRate; // Get the latency of the JACK port. - ports = jack_get_ports( client, deviceName.c_str(), NULL, flag ); + ports = jack_get_ports( client, deviceName.c_str(), JACK_DEFAULT_AUDIO_TYPE, flag ); if ( ports[ firstChannel ] ) { // Added by Ge Wang jack_latency_callback_mode_t cbmode = (mode == INPUT ? JackCaptureLatency : JackPlaybackLatency); @@ -2512,6 +2503,10 @@ void RtApiJack :: startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + JackHandle *handle = (JackHandle *) stream_.apiHandle; int result = jack_activate( handle->client ); if ( result ) { @@ -2524,7 +2519,7 @@ void RtApiJack :: startStream( void ) // Get the list of available ports. if ( shouldAutoconnect_ && (stream_.mode == OUTPUT || stream_.mode == DUPLEX) ) { result = 1; - ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), NULL, JackPortIsInput); + ports = jack_get_ports( handle->client, handle->deviceName[0].c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput); if ( ports == NULL) { errorText_ = "RtApiJack::startStream(): error determining available JACK input ports!"; goto unlock; @@ -2548,7 +2543,7 @@ void RtApiJack :: startStream( void ) if ( shouldAutoconnect_ && (stream_.mode == INPUT || stream_.mode == DUPLEX) ) { result = 1; - ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), NULL, JackPortIsOutput ); + ports = jack_get_ports( handle->client, handle->deviceName[1].c_str(), JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput ); if ( ports == NULL) { errorText_ = "RtApiJack::startStream(): error determining available JACK output ports!"; goto unlock; @@ -3234,8 +3229,8 @@ bool RtApiAsio :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks ); if ( result != ASE_OK ) { // Standard method failed. This can happen with strict/misbehaving drivers that return valid buffer size ranges - // but only accept the preferred buffer size as parameter for ASIOCreateBuffers. eg. Creatives ASIO driver - // in that case, let's be naïve and try that instead + // but only accept the preferred buffer size as parameter for ASIOCreateBuffers (e.g. Creative's ASIO driver). + // In that case, let's be naïve and try that instead. *bufferSize = preferSize; stream_.bufferSize = *bufferSize; result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks ); @@ -3391,6 +3386,10 @@ void RtApiAsio :: startStream() return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + AsioHandle *handle = (AsioHandle *) stream_.apiHandle; ASIOError result = ASIOStart(); if ( result != ASE_OK ) { @@ -3758,11 +3757,32 @@ static const char* getAsioErrorString( ASIOError result ) #ifndef INITGUID #define INITGUID #endif + +#include +#include +#include +#include +#include + #include #include #include #include -#include + +#ifndef MF_E_TRANSFORM_NEED_MORE_INPUT + #define MF_E_TRANSFORM_NEED_MORE_INPUT _HRESULT_TYPEDEF_(0xc00d6d72) +#endif + +#ifndef MFSTARTUP_NOSOCKET + #define MFSTARTUP_NOSOCKET 0x1 +#endif + +#ifdef _MSC_VER + #pragma comment( lib, "ksuser" ) + #pragma comment( lib, "mfplat.lib" ) + #pragma comment( lib, "mfuuid.lib" ) + #pragma comment( lib, "wmcodecdspuuid" ) +#endif //============================================================================= @@ -3936,6 +3956,198 @@ private: //----------------------------------------------------------------------------- +// In order to satisfy WASAPI's buffer requirements, we need a means of converting sample rate +// between HW and the user. The WasapiResampler class is used to perform this conversion between +// HwIn->UserIn and UserOut->HwOut during the stream callback loop. +class WasapiResampler +{ +public: + WasapiResampler( bool isFloat, unsigned int bitsPerSample, unsigned int channelCount, + unsigned int inSampleRate, unsigned int outSampleRate ) + : _bytesPerSample( bitsPerSample / 8 ) + , _channelCount( channelCount ) + , _sampleRatio( ( float ) outSampleRate / inSampleRate ) + , _transformUnk( NULL ) + , _transform( NULL ) + , _mediaType( NULL ) + , _inputMediaType( NULL ) + , _outputMediaType( NULL ) + + #ifdef __IWMResamplerProps_FWD_DEFINED__ + , _resamplerProps( NULL ) + #endif + { + // 1. Initialization + + MFStartup( MF_VERSION, MFSTARTUP_NOSOCKET ); + + // 2. Create Resampler Transform Object + + CoCreateInstance( CLSID_CResamplerMediaObject, NULL, CLSCTX_INPROC_SERVER, + IID_IUnknown, ( void** ) &_transformUnk ); + + _transformUnk->QueryInterface( IID_PPV_ARGS( &_transform ) ); + + #ifdef __IWMResamplerProps_FWD_DEFINED__ + _transformUnk->QueryInterface( IID_PPV_ARGS( &_resamplerProps ) ); + _resamplerProps->SetHalfFilterLength( 60 ); // best conversion quality + #endif + + // 3. Specify input / output format + + MFCreateMediaType( &_mediaType ); + _mediaType->SetGUID( MF_MT_MAJOR_TYPE, MFMediaType_Audio ); + _mediaType->SetGUID( MF_MT_SUBTYPE, isFloat ? MFAudioFormat_Float : MFAudioFormat_PCM ); + _mediaType->SetUINT32( MF_MT_AUDIO_NUM_CHANNELS, channelCount ); + _mediaType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, inSampleRate ); + _mediaType->SetUINT32( MF_MT_AUDIO_BLOCK_ALIGNMENT, _bytesPerSample * channelCount ); + _mediaType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, _bytesPerSample * channelCount * inSampleRate ); + _mediaType->SetUINT32( MF_MT_AUDIO_BITS_PER_SAMPLE, bitsPerSample ); + _mediaType->SetUINT32( MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE ); + + MFCreateMediaType( &_inputMediaType ); + _mediaType->CopyAllItems( _inputMediaType ); + + _transform->SetInputType( 0, _inputMediaType, 0 ); + + MFCreateMediaType( &_outputMediaType ); + _mediaType->CopyAllItems( _outputMediaType ); + + _outputMediaType->SetUINT32( MF_MT_AUDIO_SAMPLES_PER_SECOND, outSampleRate ); + _outputMediaType->SetUINT32( MF_MT_AUDIO_AVG_BYTES_PER_SECOND, _bytesPerSample * channelCount * outSampleRate ); + + _transform->SetOutputType( 0, _outputMediaType, 0 ); + + // 4. Send stream start messages to Resampler + + _transform->ProcessMessage( MFT_MESSAGE_COMMAND_FLUSH, 0 ); + _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0 ); + _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0 ); + } + + ~WasapiResampler() + { + // 8. Send stream stop messages to Resampler + + _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0 ); + _transform->ProcessMessage( MFT_MESSAGE_NOTIFY_END_STREAMING, 0 ); + + // 9. Cleanup + + MFShutdown(); + + SAFE_RELEASE( _transformUnk ); + SAFE_RELEASE( _transform ); + SAFE_RELEASE( _mediaType ); + SAFE_RELEASE( _inputMediaType ); + SAFE_RELEASE( _outputMediaType ); + + #ifdef __IWMResamplerProps_FWD_DEFINED__ + SAFE_RELEASE( _resamplerProps ); + #endif + } + + void Convert( char* outBuffer, const char* inBuffer, unsigned int inSampleCount, unsigned int& outSampleCount ) + { + unsigned int inputBufferSize = _bytesPerSample * _channelCount * inSampleCount; + if ( _sampleRatio == 1 ) + { + // no sample rate conversion required + memcpy( outBuffer, inBuffer, inputBufferSize ); + outSampleCount = inSampleCount; + return; + } + + unsigned int outputBufferSize = ( unsigned int ) ceilf( inputBufferSize * _sampleRatio ) + ( _bytesPerSample * _channelCount ); + + IMFMediaBuffer* rInBuffer; + IMFSample* rInSample; + BYTE* rInByteBuffer = NULL; + + // 5. Create Sample object from input data + + MFCreateMemoryBuffer( inputBufferSize, &rInBuffer ); + + rInBuffer->Lock( &rInByteBuffer, NULL, NULL ); + memcpy( rInByteBuffer, inBuffer, inputBufferSize ); + rInBuffer->Unlock(); + rInByteBuffer = NULL; + + rInBuffer->SetCurrentLength( inputBufferSize ); + + MFCreateSample( &rInSample ); + rInSample->AddBuffer( rInBuffer ); + + // 6. Pass input data to Resampler + + _transform->ProcessInput( 0, rInSample, 0 ); + + SAFE_RELEASE( rInBuffer ); + SAFE_RELEASE( rInSample ); + + // 7. Perform sample rate conversion + + IMFMediaBuffer* rOutBuffer = NULL; + BYTE* rOutByteBuffer = NULL; + + MFT_OUTPUT_DATA_BUFFER rOutDataBuffer; + DWORD rStatus; + DWORD rBytes = outputBufferSize; // maximum bytes accepted per ProcessOutput + + // 7.1 Create Sample object for output data + + memset( &rOutDataBuffer, 0, sizeof rOutDataBuffer ); + MFCreateSample( &( rOutDataBuffer.pSample ) ); + MFCreateMemoryBuffer( rBytes, &rOutBuffer ); + rOutDataBuffer.pSample->AddBuffer( rOutBuffer ); + rOutDataBuffer.dwStreamID = 0; + rOutDataBuffer.dwStatus = 0; + rOutDataBuffer.pEvents = NULL; + + // 7.2 Get output data from Resampler + + if ( _transform->ProcessOutput( 0, 1, &rOutDataBuffer, &rStatus ) == MF_E_TRANSFORM_NEED_MORE_INPUT ) + { + outSampleCount = 0; + SAFE_RELEASE( rOutBuffer ); + SAFE_RELEASE( rOutDataBuffer.pSample ); + return; + } + + // 7.3 Write output data to outBuffer + + SAFE_RELEASE( rOutBuffer ); + rOutDataBuffer.pSample->ConvertToContiguousBuffer( &rOutBuffer ); + rOutBuffer->GetCurrentLength( &rBytes ); + + rOutBuffer->Lock( &rOutByteBuffer, NULL, NULL ); + memcpy( outBuffer, rOutByteBuffer, rBytes ); + rOutBuffer->Unlock(); + rOutByteBuffer = NULL; + + outSampleCount = rBytes / _bytesPerSample / _channelCount; + SAFE_RELEASE( rOutBuffer ); + SAFE_RELEASE( rOutDataBuffer.pSample ); + } + +private: + unsigned int _bytesPerSample; + unsigned int _channelCount; + float _sampleRatio; + + IUnknown* _transformUnk; + IMFTransform* _transform; + IMFMediaType* _mediaType; + IMFMediaType* _inputMediaType; + IMFMediaType* _outputMediaType; + + #ifdef __IWMResamplerProps_FWD_DEFINED__ + IWMResamplerProps* _resamplerProps; + #endif +}; + +//----------------------------------------------------------------------------- + // A structure to hold various information related to the WASAPI implementation. struct WasapiHandle { @@ -3970,10 +4182,9 @@ RtApiWasapi::RtApiWasapi() CLSCTX_ALL, __uuidof( IMMDeviceEnumerator ), ( void** ) &deviceEnumerator_ ); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::RtApiWasapi: Unable to instantiate device enumerator"; - error( RtAudioError::DRIVER_ERROR ); - } + // If this runs on an old Windows, it will fail. Ignore and proceed. + if ( FAILED( hr ) ) + deviceEnumerator_ = NULL; } //----------------------------------------------------------------------------- @@ -4000,6 +4211,9 @@ unsigned int RtApiWasapi::getDeviceCount( void ) IMMDeviceCollection* captureDevices = NULL; IMMDeviceCollection* renderDevices = NULL; + if ( !deviceEnumerator_ ) + return 0; + // Count capture devices errorText_.clear(); HRESULT hr = deviceEnumerator_->EnumAudioEndpoints( eCapture, DEVICE_STATE_ACTIVE, &captureDevices ); @@ -4201,11 +4415,14 @@ RtAudio::DeviceInfo RtApiWasapi::getDeviceInfo( unsigned int device ) info.duplexChannels = 0; } - // sample rates (WASAPI only supports the one native sample rate) - info.preferredSampleRate = deviceFormat->nSamplesPerSec; - + // sample rates info.sampleRates.clear(); - info.sampleRates.push_back( deviceFormat->nSamplesPerSec ); + + // allow support for all sample rates as we have a built-in sample rate converter + for ( unsigned int i = 0; i < MAX_SAMPLE_RATES; i++ ) { + info.sampleRates.push_back( SAMPLE_RATES[i] ); + } + info.preferredSampleRate = deviceFormat->nSamplesPerSec; // native format info.nativeFormats = 0; @@ -4346,6 +4563,10 @@ void RtApiWasapi::startStream( void ) return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + // update stream state stream_.state = STREAM_RUNNING; @@ -4482,7 +4703,6 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne WAVEFORMATEX* deviceFormat = NULL; unsigned int bufferBytes; stream_.state = STREAM_STOPPED; - RtAudio::DeviceInfo deviceInfo; // create API Handle if not already created if ( !stream_.apiHandle ) @@ -4523,21 +4743,7 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne goto Exit; } - deviceInfo = getDeviceInfo( device ); - - // validate sample rate - if ( sampleRate != deviceInfo.preferredSampleRate ) - { - errorType = RtAudioError::INVALID_USE; - std::stringstream ss; - ss << "RtApiWasapi::probeDeviceOpen: " << sampleRate - << "Hz sample rate not supported. This device only supports " - << deviceInfo.preferredSampleRate << "Hz."; - errorText_ = ss.str(); - goto Exit; - } - - // determine whether index falls within capture or render devices + // if device index falls within capture devices if ( device >= renderDeviceCount ) { if ( mode != INPUT ) { errorType = RtAudioError::INVALID_USE; @@ -4557,28 +4763,66 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, NULL, ( void** ) &captureAudioClient ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client."; + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device audio client."; goto Exit; } hr = captureAudioClient->GetMixFormat( &deviceFormat ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format."; + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve capture device mix format."; goto Exit; } stream_.nDeviceChannels[mode] = deviceFormat->nChannels; captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] ); } - else { - if ( mode != OUTPUT ) { - errorType = RtAudioError::INVALID_USE; - errorText_ = "RtApiWasapi::probeDeviceOpen: Render device selected as input device."; + + // if device index falls within render devices and is configured for loopback + if ( device < renderDeviceCount && mode == INPUT ) + { + // if renderAudioClient is not initialised, initialise it now + IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient; + if ( !renderAudioClient ) + { + probeDeviceOpen( device, OUTPUT, channels, firstChannel, sampleRate, format, bufferSize, options ); + } + + // retrieve captureAudioClient from devicePtr + IAudioClient*& captureAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient; + + hr = renderDevices->Item( device, &devicePtr ); + if ( FAILED( hr ) ) { + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device handle."; + goto Exit; + } + + hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, + NULL, ( void** ) &captureAudioClient ); + if ( FAILED( hr ) ) { + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client."; goto Exit; } - // retrieve renderAudioClient from devicePtr + hr = captureAudioClient->GetMixFormat( &deviceFormat ); + if ( FAILED( hr ) ) { + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format."; + goto Exit; + } + + stream_.nDeviceChannels[mode] = deviceFormat->nChannels; + captureAudioClient->GetStreamLatency( ( long long* ) &stream_.latency[mode] ); + } + + // if device index falls within render devices and is configured for output + if ( device < renderDeviceCount && mode == OUTPUT ) + { + // if renderAudioClient is already initialised, don't initialise it again IAudioClient*& renderAudioClient = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient; + if ( renderAudioClient ) + { + methodResult = SUCCESS; + goto Exit; + } hr = renderDevices->Item( device, &devicePtr ); if ( FAILED( hr ) ) { @@ -4589,13 +4833,13 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne hr = devicePtr->Activate( __uuidof( IAudioClient ), CLSCTX_ALL, NULL, ( void** ) &renderAudioClient ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device audio client."; + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device audio client."; goto Exit; } hr = renderAudioClient->GetMixFormat( &deviceFormat ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve device mix format."; + errorText_ = "RtApiWasapi::probeDeviceOpen: Unable to retrieve render device mix format."; goto Exit; } @@ -4620,7 +4864,7 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne stream_.nUserChannels[mode] = channels; stream_.channelOffset[mode] = firstChannel; stream_.userFormat = format; - stream_.deviceFormat[mode] = deviceInfo.nativeFormats; + stream_.deviceFormat[mode] = getDeviceInfo( device ).nativeFormats; if ( options && options->flags & RTAUDIO_NONINTERLEAVED ) stream_.userInterleaved = false; @@ -4631,7 +4875,8 @@ bool RtApiWasapi::probeDeviceOpen( unsigned int device, StreamMode mode, unsigne // Set flags for buffer conversion. stream_.doConvertBuffer[mode] = false; if ( stream_.userFormat != stream_.deviceFormat[mode] || - stream_.nUserChannels != stream_.nDeviceChannels ) + stream_.nUserChannels[0] != stream_.nDeviceChannels[0] || + stream_.nUserChannels[1] != stream_.nDeviceChannels[1] ) stream_.doConvertBuffer[mode] = true; else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] && stream_.nUserChannels[mode] > 1 ) @@ -4720,8 +4965,12 @@ void RtApiWasapi::wasapiThread() WAVEFORMATEX* captureFormat = NULL; WAVEFORMATEX* renderFormat = NULL; + float captureSrRatio = 0.0f; + float renderSrRatio = 0.0f; WasapiBuffer captureBuffer; WasapiBuffer renderBuffer; + WasapiResampler* captureResampler = NULL; + WasapiResampler* renderResampler = NULL; // declare local stream variables RtAudioCallback callback = ( RtAudioCallback ) stream_.callbackInfo.callback; @@ -4729,14 +4978,19 @@ void RtApiWasapi::wasapiThread() unsigned long captureFlags = 0; unsigned int bufferFrameCount = 0; unsigned int numFramesPadding = 0; - bool callbackPushed = false; + unsigned int convBufferSize = 0; + bool loopbackEnabled = stream_.device[INPUT] == stream_.device[OUTPUT]; + bool callbackPushed = true; bool callbackPulled = false; bool callbackStopped = false; int callbackResult = 0; + // convBuffer is used to store converted buffers between WASAPI and the user + char* convBuffer = NULL; + unsigned int convBuffSize = 0; unsigned int deviceBuffSize = 0; - errorText_.clear(); + std::string errorText; RtAudioError::Type errorType = RtAudioError::DRIVER_ERROR; // Attempt to assign "Pro Audio" characteristic to thread @@ -4752,59 +5006,68 @@ void RtApiWasapi::wasapiThread() if ( captureAudioClient ) { hr = captureAudioClient->GetMixFormat( &captureFormat ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format."; goto Exit; } - // initialize capture stream according to desire buffer size - REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) stream_.bufferSize * 10000000 / captureFormat->nSamplesPerSec ); + // init captureResampler + captureResampler = new WasapiResampler( stream_.deviceFormat[INPUT] == RTAUDIO_FLOAT32 || stream_.deviceFormat[INPUT] == RTAUDIO_FLOAT64, + formatBytes( stream_.deviceFormat[INPUT] ) * 8, stream_.nDeviceChannels[INPUT], + captureFormat->nSamplesPerSec, stream_.sampleRate ); + + captureSrRatio = ( ( float ) captureFormat->nSamplesPerSec / stream_.sampleRate ); if ( !captureClient ) { hr = captureAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED, - AUDCLNT_STREAMFLAGS_EVENTCALLBACK, - desiredBufferPeriod, - desiredBufferPeriod, + loopbackEnabled ? AUDCLNT_STREAMFLAGS_LOOPBACK : AUDCLNT_STREAMFLAGS_EVENTCALLBACK, + 0, + 0, captureFormat, NULL ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client."; + errorText = "RtApiWasapi::wasapiThread: Unable to initialize capture audio client."; goto Exit; } hr = captureAudioClient->GetService( __uuidof( IAudioCaptureClient ), ( void** ) &captureClient ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture client handle."; goto Exit; } - // configure captureEvent to trigger on every available capture buffer - captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL ); - if ( !captureEvent ) { - errorType = RtAudioError::SYSTEM_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to create capture event."; - goto Exit; - } + // don't configure captureEvent if in loopback mode + if ( !loopbackEnabled ) + { + // configure captureEvent to trigger on every available capture buffer + captureEvent = CreateEvent( NULL, FALSE, FALSE, NULL ); + if ( !captureEvent ) { + errorType = RtAudioError::SYSTEM_ERROR; + errorText = "RtApiWasapi::wasapiThread: Unable to create capture event."; + goto Exit; + } - hr = captureAudioClient->SetEventHandle( captureEvent ); - if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to set capture event handle."; - goto Exit; + hr = captureAudioClient->SetEventHandle( captureEvent ); + if ( FAILED( hr ) ) { + errorText = "RtApiWasapi::wasapiThread: Unable to set capture event handle."; + goto Exit; + } + + ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent; } ( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient; - ( ( WasapiHandle* ) stream_.apiHandle )->captureEvent = captureEvent; } unsigned int inBufferSize = 0; hr = captureAudioClient->GetBufferSize( &inBufferSize ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to get capture buffer size."; + errorText = "RtApiWasapi::wasapiThread: Unable to get capture buffer size."; goto Exit; } // scale outBufferSize according to stream->user sample rate ratio - unsigned int outBufferSize = ( unsigned int ) stream_.bufferSize * stream_.nDeviceChannels[INPUT]; + unsigned int outBufferSize = ( unsigned int ) ceilf( stream_.bufferSize * captureSrRatio ) * stream_.nDeviceChannels[INPUT]; inBufferSize *= stream_.nDeviceChannels[INPUT]; // set captureBuffer size @@ -4813,14 +5076,14 @@ void RtApiWasapi::wasapiThread() // reset the capture stream hr = captureAudioClient->Reset(); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to reset capture stream."; + errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream."; goto Exit; } // start the capture stream hr = captureAudioClient->Start(); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to start capture stream."; + errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream."; goto Exit; } } @@ -4829,29 +5092,33 @@ void RtApiWasapi::wasapiThread() if ( renderAudioClient ) { hr = renderAudioClient->GetMixFormat( &renderFormat ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve device mix format."; goto Exit; } - // initialize render stream according to desire buffer size - REFERENCE_TIME desiredBufferPeriod = ( REFERENCE_TIME ) ( ( float ) stream_.bufferSize * 10000000 / renderFormat->nSamplesPerSec ); + // init renderResampler + renderResampler = new WasapiResampler( stream_.deviceFormat[OUTPUT] == RTAUDIO_FLOAT32 || stream_.deviceFormat[OUTPUT] == RTAUDIO_FLOAT64, + formatBytes( stream_.deviceFormat[OUTPUT] ) * 8, stream_.nDeviceChannels[OUTPUT], + stream_.sampleRate, renderFormat->nSamplesPerSec ); + + renderSrRatio = ( ( float ) renderFormat->nSamplesPerSec / stream_.sampleRate ); if ( !renderClient ) { hr = renderAudioClient->Initialize( AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK, - desiredBufferPeriod, - desiredBufferPeriod, + 0, + 0, renderFormat, NULL ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to initialize render audio client."; + errorText = "RtApiWasapi::wasapiThread: Unable to initialize render audio client."; goto Exit; } hr = renderAudioClient->GetService( __uuidof( IAudioRenderClient ), ( void** ) &renderClient ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render client handle."; goto Exit; } @@ -4859,13 +5126,13 @@ void RtApiWasapi::wasapiThread() renderEvent = CreateEvent( NULL, FALSE, FALSE, NULL ); if ( !renderEvent ) { errorType = RtAudioError::SYSTEM_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to create render event."; + errorText = "RtApiWasapi::wasapiThread: Unable to create render event."; goto Exit; } hr = renderAudioClient->SetEventHandle( renderEvent ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to set render event handle."; + errorText = "RtApiWasapi::wasapiThread: Unable to set render event handle."; goto Exit; } @@ -4876,12 +5143,12 @@ void RtApiWasapi::wasapiThread() unsigned int outBufferSize = 0; hr = renderAudioClient->GetBufferSize( &outBufferSize ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to get render buffer size."; + errorText = "RtApiWasapi::wasapiThread: Unable to get render buffer size."; goto Exit; } // scale inBufferSize according to user->stream sample rate ratio - unsigned int inBufferSize = ( unsigned int ) stream_.bufferSize * stream_.nDeviceChannels[OUTPUT]; + unsigned int inBufferSize = ( unsigned int ) ceilf( stream_.bufferSize * renderSrRatio ) * stream_.nDeviceChannels[OUTPUT]; outBufferSize *= stream_.nDeviceChannels[OUTPUT]; // set renderBuffer size @@ -4890,34 +5157,44 @@ void RtApiWasapi::wasapiThread() // reset the render stream hr = renderAudioClient->Reset(); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to reset render stream."; + errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream."; goto Exit; } // start the render stream hr = renderAudioClient->Start(); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to start render stream."; + errorText = "RtApiWasapi::wasapiThread: Unable to start render stream."; goto Exit; } } - if ( stream_.mode == INPUT ) { - using namespace std; // for roundf + // malloc buffer memory + if ( stream_.mode == INPUT ) + { + using namespace std; // for ceilf + convBuffSize = ( size_t ) ( ceilf( stream_.bufferSize * captureSrRatio ) ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ); deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ); } - else if ( stream_.mode == OUTPUT ) { + else if ( stream_.mode == OUTPUT ) + { + convBuffSize = ( size_t ) ( ceilf( stream_.bufferSize * renderSrRatio ) ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ); deviceBuffSize = stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ); } - else if ( stream_.mode == DUPLEX ) { + else if ( stream_.mode == DUPLEX ) + { + convBuffSize = std::max( ( size_t ) ( ceilf( stream_.bufferSize * captureSrRatio ) ) * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ), + ( size_t ) ( ceilf( stream_.bufferSize * renderSrRatio ) ) * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) ); deviceBuffSize = std::max( stream_.bufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ), stream_.bufferSize * stream_.nDeviceChannels[OUTPUT] * formatBytes( stream_.deviceFormat[OUTPUT] ) ); } + convBuffSize *= 2; // allow overflow for *SrRatio remainders + convBuffer = ( char* ) malloc( convBuffSize ); stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize ); - if ( !stream_.deviceBuffer ) { + if ( !convBuffer || !stream_.deviceBuffer ) { errorType = RtAudioError::MEMORY_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Error allocating device buffer memory."; + errorText = "RtApiWasapi::wasapiThread: Error allocating device buffer memory."; goto Exit; } @@ -4927,15 +5204,46 @@ void RtApiWasapi::wasapiThread() // Callback Input // ============== // 1. Pull callback buffer from inputBuffer - // 2. If 1. was successful: Convert callback buffer to user format + // 2. If 1. was successful: Convert callback buffer to user sample rate and channel count + // Convert callback buffer to user format - if ( captureAudioClient ) { - // Pull callback buffer from inputBuffer - callbackPulled = captureBuffer.pullBuffer( stream_.deviceBuffer, - ( unsigned int ) stream_.bufferSize * stream_.nDeviceChannels[INPUT], - stream_.deviceFormat[INPUT] ); + if ( captureAudioClient ) + { + int samplesToPull = ( unsigned int ) floorf( stream_.bufferSize * captureSrRatio ); + if ( captureSrRatio != 1 ) + { + // account for remainders + samplesToPull--; + } + + convBufferSize = 0; + while ( convBufferSize < stream_.bufferSize ) + { + // Pull callback buffer from inputBuffer + callbackPulled = captureBuffer.pullBuffer( convBuffer, + samplesToPull * stream_.nDeviceChannels[INPUT], + stream_.deviceFormat[INPUT] ); + + if ( !callbackPulled ) + { + break; + } - if ( callbackPulled ) { + // Convert callback buffer to user sample rate + unsigned int deviceBufferOffset = convBufferSize * stream_.nDeviceChannels[INPUT] * formatBytes( stream_.deviceFormat[INPUT] ); + unsigned int convSamples = 0; + + captureResampler->Convert( stream_.deviceBuffer + deviceBufferOffset, + convBuffer, + samplesToPull, + convSamples ); + + convBufferSize += convSamples; + samplesToPull = 1; // now pull one sample at a time until we have stream_.bufferSize samples + } + + if ( callbackPulled ) + { if ( stream_.doConvertBuffer[INPUT] ) { // Convert callback buffer to user format convertBuffer( stream_.userBuffer[INPUT], @@ -4976,12 +5284,12 @@ void RtApiWasapi::wasapiThread() HANDLE threadHandle = CreateThread( NULL, 0, stopWasapiThread, this, 0, NULL ); if ( !threadHandle ) { errorType = RtAudioError::THREAD_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread."; + errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream stop thread."; goto Exit; } else if ( !CloseHandle( threadHandle ) ) { errorType = RtAudioError::THREAD_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle."; + errorText = "RtApiWasapi::wasapiThread: Unable to close stream stop thread handle."; goto Exit; } @@ -4992,12 +5300,12 @@ void RtApiWasapi::wasapiThread() HANDLE threadHandle = CreateThread( NULL, 0, abortWasapiThread, this, 0, NULL ); if ( !threadHandle ) { errorType = RtAudioError::THREAD_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread."; + errorText = "RtApiWasapi::wasapiThread: Unable to instantiate stream abort thread."; goto Exit; } else if ( !CloseHandle( threadHandle ) ) { errorType = RtAudioError::THREAD_ERROR; - errorText_ = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle."; + errorText = "RtApiWasapi::wasapiThread: Unable to close stream abort thread handle."; goto Exit; } @@ -5009,20 +5317,39 @@ void RtApiWasapi::wasapiThread() // Callback Output // =============== // 1. Convert callback buffer to stream format - // 2. Push callback buffer into outputBuffer + // 2. Convert callback buffer to stream sample rate and channel count + // 3. Push callback buffer into outputBuffer - if ( renderAudioClient && callbackPulled ) { - if ( stream_.doConvertBuffer[OUTPUT] ) { - // Convert callback buffer to stream format - convertBuffer( stream_.deviceBuffer, - stream_.userBuffer[OUTPUT], - stream_.convertInfo[OUTPUT] ); + if ( renderAudioClient && callbackPulled ) + { + // if the last call to renderBuffer.PushBuffer() was successful + if ( callbackPushed || convBufferSize == 0 ) + { + if ( stream_.doConvertBuffer[OUTPUT] ) + { + // Convert callback buffer to stream format + convertBuffer( stream_.deviceBuffer, + stream_.userBuffer[OUTPUT], + stream_.convertInfo[OUTPUT] ); + } + else { + // no further conversion, simple copy userBuffer to deviceBuffer + memcpy( stream_.deviceBuffer, + stream_.userBuffer[OUTPUT], + stream_.bufferSize * stream_.nUserChannels[OUTPUT] * formatBytes( stream_.userFormat ) ); + } + + // Convert callback buffer to stream sample rate + renderResampler->Convert( convBuffer, + stream_.deviceBuffer, + stream_.bufferSize, + convBufferSize ); } // Push callback buffer into outputBuffer - callbackPushed = renderBuffer.pushBuffer( stream_.deviceBuffer, - stream_.bufferSize * stream_.nDeviceChannels[OUTPUT], + callbackPushed = renderBuffer.pushBuffer( convBuffer, + convBufferSize * stream_.nDeviceChannels[OUTPUT], stream_.deviceFormat[OUTPUT] ); } else { @@ -5039,7 +5366,7 @@ void RtApiWasapi::wasapiThread() if ( captureAudioClient ) { // if the callback input buffer was not pulled from captureBuffer, wait for next capture event if ( !callbackPulled ) { - WaitForSingleObject( captureEvent, INFINITE ); + WaitForSingleObject( loopbackEnabled ? renderEvent : captureEvent, INFINITE ); } // Get capture buffer from stream @@ -5047,7 +5374,7 @@ void RtApiWasapi::wasapiThread() &bufferFrameCount, &captureFlags, NULL, NULL ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve capture buffer."; goto Exit; } @@ -5060,7 +5387,7 @@ void RtApiWasapi::wasapiThread() // Release capture buffer hr = captureClient->ReleaseBuffer( bufferFrameCount ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; goto Exit; } } @@ -5069,7 +5396,7 @@ void RtApiWasapi::wasapiThread() // Inform WASAPI that capture was unsuccessful hr = captureClient->ReleaseBuffer( 0 ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; goto Exit; } } @@ -5079,7 +5406,7 @@ void RtApiWasapi::wasapiThread() // Inform WASAPI that capture was unsuccessful hr = captureClient->ReleaseBuffer( 0 ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release capture buffer."; goto Exit; } } @@ -5101,13 +5428,13 @@ void RtApiWasapi::wasapiThread() // Get render buffer from stream hr = renderAudioClient->GetBufferSize( &bufferFrameCount ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer size."; goto Exit; } hr = renderAudioClient->GetCurrentPadding( &numFramesPadding ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer padding."; goto Exit; } @@ -5116,7 +5443,7 @@ void RtApiWasapi::wasapiThread() if ( bufferFrameCount != 0 ) { hr = renderClient->GetBuffer( bufferFrameCount, &streamBuffer ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to retrieve render buffer."; goto Exit; } @@ -5129,7 +5456,7 @@ void RtApiWasapi::wasapiThread() // Release render buffer hr = renderClient->ReleaseBuffer( bufferFrameCount, 0 ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer."; goto Exit; } } @@ -5138,7 +5465,7 @@ void RtApiWasapi::wasapiThread() // Inform WASAPI that render was unsuccessful hr = renderClient->ReleaseBuffer( 0, 0 ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer."; goto Exit; } } @@ -5148,7 +5475,7 @@ void RtApiWasapi::wasapiThread() // Inform WASAPI that render was unsuccessful hr = renderClient->ReleaseBuffer( 0, 0 ); if ( FAILED( hr ) ) { - errorText_ = "RtApiWasapi::wasapiThread: Unable to release render buffer."; + errorText = "RtApiWasapi::wasapiThread: Unable to release render buffer."; goto Exit; } } @@ -5156,7 +5483,10 @@ void RtApiWasapi::wasapiThread() // if the callback buffer was pushed renderBuffer reset callbackPulled flag if ( callbackPushed ) { + // unsetting the callbackPulled flag lets the stream know that + // the audio device is ready for another callback output buffer. callbackPulled = false; + // tick stream time RtApi::tickStreamTime(); } @@ -5168,15 +5498,20 @@ Exit: CoTaskMemFree( captureFormat ); CoTaskMemFree( renderFormat ); + free ( convBuffer ); + delete renderResampler; + delete captureResampler; + CoUninitialize(); // update stream state stream_.state = STREAM_STOPPED; - if ( errorText_.empty() ) - return; - else + if ( !errorText.empty() ) + { + errorText_ = errorText; error( errorType ); + } } //******************** End of __WINDOWS_WASAPI__ *********************// @@ -6073,6 +6408,10 @@ void RtApiDs :: startStream() return; } + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + DsHandle *handle = (DsHandle *) stream_.apiHandle; // Increase scheduler frequency on lesser windows (a side-effect of @@ -7230,10 +7569,12 @@ bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne if ( result == 0 ) { if ( nDevices == device ) { strcpy( name, "default" ); + snd_ctl_close( chandle ); goto foundDevice; } nDevices++; } + snd_ctl_close( chandle ); if ( nDevices == 0 ) { // This should not happen because a check is made before this function is called. @@ -7633,7 +7974,7 @@ bool RtApiAlsa :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigne pthread_attr_t attr; pthread_attr_init( &attr ); pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE ); -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) { stream_.callbackInfo.doRealtime = true; struct sched_param param; @@ -7763,6 +8104,10 @@ void RtApiAlsa :: startStream() MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + int result = 0; snd_pcm_state_t state; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; @@ -8082,7 +8427,7 @@ static void *alsaCallbackHandler( void *ptr ) RtApiAlsa *object = (RtApiAlsa *) info->object; bool *isRunning = &info->isRunning; -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if ( info->doRealtime ) { std::cerr << "RtAudio alsa: " << (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") << @@ -8170,7 +8515,7 @@ static void *pulseaudio_callback( void * user ) RtApiPulse *context = static_cast( cbi->object ); volatile bool *isRunning = &cbi->isRunning; -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if (cbi->doRealtime) { std::cerr << "RtAudio pulse: " << (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") << @@ -8334,6 +8679,10 @@ void RtApiPulse::startStream( void ) MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + stream_.state = STREAM_RUNNING; pah->runnable = true; @@ -8573,7 +8922,7 @@ bool RtApiPulse::probeDeviceOpen( unsigned int device, StreamMode mode, pthread_attr_t attr; pthread_attr_init( &attr ); pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE ); -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) { stream_.callbackInfo.doRealtime = true; struct sched_param param; @@ -9194,7 +9543,7 @@ bool RtApiOss :: probeDeviceOpen( unsigned int device, StreamMode mode, unsigned pthread_attr_t attr; pthread_attr_init( &attr ); pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_JOINABLE ); -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if ( options && options->flags & RTAUDIO_SCHEDULE_REALTIME ) { stream_.callbackInfo.doRealtime = true; struct sched_param param; @@ -9318,6 +9667,10 @@ void RtApiOss :: startStream() MUTEX_LOCK( &stream_.mutex ); + #if defined( HAVE_GETTIMEOFDAY ) + gettimeofday( &stream_.lastTickTimestamp, NULL ); + #endif + stream_.state = STREAM_RUNNING; // No need to do anything else here ... OSS automatically starts @@ -9585,7 +9938,7 @@ static void *ossCallbackHandler( void *ptr ) RtApiOss *object = (RtApiOss *) info->object; bool *isRunning = &info->isRunning; -#ifdef SCHED_RR // Undefined with some OSes (eg: NetBSD 1.6.x with GNU Pthread) +#ifdef SCHED_RR // Undefined with some OSes (e.g. NetBSD 1.6.x with GNU Pthread) if (info->doRealtime) { std::cerr << "RtAudio oss: " << (sched_getscheduler(0) == SCHED_RR ? "" : "_NOT_ ") <<