return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
OSStatus result = noErr;
CoreHandle *handle = (CoreHandle *) stream_.apiHandle;
if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) {
unlock:
//MUTEX_UNLOCK( &stream_.mutex );
- RtApi::tickStreamTime();
+ // Make sure to only tick duplex stream time once if using two devices
+ if ( stream_.mode != DUPLEX || (stream_.mode == DUPLEX && handle->id[0] != handle->id[1] && deviceId == handle->id[0] ) )
+ RtApi::tickStreamTime();
+
return SUCCESS;
}
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
JackHandle *handle = (JackHandle *) stream_.apiHandle;
int result = jack_activate( handle->client );
if ( result ) {
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
AsioHandle *handle = (AsioHandle *) stream_.apiHandle;
ASIOError result = ASIOStart();
if ( result != ASE_OK ) {
relOutIndex += bufferSize_;
}
- // "in" index can end on the "out" index but cannot begin at it
- if ( inIndex_ <= relOutIndex && inIndexEnd > relOutIndex ) {
+ // the "IN" index CAN BEGIN at the "OUT" index
+ // the "IN" index CANNOT END at the "OUT" index
+ if ( inIndex_ < relOutIndex && inIndexEnd >= relOutIndex ) {
return false; // not enough space between "in" index and "out" index
}
relInIndex += bufferSize_;
}
- // "out" index can begin at and end on the "in" index
- if ( outIndex_ < relInIndex && outIndexEnd > relInIndex ) {
+ // the "OUT" index CANNOT BEGIN at the "IN" index
+ // the "OUT" index CAN END at the "IN" index
+ if ( outIndex_ <= relInIndex && outIndexEnd > relInIndex ) {
return false; // not enough space between "out" index and "in" index
}
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
// update stream state
stream_.state = STREAM_RUNNING;
// Wait for the last buffer to play before stopping.
Sleep( 1000 * stream_.bufferSize / stream_.sampleRate );
- // stop capture client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::stopStream: Unable to stop capture stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
- // stop render client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::stopStream: Unable to stop render stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
// close thread handle
if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
errorText_ = "RtApiWasapi::stopStream: Unable to close callback thread.";
Sleep( 1 );
}
- // stop capture client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->captureAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::abortStream: Unable to stop capture stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
- // stop render client if applicable
- if ( ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient ) {
- HRESULT hr = ( ( WasapiHandle* ) stream_.apiHandle )->renderAudioClient->Stop();
- if ( FAILED( hr ) ) {
- errorText_ = "RtApiWasapi::abortStream: Unable to stop render stream.";
- error( RtAudioError::DRIVER_ERROR );
- return;
- }
- }
-
// close thread handle
if ( stream_.callbackInfo.thread && !CloseHandle( ( void* ) stream_.callbackInfo.thread ) ) {
errorText_ = "RtApiWasapi::abortStream: Unable to close callback thread.";
stream_.doConvertBuffer[mode] = false;
if ( stream_.userFormat != stream_.deviceFormat[mode] ||
stream_.nUserChannels[0] != stream_.nDeviceChannels[0] ||
- stream_.nUserChannels[1] != stream_.nDeviceChannels[1] ||
- stream_.userInterleaved )
+ stream_.nUserChannels[1] != stream_.nDeviceChannels[1] )
stream_.doConvertBuffer[mode] = true;
else if ( stream_.userInterleaved != stream_.deviceInterleaved[mode] &&
stream_.nUserChannels[mode] > 1 )
HMODULE AvrtDll = LoadLibrary( (LPCTSTR) "AVRT.dll" );
if ( AvrtDll ) {
DWORD taskIndex = 0;
- TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr = ( TAvSetMmThreadCharacteristicsPtr ) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );
+ TAvSetMmThreadCharacteristicsPtr AvSetMmThreadCharacteristicsPtr =
+ ( TAvSetMmThreadCharacteristicsPtr ) (void(*)()) GetProcAddress( AvrtDll, "AvSetMmThreadCharacteristicsW" );
AvSetMmThreadCharacteristicsPtr( L"Pro Audio", &taskIndex );
FreeLibrary( AvrtDll );
}
}
( ( WasapiHandle* ) stream_.apiHandle )->captureClient = captureClient;
+
+ // reset the capture stream
+ hr = captureAudioClient->Reset();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";
+ goto Exit;
+ }
+
+ // start the capture stream
+ hr = captureAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream.";
+ goto Exit;
+ }
}
unsigned int inBufferSize = 0;
// set captureBuffer size
captureBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[INPUT] ) );
-
- // reset the capture stream
- hr = captureAudioClient->Reset();
- if ( FAILED( hr ) ) {
- errorText = "RtApiWasapi::wasapiThread: Unable to reset capture stream.";
- goto Exit;
- }
-
- // start the capture stream
- hr = captureAudioClient->Start();
- if ( FAILED( hr ) ) {
- errorText = "RtApiWasapi::wasapiThread: Unable to start capture stream.";
- goto Exit;
- }
}
// start render stream if applicable
( ( WasapiHandle* ) stream_.apiHandle )->renderClient = renderClient;
( ( WasapiHandle* ) stream_.apiHandle )->renderEvent = renderEvent;
+
+ // reset the render stream
+ hr = renderAudioClient->Reset();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream.";
+ goto Exit;
+ }
+
+ // start the render stream
+ hr = renderAudioClient->Start();
+ if ( FAILED( hr ) ) {
+ errorText = "RtApiWasapi::wasapiThread: Unable to start render stream.";
+ goto Exit;
+ }
}
unsigned int outBufferSize = 0;
// set renderBuffer size
renderBuffer.setBufferSize( inBufferSize + outBufferSize, formatBytes( stream_.deviceFormat[OUTPUT] ) );
-
- // reset the render stream
- hr = renderAudioClient->Reset();
- if ( FAILED( hr ) ) {
- errorText = "RtApiWasapi::wasapiThread: Unable to reset render stream.";
- goto Exit;
- }
-
- // start the render stream
- hr = renderAudioClient->Start();
- if ( FAILED( hr ) ) {
- errorText = "RtApiWasapi::wasapiThread: Unable to start render stream.";
- goto Exit;
- }
}
// malloc buffer memory
}
convBuffSize *= 2; // allow overflow for *SrRatio remainders
- convBuffer = ( char* ) malloc( convBuffSize );
- stream_.deviceBuffer = ( char* ) malloc( deviceBuffSize );
+ convBuffer = ( char* ) calloc( convBuffSize, 1 );
+ stream_.deviceBuffer = ( char* ) calloc( deviceBuffSize, 1 );
if ( !convBuffer || !stream_.deviceBuffer ) {
errorType = RtAudioError::MEMORY_ERROR;
errorText = "RtApiWasapi::wasapiThread: Error allocating device buffer memory.";
captureFlags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY ? RTAUDIO_INPUT_OVERFLOW : 0,
stream_.callbackInfo.userData );
+ // tick stream time
+ RtApi::tickStreamTime();
+
// Handle return value from callback
if ( callbackResult == 1 ) {
// instantiate a thread to stop this thread
stream_.convertInfo[OUTPUT] );
}
+ else {
+ // no further conversion, simple copy userBuffer to deviceBuffer
+ memcpy( stream_.deviceBuffer,
+ stream_.userBuffer[OUTPUT],
+ stream_.bufferSize * stream_.nUserChannels[OUTPUT] * formatBytes( stream_.userFormat ) );
+ }
// Convert callback buffer to stream sample rate
renderResampler->Convert( convBuffer,
// unsetting the callbackPulled flag lets the stream know that
// the audio device is ready for another callback output buffer.
callbackPulled = false;
-
- // tick stream time
- RtApi::tickStreamTime();
}
}
return;
}
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
DsHandle *handle = (DsHandle *) stream_.apiHandle;
// Increase scheduler frequency on lesser windows (a side-effect of
unsigned nDevices = 0;
int result, subdevice, card;
char name[64];
- snd_ctl_t *handle;
+ snd_ctl_t *handle = 0;
// Count cards and devices
card = -1;
sprintf( name, "hw:%d", card );
result = snd_ctl_open( &handle, name, 0 );
if ( result < 0 ) {
+ handle = 0;
errorStream_ << "RtApiAlsa::getDeviceCount: control open, card = " << card << ", " << snd_strerror( result ) << ".";
errorText_ = errorStream_.str();
error( RtAudioError::WARNING );
nDevices++;
}
nextcard:
- snd_ctl_close( handle );
+ if ( handle )
+ snd_ctl_close( handle );
snd_card_next( &card );
}
unsigned nDevices = 0;
int result, subdevice, card;
char name[64];
- snd_ctl_t *chandle;
+ snd_ctl_t *chandle = 0;
// Count cards and devices
card = -1;
sprintf( name, "hw:%d", card );
result = snd_ctl_open( &chandle, name, SND_CTL_NONBLOCK );
if ( result < 0 ) {
+ chandle = 0;
errorStream_ << "RtApiAlsa::getDeviceInfo: control open, card = " << card << ", " << snd_strerror( result ) << ".";
errorText_ = errorStream_.str();
error( RtAudioError::WARNING );
nDevices++;
}
nextcard:
- snd_ctl_close( chandle );
+ if ( chandle )
+ snd_ctl_close( chandle );
snd_card_next( &card );
}
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
int result = 0;
snd_pcm_state_t state;
AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle;
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
stream_.state = STREAM_RUNNING;
pah->runnable = true;
MUTEX_LOCK( &stream_.mutex );
+ #if defined( HAVE_GETTIMEOFDAY )
+ gettimeofday( &stream_.lastTickTimestamp, NULL );
+ #endif
+
stream_.state = STREAM_RUNNING;
// No need to do anything else here ... OSS automatically starts