Commit 1acbd524 authored by David Reid's avatar David Reid

WASAPI: Fix a potential deadlock with exclusive + duplex mode.

parent e3536a1c
...@@ -15254,13 +15254,6 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice) ...@@ -15254,13 +15254,6 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice)
/* The process is to map the playback buffer and fill it as quickly as possible from input data. */ /* The process is to map the playback buffer and fill it as quickly as possible from input data. */
if (pMappedDeviceBufferPlayback == NULL) { if (pMappedDeviceBufferPlayback == NULL) {
/* WASAPI is weird with exclusive mode. You need to wait on the event _before_ querying the available frames. */
if (pDevice->playback.shareMode == ma_share_mode_exclusive) {
if (WaitForSingleObject(pDevice->wasapi.hEventPlayback, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) {
return MA_ERROR; /* Wait failed. */
}
}
result = ma_device__get_available_frames__wasapi(pDevice, (ma_IAudioClient*)pDevice->wasapi.pAudioClientPlayback, &framesAvailablePlayback); result = ma_device__get_available_frames__wasapi(pDevice, (ma_IAudioClient*)pDevice->wasapi.pAudioClientPlayback, &framesAvailablePlayback);
if (result != MA_SUCCESS) { if (result != MA_SUCCESS) {
return result; return result;
...@@ -15276,18 +15269,6 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice) ...@@ -15276,18 +15269,6 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice)
} }
} }
/* If there's no frames available in the playback device we need to wait for more. */
if (framesAvailablePlayback == 0) {
/* In exclusive mode we waited at the top. */
if (pDevice->playback.shareMode != ma_share_mode_exclusive) {
if (WaitForSingleObject(pDevice->wasapi.hEventPlayback, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) {
return MA_ERROR; /* Wait failed. */
}
}
continue;
}
/* We're ready to map the playback device's buffer. We don't release this until it's been entirely filled. */ /* We're ready to map the playback device's buffer. We don't release this until it's been entirely filled. */
hr = ma_IAudioRenderClient_GetBuffer((ma_IAudioRenderClient*)pDevice->wasapi.pRenderClient, framesAvailablePlayback, &pMappedDeviceBufferPlayback); hr = ma_IAudioRenderClient_GetBuffer((ma_IAudioRenderClient*)pDevice->wasapi.pRenderClient, framesAvailablePlayback, &pMappedDeviceBufferPlayback);
if (FAILED(hr)) { if (FAILED(hr)) {
...@@ -15300,221 +15281,223 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice) ...@@ -15300,221 +15281,223 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice)
mappedDeviceBufferFramesRemainingPlayback = framesAvailablePlayback; mappedDeviceBufferFramesRemainingPlayback = framesAvailablePlayback;
} }
/* At this point we should have a buffer available for output. We need to keep writing input samples to it. */ if (mappedDeviceBufferFramesRemainingPlayback > 0) {
for (;;) { /* At this point we should have a buffer available for output. We need to keep writing input samples to it. */
/* Try grabbing some captured data if we haven't already got a mapped buffer. */ for (;;) {
if (pMappedDeviceBufferCapture == NULL) { /* Try grabbing some captured data if we haven't already got a mapped buffer. */
if (pDevice->capture.shareMode == ma_share_mode_shared) { if (pMappedDeviceBufferCapture == NULL) {
if (WaitForSingleObject(pDevice->wasapi.hEventCapture, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) { if (pDevice->capture.shareMode == ma_share_mode_shared) {
return MA_ERROR; /* Wait failed. */
}
}
result = ma_device__get_available_frames__wasapi(pDevice, (ma_IAudioClient*)pDevice->wasapi.pAudioClientCapture, &framesAvailableCapture);
if (result != MA_SUCCESS) {
exitLoop = MA_TRUE;
break;
}
/*printf("TRACE 2: framesAvailableCapture=%d\n", framesAvailableCapture);*/
/* Wait for more if nothing is available. */
if (framesAvailableCapture == 0) {
/* In exclusive mode we waited at the top. */
if (pDevice->capture.shareMode != ma_share_mode_shared) {
if (WaitForSingleObject(pDevice->wasapi.hEventCapture, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) { if (WaitForSingleObject(pDevice->wasapi.hEventCapture, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) {
return MA_ERROR; /* Wait failed. */ return MA_ERROR; /* Wait failed. */
} }
} }
continue; result = ma_device__get_available_frames__wasapi(pDevice, (ma_IAudioClient*)pDevice->wasapi.pAudioClientCapture, &framesAvailableCapture);
} if (result != MA_SUCCESS) {
exitLoop = MA_TRUE;
break;
}
/* Getting here means there's data available for writing to the output device. */ /*printf("TRACE 2: framesAvailableCapture=%d\n", framesAvailableCapture);*/
mappedDeviceBufferSizeInFramesCapture = ma_min(framesAvailableCapture, periodSizeInFramesCapture);
hr = ma_IAudioCaptureClient_GetBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, (BYTE**)&pMappedDeviceBufferCapture, &mappedDeviceBufferSizeInFramesCapture, &flagsCapture, NULL, NULL);
if (FAILED(hr)) {
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to retrieve internal buffer from capture device in preparation for writing to the device.", ma_result_from_HRESULT(hr));
exitLoop = MA_TRUE;
break;
}
/* Wait for more if nothing is available. */
if (framesAvailableCapture == 0) {
/* In exclusive mode we waited at the top. */
if (pDevice->capture.shareMode != ma_share_mode_shared) {
if (WaitForSingleObject(pDevice->wasapi.hEventCapture, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) {
return MA_ERROR; /* Wait failed. */
}
}
/* Overrun detection. */ continue;
if ((flagsCapture & MA_AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY) != 0) { }
/* Glitched. Probably due to an overrun. */
#ifdef MA_DEBUG_OUTPUT
printf("[WASAPI] Data discontinuity (possible overrun). framesAvailableCapture=%d, mappedBufferSizeInFramesCapture=%d\n", framesAvailableCapture, mappedDeviceBufferSizeInFramesCapture);
#endif
/* /* Getting here means there's data available for writing to the output device. */
Exeriment: If we get an overrun it probably means we're straddling the end of the buffer. In order to prevent a never-ending sequence of glitches let's experiment mappedDeviceBufferSizeInFramesCapture = ma_min(framesAvailableCapture, periodSizeInFramesCapture);
by dropping every frame until we're left with only a single period. To do this we just keep retrieving and immediately releasing buffers until we're down to the hr = ma_IAudioCaptureClient_GetBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, (BYTE**)&pMappedDeviceBufferCapture, &mappedDeviceBufferSizeInFramesCapture, &flagsCapture, NULL, NULL);
last period. if (FAILED(hr)) {
*/ ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to retrieve internal buffer from capture device in preparation for writing to the device.", ma_result_from_HRESULT(hr));
if (framesAvailableCapture >= pDevice->wasapi.actualPeriodSizeInFramesCapture) { exitLoop = MA_TRUE;
break;
}
/* Overrun detection. */
if ((flagsCapture & MA_AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY) != 0) {
/* Glitched. Probably due to an overrun. */
#ifdef MA_DEBUG_OUTPUT #ifdef MA_DEBUG_OUTPUT
printf("[WASAPI] Synchronizing capture stream. "); printf("[WASAPI] Data discontinuity (possible overrun). framesAvailableCapture=%d, mappedBufferSizeInFramesCapture=%d\n", framesAvailableCapture, mappedDeviceBufferSizeInFramesCapture);
#endif #endif
do
{
hr = ma_IAudioCaptureClient_ReleaseBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, mappedDeviceBufferSizeInFramesCapture);
if (FAILED(hr)) {
break;
}
framesAvailableCapture -= mappedDeviceBufferSizeInFramesCapture; /*
Exeriment: If we get an overrun it probably means we're straddling the end of the buffer. In order to prevent a never-ending sequence of glitches let's experiment
if (framesAvailableCapture > 0) { by dropping every frame until we're left with only a single period. To do this we just keep retrieving and immediately releasing buffers until we're down to the
mappedDeviceBufferSizeInFramesCapture = ma_min(framesAvailableCapture, periodSizeInFramesCapture); last period.
hr = ma_IAudioCaptureClient_GetBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, (BYTE**)&pMappedDeviceBufferCapture, &mappedDeviceBufferSizeInFramesCapture, &flagsCapture, NULL, NULL); */
if (framesAvailableCapture >= pDevice->wasapi.actualPeriodSizeInFramesCapture) {
#ifdef MA_DEBUG_OUTPUT
printf("[WASAPI] Synchronizing capture stream. ");
#endif
do
{
hr = ma_IAudioCaptureClient_ReleaseBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, mappedDeviceBufferSizeInFramesCapture);
if (FAILED(hr)) { if (FAILED(hr)) {
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to retrieve internal buffer from capture device in preparation for writing to the device.", ma_result_from_HRESULT(hr));
exitLoop = MA_TRUE;
break; break;
} }
} else {
pMappedDeviceBufferCapture = NULL; framesAvailableCapture -= mappedDeviceBufferSizeInFramesCapture;
mappedDeviceBufferSizeInFramesCapture = 0;
} if (framesAvailableCapture > 0) {
} while (framesAvailableCapture > periodSizeInFramesCapture); mappedDeviceBufferSizeInFramesCapture = ma_min(framesAvailableCapture, periodSizeInFramesCapture);
hr = ma_IAudioCaptureClient_GetBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, (BYTE**)&pMappedDeviceBufferCapture, &mappedDeviceBufferSizeInFramesCapture, &flagsCapture, NULL, NULL);
if (FAILED(hr)) {
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to retrieve internal buffer from capture device in preparation for writing to the device.", ma_result_from_HRESULT(hr));
exitLoop = MA_TRUE;
break;
}
} else {
pMappedDeviceBufferCapture = NULL;
mappedDeviceBufferSizeInFramesCapture = 0;
}
} while (framesAvailableCapture > periodSizeInFramesCapture);
#ifdef MA_DEBUG_OUTPUT
printf("framesAvailableCapture=%d, mappedBufferSizeInFramesCapture=%d\n", framesAvailableCapture, mappedDeviceBufferSizeInFramesCapture);
#endif
}
} else {
#ifdef MA_DEBUG_OUTPUT #ifdef MA_DEBUG_OUTPUT
printf("framesAvailableCapture=%d, mappedBufferSizeInFramesCapture=%d\n", framesAvailableCapture, mappedDeviceBufferSizeInFramesCapture); if (flagsCapture != 0) {
printf("[WASAPI] Capture Flags: %ld\n", flagsCapture);
}
#endif #endif
} }
} else {
#ifdef MA_DEBUG_OUTPUT
if (flagsCapture != 0) {
printf("[WASAPI] Capture Flags: %ld\n", flagsCapture);
}
#endif
}
mappedDeviceBufferFramesRemainingCapture = mappedDeviceBufferSizeInFramesCapture; mappedDeviceBufferFramesRemainingCapture = mappedDeviceBufferSizeInFramesCapture;
} }
/* At this point we should have both input and output data available. We now need to convert the data and post it to the client. */ /* At this point we should have both input and output data available. We now need to convert the data and post it to the client. */
for (;;) { for (;;) {
BYTE* pRunningDeviceBufferCapture; BYTE* pRunningDeviceBufferCapture;
BYTE* pRunningDeviceBufferPlayback; BYTE* pRunningDeviceBufferPlayback;
ma_uint32 framesToProcess; ma_uint32 framesToProcess;
ma_uint32 framesProcessed; ma_uint32 framesProcessed;
pRunningDeviceBufferCapture = pMappedDeviceBufferCapture + ((mappedDeviceBufferSizeInFramesCapture - mappedDeviceBufferFramesRemainingCapture ) * bpfCaptureDevice);
pRunningDeviceBufferPlayback = pMappedDeviceBufferPlayback + ((mappedDeviceBufferSizeInFramesPlayback - mappedDeviceBufferFramesRemainingPlayback) * bpfPlaybackDevice);
/* There may be some data sitting in the converter that needs to be processed first. Once this is exhaused, run the data callback again. */
if (!pDevice->playback.converter.isPassthrough && outputDataInClientFormatConsumed < outputDataInClientFormatCount) {
ma_uint64 convertedFrameCountClient = (outputDataInClientFormatCount - outputDataInClientFormatConsumed);
ma_uint64 convertedFrameCountDevice = mappedDeviceBufferFramesRemainingPlayback;
void* pConvertedFramesClient = outputDataInClientFormat + (outputDataInClientFormatConsumed * bpfPlaybackClient);
void* pConvertedFramesDevice = pRunningDeviceBufferPlayback;
result = ma_data_converter_process_pcm_frames(&pDevice->playback.converter, pConvertedFramesClient, &convertedFrameCountClient, pConvertedFramesDevice, &convertedFrameCountDevice);
if (result != MA_SUCCESS) {
break;
}
pRunningDeviceBufferCapture = pMappedDeviceBufferCapture + ((mappedDeviceBufferSizeInFramesCapture - mappedDeviceBufferFramesRemainingCapture ) * bpfCaptureDevice); outputDataInClientFormatConsumed += (ma_uint32)convertedFrameCountClient; /* Safe cast. */
pRunningDeviceBufferPlayback = pMappedDeviceBufferPlayback + ((mappedDeviceBufferSizeInFramesPlayback - mappedDeviceBufferFramesRemainingPlayback) * bpfPlaybackDevice); mappedDeviceBufferFramesRemainingPlayback -= (ma_uint32)convertedFrameCountDevice; /* Safe cast. */
/* There may be some data sitting in the converter that needs to be processed first. Once this is exhaused, run the data callback again. */ if (mappedDeviceBufferFramesRemainingPlayback == 0) {
if (!pDevice->playback.converter.isPassthrough && outputDataInClientFormatConsumed < outputDataInClientFormatCount) { break;
ma_uint64 convertedFrameCountClient = (outputDataInClientFormatCount - outputDataInClientFormatConsumed); }
ma_uint64 convertedFrameCountDevice = mappedDeviceBufferFramesRemainingPlayback;
void* pConvertedFramesClient = outputDataInClientFormat + (outputDataInClientFormatConsumed * bpfPlaybackClient);
void* pConvertedFramesDevice = pRunningDeviceBufferPlayback;
result = ma_data_converter_process_pcm_frames(&pDevice->playback.converter, pConvertedFramesClient, &convertedFrameCountClient, pConvertedFramesDevice, &convertedFrameCountDevice);
if (result != MA_SUCCESS) {
break;
} }
outputDataInClientFormatConsumed += (ma_uint32)convertedFrameCountClient; /* Safe cast. */ /*
mappedDeviceBufferFramesRemainingPlayback -= (ma_uint32)convertedFrameCountDevice; /* Safe cast. */ Getting here means we need to fire the callback. If format conversion is unnecessary, we can optimize this by passing the pointers to the internal
buffers directly to the callback.
*/
if (pDevice->capture.converter.isPassthrough && pDevice->playback.converter.isPassthrough) {
/* Optimal path. We can pass mapped pointers directly to the callback. */
framesToProcess = ma_min(mappedDeviceBufferFramesRemainingCapture, mappedDeviceBufferFramesRemainingPlayback);
framesProcessed = framesToProcess;
if (mappedDeviceBufferFramesRemainingPlayback == 0) { ma_device__on_data(pDevice, pRunningDeviceBufferPlayback, pRunningDeviceBufferCapture, framesToProcess);
break;
}
}
/* mappedDeviceBufferFramesRemainingCapture -= framesProcessed;
Getting here means we need to fire the callback. If format conversion is unnecessary, we can optimize this by passing the pointers to the internal mappedDeviceBufferFramesRemainingPlayback -= framesProcessed;
buffers directly to the callback.
*/
if (pDevice->capture.converter.isPassthrough && pDevice->playback.converter.isPassthrough) {
/* Optimal path. We can pass mapped pointers directly to the callback. */
framesToProcess = ma_min(mappedDeviceBufferFramesRemainingCapture, mappedDeviceBufferFramesRemainingPlayback);
framesProcessed = framesToProcess;
ma_device__on_data(pDevice, pRunningDeviceBufferPlayback, pRunningDeviceBufferCapture, framesToProcess); if (mappedDeviceBufferFramesRemainingCapture == 0) {
break; /* Exhausted input data. */
}
if (mappedDeviceBufferFramesRemainingPlayback == 0) {
break; /* Exhausted output data. */
}
} else if (pDevice->capture.converter.isPassthrough) {
/* The input buffer is a passthrough, but the playback buffer requires a conversion. */
framesToProcess = ma_min(mappedDeviceBufferFramesRemainingCapture, outputDataInClientFormatCap);
framesProcessed = framesToProcess;
ma_device__on_data(pDevice, outputDataInClientFormat, pRunningDeviceBufferCapture, framesToProcess);
outputDataInClientFormatCount = framesProcessed;
outputDataInClientFormatConsumed = 0;
mappedDeviceBufferFramesRemainingCapture -= framesProcessed;
if (mappedDeviceBufferFramesRemainingCapture == 0) {
break; /* Exhausted input data. */
}
} else if (pDevice->playback.converter.isPassthrough) {
/* The input buffer requires conversion, the playback buffer is passthrough. */
ma_uint64 capturedDeviceFramesToProcess = mappedDeviceBufferFramesRemainingCapture;
ma_uint64 capturedClientFramesToProcess = ma_min(inputDataInClientFormatCap, mappedDeviceBufferFramesRemainingPlayback);
mappedDeviceBufferFramesRemainingCapture -= framesProcessed; result = ma_data_converter_process_pcm_frames(&pDevice->capture.converter, pRunningDeviceBufferCapture, &capturedDeviceFramesToProcess, inputDataInClientFormat, &capturedClientFramesToProcess);
mappedDeviceBufferFramesRemainingPlayback -= framesProcessed; if (result != MA_SUCCESS) {
break;
}
if (mappedDeviceBufferFramesRemainingCapture == 0) { if (capturedClientFramesToProcess == 0) {
break; /* Exhausted input data. */ break;
} }
if (mappedDeviceBufferFramesRemainingPlayback == 0) {
break; /* Exhausted output data. */
}
} else if (pDevice->capture.converter.isPassthrough) {
/* The input buffer is a passthrough, but the playback buffer requires a conversion. */
framesToProcess = ma_min(mappedDeviceBufferFramesRemainingCapture, outputDataInClientFormatCap);
framesProcessed = framesToProcess;
ma_device__on_data(pDevice, outputDataInClientFormat, pRunningDeviceBufferCapture, framesToProcess);
outputDataInClientFormatCount = framesProcessed;
outputDataInClientFormatConsumed = 0;
mappedDeviceBufferFramesRemainingCapture -= framesProcessed;
if (mappedDeviceBufferFramesRemainingCapture == 0) {
break; /* Exhausted input data. */
}
} else if (pDevice->playback.converter.isPassthrough) {
/* The input buffer requires conversion, the playback buffer is passthrough. */
ma_uint64 capturedDeviceFramesToProcess = mappedDeviceBufferFramesRemainingCapture;
ma_uint64 capturedClientFramesToProcess = ma_min(inputDataInClientFormatCap, mappedDeviceBufferFramesRemainingPlayback);
result = ma_data_converter_process_pcm_frames(&pDevice->capture.converter, pRunningDeviceBufferCapture, &capturedDeviceFramesToProcess, inputDataInClientFormat, &capturedClientFramesToProcess); ma_device__on_data(pDevice, pRunningDeviceBufferPlayback, inputDataInClientFormat, (ma_uint32)capturedClientFramesToProcess); /* Safe cast. */
if (result != MA_SUCCESS) {
break;
}
if (capturedClientFramesToProcess == 0) { mappedDeviceBufferFramesRemainingCapture -= (ma_uint32)capturedDeviceFramesToProcess;
break; mappedDeviceBufferFramesRemainingPlayback -= (ma_uint32)capturedClientFramesToProcess;
} } else {
ma_uint64 capturedDeviceFramesToProcess = mappedDeviceBufferFramesRemainingCapture;
ma_uint64 capturedClientFramesToProcess = ma_min(inputDataInClientFormatCap, outputDataInClientFormatCap);
ma_device__on_data(pDevice, pRunningDeviceBufferPlayback, inputDataInClientFormat, (ma_uint32)capturedClientFramesToProcess); /* Safe cast. */ result = ma_data_converter_process_pcm_frames(&pDevice->capture.converter, pRunningDeviceBufferCapture, &capturedDeviceFramesToProcess, inputDataInClientFormat, &capturedClientFramesToProcess);
if (result != MA_SUCCESS) {
break;
}
mappedDeviceBufferFramesRemainingCapture -= (ma_uint32)capturedDeviceFramesToProcess; if (capturedClientFramesToProcess == 0) {
mappedDeviceBufferFramesRemainingPlayback -= (ma_uint32)capturedClientFramesToProcess; break;
} else { }
ma_uint64 capturedDeviceFramesToProcess = mappedDeviceBufferFramesRemainingCapture;
ma_uint64 capturedClientFramesToProcess = ma_min(inputDataInClientFormatCap, outputDataInClientFormatCap);
result = ma_data_converter_process_pcm_frames(&pDevice->capture.converter, pRunningDeviceBufferCapture, &capturedDeviceFramesToProcess, inputDataInClientFormat, &capturedClientFramesToProcess); ma_device__on_data(pDevice, outputDataInClientFormat, inputDataInClientFormat, (ma_uint32)capturedClientFramesToProcess);
if (result != MA_SUCCESS) {
break; mappedDeviceBufferFramesRemainingCapture -= (ma_uint32)capturedDeviceFramesToProcess;
outputDataInClientFormatCount = (ma_uint32)capturedClientFramesToProcess;
outputDataInClientFormatConsumed = 0;
} }
}
if (capturedClientFramesToProcess == 0) { /* If at this point we've run out of capture data we need to release the buffer. */
if (mappedDeviceBufferFramesRemainingCapture == 0 && pMappedDeviceBufferCapture != NULL) {
hr = ma_IAudioCaptureClient_ReleaseBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, mappedDeviceBufferSizeInFramesCapture);
if (FAILED(hr)) {
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to release internal buffer from capture device after reading from the device.", ma_result_from_HRESULT(hr));
exitLoop = MA_TRUE;
break; break;
} }
ma_device__on_data(pDevice, outputDataInClientFormat, inputDataInClientFormat, (ma_uint32)capturedClientFramesToProcess); /*printf("TRACE: Released capture buffer\n");*/
mappedDeviceBufferFramesRemainingCapture -= (ma_uint32)capturedDeviceFramesToProcess; pMappedDeviceBufferCapture = NULL;
outputDataInClientFormatCount = (ma_uint32)capturedClientFramesToProcess; mappedDeviceBufferFramesRemainingCapture = 0;
outputDataInClientFormatConsumed = 0; mappedDeviceBufferSizeInFramesCapture = 0;
} }
}
/* If at this point we've run out of capture data we need to release the buffer. */ /* Get out of this loop if we're run out of room in the playback buffer. */
if (mappedDeviceBufferFramesRemainingCapture == 0 && pMappedDeviceBufferCapture != NULL) { if (mappedDeviceBufferFramesRemainingPlayback == 0) {
hr = ma_IAudioCaptureClient_ReleaseBuffer((ma_IAudioCaptureClient*)pDevice->wasapi.pCaptureClient, mappedDeviceBufferSizeInFramesCapture);
if (FAILED(hr)) {
ma_post_error(pDevice, MA_LOG_LEVEL_ERROR, "[WASAPI] Failed to release internal buffer from capture device after reading from the device.", ma_result_from_HRESULT(hr));
exitLoop = MA_TRUE;
break; break;
} }
/*printf("TRACE: Released capture buffer\n");*/
pMappedDeviceBufferCapture = NULL;
mappedDeviceBufferFramesRemainingCapture = 0;
mappedDeviceBufferSizeInFramesCapture = 0;
}
/* Get out of this loop if we're run out of room in the playback buffer. */
if (mappedDeviceBufferFramesRemainingPlayback == 0) {
break;
} }
} }
...@@ -15554,6 +15537,11 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice) ...@@ -15554,6 +15537,11 @@ static ma_result ma_device_audio_thread__wasapi(ma_device* pDevice)
c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_TRUE); c89atomic_exchange_8(&pDevice->wasapi.isStartedPlayback, MA_TRUE);
} }
} }
/* Make sure the device has started before waiting. */
if (WaitForSingleObject(pDevice->wasapi.hEventPlayback, MA_WASAPI_WAIT_TIMEOUT_MILLISECONDS) != WAIT_OBJECT_0) {
return MA_ERROR; /* Wait failed. */
}
} break; } break;
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment