Commit ac1a161c authored by ilyak's avatar ilyak

webaudio: audio context resume only for started devices

parent 5e8534cf
...@@ -31291,6 +31291,7 @@ static ma_result ma_device_init_by_type__webaudio(ma_device* pDevice, const ma_d ...@@ -31291,6 +31291,7 @@ static ma_result ma_device_init_by_type__webaudio(ma_device* pDevice, const ma_d
/* The AudioContext must be created in a suspended state. */ /* The AudioContext must be created in a suspended state. */
device.webaudio = new (window.AudioContext || window.webkitAudioContext)({sampleRate:sampleRate}); device.webaudio = new (window.AudioContext || window.webkitAudioContext)({sampleRate:sampleRate});
device.webaudio.suspend(); device.webaudio.suspend();
device.state = 1; /* MA_STATE_STOPPED */
/* /*
We need an intermediary buffer which we use for JavaScript and C interop. This buffer stores interleaved f32 PCM data. Because it's passed between We need an intermediary buffer which we use for JavaScript and C interop. This buffer stores interleaved f32 PCM data. Because it's passed between
...@@ -31498,13 +31499,17 @@ static ma_result ma_device_start__webaudio(ma_device* pDevice) ...@@ -31498,13 +31499,17 @@ static ma_result ma_device_start__webaudio(ma_device* pDevice)
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) { if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
EM_ASM({ EM_ASM({
miniaudio.get_device_by_index($0).webaudio.resume(); var device = miniaudio.get_device_by_index($0);
device.webaudio.resume();
device.state = 2; /* MA_STATE_STARTED */
}, pDevice->webaudio.indexCapture); }, pDevice->webaudio.indexCapture);
} }
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) { if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
EM_ASM({ EM_ASM({
miniaudio.get_device_by_index($0).webaudio.resume(); var device = miniaudio.get_device_by_index($0);
device.webaudio.resume();
device.state = 2; /* MA_STATE_STARTED */
}, pDevice->webaudio.indexPlayback); }, pDevice->webaudio.indexPlayback);
} }
...@@ -31527,13 +31532,17 @@ static ma_result ma_device_stop__webaudio(ma_device* pDevice) ...@@ -31527,13 +31532,17 @@ static ma_result ma_device_stop__webaudio(ma_device* pDevice)
if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) { if (pDevice->type == ma_device_type_capture || pDevice->type == ma_device_type_duplex) {
EM_ASM({ EM_ASM({
miniaudio.get_device_by_index($0).webaudio.suspend(); var device = miniaudio.get_device_by_index($0);
device.webaudio.suspend();
device.state = 1; /* MA_STATE_STOPPED */
}, pDevice->webaudio.indexCapture); }, pDevice->webaudio.indexCapture);
} }
if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) { if (pDevice->type == ma_device_type_playback || pDevice->type == ma_device_type_duplex) {
EM_ASM({ EM_ASM({
miniaudio.get_device_by_index($0).webaudio.suspend(); var device = miniaudio.get_device_by_index($0);
device.webaudio.suspend();
device.state = 1; /* MA_STATE_STOPPED */
}, pDevice->webaudio.indexPlayback); }, pDevice->webaudio.indexPlayback);
} }
...@@ -31621,7 +31630,7 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const ma_contex ...@@ -31621,7 +31630,7 @@ static ma_result ma_context_init__webaudio(ma_context* pContext, const ma_contex
miniaudio.unlock = function() { miniaudio.unlock = function() {
for(var i = 0; i < miniaudio.devices.length; ++i) { for(var i = 0; i < miniaudio.devices.length; ++i) {
var device = miniaudio.devices[i]; var device = miniaudio.devices[i];
if (device != null && device.webaudio != null) { if (device != null && device.webaudio != null && device.state === 2 /* MA_STATE_STARTED */) {
device.webaudio.resume(); device.webaudio.resume();
} }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment