aboutsummaryrefslogtreecommitdiff
path: root/src/library_sdl.js
diff options
context:
space:
mode:
authorJukka Jylänki <jujjyl@gmail.com>2013-06-16 14:27:35 +0300
committerAlon Zakai <alonzakai@gmail.com>2013-09-24 16:56:44 -0700
commit7a1e760e82ccef4bc950890bb11fd1a59e88f11a (patch)
tree2d031eac943b0224cf7eb339e353647f749c847b /src/library_sdl.js
parent6df56c0c5188f2316f75c414ccea9c3f735904ba (diff)
Improve SDL_OpenAudio support to work with the newest Web Audio API spec, add better support for different SDL audio formats and sample rates. Add browser test for SDL audio beep sample.
Diffstat (limited to 'src/library_sdl.js')
-rw-r--r--src/library_sdl.js280
1 files changed, 188 insertions, 92 deletions
diff --git a/src/library_sdl.js b/src/library_sdl.js
index d292f753..2860669c 100644
--- a/src/library_sdl.js
+++ b/src/library_sdl.js
@@ -704,7 +704,7 @@ var LibrarySDL = {
// since the browser engine handles that for us. Therefore, in JS we just
// maintain a list of channels and return IDs for them to the SDL consumer.
allocateChannels: function(num) { // called from Mix_AllocateChannels and init
- if (SDL.numChannels && SDL.numChannels >= num) return;
+ if (SDL.numChannels && SDL.numChannels >= num && num != 0) return;
SDL.numChannels = num;
SDL.channels = [];
for (var i = 0; i < num; i++) {
@@ -1454,105 +1454,200 @@ var LibrarySDL = {
// SDL_Audio
- // TODO fix SDL_OpenAudio, and add some tests for it. It's currently broken.
SDL_OpenAudio: function(desired, obtained) {
- SDL.allocateChannels(32);
-
- SDL.audio = {
- freq: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.freq', 'i32', 0, 1) }}},
- format: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.format', 'i16', 0, 1) }}},
- channels: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.channels', 'i8', 0, 1) }}},
- samples: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.samples', 'i16', 0, 1) }}},
- callback: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.callback', 'void*', 0, 1) }}},
- userdata: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.userdata', 'void*', 0, 1) }}},
- soundSource: new Array(),
- nextSoundSource: 0,
- lastSoundSource: -1,
- nextPlayTime: 0,
- paused: true,
- timer: null
- };
-
- if (obtained) {
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.freq', 'SDL.audio.freq', 'i32') }}}; // no good way for us to know if the browser can really handle this
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.format', 33040, 'i16') }}}; // float, signed, 16-bit
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.channels', 'SDL.audio.channels', 'i8') }}};
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.silence', makeGetValue('desired', 'SDL.structs.AudioSpec.silence', 'i8', 0, 1), 'i8') }}}; // unclear if browsers can provide this
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.samples', 'SDL.audio.samples', 'i16') }}};
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.callback', 'SDL.audio.callback', '*') }}};
- {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.userdata', 'SDL.audio.userdata', '*') }}};
- }
-
- var totalSamples = SDL.audio.samples*SDL.audio.channels;
- SDL.audio.bufferSize = totalSamples*2; // hardcoded 16-bit audio
- SDL.audio.buffer = _malloc(SDL.audio.bufferSize);
- SDL.audio.caller = function() {
- Runtime.dynCall('viii', SDL.audio.callback, [SDL.audio.userdata, SDL.audio.buffer, SDL.audio.bufferSize]);
- SDL.audio.pushAudio(SDL.audio.buffer, SDL.audio.bufferSize);
- };
- // Mozilla Audio API/WebAudioAPI
+ // On Firefox, we prefer Mozilla Audio API. Otherwise, use WebAudioAPI.
try {
+ SDL.audio = {
+ freq: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.freq', 'i32', 0, 1) }}},
+ format: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.format', 'i16', 0, 1) }}},
+ channels: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.channels', 'i8', 0, 1) }}},
+ samples: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.samples', 'i16', 0, 1) }}}, // Samples in the CB buffer per single sound channel.
+ callback: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.callback', 'void*', 0, 1) }}},
+ userdata: {{{ makeGetValue('desired', 'SDL.structs.AudioSpec.userdata', 'void*', 0, 1) }}},
+ paused: true,
+ timer: null
+ };
+ // The .silence field tells the constant sample value that corresponds to the safe un-skewed silence value for the wave data.
+ if (SDL.audio.format == 0x0008 /*AUDIO_U8*/) {
+ SDL.audio.silence = 128; // Audio ranges in [0, 255], so silence is half-way in between.
+ } else if (SDL.audio.format == 0x8010 /*AUDIO_S16LSB*/) {
+ SDL.audio.silence = 0; // Signed data in range [-32768, 32767], silence is 0.
+ } else {
+ throw 'Invalid SDL audio format ' + SDL.audio.format + '!';
+ }
+ // Round the desired audio frequency up to the next 'common' frequency value.
+ // Web Audio API spec states 'An implementation must support sample-rates in at least the range 22050 to 96000.'
+ if (SDL.audio.freq <= 0) {
+ throw 'Unsupported sound frequency ' + SDL.audio.freq + '!';
+ } else if (SDL.audio.freq <= 22050) {
+ SDL.audio.freq = 22050; // Take it safe and clamp everything lower than 22kHz to that.
+ } else if (SDL.audio.freq <= 32000) {
+ SDL.audio.freq = 32000;
+ } else if (SDL.audio.freq <= 44100) {
+ SDL.audio.freq = 44100;
+ } else if (SDL.audio.freq <= 48000) {
+ SDL.audio.freq = 48000;
+ } else if (SDL.audio.freq <= 96000) {
+ SDL.audio.freq = 96000;
+ } else {
+ throw 'Unsupported sound frequency ' + SDL.audio.freq + '!';
+ }
+ if (SDL.audio.channels == 0) {
+ SDL.audio.channels = 1; // In SDL both 0 and 1 mean mono.
+ } else if (SDL.audio.channels < 0 || SDL.audio.channels > 32) {
+ throw 'Unsupported number of audio channels for SDL audio: ' + SDL.audio.channels + '!';
+ } else if (SDL.audio.channels != 1 && SDL.audio.channels != 2) { // Unsure what SDL audio spec supports. Web Audio spec supports up to 32 channels.
+ console.log('Warning: Using untested number of audio channels ' + SDL.audio.channels);
+ }
+ if (SDL.audio.samples < 1024 || SDL.audio.samples > 524288 /* arbitrary cap */) {
+ throw 'Unsupported audio callback buffer size ' + SDL.audio.samples + '!';
+ } else if ((SDL.audio.samples & (SDL.audio.samples-1)) != 0) {
+ throw 'Audio callback buffer size ' + SDL.audio.samples + ' must be a power-of-two!';
+ }
+
+ var totalSamples = SDL.audio.samples*SDL.audio.channels;
+ SDL.audio.bytesPerSample = (SDL.audio.format == 0x0008 /*AUDIO_U8*/ || SDL.audio.format == 0x8008 /*AUDIO_S8*/) ? 1 : 2;
+ SDL.audio.bufferSize = totalSamples*SDL.audio.bytesPerSample;
+ SDL.audio.buffer = _malloc(SDL.audio.bufferSize);
+
+ // Create a callback function that will be routinely called to ask more audio data from the user application.
+ SDL.audio.caller = function() {
+ if (!SDL.audio) {
+ return;
+ }
+ Runtime.dynCall('viii', SDL.audio.callback, [SDL.audio.userdata, SDL.audio.buffer, SDL.audio.bufferSize]);
+ SDL.audio.pushAudio(SDL.audio.buffer, SDL.audio.bufferSize);
+ };
+
SDL.audio.audioOutput = new Audio();
- SDL.audio.hasWebAudio = ((typeof(AudioContext) === 'function')||(typeof(webkitAudioContext) === 'function'));
- if(!SDL.audio.hasWebAudio&&(typeof(SDL.audio.audioOutput['mozSetup'])==='function')){
- SDL.audio.audioOutput['mozSetup'](SDL.audio.channels, SDL.audio.freq); // use string attributes on mozOutput for closure compiler
- SDL.audio.mozBuffer = new Float32Array(totalSamples);
- SDL.audio.pushAudio = function(ptr, size) {
- var mozBuffer = SDL.audio.mozBuffer;
+ if (typeof(SDL.audio.audioOutput['mozSetup'])==='function') { // Primarily use Mozilla Audio Data API if available.
+ SDL.audio.audioOutput['mozSetup'](SDL.audio.channels, SDL.audio.freq); // use string attributes on mozOutput for closure compiler
+ SDL.audio.mozBuffer = new Float32Array(totalSamples);
+ SDL.audio.pushAudio = function(ptr, size) {
+ var mozBuffer = SDL.audio.mozBuffer;
+ // The input audio data for SDL audio is either 8-bit or 16-bit interleaved across channels, output for Mozilla Audio Data API
+ // needs to be Float32 interleaved, so perform a sample conversion.
+ if (SDL.audio.format == 0x8010 /*AUDIO_S16LSB*/) {
+ for (var i = 0; i < totalSamples; i++) {
+ mozBuffer[i] = ({{{ makeGetValue('ptr', 'i*2', 'i16', 0, 0) }}}) / 0x8000;
+ }
+ } else if (SDL.audio.format == 0x0008 /*AUDIO_U8*/) {
for (var i = 0; i < totalSamples; i++) {
- mozBuffer[i] = ({{{ makeGetValue('ptr', 'i*2', 'i16', 0, 0) }}}) / 0x8000; // hardcoded 16-bit audio, signed (TODO: reSign if not ta2?)
+ var v = ({{{ makeGetValue('ptr', 'i', 'i8', 0, 0) }}});
+ mozBuffer[i] = ((v >= 0) ? v-128 : v+128) /128;
}
- SDL.audio.audioOutput['mozWriteAudio'](mozBuffer);
}
- }else{
- if (typeof(AudioContext) === 'function') {
- SDL.audio.context = new AudioContext();
- } else if (typeof(webkitAudioContext) === 'function') {
- SDL.audio.context = new webkitAudioContext();
- } else {
- throw 'no sound!';
- }
- SDL.audio.nextSoundSource = 0;
- SDL.audio.soundSource = new Array();
- SDL.audio.nextPlayTime = 0;
- SDL.audio.pushAudio=function(ptr,size){
- if(SDL.audio.lastSoundSource>-1){
- if(SDL.audio.soundSource[SDL.audio.lastSoundSource].playbackState === 3){
- SDL.audio.soundSource = new Array();
- SDL.audio.nextPlayTime = 0;
- SDL.audio.lastSoundSource = -1;
- SDL.audio.nextSoundSource = 0;
- }
- }
- SDL.audio.soundSource[SDL.audio.nextSoundSource] = SDL.audio.context.createBufferSource();
- SDL.audio.soundSource[SDL.audio.nextSoundSource].connect(SDL.audio.context.destination);
- SDL.audio.soundSource[SDL.audio.nextSoundSource].buffer = SDL.audio.context.createBuffer(SDL.audio.channels,(size / totalSamples),SDL.audio.freq);
- for(var j = 0; j<SDL.audio.channels; j++){
- var channelData = SDL.audio.SoundSource[SDL.audio.nextSoundSource].buffer.getChannelData(j);
- var samples = SDL.audio.samples;
- for(var i = 0; i<samples; i++){
- channelData[i] = ({{{ makeGetValue('ptr', '(i+samples*j)*2', 'i16', 0, 0) }}}) / 0x8000; // hardcoded 16-bit audio, signed (TODO: reSign if not ta2?)
- }
- }
- SDL.audio.nextPlayTime = SDL.audio.context.currentTime+SDL.audio.soundSource[SDL.audio.nextSoundSource].buffer.duration;
-
-
- SDL.audio.soundSource[SDL.audio.nextSoundSource].start(SDL.audio.nextPlayTime);
-
- SDL.audio.lastSoundSource = SDL.Audio.nextSoundSource;
- SDL.Audio.nextSoundSource++;
+ SDL.audio.audioOutput['mozWriteAudio'](mozBuffer);
+ }
+ } else {
+ // Initialize Web Audio API if we haven't done so yet. Note: Only initialize Web Audio context ever once on the web page,
+ // since initializing multiple times fails on Chrome saying 'audio resources have been exhausted'.
+ if (!SDL.audioContext) {
+ if (typeof(AudioContext) === 'function') {
+ SDL.audioContext = new AudioContext();
+ } else if (typeof(webkitAudioContext) === 'function') {
+ SDL.audioContext = new webkitAudioContext();
+ } else {
+ throw 'Web Audio API is not available!';
+ }
+ }
+ SDL.audio.soundSource = new Array(); // Use an array of sound sources as a ring buffer to queue blocks of synthesized audio to Web Audio API.
+ SDL.audio.nextSoundSource = 0; // Index of the next sound buffer in the ring buffer queue to play.
+ SDL.audio.nextPlayTime = 0; // Time in seconds when the next audio block is due to start.
+
+ // The pushAudio function with a new audio buffer whenever there is new audio data to schedule to be played back on the device.
+ SDL.audio.pushAudio=function(ptr,sizeBytes) {
+ try {
+ var sizeSamples = sizeBytes / SDL.audio.bytesPerSample; // How many samples fit in the callback buffer?
+ var sizeSamplesPerChannel = sizeSamples / SDL.audio.channels; // How many samples per a single channel fit in the cb buffer?
+ if (sizeSamplesPerChannel != SDL.audio.samples) {
+ throw 'Received mismatching audio buffer size!';
}
+ // Allocate new sound buffer to be played.
+ var source = SDL.audioContext.createBufferSource();
+ SDL.audio.soundSource[SDL.audio.nextSoundSource] = source;
+ source.buffer = SDL.audioContext.createBuffer(SDL.audio.channels,sizeSamplesPerChannel,SDL.audio.freq);
+ SDL.audio.soundSource[SDL.audio.nextSoundSource].connect(SDL.audioContext.destination);
+
+ // The input audio data is interleaved across the channels, i.e. [L, R, L, R, L, R, ...] and is either 8-bit or 16-bit as
+ // supported by the SDL API. The output audio wave data for Web Audio API must be in planar buffers of [-1,1]-normalized Float32 data,
+ // so perform a buffer conversion for the data.
+ var numChannels = SDL.audio.channels;
+ for(var i = 0; i < numChannels; ++i) {
+ var channelData = SDL.audio.soundSource[SDL.audio.nextSoundSource].buffer.getChannelData(i);
+ if (channelData.length != sizeSamplesPerChannel) {
+ throw 'Web Audio output buffer length mismatch! Destination size: ' + channelData.length + ' samples vs expected ' + sizeSamplesPerChannel + ' samples!';
+ }
+ if (SDL.audio.format == 0x8010 /*AUDIO_S16LSB*/) {
+ for(var j = 0; j < sizeSamplesPerChannel; ++j) {
+ channelData[j] = ({{{ makeGetValue('ptr', '(j*numChannels + i)*2', 'i16', 0, 0) }}}) / 0x8000;
+ }
+ } else if (SDL.audio.format == 0x0008 /*AUDIO_U8*/) {
+ for(var j = 0; j < sizeSamplesPerChannel; ++j) {
+ var v = ({{{ makeGetValue('ptr', 'j*numChannels + i', 'i8', 0, 0) }}});
+ channelData[j] = ((v >= 0) ? v-128 : v+128) /128;
+ }
+ }
+ }
+
+ // Schedule the generated sample buffer to be played out at the correct time right after the previously scheduled
+ // sample buffer has finished.
+ var curtime = SDL.audioContext.currentTime;
+ if (curtime > SDL.audio.nextPlayTime && SDL.audio.nextPlayTime != 0) {
+ console.log('warning: Audio callback had starved sending audio by ' + (curtime - SDL.audio.nextPlayTime) + ' seconds.');
+ // Immediately queue up an extra buffer to force the sound feeding to be ahead by one sample block:
+ Browser.safeSetTimeout(SDL.audio.caller, 1);
+ }
+ var playtime = Math.max(curtime, SDL.audio.nextPlayTime);
+ SDL.audio.soundSource[SDL.audio.nextSoundSource].start(playtime);
+ SDL.audio.nextPlayTime = playtime + SDL.audio.soundSource[SDL.audio.nextSoundSource].buffer.duration;
+ SDL.audio.nextSoundSource = (SDL.audio.nextSoundSource + 1) % 4;
+ } catch(e) {
+ console.log('Web Audio API error playing back audio: ' + e.toString());
+ }
+ }
}
+
+ if (obtained) {
+ // Report back the initialized audio parameters.
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.freq', 'SDL.audio.freq', 'i32') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.format', 'SDL.audio.format', 'i16') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.channels', 'SDL.audio.channels', 'i8') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.silence', makeGetValue('desired', 'SDL.structs.AudioSpec.silence', 'i8', 0, 1), 'i8') }}}; // unclear if browsers can provide this
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.samples', 'SDL.audio.samples', 'i16') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.callback', 'SDL.audio.callback', '*') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.userdata', 'SDL.audio.userdata', '*') }}};
+ }
+ SDL.allocateChannels(32);
+
} catch(e) {
+ console.log('Initializing SDL audio threw an exception: "' + e.toString() + '"! Continuing without audio.');
SDL.audio = null;
+ SDL.allocateChannels(0);
+ if (obtained) {
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.freq', 0, 'i32') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.format', 0, 'i16') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.channels', 0, 'i8') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.silence', 0, 'i8') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.samples', 0, 'i16') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.callback', 0, '*') }}};
+ {{{ makeSetValue('obtained', 'SDL.structs.AudioSpec.userdata', 0, '*') }}};
+ }
+ }
+ if (!SDL.audio) {
+ return -1;
}
- if (!SDL.audio) return -1;
return 0;
},
SDL_PauseAudio: function(pauseOn) {
+ if (!SDL.audio) {
+ return;
+ }
if (SDL.audio.paused !== pauseOn) {
- SDL.audio.timer = pauseOn ? SDL.audio.timer && clearInterval(SDL.audio.timer) : Browser.safeSetInterval(SDL.audio.caller, 1/35);
+ SDL.audio.timer = pauseOn ? SDL.audio.timer && clearInterval(SDL.audio.timer) : Browser.safeSetInterval(SDL.audio.caller, 1000 * SDL.audio.samples / SDL.audio.freq);
+ // Immediately queue up a buffer to make the sound feeding to be ahead by one sample block.
+ Browser.safeSetTimeout(SDL.audio.caller, 1);
}
SDL.audio.paused = pauseOn;
},
@@ -1560,17 +1655,18 @@ var LibrarySDL = {
SDL_CloseAudio__deps: ['SDL_PauseAudio', 'free'],
SDL_CloseAudio: function() {
if (SDL.audio) {
- try{
- for(var i = 0; i<SDL.audio.soundSource.length;i++){
- if(!(typeof(SDL.audio.soundSource[i]==='undefined'))){
- SDL.audio.soundSource[i].stop(0);
- }
- }
- }catch(e){}
- SDL.audo.soundSource = null;
+ try{
+ for(var i = 0; i < SDL.audio.soundSource.length; ++i) {
+ if (!(typeof(SDL.audio.soundSource[i]==='undefined'))) {
+ SDL.audio.soundSource[i].stop(0);
+ }
+ }
+ } catch(e) {}
+ SDL.audio.soundSource = null;
_SDL_PauseAudio(1);
_free(SDL.audio.buffer);
SDL.audio = null;
+ SDL.allocateChannels(0);
}
},