diff --git a/webaudioapi/waa-tests.ts b/webaudioapi/waa-tests.ts index 505f6214ef..fc92bd06a0 100644 --- a/webaudioapi/waa-tests.ts +++ b/webaudioapi/waa-tests.ts @@ -323,3 +323,11 @@ declare var footstepsBuffer: any; context.startRendering(); } +// Test automatic type inference of the audio processing event handler +() => { + var context = new AudioContext(); + var recorder = context.createScriptProcessor(2048, 1, 1); + recorder.onaudioprocess = function (e) { + e.inputBuffer; + }; +} diff --git a/webaudioapi/waa.d.ts b/webaudioapi/waa.d.ts index 128855da8d..59fab23ce5 100644 --- a/webaudioapi/waa.d.ts +++ b/webaudioapi/waa.d.ts @@ -601,6 +601,10 @@ interface AudioBufferSourceNode extends AudioNode { interface MediaElementAudioSourceNode extends AudioNode { } +interface AudioProcessingEventHandler { + (e: AudioProcessingEvent): void; +} + /** * This interface is an AudioNode which can generate, process, or analyse audio directly using JavaScript. * @@ -617,7 +621,7 @@ interface ScriptProcessorNode extends AudioNode { /** * An event listener which is called periodically for audio processing. An event of type AudioProcessingEvent will be passed to the event handler. */ - onaudioprocess: EventHandler; + onaudioprocess: AudioProcessingEventHandler; /** * The size of the buffer (in sample-frames) which needs to be processed each time onprocessaudio is called. Legal values are (256, 512, 1024, 2048, 4096, 8192, 16384).