From 16407a3184fca78c6686918d71a34d9a2e9ff9cd Mon Sep 17 00:00:00 2001 From: Jason Sigal Date: Thu, 10 Jan 2019 09:07:46 -0500 Subject: [PATCH] userStartAudio method starts on user gesture (#322) A different approach to starting the audio context on a user gesture that works with Chrome's new autoplay policy and gives the user control over whether/how to handle the success callback / Promise --- Gruntfile.js | 15 +++++++++ package.json | 1 + src/audiocontext.js | 79 +++++++++++++++++++++++++++++---------------- 3 files changed, 67 insertions(+), 28 deletions(-) diff --git a/Gruntfile.js b/Gruntfile.js index 82ab664f..0e97715d 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -44,6 +44,20 @@ module.exports = function(grunt) { } } }); + } else if (path.indexOf('node_modules/startaudiocontext') > -1) { + // return '/** StartAudioContext.js by Yotam Mann, MIT License 2017 https://github.com/tambien/StartAudioContext http://opensource.org/licenses/MIT **/\n' + + return require('amdclean').clean({ + code: contents, + escodegen: { + comment: false, + format: { + indent: { + style: ' ', + adjustMultiLineComment: true + } + } + } + }); } else { return require('amdclean').clean({ 'code':contents, @@ -63,6 +77,7 @@ module.exports = function(grunt) { out: 'lib/p5.sound.js', paths: { 'Tone' : 'node_modules/tone/Tone', + 'StartAudioContext' : 'node_modules/startaudiocontext/StartAudioContext', 'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd', 'panner' : 'src/panner', 'shims': 'src/shims', diff --git a/package.json b/package.json index e8e08cd7..91d39d1b 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "grunt-open": "^0.2.3" }, "dependencies": { + "startaudiocontext": "^1.2.1", "tone": "0.10.0" }, "scripts": { diff --git a/src/audiocontext.js b/src/audiocontext.js index ec43d34c..28ac40dd 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -1,6 +1,6 @@ 'use strict'; -define(function () { +define(['StartAudioContext'], function (require, StartAudioContext) { // Create the Audio Context var audiocontext = new window.AudioContext(); @@ -42,34 +42,57 @@ define(function () { return audiocontext; }; - // if it is iOS, we have to have a user interaction to start Web Audio - // http://paulbakaus.com/tutorials/html5/web-audio-on-ios/ - var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false ; - if (iOS) { - var iosStarted = false; - var startIOS = function() { - if (iosStarted) return; - // create empty buffer - var buffer = audiocontext.createBuffer(1, 1, 22050); - var source = audiocontext.createBufferSource(); - source.buffer = buffer; - - // connect to output (your speakers) - source.connect(audiocontext.destination); - // play the file - source.start(0); - console.log('start ios!'); - - if (audiocontext.state === 'running') { - iosStarted = true; - } - }; - document.addEventListener('touchend', startIOS, false); - document.addEventListener('touchstart', startIOS, false); - - // TO DO: fake touch event so that audio will just start - } + /** + *

It is a good practice to give users control over starting audio playback. + * This practice is enforced by Google Chrome's autoplay policy as of r70 + * (info), iOS Safari, and other browsers. + *

+ * + *

+ * userStartAudio() starts the Audio Context on a user gesture. It utilizes + * the StartAudioContext library by + * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext. + *

+ * + *

Starting the audio context on a user gesture can be as simple as userStartAudio(). + * Optional parameters let you decide on a specific element that will start the audio context, + * and/or call a function once the audio context is started.

+ * @param {Element|Array} [element(s)] This argument can be an Element, + * Selector String, NodeList, p5.Element, + * jQuery Element, or an Array of any of those. + * @param {Function} [callback] Callback to invoke when the AudioContext has started + * @return {Promise} Returns a Promise which is resolved when + * the AudioContext state is 'running' + * @method userStartAudio + * @example + *
+ * function setup() { + * var myDiv = createDiv('click to start audio'); + * myDiv.position(0, 0); + * + * var mySynth = new p5.MonoSynth(); + * + * // This won't play until the context has started + * mySynth.play('A6'); + * + * // Start the audio context on a click/touch event + * userStartAudio().then(function() { + * myDiv.remove(); + * }); + * } + *
+ */ + p5.prototype.userStartAudio = function(elements, callback) { + var elt = elements; + if (elements instanceof p5.Element) { + elt = elements.elt; + } else if (elements instanceof Array && elements[0] instanceof p5.Element ) { + elt = elements.map(function(e) { return e.elt}); + } + return StartAudioContext(audiocontext, elt, callback); + }; return audiocontext; });