From 666846add65dd1cc0de9f74fdcce67571e646223 Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Tue, 11 Sep 2018 00:41:30 -0400 Subject: [PATCH 1/6] userStartAudio add StartAudioContext dependency --- Gruntfile.js | 15 +++++++++++ package.json | 1 + src/audiocontext.js | 62 +++++++++++++++++++++++++-------------------- 3 files changed, 50 insertions(+), 28 deletions(-) diff --git a/Gruntfile.js b/Gruntfile.js index 82ab664f..0e97715d 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -44,6 +44,20 @@ module.exports = function(grunt) { } } }); + } else if (path.indexOf('node_modules/startaudiocontext') > -1) { + // return '/** StartAudioContext.js by Yotam Mann, MIT License 2017 https://github.com/tambien/StartAudioContext http://opensource.org/licenses/MIT **/\n' + + return require('amdclean').clean({ + code: contents, + escodegen: { + comment: false, + format: { + indent: { + style: ' ', + adjustMultiLineComment: true + } + } + } + }); } else { return require('amdclean').clean({ 'code':contents, @@ -63,6 +77,7 @@ module.exports = function(grunt) { out: 'lib/p5.sound.js', paths: { 'Tone' : 'node_modules/tone/Tone', + 'StartAudioContext' : 'node_modules/startaudiocontext/StartAudioContext', 'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd', 'panner' : 'src/panner', 'shims': 'src/shims', diff --git a/package.json b/package.json index e8e08cd7..91d39d1b 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "grunt-open": "^0.2.3" }, "dependencies": { + "startaudiocontext": "^1.2.1", "tone": "0.10.0" }, "scripts": { diff --git a/src/audiocontext.js b/src/audiocontext.js index ec43d34c..067ed148 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -1,6 +1,6 @@ 'use strict'; -define(function () { +define(['StartAudioContext'], function (require, StartAudioContext) { // Create the Audio Context var audiocontext = new window.AudioContext(); @@ -42,34 +42,40 @@ define(function () { return audiocontext; }; - // if it is iOS, we have to have a user interaction to start Web Audio - // http://paulbakaus.com/tutorials/html5/web-audio-on-ios/ - var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false ; - if (iOS) { - var iosStarted = false; - var startIOS = function() { - if (iosStarted) return; - // create empty buffer - var buffer = audiocontext.createBuffer(1, 1, 22050); - var source = audiocontext.createBufferSource(); - source.buffer = buffer; - - // connect to output (your speakers) - source.connect(audiocontext.destination); - // play the file - source.start(0); - console.log('start ios!'); - - if (audiocontext.state === 'running') { - iosStarted = true; - } - }; - document.addEventListener('touchend', startIOS, false); - document.addEventListener('touchstart', startIOS, false); - - // TO DO: fake touch event so that audio will just start - } + /** + *

It is good practice to wait for a user gester before starting audio. + * This practice is enforced by Google Chrome's autoplay policy as of r70 + * (info), iOS Safari, and other browsers. + *

+ * + *

+ * This method starts the audio context on a user gesture. It utilizes + * StartAudioContext library by Yotam Mann (MIT Licence, 2016). Read more + * at https://github.com/tambien/StartAudioContext. + *

+ * @param {Element|Array} [element(s)] This argument can be an Element, + * Selector String, NodeList, jQuery Element, + * or an Array of any of those. + * @param {Function} [callback] Callback to invoke when the AudioContext has started + * @return {Promise} Returns a Promise which is resolved when + * the AudioContext state is 'running' + * @method userStartAudio + * @example + *
+ * function setup() { + * var myButton = createButton('click to start audio'); + * myButton.position(0, 0); + * + * userStartAudio(myButton, function() { + * alert('audio started!'); + * }); + * } + *
+ */ + p5.prototype.userStartAudio = function(elements, callback) { + return StartAudioContext(audiocontext, elements, callback); + }; return audiocontext; }); From c503f5f4cd779020f800f4c70e3fbc8d6cf62765 Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Tue, 11 Sep 2018 00:52:09 -0400 Subject: [PATCH 2/6] add p5.Element support --- src/audiocontext.js | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/audiocontext.js b/src/audiocontext.js index 067ed148..10f4ba48 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -55,8 +55,8 @@ define(['StartAudioContext'], function (require, StartAudioContext) { * at https://github.com/tambien/StartAudioContext. *

* @param {Element|Array} [element(s)] This argument can be an Element, - * Selector String, NodeList, jQuery Element, - * or an Array of any of those. + * Selector String, NodeList, p5.Element, + * jQuery Element, or an Array of any of those. * @param {Function} [callback] Callback to invoke when the AudioContext has started * @return {Promise} Returns a Promise which is resolved when * the AudioContext state is 'running' @@ -74,7 +74,13 @@ define(['StartAudioContext'], function (require, StartAudioContext) { * */ p5.prototype.userStartAudio = function(elements, callback) { - return StartAudioContext(audiocontext, elements, callback); + var elt = elements; + if (elements instanceof p5.Element) { + elt = elements.elt; + } else if (elements instanceof Array && elements[0] instanceof p5.Element ) { + elt = elements.map(function(e) { return e.elt}); + } + return StartAudioContext(audiocontext, elt, callback); }; return audiocontext; From ef870e9079e87d96ae366533abdc43ad199289c4 Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Sat, 15 Sep 2018 23:19:18 -0400 Subject: [PATCH 3/6] fix typo --- src/audiocontext.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/audiocontext.js b/src/audiocontext.js index 10f4ba48..ef78d101 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -44,7 +44,7 @@ define(['StartAudioContext'], function (require, StartAudioContext) { /** - *

It is good practice to wait for a user gester before starting audio. + *

It is good practice to wait for a user gesture before starting audio. * This practice is enforced by Google Chrome's autoplay policy as of r70 * (info), iOS Safari, and other browsers. *

From 1948d5ef21491161ab4a0ac33487dec97d8b7ca5 Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Tue, 11 Sep 2018 00:41:30 -0400 Subject: [PATCH 4/6] userStartAudio add StartAudioContext dependency --- Gruntfile.js | 15 +++++++++++ package.json | 1 + src/audiocontext.js | 62 +++++++++++++++++++++++++-------------------- 3 files changed, 50 insertions(+), 28 deletions(-) diff --git a/Gruntfile.js b/Gruntfile.js index 82ab664f..0e97715d 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -44,6 +44,20 @@ module.exports = function(grunt) { } } }); + } else if (path.indexOf('node_modules/startaudiocontext') > -1) { + // return '/** StartAudioContext.js by Yotam Mann, MIT License 2017 https://github.com/tambien/StartAudioContext http://opensource.org/licenses/MIT **/\n' + + return require('amdclean').clean({ + code: contents, + escodegen: { + comment: false, + format: { + indent: { + style: ' ', + adjustMultiLineComment: true + } + } + } + }); } else { return require('amdclean').clean({ 'code':contents, @@ -63,6 +77,7 @@ module.exports = function(grunt) { out: 'lib/p5.sound.js', paths: { 'Tone' : 'node_modules/tone/Tone', + 'StartAudioContext' : 'node_modules/startaudiocontext/StartAudioContext', 'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd', 'panner' : 'src/panner', 'shims': 'src/shims', diff --git a/package.json b/package.json index e8e08cd7..91d39d1b 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "grunt-open": "^0.2.3" }, "dependencies": { + "startaudiocontext": "^1.2.1", "tone": "0.10.0" }, "scripts": { diff --git a/src/audiocontext.js b/src/audiocontext.js index ec43d34c..bf338eb7 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -1,6 +1,6 @@ 'use strict'; -define(function () { +define(['StartAudioContext'], function (require, StartAudioContext) { // Create the Audio Context var audiocontext = new window.AudioContext(); @@ -42,34 +42,40 @@ define(function () { return audiocontext; }; - // if it is iOS, we have to have a user interaction to start Web Audio - // http://paulbakaus.com/tutorials/html5/web-audio-on-ios/ - var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false ; - if (iOS) { - var iosStarted = false; - var startIOS = function() { - if (iosStarted) return; - // create empty buffer - var buffer = audiocontext.createBuffer(1, 1, 22050); - var source = audiocontext.createBufferSource(); - source.buffer = buffer; - - // connect to output (your speakers) - source.connect(audiocontext.destination); - // play the file - source.start(0); - console.log('start ios!'); - - if (audiocontext.state === 'running') { - iosStarted = true; - } - }; - document.addEventListener('touchend', startIOS, false); - document.addEventListener('touchstart', startIOS, false); - - // TO DO: fake touch event so that audio will just start - } + /** + *

It is good practice to wait for a user gesture before starting audio. + * This practice is enforced by Google Chrome's autoplay policy as of r70 + * (info), iOS Safari, and other browsers. + *

+ * + *

+ * This method starts the audio context on a user gesture. It utilizes + * StartAudioContext library by Yotam Mann (MIT Licence, 2016). Read more + * at https://github.com/tambien/StartAudioContext. + *

+ * @param {Element|Array} [element(s)] This argument can be an Element, + * Selector String, NodeList, jQuery Element, + * or an Array of any of those. + * @param {Function} [callback] Callback to invoke when the AudioContext has started + * @return {Promise} Returns a Promise which is resolved when + * the AudioContext state is 'running' + * @method userStartAudio + * @example + *
+ * function setup() { + * var myButton = createButton('click to start audio'); + * myButton.position(0, 0); + * + * userStartAudio(myButton, function() { + * alert('audio started!'); + * }); + * } + *
+ */ + p5.prototype.userStartAudio = function(elements, callback) { + return StartAudioContext(audiocontext, elements, callback); + }; return audiocontext; }); From 0dfab6e3f641fb1df87e56b13bc0ea358f75c3bf Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Tue, 11 Sep 2018 00:52:09 -0400 Subject: [PATCH 5/6] add p5.Element support --- src/audiocontext.js | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/audiocontext.js b/src/audiocontext.js index bf338eb7..ef78d101 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -55,8 +55,8 @@ define(['StartAudioContext'], function (require, StartAudioContext) { * at https://github.com/tambien/StartAudioContext. *

* @param {Element|Array} [element(s)] This argument can be an Element, - * Selector String, NodeList, jQuery Element, - * or an Array of any of those. + * Selector String, NodeList, p5.Element, + * jQuery Element, or an Array of any of those. * @param {Function} [callback] Callback to invoke when the AudioContext has started * @return {Promise} Returns a Promise which is resolved when * the AudioContext state is 'running' @@ -74,7 +74,13 @@ define(['StartAudioContext'], function (require, StartAudioContext) { * */ p5.prototype.userStartAudio = function(elements, callback) { - return StartAudioContext(audiocontext, elements, callback); + var elt = elements; + if (elements instanceof p5.Element) { + elt = elements.elt; + } else if (elements instanceof Array && elements[0] instanceof p5.Element ) { + elt = elements.map(function(e) { return e.elt}); + } + return StartAudioContext(audiocontext, elt, callback); }; return audiocontext; From d2981deae1a40dd45769c087b1f5f85244dc1121 Mon Sep 17 00:00:00 2001 From: therewasaguy Date: Thu, 10 Jan 2019 02:30:23 -0500 Subject: [PATCH 6/6] update example --- src/audiocontext.js | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/src/audiocontext.js b/src/audiocontext.js index ef78d101..28ac40dd 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -44,16 +44,21 @@ define(['StartAudioContext'], function (require, StartAudioContext) { /** - *

It is good practice to wait for a user gesture before starting audio. + *

It is a good practice to give users control over starting audio playback. * This practice is enforced by Google Chrome's autoplay policy as of r70 * (info), iOS Safari, and other browsers. *

* *

- * This method starts the audio context on a user gesture. It utilizes - * StartAudioContext library by Yotam Mann (MIT Licence, 2016). Read more - * at https://github.com/tambien/StartAudioContext. + * userStartAudio() starts the Audio Context on a user gesture. It utilizes + * the StartAudioContext library by + * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext. *

+ * + *

Starting the audio context on a user gesture can be as simple as userStartAudio(). + * Optional parameters let you decide on a specific element that will start the audio context, + * and/or call a function once the audio context is started.

* @param {Element|Array} [element(s)] This argument can be an Element, * Selector String, NodeList, p5.Element, * jQuery Element, or an Array of any of those. @@ -64,12 +69,18 @@ define(['StartAudioContext'], function (require, StartAudioContext) { * @example *
* function setup() { - * var myButton = createButton('click to start audio'); - * myButton.position(0, 0); + * var myDiv = createDiv('click to start audio'); + * myDiv.position(0, 0); + * + * var mySynth = new p5.MonoSynth(); + * + * // This won't play until the context has started + * mySynth.play('A6'); * - * userStartAudio(myButton, function() { - * alert('audio started!'); - * }); + * // Start the audio context on a click/touch event + * userStartAudio().then(function() { + * myDiv.remove(); + * }); * } *
*/