From 80c3d5424c5c5d66be33c26d3505ab2123886440 Mon Sep 17 00:00:00 2001
From: Jason Sigal
Date: Tue, 29 May 2018 10:48:28 +0200
Subject: [PATCH 1/3] rename sndcore module as audiocontext
---
Gruntfile.js | 2 +-
src/app.js | 4 ++--
src/{sndcore.js => audiocontext.js} | 0
src/gain.js | 1 -
src/reverb.js | 7 +++----
src/soundRecorder.js | 1 -
src/soundfile.js | 1 -
7 files changed, 6 insertions(+), 10 deletions(-)
rename src/{sndcore.js => audiocontext.js} (100%)
diff --git a/Gruntfile.js b/Gruntfile.js
index 4d21db54..8ab61d33 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -65,7 +65,7 @@ module.exports = function(grunt) {
'Tone' : 'node_modules/tone/Tone',
'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd',
'panner' : 'src/panner',
- 'sndcore': 'src/sndcore',
+ 'audiocontext': 'src/audiocontext',
'master': 'src/master',
'helpers': 'src/helpers',
'errorHandler': 'src/errorHandler',
diff --git a/src/app.js b/src/app.js
index cc2b6585..07aea7f0 100644
--- a/src/app.js
+++ b/src/app.js
@@ -2,8 +2,8 @@
define(function (require) {
- var p5SOUND = require('sndcore');
- require('master');
+ require('audiocontext');
+ var p5SOUND = require('master');
require('helpers');
require('errorHandler');
require('panner');
diff --git a/src/sndcore.js b/src/audiocontext.js
similarity index 100%
rename from src/sndcore.js
rename to src/audiocontext.js
diff --git a/src/gain.js b/src/gain.js
index cbeb87d3..79ea33b8 100644
--- a/src/gain.js
+++ b/src/gain.js
@@ -2,7 +2,6 @@
define(function (require) {
var p5sound = require('master');
- require('sndcore');
/**
* A gain node is usefull to set the relative volume of sound.
diff --git a/src/reverb.js b/src/reverb.js
index 8a7fd06c..945b17d5 100644
--- a/src/reverb.js
+++ b/src/reverb.js
@@ -3,7 +3,6 @@
define(function (require) {
var CustomError = require('errorHandler');
var Effect = require('effect');
- require('sndcore');
/**
* Reverb adds depth to a sound through a large number of decaying
@@ -14,9 +13,9 @@ define(function (require) {
* extends p5.Reverb allowing you to recreate the sound of actual physical
* spaces through convolution.
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
*
* @class p5.Reverb
diff --git a/src/soundRecorder.js b/src/soundRecorder.js
index 584e3595..fcd78133 100644
--- a/src/soundRecorder.js
+++ b/src/soundRecorder.js
@@ -4,7 +4,6 @@ define(function (require) {
// inspiration: recorder.js, Tone.js & typedarray.org
- require('sndcore');
var p5sound = require('master');
var ac = p5sound.audiocontext;
diff --git a/src/soundfile.js b/src/soundfile.js
index 7239890c..1816483d 100644
--- a/src/soundfile.js
+++ b/src/soundfile.js
@@ -2,7 +2,6 @@
define(function (require) {
- require('sndcore');
var CustomError = require('errorHandler');
var p5sound = require('master');
var ac = p5sound.audiocontext;
From 332db54e10ee60833da6557075d53fe9f5c6239b Mon Sep 17 00:00:00 2001
From: Jason Sigal
Date: Tue, 29 May 2018 10:50:17 +0200
Subject: [PATCH 2/3] move shims to new shims module
---
Gruntfile.js | 1 +
src/app.js | 1 +
src/audiocontext.js | 198 +-----------------------------------------
src/shims.js | 204 ++++++++++++++++++++++++++++++++++++++++++++
4 files changed, 207 insertions(+), 197 deletions(-)
create mode 100644 src/shims.js
diff --git a/Gruntfile.js b/Gruntfile.js
index 8ab61d33..82ab664f 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -65,6 +65,7 @@ module.exports = function(grunt) {
'Tone' : 'node_modules/tone/Tone',
'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd',
'panner' : 'src/panner',
+ 'shims': 'src/shims',
'audiocontext': 'src/audiocontext',
'master': 'src/master',
'helpers': 'src/helpers',
diff --git a/src/app.js b/src/app.js
index 07aea7f0..c6870512 100644
--- a/src/app.js
+++ b/src/app.js
@@ -2,6 +2,7 @@
define(function (require) {
+ require('shims');
require('audiocontext');
var p5SOUND = require('master');
require('helpers');
diff --git a/src/audiocontext.js b/src/audiocontext.js
index b99590c6..38ba60e7 100644
--- a/src/audiocontext.js
+++ b/src/audiocontext.js
@@ -1,151 +1,6 @@
'use strict';
define(function () {
-
- /* AudioContext Monkeypatch
- Copyright 2013 Chris Wilson
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
- (function () {
- function fixSetTarget(param) {
- if (!param) // if NYI, just return
- return;
- if (!param.setTargetAtTime)
- param.setTargetAtTime = param.setTargetValueAtTime;
- }
-
- if (window.hasOwnProperty('webkitAudioContext') &&
- !window.hasOwnProperty('AudioContext')) {
- window.AudioContext = window.webkitAudioContext;
-
- if (typeof AudioContext.prototype.createGain !== 'function')
- AudioContext.prototype.createGain = AudioContext.prototype.createGainNode;
- if (typeof AudioContext.prototype.createDelay !== 'function')
- AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode;
- if (typeof AudioContext.prototype.createScriptProcessor !== 'function')
- AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode;
- if (typeof AudioContext.prototype.createPeriodicWave !== 'function')
- AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable;
-
-
- AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain;
- AudioContext.prototype.createGain = function() {
- var node = this.internal_createGain();
- fixSetTarget(node.gain);
- return node;
- };
-
- AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay;
- AudioContext.prototype.createDelay = function(maxDelayTime) {
- var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay();
- fixSetTarget(node.delayTime);
- return node;
- };
-
- AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource;
- AudioContext.prototype.createBufferSource = function() {
- var node = this.internal_createBufferSource();
- if (!node.start) {
- node.start = function ( when, offset, duration ) {
- if ( offset || duration )
- this.noteGrainOn( when || 0, offset, duration );
- else
- this.noteOn( when || 0 );
- };
- } else {
- node.internal_start = node.start;
- node.start = function( when, offset, duration ) {
- if( typeof duration !== 'undefined' )
- node.internal_start( when || 0, offset, duration );
- else
- node.internal_start( when || 0, offset || 0 );
- };
- }
- if (!node.stop) {
- node.stop = function ( when ) {
- this.noteOff( when || 0 );
- };
- } else {
- node.internal_stop = node.stop;
- node.stop = function( when ) {
- node.internal_stop( when || 0 );
- };
- }
- fixSetTarget(node.playbackRate);
- return node;
- };
-
- AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor;
- AudioContext.prototype.createDynamicsCompressor = function() {
- var node = this.internal_createDynamicsCompressor();
- fixSetTarget(node.threshold);
- fixSetTarget(node.knee);
- fixSetTarget(node.ratio);
- fixSetTarget(node.reduction);
- fixSetTarget(node.attack);
- fixSetTarget(node.release);
- return node;
- };
-
- AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter;
- AudioContext.prototype.createBiquadFilter = function() {
- var node = this.internal_createBiquadFilter();
- fixSetTarget(node.frequency);
- fixSetTarget(node.detune);
- fixSetTarget(node.Q);
- fixSetTarget(node.gain);
- return node;
- };
-
- if (typeof AudioContext.prototype.createOscillator !== 'function') {
- AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator;
- AudioContext.prototype.createOscillator = function() {
- var node = this.internal_createOscillator();
- if (!node.start) {
- node.start = function ( when ) {
- this.noteOn( when || 0 );
- };
- } else {
- node.internal_start = node.start;
- node.start = function ( when ) {
- node.internal_start( when || 0);
- };
- }
- if (!node.stop) {
- node.stop = function ( when ) {
- this.noteOff( when || 0 );
- };
- } else {
- node.internal_stop = node.stop;
- node.stop = function( when ) {
- node.internal_stop( when || 0 );
- };
- }
- if (!node.setPeriodicWave)
- node.setPeriodicWave = node.setWaveTable;
- fixSetTarget(node.frequency);
- fixSetTarget(node.detune);
- return node;
- };
- }
- }
-
- if (window.hasOwnProperty('webkitOfflineAudioContext') &&
- !window.hasOwnProperty('OfflineAudioContext')) {
- window.OfflineAudioContext = window.webkitOfflineAudioContext;
- }
-
- })(window);
- // <-- end MonkeyPatch.
-
// Create the Audio Context
var audiocontext = new window.AudioContext();
@@ -162,58 +17,6 @@ define(function () {
return audiocontext;
};
- // Polyfill for AudioIn, also handled by p5.dom createCapture
- navigator.getUserMedia = navigator.getUserMedia ||
- navigator.webkitGetUserMedia ||
- navigator.mozGetUserMedia ||
- navigator.msGetUserMedia;
-
-
- /**
- * Determine which filetypes are supported (inspired by buzz.js)
- * The audio element (el) will only be used to test browser support for various audio formats
- */
- var el = document.createElement('audio');
-
- p5.prototype.isSupported = function() {
- return !!el.canPlayType;
- };
- var isOGGSupported = function() {
- return !!el.canPlayType && el.canPlayType('audio/ogg; codecs="vorbis"');
- };
- var isMP3Supported = function() {
- return !!el.canPlayType && el.canPlayType('audio/mpeg;');
- };
- var isWAVSupported = function() {
- return !!el.canPlayType && el.canPlayType('audio/wav; codecs="1"');
- };
- var isAACSupported = function() {
- return !!el.canPlayType && (el.canPlayType('audio/x-m4a;') || el.canPlayType('audio/aac;'));
- };
- var isAIFSupported = function() {
- return !!el.canPlayType && el.canPlayType('audio/x-aiff;');
- };
- p5.prototype.isFileSupported = function(extension) {
- switch(extension.toLowerCase())
- {
- case 'mp3':
- return isMP3Supported();
- case 'wav':
- return isWAVSupported();
- case 'ogg':
- return isOGGSupported();
- case 'aac':
- case 'm4a':
- case 'mp4':
- return isAACSupported();
- case 'aif':
- case 'aiff':
- return isAIFSupported();
- default:
- return false;
- }
- };
-
// if it is iOS, we have to have a user interaction to start Web Audio
// http://paulbakaus.com/tutorials/html5/web-audio-on-ios/
var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false ;
@@ -243,4 +46,5 @@ define(function () {
// TO DO: fake touch event so that audio will just start
}
+ return audiocontext;
});
diff --git a/src/shims.js b/src/shims.js
new file mode 100644
index 00000000..3de3f62f
--- /dev/null
+++ b/src/shims.js
@@ -0,0 +1,204 @@
+'use strict';
+
+/**
+ * This module has shims
+ */
+
+define(function () {
+
+ /* AudioContext Monkeypatch
+ Copyright 2013 Chris Wilson
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+ (function () {
+ function fixSetTarget(param) {
+ if (!param) // if NYI, just return
+ return;
+ if (!param.setTargetAtTime)
+ param.setTargetAtTime = param.setTargetValueAtTime;
+ }
+
+ if (window.hasOwnProperty('webkitAudioContext') &&
+ !window.hasOwnProperty('AudioContext')) {
+ window.AudioContext = window.webkitAudioContext;
+
+ if (typeof AudioContext.prototype.createGain !== 'function')
+ AudioContext.prototype.createGain = AudioContext.prototype.createGainNode;
+ if (typeof AudioContext.prototype.createDelay !== 'function')
+ AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode;
+ if (typeof AudioContext.prototype.createScriptProcessor !== 'function')
+ AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode;
+ if (typeof AudioContext.prototype.createPeriodicWave !== 'function')
+ AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable;
+
+
+ AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain;
+ AudioContext.prototype.createGain = function() {
+ var node = this.internal_createGain();
+ fixSetTarget(node.gain);
+ return node;
+ };
+
+ AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay;
+ AudioContext.prototype.createDelay = function(maxDelayTime) {
+ var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay();
+ fixSetTarget(node.delayTime);
+ return node;
+ };
+
+ AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource;
+ AudioContext.prototype.createBufferSource = function() {
+ var node = this.internal_createBufferSource();
+ if (!node.start) {
+ node.start = function ( when, offset, duration ) {
+ if ( offset || duration )
+ this.noteGrainOn( when || 0, offset, duration );
+ else
+ this.noteOn( when || 0 );
+ };
+ } else {
+ node.internal_start = node.start;
+ node.start = function( when, offset, duration ) {
+ if( typeof duration !== 'undefined' )
+ node.internal_start( when || 0, offset, duration );
+ else
+ node.internal_start( when || 0, offset || 0 );
+ };
+ }
+ if (!node.stop) {
+ node.stop = function ( when ) {
+ this.noteOff( when || 0 );
+ };
+ } else {
+ node.internal_stop = node.stop;
+ node.stop = function( when ) {
+ node.internal_stop( when || 0 );
+ };
+ }
+ fixSetTarget(node.playbackRate);
+ return node;
+ };
+
+ AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor;
+ AudioContext.prototype.createDynamicsCompressor = function() {
+ var node = this.internal_createDynamicsCompressor();
+ fixSetTarget(node.threshold);
+ fixSetTarget(node.knee);
+ fixSetTarget(node.ratio);
+ fixSetTarget(node.reduction);
+ fixSetTarget(node.attack);
+ fixSetTarget(node.release);
+ return node;
+ };
+
+ AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter;
+ AudioContext.prototype.createBiquadFilter = function() {
+ var node = this.internal_createBiquadFilter();
+ fixSetTarget(node.frequency);
+ fixSetTarget(node.detune);
+ fixSetTarget(node.Q);
+ fixSetTarget(node.gain);
+ return node;
+ };
+
+ if (typeof AudioContext.prototype.createOscillator !== 'function') {
+ AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator;
+ AudioContext.prototype.createOscillator = function() {
+ var node = this.internal_createOscillator();
+ if (!node.start) {
+ node.start = function ( when ) {
+ this.noteOn( when || 0 );
+ };
+ } else {
+ node.internal_start = node.start;
+ node.start = function ( when ) {
+ node.internal_start( when || 0);
+ };
+ }
+ if (!node.stop) {
+ node.stop = function ( when ) {
+ this.noteOff( when || 0 );
+ };
+ } else {
+ node.internal_stop = node.stop;
+ node.stop = function( when ) {
+ node.internal_stop( when || 0 );
+ };
+ }
+ if (!node.setPeriodicWave)
+ node.setPeriodicWave = node.setWaveTable;
+ fixSetTarget(node.frequency);
+ fixSetTarget(node.detune);
+ return node;
+ };
+ }
+ }
+
+ if (window.hasOwnProperty('webkitOfflineAudioContext') &&
+ !window.hasOwnProperty('OfflineAudioContext')) {
+ window.OfflineAudioContext = window.webkitOfflineAudioContext;
+ }
+
+ })(window);
+ // <-- end MonkeyPatch.
+
+ // Polyfill for AudioIn, also handled by p5.dom createCapture
+ navigator.getUserMedia = navigator.getUserMedia ||
+ navigator.webkitGetUserMedia ||
+ navigator.mozGetUserMedia ||
+ navigator.msGetUserMedia;
+
+
+ /**
+ * Determine which filetypes are supported (inspired by buzz.js)
+ * The audio element (el) will only be used to test browser support for various audio formats
+ */
+ var el = document.createElement('audio');
+
+ p5.prototype.isSupported = function() {
+ return !!el.canPlayType;
+ };
+ var isOGGSupported = function() {
+ return !!el.canPlayType && el.canPlayType('audio/ogg; codecs="vorbis"');
+ };
+ var isMP3Supported = function() {
+ return !!el.canPlayType && el.canPlayType('audio/mpeg;');
+ };
+ var isWAVSupported = function() {
+ return !!el.canPlayType && el.canPlayType('audio/wav; codecs="1"');
+ };
+ var isAACSupported = function() {
+ return !!el.canPlayType && (el.canPlayType('audio/x-m4a;') || el.canPlayType('audio/aac;'));
+ };
+ var isAIFSupported = function() {
+ return !!el.canPlayType && el.canPlayType('audio/x-aiff;');
+ };
+ p5.prototype.isFileSupported = function(extension) {
+ switch(extension.toLowerCase())
+ {
+ case 'mp3':
+ return isMP3Supported();
+ case 'wav':
+ return isWAVSupported();
+ case 'ogg':
+ return isOGGSupported();
+ case 'aac':
+ case 'm4a':
+ case 'mp4':
+ return isAACSupported();
+ case 'aif':
+ case 'aiff':
+ return isAIFSupported();
+ default:
+ return false;
+ }
+ };
+});
From f6aea92d60025fa63c1cee915a4a6b2cd48660dc Mon Sep 17 00:00:00 2001
From: Jason Sigal
Date: Fri, 15 Jun 2018 10:44:10 -0400
Subject: [PATCH 3/3] add getAudioContext example
---
src/audiocontext.js | 25 +++++++++++++++++++++++++
1 file changed, 25 insertions(+)
diff --git a/src/audiocontext.js b/src/audiocontext.js
index 38ba60e7..ec43d34c 100644
--- a/src/audiocontext.js
+++ b/src/audiocontext.js
@@ -10,8 +10,33 @@ define(function () {
* 'http://webaudio.github.io/web-audio-api/'>Web Audio API
* .
*
+ * Some browsers require users to startAudioContext
+ * with a user gesture, such as touchStarted in the example below.
+ *
* @method getAudioContext
* @return {Object} AudioContext for this sketch
+ * @example
+ *
+ * function draw() {
+ * background(255);
+ * textAlign(CENTER);
+ *
+ * if (getAudioContext().state !== 'running') {
+ * text('click to start audio', width/2, height/2);
+ * } else {
+ * text('audio is enabled', width/2, height/2);
+ * }
+ * }
+ *
+ * function touchStarted() {
+ * if (getAudioContext().state !== 'running') {
+ * getAudioContext().resume();
+ * }
+ * var synth = new p5.MonoSynth();
+ * synth.play('A4', 0.5, 0, 0.2);
+ * }
+ *
+ *
*/
p5.prototype.getAudioContext = function() {
return audiocontext;