From 61ac195f9467d9fa554b523bb5fe6ca5e662446f Mon Sep 17 00:00:00 2001
From: Jason Sigal
- * p5.SoundFile: Load and play sound files.
- * p5.Amplitude: Get the current volume of a sound.
- * p5.AudioIn: Get sound from an input source, typically
- * a computer microphone.
- * p5.FFT: Analyze the frequency of sound. Returns
- * results from the frequency spectrum or time domain (waveform).
- * p5.Oscillator: Generate Sine,
+ *
p5.sound is on GitHub. * Download the latest version - * here. + * here.
* * @module p5.sound * @submodule p5.sound @@ -64,8768 +70,7954 @@ * Web Audio API: http://w3.org/TR/webaudio/ */ -(function (root, factory) { - if (typeof define === 'function' && define.amd) - define('p5.sound', ['p5'], function (p5) { (factory(p5));}); - else if (typeof exports === 'object') - factory(require('../p5')); - else - factory(root['p5']); -}(this, function (p5) { - -var shims; -'use strict'; /** - * This module has shims - */ -shims = function () { - /* AudioContext Monkeypatch - Copyright 2013 Chris Wilson - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - */ - (function () { - function fixSetTarget(param) { - if (!param) - // if NYI, just return - return; - if (!param.setTargetAtTime) - param.setTargetAtTime = param.setTargetValueAtTime; - } - if (window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext')) { - window.AudioContext = window.webkitAudioContext; - if (typeof AudioContext.prototype.createGain !== 'function') - AudioContext.prototype.createGain = AudioContext.prototype.createGainNode; - if (typeof AudioContext.prototype.createDelay !== 'function') - AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode; - if (typeof AudioContext.prototype.createScriptProcessor !== 'function') - AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode; - if (typeof AudioContext.prototype.createPeriodicWave !== 'function') - AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable; - AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain; - AudioContext.prototype.createGain = function () { - var node = this.internal_createGain(); - fixSetTarget(node.gain); - return node; - }; - AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay; - AudioContext.prototype.createDelay = function (maxDelayTime) { - var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay(); - fixSetTarget(node.delayTime); - return node; - }; - AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource; - AudioContext.prototype.createBufferSource = function () { - var node = this.internal_createBufferSource(); - if (!node.start) { - node.start = function (when, offset, duration) { - if (offset || duration) - this.noteGrainOn(when || 0, offset, duration); - else - this.noteOn(when || 0); - }; - } else { - node.internal_start = node.start; - node.start = function (when, offset, duration) { - if (typeof duration !== 'undefined') - node.internal_start(when || 0, offset, duration); - else - node.internal_start(when || 0, offset || 0); - }; - } - if (!node.stop) { - node.stop = function (when) { - this.noteOff(when || 0); - }; - } else { - node.internal_stop = node.stop; - node.stop = function (when) { - node.internal_stop(when || 0); - }; - } - fixSetTarget(node.playbackRate); - return node; - }; - AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor; - AudioContext.prototype.createDynamicsCompressor = function () { - var node = this.internal_createDynamicsCompressor(); - fixSetTarget(node.threshold); - fixSetTarget(node.knee); - fixSetTarget(node.ratio); - fixSetTarget(node.reduction); - fixSetTarget(node.attack); - fixSetTarget(node.release); - return node; - }; - AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter; - AudioContext.prototype.createBiquadFilter = function () { - var node = this.internal_createBiquadFilter(); - fixSetTarget(node.frequency); - fixSetTarget(node.detune); - fixSetTarget(node.Q); - fixSetTarget(node.gain); - return node; - }; - if (typeof AudioContext.prototype.createOscillator !== 'function') { - AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator; - AudioContext.prototype.createOscillator = function () { - var node = this.internal_createOscillator(); - if (!node.start) { - node.start = function (when) { - this.noteOn(when || 0); - }; - } else { - node.internal_start = node.start; - node.start = function (when) { - node.internal_start(when || 0); - }; - } - if (!node.stop) { - node.stop = function (when) { - this.noteOff(when || 0); - }; - } else { - node.internal_stop = node.stop; - node.stop = function (when) { - node.internal_stop(when || 0); - }; - } - if (!node.setPeriodicWave) - node.setPeriodicWave = node.setWaveTable; - fixSetTarget(node.frequency); - fixSetTarget(node.detune); - return node; - }; - } - } - if (window.hasOwnProperty('webkitOfflineAudioContext') && !window.hasOwnProperty('OfflineAudioContext')) { - window.OfflineAudioContext = window.webkitOfflineAudioContext; - } - }(window)); - // <-- end MonkeyPatch. - // Polyfill for AudioIn, also handled by p5.dom createCapture - navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; + (function(modules) { + var installedModules = {}; + function __webpack_require__(moduleId) { + if(installedModules[moduleId]) { + return installedModules[moduleId].exports; + } + var module = installedModules[moduleId] = { + i: moduleId, + l: false, + exports: {} + }; + modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); + module.l = true; + return module.exports; + } + __webpack_require__.m = modules; + __webpack_require__.c = installedModules; + __webpack_require__.d = function(exports, name, getter) { + if(!__webpack_require__.o(exports, name)) { + Object.defineProperty(exports, name, { enumerable: true, get: getter }); + } + }; + __webpack_require__.r = function(exports) { + if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { + Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); + } + Object.defineProperty(exports, '__esModule', { value: true }); + }; + __webpack_require__.t = function(value, mode) { + if(mode & 1) value = __webpack_require__(value); + if(mode & 8) return value; + if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value; + var ns = Object.create(null); + __webpack_require__.r(ns); + Object.defineProperty(ns, 'default', { enumerable: true, value: value }); + if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key)); + return ns; + }; + __webpack_require__.n = function(module) { + var getter = module && module.__esModule ? + function getDefault() { return module['default']; } : + function getModuleExports() { return module; }; + __webpack_require__.d(getter, 'a', getter); + return getter; + }; + __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; + __webpack_require__.p = ""; + return __webpack_require__(__webpack_require__.s = 31); + }) + ([ + (function(module, exports, __webpack_require__) { + +var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function(){"use strict";function a(t,e){this.isUndef(t)||1===t?this.input=this.context.createGain():1rampTime
parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * How This Works: When you load the p5.sound module, it + * creates a single instance of p5sound. All sound objects in this + * module output to p5sound before reaching your computer's output. + * So if you change the amplitude of p5sound, it impacts all of the + * sound in this module.
+ * + *If no value is provided, returns a Web Audio API Gain Node
+ * + * @method masterVolume + * @param {Number|Object} volume Volume (amplitude) between 0.0 + * and 1.0 or modulating signal/oscillator + * @param {Number} [rampTime] Fade for t seconds + * @param {Number} [timeFromNow] Schedule this event to happen at + * t seconds in the future + */ + + + p5.prototype.masterVolume = function (vol, rampTime, tFromNow) { + if (typeof vol === 'number') { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = p5sound.output.gain.value; + p5sound.output.gain.cancelScheduledValues(now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(p5sound.output.gain); } else { - checkLoop(); - } - } - function bindTapListener(element, tapListeners, context) { - if (Array.isArray(element) || NodeList && element instanceof NodeList) { - for (var i = 0; i < element.length; i++) { - bindTapListener(element[i], tapListeners, context); - } - } else if (typeof element === 'string') { - bindTapListener(document.querySelectorAll(element), tapListeners, context); - } else if (element.jquery && typeof element.toArray === 'function') { - bindTapListener(element.toArray(), tapListeners, context); - } else if (Element && element instanceof Element) { - var tap = new TapListener(element, context); - tapListeners.push(tap); - } - } - function StartAudioContext(context, elements, callback) { - var promise = new Promise(function (success) { - onStarted(context, success); - }); - var tapListeners = []; - if (!elements) { - elements = document.body; - } - bindTapListener(elements, tapListeners, context); - promise.then(function () { - for (var i = 0; i < tapListeners.length; i++) { - tapListeners[i].dispose(); - } - tapListeners = null; - if (callback) { - callback(); - } - }); - return promise; - } - return StartAudioContext; -})); -/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ -var Tone_core_Tone; -Tone_core_Tone = function () { - 'use strict'; - var Tone = function (inputs, outputs) { - if (this.isUndef(inputs) || inputs === 1) { - this.input = this.context.createGain(); - } else if (inputs > 1) { - this.input = new Array(inputs); - } - if (this.isUndef(outputs) || outputs === 1) { - this.output = this.context.createGain(); - } else if (outputs > 1) { - this.output = new Array(inputs); - } - }; - Tone.prototype.set = function (params, value, rampTime) { - if (this.isObject(params)) { - rampTime = value; - } else if (this.isString(params)) { - var tmpObj = {}; - tmpObj[params] = value; - params = tmpObj; - } - paramLoop: - for (var attr in params) { - value = params[attr]; - var parent = this; - if (attr.indexOf('.') !== -1) { - var attrSplit = attr.split('.'); - for (var i = 0; i < attrSplit.length - 1; i++) { - parent = parent[attrSplit[i]]; - if (parent instanceof Tone) { - attrSplit.splice(0, i + 1); - var innerParam = attrSplit.join('.'); - parent.set(innerParam, value); - continue paramLoop; - } - } - attr = attrSplit[attrSplit.length - 1]; - } - var param = parent[attr]; - if (this.isUndef(param)) { - continue; - } - if (Tone.Signal && param instanceof Tone.Signal || Tone.Param && param instanceof Tone.Param) { - if (param.value !== value) { - if (this.isUndef(rampTime)) { - param.value = value; - } else { - param.rampTo(value, rampTime); - } - } - } else if (param instanceof AudioParam) { - if (param.value !== value) { - param.value = value; - } - } else if (param instanceof Tone) { - param.set(value); - } else if (param !== value) { - parent[attr] = value; - } - } - return this; - }; - Tone.prototype.get = function (params) { - if (this.isUndef(params)) { - params = this._collectDefaults(this.constructor); - } else if (this.isString(params)) { - params = [params]; - } - var ret = {}; - for (var i = 0; i < params.length; i++) { - var attr = params[i]; - var parent = this; - var subRet = ret; - if (attr.indexOf('.') !== -1) { - var attrSplit = attr.split('.'); - for (var j = 0; j < attrSplit.length - 1; j++) { - var subAttr = attrSplit[j]; - subRet[subAttr] = subRet[subAttr] || {}; - subRet = subRet[subAttr]; - parent = parent[subAttr]; - } - attr = attrSplit[attrSplit.length - 1]; - } - var param = parent[attr]; - if (this.isObject(params[attr])) { - subRet[attr] = param.get(); - } else if (Tone.Signal && param instanceof Tone.Signal) { - subRet[attr] = param.value; - } else if (Tone.Param && param instanceof Tone.Param) { - subRet[attr] = param.value; - } else if (param instanceof AudioParam) { - subRet[attr] = param.value; - } else if (param instanceof Tone) { - subRet[attr] = param.get(); - } else if (!this.isFunction(param) && !this.isUndef(param)) { - subRet[attr] = param; - } - } - return ret; - }; - Tone.prototype._collectDefaults = function (constr) { - var ret = []; - if (!this.isUndef(constr.defaults)) { - ret = Object.keys(constr.defaults); - } - if (!this.isUndef(constr._super)) { - var superDefs = this._collectDefaults(constr._super); - for (var i = 0; i < superDefs.length; i++) { - if (ret.indexOf(superDefs[i]) === -1) { - ret.push(superDefs[i]); - } - } - } - return ret; - }; - Tone.prototype.toString = function () { - for (var className in Tone) { - var isLetter = className[0].match(/^[A-Z]$/); - var sameConstructor = Tone[className] === this.constructor; - if (this.isFunction(Tone[className]) && isLetter && sameConstructor) { - return className; - } + return p5sound.output.gain; } - return 'Tone'; }; - Object.defineProperty(Tone.prototype, 'numberOfInputs', { - get: function () { - if (this.input) { - if (this.isArray(this.input)) { - return this.input.length; - } else { - return 1; - } - } else { - return 0; - } - } - }); - Object.defineProperty(Tone.prototype, 'numberOfOutputs', { - get: function () { - if (this.output) { - if (this.isArray(this.output)) { - return this.output.length; - } else { - return 1; - } - } else { - return 0; - } - } - }); - Tone.prototype.dispose = function () { - if (!this.isUndef(this.input)) { - if (this.input instanceof AudioNode) { - this.input.disconnect(); - } - this.input = null; - } - if (!this.isUndef(this.output)) { - if (this.output instanceof AudioNode) { - this.output.disconnect(); - } - this.output = null; - } - return this; - }; - Tone.prototype.connect = function (unit, outputNum, inputNum) { - if (Array.isArray(this.output)) { - outputNum = this.defaultArg(outputNum, 0); - this.output[outputNum].connect(unit, 0, inputNum); - } else { - this.output.connect(unit, outputNum, inputNum); - } - return this; - }; - Tone.prototype.disconnect = function (destination, outputNum, inputNum) { - if (this.isArray(this.output)) { - if (this.isNumber(destination)) { - this.output[destination].disconnect(); - } else { - outputNum = this.defaultArg(outputNum, 0); - this.output[outputNum].disconnect(destination, 0, inputNum); - } - } else { - this.output.disconnect.apply(this.output, arguments); - } + /** + * `p5.soundOut` is the p5.sound master output. It sends output to + * the destination of this window's web audio context. It contains + * Web Audio API nodes including a dyanmicsCompressor (.limiter
),
+ * and Gain Nodes for .input
and .output
.
+ *
+ * @property {Object} soundOut
+ */
+
+
+ p5.prototype.soundOut = p5.soundOut = p5sound;
+
+ p5.soundOut._silentNode = p5sound.audiocontext.createGain();
+ p5.soundOut._silentNode.gain.value = 0;
+
+ p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
+
+ return p5sound;
+}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(5),__webpack_require__(8),__webpack_require__(22),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Signal=function(){var t=this.optionsObject(arguments,["value","units"],n.Signal.defaults);this.output=this._gain=this.context.createGain(),t.param=this._gain.gain,n.Param.call(this,t),this.input=this._param=this._gain.gain,this.context.getConstant(1).chain(this._gain)},n.extend(n.Signal,n.Param),n.Signal.defaults={value:0,units:n.Type.Default,convert:!0},n.Signal.prototype.connect=n.SignalBase.prototype.connect,n.Signal.prototype.dispose=function(){return n.Param.prototype.dispose.call(this),this._param=null,this._gain.disconnect(),this._gain=null,this},n.Signal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Multiply=function(t){this.createInsOuts(2,0),this._mult=this.input[0]=this.output=new i.Gain,this._param=this.input[1]=this.output.gain,this._param.value=this.defaultArg(t,0)},i.extend(i.Multiply,i.Signal),i.Multiply.prototype.dispose=function(){return i.prototype.dispose.call(this),this._mult.dispose(),this._mult=null,this._param=null,this},i.Multiply}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var CrossFade = __webpack_require__(51);
+ /**
+ * Effect is a base class for audio effects in p5. Some browsers require users to startAudioContext - * with a user gesture, such as touchStarted in the example below.
+ * Returns the frequency value of a MIDI note value. + * General MIDI treats notes as integers where middle C + * is 60, C# is 61, D is 62 etc. Useful for generating + * musical frequencies with oscillators. * - * @method getAudioContext - * @return {Object} AudioContext for this sketch - * @example - *
- * function draw() {
- * background(255);
- * textAlign(CENTER);
+ * @method midiToFreq
+ * @param {Number} midiNote The number of a MIDI note
+ * @return {Number} Frequency value of the given MIDI note
+ * @example
+ *
+ * let midiNotes = [60, 64, 67, 72];
+ * let noteIndex = 0;
+ * let midiVal, freq;
*
- * if (getAudioContext().state !== 'running') {
- * text('click to start audio', width/2, height/2);
- * } else {
- * text('audio is enabled', width/2, height/2);
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startSound);
+ * osc = new p5.TriOsc();
+ * env = new p5.Envelope();
* }
*
- * function touchStarted() {
- * if (getAudioContext().state !== 'running') {
- * getAudioContext().resume();
+ * function draw() {
+ * background(220);
+ * text('tap to play', 10, 20);
+ * if (midiVal) {
+ * text('MIDI: ' + midiVal, 10, 40);
+ * text('Freq: ' + freq, 10, 60);
* }
- * let synth = new p5.MonoSynth();
- * synth.play('A4', 0.5, 0, 0.2);
* }
*
- *
- */
- p5.prototype.getAudioContext = function () {
- return audiocontext;
- };
- /**
- * It is a good practice to give users control over starting audio playback. - * This practice is enforced by Google Chrome's autoplay policy as of r70 - * (info), iOS Safari, and other browsers. - *
- * - *- * userStartAudio() starts the Audio Context on a user gesture. It utilizes - * the StartAudioContext library by - * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext. - *
- * - *Starting the audio context on a user gesture can be as simple as userStartAudio()
.
- * Optional parameters let you decide on a specific element that will start the audio context,
- * and/or call a function once the audio context is started.
- * function setup() {
- * let myDiv = createDiv('click to start audio');
- * myDiv.position(0, 0);
- *
- * let mySynth = new p5.MonoSynth();
+ * function startSound() {
+ * // see also: userStartAudio();
+ * osc.start();
*
- * // This won't play until the context has started
- * mySynth.play('A6');
+ * midiVal = midiNotes[noteIndex % midiNotes.length];
+ * freq = midiToFreq(midiVal);
+ * osc.freq(freq);
+ * env.ramp(osc, 0, 1.0, 0);
*
- * // Start the audio context on a click/touch event
- * userStartAudio().then(function() {
- * myDiv.remove();
- * });
+ * noteIndex++;
* }
*
Scale the output of all sound in this sketch
- * Scaled between 0.0 (silence) and 1.0 (full volume). - * 1.0 is the maximum amplitude of a digital sound, so multiplying - * by greater than 1.0 may cause digital distortion. To - * fade, provide arampTime
parameter. For more
- * complex fades, see the Envelope class.
- *
- * Alternately, you can pass in a signal source such as an
- * oscillator to modulate the amplitude with an audio signal.
+ * List the SoundFile formats that you will include. LoadSound
+ * will search your directory for these extensions, and will pick
+ * a format that is compatable with the client's web browser.
+ * Here is a free online file
+ * converter.
*
- * How This Works: When you load the p5.sound module, it - * creates a single instance of p5sound. All sound objects in this - * module output to p5sound before reaching your computer's output. - * So if you change the amplitude of p5sound, it impacts all of the - * sound in this module.
+ * @method soundFormats + * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg' + * @example + *
+ * function preload() {
+ * // set the global sound formats
+ * soundFormats('mp3', 'ogg');
*
- * If no value is provided, returns a Web Audio API Gain Node
+ * // load either beatbox.mp3, or .ogg, depending on browser
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
*
- * @method masterVolume
- * @param {Number|Object} volume Volume (amplitude) between 0.0
- * and 1.0 or modulating signal/oscillator
- * @param {Number} [rampTime] Fade for t seconds
- * @param {Number} [timeFromNow] Schedule this event to happen at
- * t seconds in the future
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * text('sound loaded! tap to play', 10, 20, width - 20);
+ * cnv.mousePressed(function() {
+ * mySound.play();
+ * });
+ * }
+ *
.limiter
),
- * and Gain Nodes for .input
and .output
.
- *
- * @property {Object} soundOut
- */
- p5.prototype.soundOut = p5.soundOut = p5sound;
- /**
- * a silent connection to the DesinationNode
- * which will ensure that anything connected to it
- * will not be garbage collected
- *
- * @private
- */
- p5.soundOut._silentNode = p5sound.audiocontext.createGain();
- p5.soundOut._silentNode.gain.value = 0;
- p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
- return p5sound;
-}(audiocontext);
-var helpers;
-'use strict';
-helpers = function () {
- var p5sound = master;
- /**
- * @for p5
- */
- /**
- * Returns a number representing the sample rate, in samples per second,
- * of all sound objects in this audio context. It is determined by the
- * sampling rate of your operating system's sound card, and it is not
- * currently possile to change.
- * It is often 44100, or twice the range of human hearing.
- *
- * @method sampleRate
- * @return {Number} samplerate samples per second
- */
- p5.prototype.sampleRate = function () {
- return p5sound.audiocontext.sampleRate;
- };
- /**
- * Returns the closest MIDI note value for
- * a given frequency.
- *
- * @method freqToMidi
- * @param {Number} frequency A freqeuncy, for example, the "A"
- * above Middle C is 440Hz
- * @return {Number} MIDI note value
- */
- p5.prototype.freqToMidi = function (f) {
- var mathlog2 = Math.log(f / 440) / Math.log(2);
- var m = Math.round(12 * mathlog2) + 69;
- return m;
- };
- /**
- * Returns the frequency value of a MIDI note value.
- * General MIDI treats notes as integers where middle C
- * is 60, C# is 61, D is 62 etc. Useful for generating
- * musical frequencies with oscillators.
- *
- * @method midiToFreq
- * @param {Number} midiNote The number of a MIDI note
- * @return {Number} Frequency value of the given MIDI note
- * @example
- *
- * let notes = [60, 64, 67, 72];
- * let i = 0;
- *
- * function setup() {
- * osc = new p5.Oscillator('Triangle');
- * osc.start();
- * frameRate(1);
- * }
- *
- * function draw() {
- * let freq = midiToFreq(notes[i]);
- * osc.freq(freq);
- * i++;
- * if (i >= notes.length){
- * i = 0;
- * }
- * }
- *
- * function preload() {
- * // set the global sound formats
- * soundFormats('mp3', 'ogg');
- *
- * // load either beatbox.mp3, or .ogg, depending on browser
- * mySound = loadSound('assets/beatbox.mp3');
- * }
- *
- * function setup() {
- * mySound.play();
- * }
- *
SoundFile object with a path to a file.
- * - *The p5.SoundFile may not be available immediately because - * it loads the file information asynchronously.
+ *A p5.Filter uses a Web Audio Biquad Filter to filter + * the frequency response of an input source. Subclasses + * include:
+ *p5.LowPass
:
+ * Allows frequencies below the cutoff frequency to pass through,
+ * and attenuates frequencies above the cutoff.p5.HighPass
:
+ * The opposite of a lowpass filter. p5.BandPass
:
+ * Allows a range of frequencies to pass through and attenuates
+ * the frequencies below and above this frequency range.To do something with the sound as soon as it loads - * pass the name of a function as the second parameter.
+ * The.res()
method controls either width of the
+ * bandpass, or resonance of the low/highpass cutoff frequency.
*
- * Only one file path is required. However, audio file formats - * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all - * web browsers. If you want to ensure compatability, instead of a single - * file path, you may include an Array of filepaths, and the browser will - * choose a format that works.
+ * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. * - * @class p5.SoundFile + * @class p5.Filter + * @extends p5.Effect * @constructor - * @param {String|Array} path path to a sound file (String). Optionally, - * you may include multiple file formats in - * an array. Alternately, accepts an object - * from the HTML5 File API, or a p5.File. - * @param {Function} [successCallback] Name of a function to call once file loads - * @param {Function} [errorCallback] Name of a function to call if file fails to - * load. This function will receive an error or - * XMLHttpRequest object with information - * about what went wrong. - * @param {Function} [whileLoadingCallback] Name of a function to call while file - * is loading. That function will - * receive progress of the request to - * load the sound file - * (between 0 and 1) as its first - * parameter. This progress - * does not account for the additional - * time needed to decode the audio data. - * + * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass' * @example *
+ * let fft, noise, filter;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(makeNoise);
+ * fill(255, 0, 255);
+ *
+ * filter = new p5.BandPass();
+ * noise = new p5.Noise();
+ * noise.disconnect();
+ * noise.connect(filter);
+ *
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ *
+ * // set the BandPass frequency based on mouseX
+ * let freq = map(mouseX, 0, width, 20, 10000);
+ * freq = constrain(freq, 0, 22050);
+ * filter.freq(freq);
+ * // give the filter a narrow band (lower res = wider bandpass)
+ * filter.res(50);
+ *
+ * // draw filtered spectrum
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * for (let i = 0; i < spectrum.length; i++) {
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width/spectrum.length, h);
+ * }
+ * if (!noise.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text('Frequency: ' + round(freq)+'Hz', 20, 20, width - 20);
+ * }
+ * }
+ *
+ * function makeNoise() {
+ * // see also: `userStartAudio()`
+ * noise.start();
+ * noise.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * noise.amp(0, 0.2);
+ * }
+ *
+ *
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
- * }
+ * Set the filter frequency, in Hz, from 10 to 22050 (the range of
+ * human hearing, although in reality most people hear in a narrower
+ * range).
*
- * function setup() {
- * mySound.setVolume(0.1);
- * mySound.play();
- * }
- *
restart
and
- * sustain
. Play Mode determines what happens to a
- * p5.SoundFile if it is triggered while in the middle of playback.
- * In sustain mode, playback will continue simultaneous to the
- * new playback. In restart mode, play() will stop playback
- * and start over. With untilDone, a sound will play only if it's
- * not already playing. Sustain is the default mode.
- *
- * @method playMode
- * @param {String} str 'restart' or 'sustain' or 'untilDone'
- * @example
- *
- * let mySound;
- * function preload(){
- * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- * function mouseClicked() {
- * mySound.playMode('sustain');
- * mySound.play();
- * }
- * function keyPressed() {
- * mySound.playMode('restart');
- * mySound.play();
- * }
+ * Set the type of a p5.Filter. Possible types include:
+ * "lowpass" (default), "highpass", "bandpass",
+ * "lowshelf", "highshelf", "peaking", "notch",
+ * "allpass".
*
- *
- * let soundFile;
- *
- * function preload() {
- * soundFormats('ogg', 'mp3');
- * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
- * }
- * function setup() {
- * background(0, 255, 0);
- * soundFile.setVolume(0.1);
- * soundFile.loop();
- * }
- * function keyTyped() {
- * if (key == 'p') {
- * soundFile.pause();
- * background(255, 0, 0);
- * }
- * }
- *
- * function keyReleased() {
- * if (key == 'p') {
- * soundFile.play();
- * background(0, 255, 0);
- * }
- * }
- *
- * new p5.LowPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('lowpass')
.
+ * See p5.Filter for methods.
*
- * @method loop
- * @param {Number} [startTime] (optional) schedule event to occur
- * seconds from now
- * @param {Number} [rate] (optional) playback rate
- * @param {Number} [amp] (optional) playback volume
- * @param {Number} [cueLoopStart] (optional) startTime in seconds
- * @param {Number} [duration] (optional) loop duration in seconds
+ * @class p5.LowPass
+ * @constructor
+ * @extends p5.Filter
*/
- p5.SoundFile.prototype.loop = function (startTime, rate, amp, loopStart, duration) {
- this._looping = true;
- this.play(startTime, rate, amp, loopStart, duration);
+
+
+ p5.LowPass = function () {
+ p5.Filter.call(this, 'lowpass');
};
+
+ p5.LowPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Set a p5.SoundFile's looping flag to true or false. If the sound
- * is currently playing, this change will take effect when it
- * reaches the end of the current playback.
+ * Constructor: new p5.HighPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('highpass')
.
+ * See p5.Filter for methods.
*
- * @method setLoop
- * @param {Boolean} Boolean set looping to true or false
+ * @class p5.HighPass
+ * @constructor
+ * @extends p5.Filter
*/
- p5.SoundFile.prototype.setLoop = function (bool) {
- if (bool === true) {
- this._looping = true;
- } else if (bool === false) {
- this._looping = false;
- } else {
- throw 'Error: setLoop accepts either true or false';
- }
- if (this.bufferSourceNode) {
- this.bufferSourceNode.loop = this._looping;
- this._counterNode.loop = this._looping;
- }
+
+ p5.HighPass = function () {
+ p5.Filter.call(this, 'highpass');
};
+
+ p5.HighPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not.
+ * Constructor: new p5.BandPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('bandpass')
.
+ * See p5.Filter for methods.
*
- * @method isLooping
- * @return {Boolean}
+ * @class p5.BandPass
+ * @constructor
+ * @extends p5.Filter
*/
- p5.SoundFile.prototype.isLooping = function () {
- if (!this.bufferSourceNode) {
- return false;
- }
- if (this._looping === true && this.isPlaying() === true) {
- return true;
- }
- return false;
+
+ p5.BandPass = function () {
+ p5.Filter.call(this, 'bandpass');
};
+
+ p5.BandPass.prototype = Object.create(p5.Filter.prototype);
+ return p5.Filter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(25),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Subtract=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.output=new n.Gain,this._neg=new n.Negate,this._param=this.input[1]=new n.Signal(t),this._param.chain(this._neg,this._sum)},n.extend(n.Subtract,n.Signal),n.Subtract.prototype.dispose=function(){return n.prototype.dispose.call(this),this._neg.dispose(),this._neg=null,this._sum.disconnect(),this._sum=null,this._param.dispose(),this._param=null,this},n.Subtract}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+(function(global) {var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;
+
+global.TONE_SILENCE_VERSION_LOGGING = true;
+!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(35), __webpack_require__(12), __webpack_require__(0)], __WEBPACK_AMD_DEFINE_RESULT__ = (function (StartAudioContext, Context, Tone) {
+ var audiocontext = new window.AudioContext();
+
+ Tone.context.dispose();
+ Tone.setContext(audiocontext);
/**
- * Returns true if a p5.SoundFile is playing, false if not (i.e.
- * paused or stopped).
+ * Returns the Audio Context for this sketch. Useful for users + * who would like to dig deeper into the Web Audio API + * .
* - * @method isPlaying - * @return {Boolean} - */ - p5.SoundFile.prototype.isPlaying = function () { - return this._playing; - }; - /** - * Returns true if a p5.SoundFile is paused, false if not (i.e. - * playing or stopped). + *Some browsers require users to startAudioContext + * with a user gesture, such as touchStarted in the example below.
* - * @method isPaused - * @return {Boolean} + * @for p5 + * @method getAudioContext + * @return {Object} AudioContext for this sketch + * @example + *
+ * function draw() {
+ * background(255);
+ * textAlign(CENTER);
+ *
+ * if (getAudioContext().state !== 'running') {
+ * text('click to start audio', width/2, height/2);
+ * } else {
+ * text('audio is enabled', width/2, height/2);
+ * }
+ * }
+ *
+ * function touchStarted() {
+ * if (getAudioContext().state !== 'running') {
+ * getAudioContext().resume();
+ * }
+ * var synth = new p5.MonoSynth();
+ * synth.play('A4', 0.5, 0, 0.2);
+ * }
+ *
+ *
It is not only a good practice to give users control over starting + * audio. This policy is enforced by many web browsers, including iOS and + * Google Chrome, which create the Web Audio API's + * Audio Context + * in a suspended state.
+ * + *In these browser-specific policies, sound will not play until a user
+ * interaction event (i.e. mousePressed()
) explicitly resumes
+ * the AudioContext, or starts an audio node. This can be accomplished by
+ * calling start()
on a p5.Oscillator
,
+ * play()
on a p5.SoundFile
, or simply
+ * userStartAudio()
.
userStartAudio()
starts the AudioContext on a user
+ * gesture. The default behavior will enable audio on any
+ * mouseUp or touchEnd event. It can also be placed in a specific
+ * interaction function, such as mousePressed()
as in the
+ * example below. This method utilizes
+ * StartAudioContext
+ * , a library by Yotam Mann (MIT Licence, 2016).
+ * function setup() {
+ * // mimics the autoplay policy
+ * getAudioContext().suspend();
*
- * @method stop
- * @param {Number} [startTime] (optional) schedule event to occur
- * in seconds from now
+ * let mySynth = new p5.MonoSynth();
+ *
+ * // This won't play until the context has resumed
+ * mySynth.play('A6');
+ * }
+ * function draw() {
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ * text(getAudioContext().state, width/2, height/2);
+ * }
+ * function mousePressed() {
+ * userStartAudio();
+ * }
+ *
Creates a signal that oscillates between -1.0 and 1.0. + * By default, the oscillation takes the form of a sinusoidal + * shape ('sine'). Additional types include 'triangle', + * 'sawtooth' and 'square'. The frequency defaults to + * 440 oscillations per second (440Hz, equal to the pitch of an + * 'A' note).
* - * @method pan - * @param {Number} [panValue] Set the stereo panner - * @param {Number} [timeFromNow] schedule this event to happen - * seconds from now - * @example - *
+ * Set the type of oscillation with setType(), or by instantiating a
+ * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc.
+ *
*
- * let ball = {};
- * let soundFile;
+ * @class p5.Oscillator
+ * @constructor
+ * @param {Number} [freq] frequency defaults to 440Hz
+ * @param {String} [type] type of oscillator. Options:
+ * 'sine' (default), 'triangle',
+ * 'sawtooth', 'square'
+ * @example
+ *
+ * let osc, playing, freq, amp;
*
- * function preload() {
- * soundFormats('ogg', 'mp3');
- * soundFile = loadSound('assets/beatbox.mp3');
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator('sine');
* }
*
* function draw() {
- * background(0);
- * ball.x = constrain(mouseX, 0, width);
- * ellipse(ball.x, height/2, 20, 20)
+ * background(220)
+ * freq = constrain(map(mouseX, 0, width, 100, 500), 100, 500);
+ * amp = constrain(map(mouseY, height, 0, 0, 1), 0, 1);
+ *
+ * text('tap to play', 20, 20);
+ * text('freq: ' + freq, 20, 40);
+ * text('amp: ' + amp, 20, 60);
+ *
+ * if (playing) {
+ * // smooth the transitions by 0.1 seconds
+ * osc.freq(freq, 0.1);
+ * osc.amp(amp, 0.1);
+ * }
* }
*
- * function mousePressed(){
- * // map the ball's x location to a panning degree
- * // between -1.0 (left) and 1.0 (right)
- * let panning = map(ball.x, 0., width,-1.0, 1.0);
- * soundFile.pan(panning);
- * soundFile.play();
+ * function playOscillator() {
+ * // starting an oscillator on a user gesture will enable audio
+ * // in browsers that have a strict autoplay policy.
+ * // See also: userStartAudio();
+ * osc.start();
+ * playing = true;
* }
- *
- */
- p5.SoundFile.prototype.pan = function (pval, tFromNow) {
- this.panPosition = pval;
- this.panner.pan(pval, tFromNow);
- };
- /**
- * Returns the current stereo pan position (-1.0 to 1.0)
*
- * @method getPan
- * @return {Number} Returns the stereo pan setting of the Oscillator
- * as a number between -1.0 (left) and 1.0 (right).
- * 0.0 is center and default.
+ * function mouseReleased() {
+ * // ramp amplitude to 0 over 0.5 seconds
+ * osc.amp(0, 0.5);
+ * playing = false;
+ * }
+ *
- * let song;
- *
- * function preload() {
- * song = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * song.loop();
- * }
- *
- * function draw() {
- * background(200);
- *
- * // Set the rate to a range between 0.1 and 4
- * // Changing the rate also alters the pitch
- * let speed = map(mouseY, 0.1, height, 0, 2);
- * speed = constrain(speed, 0.01, 4);
- * song.rate(speed);
+ * Start an oscillator.
*
- * // Draw a circle to show what is going on
- * stroke(0);
- * fill(51, 100);
- * ellipse(mouseX, 100, 48, 48);
- * }
- *
- *
- *
- * let drum;
- *
- * function preload() {
- * drum = loadSound('assets/drum.mp3');
- * }
+ * let osc;
*
* function setup() {
- * drum.reverseBuffer();
- * drum.play();
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playOscillator);
+ * osc = new p5.Oscillator(300);
+ * background(220);
+ * text('tap to play', 20, 20);
* }
*
- *
- * new p5.SinOsc()
.
+ * This creates a Sine Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sine')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('sine')
.
+ * See p5.Oscillator for methods.
*
- * The process involves running the soundfile through a lowpass filter, and finding all of the
- * peaks above the initial threshold. If the total number of peaks are below the minimum number of peaks,
- * it decreases the threshold and re-runs the analysis until either minPeaks or minThreshold are reached.
+ * @class p5.SinOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+ p5.SinOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'sine');
+ };
+
+ p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.TriOsc()
.
+ * This creates a Triangle Wave Oscillator and is
+ * equivalent to new p5.Oscillator('triangle')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('triangle')
.
+ * See p5.Oscillator for methods.
*
- * @method processPeaks
- * @param {Function} callback a function to call once this data is returned
- * @param {Number} [initThreshold] initial threshold defaults to 0.9
- * @param {Number} [minThreshold] minimum threshold defaults to 0.22
- * @param {Number} [minPeaks] minimum number of peaks defaults to 200
- * @return {Array} Array of timestamped peaks
+ * @class p5.TriOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
*/
- p5.SoundFile.prototype.processPeaks = function (callback, _initThreshold, _minThreshold, _minPeaks) {
- var bufLen = this.buffer.length;
- var sampleRate = this.buffer.sampleRate;
- var buffer = this.buffer;
- var allPeaks = [];
- var initialThreshold = _initThreshold || 0.9, threshold = initialThreshold, minThreshold = _minThreshold || 0.22, minPeaks = _minPeaks || 200;
- // Create offline context
- var offlineContext = new window.OfflineAudioContext(1, bufLen, sampleRate);
- // create buffer source
- var source = offlineContext.createBufferSource();
- source.buffer = buffer;
- // Create filter. TO DO: allow custom setting of filter
- var filter = offlineContext.createBiquadFilter();
- filter.type = 'lowpass';
- source.connect(filter);
- filter.connect(offlineContext.destination);
- // start playing at time:0
- source.start(0);
- offlineContext.startRendering();
- // Render the song
- // act on the result
- offlineContext.oncomplete = function (e) {
- if (!self.panner)
- return;
- var filteredBuffer = e.renderedBuffer;
- var bufferData = filteredBuffer.getChannelData(0);
- // step 1:
- // create Peak instances, add them to array, with strength and sampleIndex
- do {
- allPeaks = getPeaksAtThreshold(bufferData, threshold);
- threshold -= 0.005;
- } while (Object.keys(allPeaks).length < minPeaks && threshold >= minThreshold);
- // step 2:
- // find intervals for each peak in the sampleIndex, add tempos array
- var intervalCounts = countIntervalsBetweenNearbyPeaks(allPeaks);
- // step 3: find top tempos
- var groups = groupNeighborsByTempo(intervalCounts, filteredBuffer.sampleRate);
- // sort top intervals
- var topTempos = groups.sort(function (intA, intB) {
- return intB.count - intA.count;
- }).splice(0, 5);
- // set this SoundFile's tempo to the top tempo ??
- this.tempo = topTempos[0].tempo;
- // step 4:
- // new array of peaks at top tempo within a bpmVariance
- var bpmVariance = 5;
- var tempoPeaks = getPeaksAtTopTempo(allPeaks, topTempos[0].tempo, filteredBuffer.sampleRate, bpmVariance);
- callback(tempoPeaks);
- };
+
+ p5.TriOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'triangle');
};
- // process peaks
- var Peak = function (amp, i) {
- this.sampleIndex = i;
- this.amplitude = amp;
- this.tempos = [];
- this.intervals = [];
+
+ p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.SawOsc()
.
+ * This creates a SawTooth Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sawtooth')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('sawtooth')
.
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SawOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+ p5.SawOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'sawtooth');
};
- // 1. for processPeaks() Function to identify peaks above a threshold
- // returns an array of peak indexes as frames (samples) of the original soundfile
- function getPeaksAtThreshold(data, threshold) {
- var peaksObj = {};
- var length = data.length;
- for (var i = 0; i < length; i++) {
- if (data[i] > threshold) {
- var amp = data[i];
- var peak = new Peak(amp, i);
- peaksObj[i] = peak;
- // Skip forward ~ 1/8s to get past this peak.
- i += 6000;
+
+ p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.SqrOsc()
.
+ * This creates a Square Wave Oscillator and is
+ * equivalent to new p5.Oscillator('square')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('square')
.
+ * See p5.Oscillator for methods.
+ *
+ * @class p5.SqrOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+ p5.SqrOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'square');
+ };
+
+ p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Timeline=function(){var e=this.optionsObject(arguments,["memory"],i.Timeline.defaults);this._timeline=[],this._toRemove=[],this._iterating=!1,this.memory=e.memory},i.extend(i.Timeline),i.Timeline.defaults={memory:1/0},Object.defineProperty(i.Timeline.prototype,"length",{get:function(){return this._timeline.length}}),i.Timeline.prototype.add=function(e){if(this.isUndef(e.time))throw new Error("Tone.Timeline: events must have a time attribute");if(this._timeline.length){var i=this._search(e.time);this._timeline.splice(i+1,0,e)}else this._timeline.push(e);if(this.length>this.memory){var t=this.length-this.memory;this._timeline.splice(0,t)}return this},i.Timeline.prototype.remove=function(e){if(this._iterating)this._toRemove.push(e);else{var i=this._timeline.indexOf(e);-1!==i&&this._timeline.splice(i,1)}return this},i.Timeline.prototype.get=function(e){var i=this._search(e);return-1!==i?this._timeline[i]:null},i.Timeline.prototype.peek=function(){return this._timeline[0]},i.Timeline.prototype.shift=function(){return this._timeline.shift()},i.Timeline.prototype.getAfter=function(e){var i=this._search(e);return i+1
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * let note = random(['Fb4', 'G4']);
+ * // note velocity (volume, from 0 to 1)
+ * let velocity = random();
+ * // time from now (in seconds)
+ * let time = 0;
+ * // note duration (in seconds)
+ * let dur = 1/6;
+ *
+ * monoSynth.play(note, velocity, time, dur);
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
+ * let monoSynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(triggerAttack);
+ * background(220);
+ * text('tap here for attack, let go to release', 5, 20, width - 20);
+ * monoSynth = new p5.MonoSynth();
+ * }
+ *
+ * function triggerAttack() {
+ * userStartAudio();
+ *
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ */
+
+
+ p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) {
+ this.env.setADSR(attack, decay, sustain, release);
+ };
+ /**
+ * Getters and Setters
+ * @property {Number} attack
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} decay
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} sustain
+ * @for p5.MonoSynth
+ */
+
+ /**
+ * @property {Number} release
+ * @for p5.MonoSynth
+ */
+
+
+ Object.defineProperties(p5.MonoSynth.prototype, {
+ 'attack': {
+ get: function get() {
+ return this.env.aTime;
+ },
+ set: function set(attack) {
+ this.env.setADSR(attack, this.env.dTime, this.env.sPercent, this.env.rTime);
+ }
+ },
+ 'decay': {
+ get: function get() {
+ return this.env.dTime;
+ },
+ set: function set(decay) {
+ this.env.setADSR(this.env.aTime, decay, this.env.sPercent, this.env.rTime);
+ }
+ },
+ 'sustain': {
+ get: function get() {
+ return this.env.sPercent;
+ },
+ set: function set(sustain) {
+ this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime);
+ }
+ },
+ 'release': {
+ get: function get() {
+ return this.env.rTime;
+ },
+ set: function set(release) {
+ this.env.setADSR(this.env.aTime, this.env.dTime, this.env.sPercent, release);
}
- i++;
}
- return peaksObj;
- }
- // 2. for processPeaks()
- function countIntervalsBetweenNearbyPeaks(peaksObj) {
- var intervalCounts = [];
- var peaksArray = Object.keys(peaksObj).sort();
- for (var index = 0; index < peaksArray.length; index++) {
- // find intervals in comparison to nearby peaks
- for (var i = 0; i < 10; i++) {
- var startPeak = peaksObj[peaksArray[index]];
- var endPeak = peaksObj[peaksArray[index + i]];
- if (startPeak && endPeak) {
- var startPos = startPeak.sampleIndex;
- var endPos = endPeak.sampleIndex;
- var interval = endPos - startPos;
- // add a sample interval to the startPeak in the allPeaks array
- if (interval > 0) {
- startPeak.intervals.push(interval);
+ });
+ /**
+ * MonoSynth amp
+ * @method amp
+ * @for p5.MonoSynth
+ * @param {Number} vol desired volume
+ * @param {Number} [rampTime] Time to reach new volume
+ * @return {Number} new volume value
+ */
+
+ p5.MonoSynth.prototype.amp = function (vol, rampTime) {
+ var t = rampTime || 0;
+
+ if (typeof vol !== 'undefined') {
+ this.oscillator.amp(vol, t);
+ }
+
+ return this.oscillator.amp().value;
+ };
+ /**
+ * Connect to a p5.sound / Web Audio object.
+ *
+ * @method connect
+ * @for p5.MonoSynth
+ * @param {Object} unit A p5.sound or Web Audio object
+ */
+
+
+ p5.MonoSynth.prototype.connect = function (unit) {
+ var u = unit || p5sound.input;
+ this.output.connect(u.input ? u.input : u);
+ };
+ /**
+ * Disconnect all outputs
+ *
+ * @method disconnect
+ * @for p5.MonoSynth
+ */
+
+
+ p5.MonoSynth.prototype.disconnect = function () {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ };
+ /**
+ * Get rid of the MonoSynth and free up its resources / memory.
+ *
+ * @method dispose
+ * @for p5.MonoSynth
+ */
+
+
+ p5.MonoSynth.prototype.dispose = function () {
+ AudioVoice.prototype.dispose.apply(this);
+
+ if (this.env) {
+ this.env.dispose();
+ }
+
+ if (this.oscillator) {
+ this.oscillator.dispose();
+ }
+ };
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function () {
+ var p5sound = __webpack_require__(1);
+ /**
+ * Base class for monophonic synthesizers. Any extensions of this class
+ * should follow the API and implement the methods below in order to
+ * remain compatible with p5.PolySynth();
+ *
+ * @class p5.AudioVoice
+ * @constructor
+ */
+
+
+ p5.AudioVoice = function () {
+ this.ac = p5sound.audiocontext;
+ this.output = this.ac.createGain();
+ this.connect();
+ p5sound.soundArray.push(this);
+ };
+
+ p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {};
+
+ p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {};
+
+ p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {};
+
+ p5.AudioVoice.prototype.amp = function (vol, rampTime) {};
+ /**
+ * Connect to p5 objects or Web Audio Nodes
+ * @method connect
+ * @for p5.AudioVoice
+ * @param {Object} unit
+ */
+
+
+ p5.AudioVoice.prototype.connect = function (unit) {
+ var u = unit || p5sound.input;
+ this.output.connect(u.input ? u.input : u);
+ };
+ /**
+ * Disconnect from soundOut
+ * @method disconnect
+ * @for p5.AudioVoice
+ */
+
+
+ p5.AudioVoice.prototype.disconnect = function () {
+ this.output.disconnect();
+ };
+
+ p5.AudioVoice.prototype.dispose = function () {
+ if (this.output) {
+ this.output.disconnect();
+ delete this.output;
+ }
+ };
+
+ return p5.AudioVoice;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var TimelineSignal = __webpack_require__(14);
+
+ var noteToFreq = __webpack_require__(6).noteToFreq;
+ /**
+ * An AudioVoice is used as a single voice for sound synthesis.
+ * The PolySynth class holds an array of AudioVoice, and deals
+ * with voices allocations, with setting notes to be played, and
+ * parameters to be set.
+ *
+ * @class p5.PolySynth
+ * @constructor
+ *
+ * @param {Number} [synthVoice] A monophonic synth voice inheriting
+ * the AudioVoice class. Defaults to p5.MonoSynth
+ * @param {Number} [maxVoices] Number of voices, defaults to 8;
+ * @example
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ *
+ * let polySynth;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ * background(220);
+ * text('click to play', 20, 20);
+ *
+ * polySynth = new p5.PolySynth();
+ * }
+ *
+ * function playSynth() {
+ * userStartAudio();
+ *
+ * // note duration (in seconds)
+ * let dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * let time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * let vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play('G2', vel, 0, dur);
+ * polySynth.play('C3', vel, time += 1/3, dur);
+ * polySynth.play('G3', vel, time += 1/3, dur);
+ * }
+ *
setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+
+ p5.PolySynth.prototype.noteADSR = function (note, a, d, s, r, timeFromNow) {
+ var now = p5sound.audiocontext.currentTime;
+ var timeFromNow = timeFromNow || 0;
+ var t = now + timeFromNow;
+ this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
+ };
+ /**
+ * Set the PolySynths global envelope. This method modifies the envelopes of each
+ * monosynth so that all notes are played with this envelope.
+ *
+ * @method setADSR
+ * @for p5.PolySynth
+ * @param {Number} [attackTime] Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+
+ p5.PolySynth.prototype.setADSR = function (a, d, s, r) {
+ this.audiovoices.forEach(function (voice) {
+ voice.setADSR(a, d, s, r);
+ });
+ };
+ /**
+ * Trigger the Attack, and Decay portion of a MonoSynth.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go.
+ *
+ * @method noteAttack
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note on which attack should be triggered.
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
+ * @param {Number} [secondsFromNow] time from now (in seconds)
+ * @example
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
+ * let polySynth = new p5.PolySynth();
+ * let pitches = ['G', 'D', 'G', 'C'];
+ * let octaves = [2, 3, 4];
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playChord);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
+ *
+ * function playChord() {
+ * userStartAudio();
+ *
+ * // play a chord: multiple notes at the same time
+ * for (let i = 0; i < 4; i++) {
+ * let note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
SoundFile object with a path to a file.
* - * Accepts a callback function, a time (in seconds) at which to trigger - * the callback, and an optional parameter for the callback. + *The p5.SoundFile may not be available immediately because + * it loads the file information asynchronously.
* - * Time will be passed as the first parameter to the callback function, - * and param will be the second parameter. + *To do something with the sound as soon as it loads + * pass the name of a function as the second parameter.
+ * + *Only one file path is required. However, audio file formats + * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all + * web browsers. If you want to ensure compatability, instead of a single + * file path, you may include an Array of filepaths, and the browser will + * choose a format that works.
* + * @class p5.SoundFile + * @constructor + * @param {String|Array} path path to a sound file (String). Optionally, + * you may include multiple file formats in + * an array. Alternately, accepts an object + * from the HTML5 File API, or a p5.File. + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if file fails to + * load. This function will receive an error or + * XMLHttpRequest object with information + * about what went wrong. + * @param {Function} [whileLoadingCallback] Name of a function to call while file + * is loading. That function will + * receive progress of the request to + * load the sound file + * (between 0 and 1) as its first + * parameter. This progress + * does not account for the additional + * time needed to decode the audio data. * - * @method addCue - * @param {Number} time Time in seconds, relative to this media - * element's playback. For example, to trigger - * an event every time playback reaches two - * seconds, pass in the number 2. This will be - * passed as the first parameter to - * the callback function. - * @param {Function} callback Name of a function that will be - * called at the given time. The callback will - * receive time and (optionally) param as its - * two parameters. - * @param {Object} [value] An object to be passed as the - * second parameter to the - * callback function. - * @return {Number} id ID of this cue, - * useful for removeCue(id) * @example *
* let mySound;
* function preload() {
- * mySound = loadSound('assets/beat.mp3');
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
* }
*
* function setup() {
- * background(0);
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
*
- * // schedule calls to changeText
- * mySound.addCue(0.50, changeText, "hello" );
- * mySound.addCue(1.00, changeText, "p5" );
- * mySound.addCue(1.50, changeText, "what" );
- * mySound.addCue(2.00, changeText, "do" );
- * mySound.addCue(2.50, changeText, "you" );
- * mySound.addCue(3.00, changeText, "want" );
- * mySound.addCue(4.00, changeText, "to" );
- * mySound.addCue(5.00, changeText, "make" );
- * mySound.addCue(6.00, changeText, "?" );
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
* }
+ *
+ * let mySound;
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell');
* }
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * if (mySound.isPlaying() ) {
- * mySound.stop();
- * } else {
- * mySound.play();
- * }
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap here to play', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * // playing a sound file on a user gesture
+ * // is equivalent to `userStartAudio()`
+ * mySound.play();
* }
*
restart
and
+ * sustain
. Play Mode determines what happens to a
+ * p5.SoundFile if it is triggered while in the middle of playback.
+ * In sustain mode, playback will continue simultaneous to the
+ * new playback. In restart mode, play() will stop playback
+ * and start over. With untilDone, a sound will play only if it's
+ * not already playing. Sustain is the default mode.
*
- * @method save
- * @param {String} [fileName] name of the resulting .wav file.
- * @example
+ * @method playMode
+ * @for p5.SoundFile
+ * @param {String} str 'restart' or 'sustain' or 'untilDone'
+ * @example
*
- * let inp, button, mySound;
- * let fileName = 'cool';
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
+ * let mySound;
+ * function preload(){
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
* function setup() {
- * btn = createButton('click to save file');
- * btn.position(0, 0);
- * btn.mouseClicked(handleMouseClick);
- * }
- *
- * function handleMouseClick() {
- * mySound.save(fileName);
- * }
- *
- *
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * noFill();
+ * rect(0, height/2, width - 1, height/2 - 1);
+ * rect(0, 0, width - 1, height/2);
+ * textAlign(CENTER, CENTER);
+ * fill(20);
+ * text('restart', width/2, 1 * height/4);
+ * text('sustain', width/2, 3 * height/4);
* }
- *
- * function setup() {
- * noCanvas();
- * let soundBlob = mySound.getBlob();
- *
- * // Now we can send the blob to a server...
- * let serverUrl = 'https://jsonplaceholder.typicode.com/posts';
- * let httpRequestOptions = {
- * method: 'POST',
- * body: new FormData().append('soundBlob', soundBlob),
- * headers: new Headers({
- * 'Content-Type': 'multipart/form-data'
- * })
- * };
- * httpDo(serverUrl, httpRequestOptions);
- *
- * // We can also create an `ObjectURL` pointing to the Blob
- * let blobUrl = URL.createObjectURL(soundBlob);
- *
- * // The `
- * let sound, amplitude, cnv;
- *
- * function preload(){
- * sound = loadSound('assets/beat.mp3');
+ * let soundFile;
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
* }
* function setup() {
- * cnv = createCanvas(100,100);
- * amplitude = new p5.Amplitude();
- *
- * // start / stop the sound when canvas is clicked
- * cnv.mouseClicked(function() {
- * if (sound.isPlaying() ){
- * sound.stop();
- * } else {
- * sound.play();
- * }
- * });
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play, release to pause', 10, 20, width - 20);
* }
- * function draw() {
- * background(0);
- * fill(255);
- * let level = amplitude.getLevel();
- * let size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
+ * function canvasPressed() {
+ * soundFile.loop();
+ * background(0, 200, 50);
* }
- *
- *
- * function preload(){
- * sound1 = loadSound('assets/beat.mp3');
- * sound2 = loadSound('assets/drum.mp3');
+ * let soundFile;
+ * let loopStart = 0.5;
+ * let loopDuration = 0.2;
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
* }
- * function setup(){
- * amplitude = new p5.Amplitude();
- * sound1.play();
- * sound2.play();
- * amplitude.setInput(sound2);
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play, release to pause', 10, 20, width - 20);
* }
- * function draw() {
- * background(0);
- * fill(255);
- * let level = amplitude.getLevel();
- * let size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
+ * function canvasPressed() {
+ * soundFile.loop();
+ * background(0, 200, 50);
* }
- * function mouseClicked(){
- * sound1.stop();
- * sound2.stop();
+ * function mouseReleased() {
+ * soundFile.pause();
+ * background(220);
* }
- *
rampTime
parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * @method setVolume
+ * @for p5.SoundFile
+ * @param {Number|Object} volume Volume (amplitude) between 0.0
+ * and 1.0 or modulating signal/oscillator
+ * @param {Number} [rampTime] Fade for t seconds
+ * @param {Number} [timeFromNow] Schedule this event to happen at
+ * t seconds in the future
+ */
+
+
+ p5.SoundFile.prototype.setVolume = function (vol, _rampTime, _tFromNow) {
+ if (typeof vol === 'number') {
+ var rampTime = _rampTime || 0;
+ var tFromNow = _tFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(this.output.gain);
} else {
- this.output.connect(this.panner.connect(p5sound.input));
- }
- };
- p5.Amplitude.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+ return this.output.gain;
}
- };
- // TO DO make this stereo / dependent on # of audio channels
- p5.Amplitude.prototype._audioProcess = function (event) {
- for (var channel = 0; channel < event.inputBuffer.numberOfChannels; channel++) {
- var inputBuffer = event.inputBuffer.getChannelData(channel);
- var bufLength = inputBuffer.length;
- var total = 0;
- var sum = 0;
- var x;
- for (var i = 0; i < bufLength; i++) {
- x = inputBuffer[i];
- if (this.normalize) {
- total += Math.max(Math.min(x / this.volMax, 1), -1);
- sum += Math.max(Math.min(x / this.volMax, 1), -1) * Math.max(Math.min(x / this.volMax, 1), -1);
- } else {
- total += x;
- sum += x * x;
- }
- }
- var average = total / bufLength;
- // ... then take the square root of the sum.
- var rms = Math.sqrt(sum / bufLength);
- this.stereoVol[channel] = Math.max(rms, this.stereoVol[channel] * this.smoothing);
- this.stereoAvg[channel] = Math.max(average, this.stereoVol[channel] * this.smoothing);
- this.volMax = Math.max(this.stereoVol[channel], this.volMax);
- }
- // add volume from all channels together
- var self = this;
- var volSum = this.stereoVol.reduce(function (previousValue, currentValue, index) {
- self.stereoVolNorm[index - 1] = Math.max(Math.min(self.stereoVol[index - 1] / self.volMax, 1), 0);
- self.stereoVolNorm[index] = Math.max(Math.min(self.stereoVol[index] / self.volMax, 1), 0);
- return previousValue + currentValue;
- });
- // volume is average of channels
- this.volume = volSum / this.stereoVol.length;
- // normalized value
- this.volNorm = Math.max(Math.min(this.volume / this.volMax, 1), 0);
+ };
+
+
+ p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;
+
+ p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;
+
+ p5.SoundFile.prototype.getVolume = function () {
+ return this.output.gain.value;
};
/**
- * Returns a single Amplitude reading at the moment it is called.
- * For continuous readings, run in the draw loop.
+ * Set the stereo panning of a p5.sound object to
+ * a floating point number between -1.0 (left) and 1.0 (right).
+ * Default is 0.0 (center).
*
- * @method getLevel
- * @param {Number} [channel] Optionally return only channel 0 (left) or 1 (right)
- * @return {Number} Amplitude as a number between 0.0 and 1.0
- * @example
- *
- * function preload(){
- * sound = loadSound('assets/beat.mp3');
- * }
- * function setup() {
- * amplitude = new p5.Amplitude();
- * sound.play();
+ * @method pan
+ * @for p5.SoundFile
+ * @param {Number} [panValue] Set the stereo panner
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @example
+ *
+ * let ballX = 0;
+ * let soundFile;
+ *
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/beatbox.mp3');
* }
+ *
* function draw() {
- * background(0);
- * fill(255);
- * let level = amplitude.getLevel();
- * let size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
- * }
- * function mouseClicked(){
- * sound.stop();
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * ballX = constrain(mouseX, 0, width);
+ * ellipse(ballX, height/2, 20, 20);
* }
- *
- */
- p5.Amplitude.prototype.getLevel = function (channel) {
- if (typeof channel !== 'undefined') {
- if (this.normalize) {
- return this.stereoVolNorm[channel];
- } else {
- return this.stereoVol[channel];
- }
- } else if (this.normalize) {
- return this.volNorm;
- } else {
- return this.volume;
- }
- };
- /**
- * Determines whether the results of Amplitude.process() will be
- * Normalized. To normalize, Amplitude finds the difference the
- * loudest reading it has processed and the maximum amplitude of
- * 1.0. Amplitude adds this difference to all values to produce
- * results that will reliably map between 0.0 and 1.0. However,
- * if a louder moment occurs, the amount that Normalize adds to
- * all the values will change. Accepts an optional boolean parameter
- * (true or false). Normalizing is off by default.
*
- * @method toggleNormalize
- * @param {boolean} [boolean] set normalize to true (1) or false (0)
+ * function canvasPressed(){
+ * // map the ball's x location to a panning degree
+ * // between -1.0 (left) and 1.0 (right)
+ * let panning = map(ballX, 0., width,-1.0, 1.0);
+ * soundFile.pan(panning);
+ * soundFile.play();
+ * }
+ *
FFT (Fast Fourier Transform) is an analysis algorithm that - * isolates individual - * - * audio frequencies within a waveform.
- * - *Once instantiated, a p5.FFT object can return an array based on
- * two types of analyses:
• FFT.waveform()
computes
- * amplitude values along the time domain. The array indices correspond
- * to samples across a brief moment in time. Each value represents
- * amplitude of the waveform at that sample of time.
- * • FFT.analyze()
computes amplitude values along the
- * frequency domain. The array indices correspond to frequencies (i.e.
- * pitches), from the lowest to the highest that humans can hear. Each
- * value represents amplitude at that slice of the frequency spectrum.
- * Use with getEnergy()
to measure amplitude at specific
- * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample
- * buffer. It returns an array of amplitude measurements, referred
- * to as bins
. The array is 1024 bins long by default.
- * You can change the bin array length, but it must be a power of 2
- * between 16 and 1024 in order for the FFT algorithm to function
- * correctly. The actual size of the FFT buffer is twice the
- * number of bins, so given a standard sample rate, the buffer is
- * 2048/44100 seconds long.
- * function preload(){
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
+ * let mySound;
*
- * function setup(){
- * let cnv = createCanvas(100,100);
- * cnv.mouseClicked(togglePlay);
- * fft = new p5.FFT();
- * sound.amp(0.2);
+ * function preload() {
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
- * function draw(){
- * background(0);
- *
- * let spectrum = fft.analyze();
- * noStroke();
- * fill(0,255,0); // spectrum is green
- * for (var i = 0; i< spectrum.length; i++){
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h )
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * }
+ * function canvasPressed() {
+ * mySound.loop();
+ * }
+ * function mouseReleased() {
+ * mySound.pause();
+ * }
+ * function draw() {
+ * background(220);
*
- * let waveform = fft.waveform();
- * noFill();
- * beginShape();
- * stroke(255,0,0); // waveform is red
- * strokeWeight(1);
- * for (var i = 0; i< waveform.length; i++){
- * let x = map(i, 0, waveform.length, 0, width);
- * let y = map( waveform[i], -1, 1, 0, height);
- * vertex(x,y);
- * }
- * endShape();
+ * // Set the rate to a range between 0.1 and 4
+ * // Changing the rate also alters the pitch
+ * let playbackRate = map(mouseY, 0.1, height, 2, 0);
+ * playbackRate = constrain(playbackRate, 0.01, 4);
+ * mySound.rate(playbackRate);
*
- * text('click to play/pause', 4, 10);
+ * line(0, mouseY, width, mouseY);
+ * text('rate: ' + round(playbackRate * 100) + '%', 10, 20);
* }
*
- * // fade sound if mouse is over canvas
- * function togglePlay() {
- * if (sound.isPlaying()) {
- * sound.pause();
- * } else {
- * sound.loop();
- * }
- * }
- *
getEnergy()
.
- *
- * @method analyze
- * @param {Number} [bins] Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @param {Number} [scale] If "dB," returns decibel
- * float measurements between
- * -140 and 0 (max).
- * Otherwise returns integers from 0-255.
- * @return {Array} spectrum Array of energy (amplitude/volume)
- * values across the frequency spectrum.
- * Lowest energy (silence) = 0, highest
- * possible is 255.
- * @example
- *
- * let osc;
- * let fft;
- *
- * function setup(){
- * createCanvas(100,100);
- * osc = new p5.Oscillator();
- * osc.amp(0);
- * osc.start();
- * fft = new p5.FFT();
- * }
- *
- * function draw(){
- * background(0);
- *
- * let freq = map(mouseX, 0, 800, 20, 15000);
- * freq = constrain(freq, 1, 20000);
- * osc.freq(freq);
- *
- * let spectrum = fft.analyze();
- * noStroke();
- * fill(0,255,0); // spectrum is green
- * for (var i = 0; i< spectrum.length; i++){
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h );
- * }
- *
- * stroke(255);
- * text('Freq: ' + round(freq)+'Hz', 10, 10);
- *
- * isMouseOverCanvas();
- * }
- *
- * // only play sound when mouse is over canvas
- * function isMouseOverCanvas() {
- * let mX = mouseX, mY = mouseY;
- * if (mX > 0 && mX < width && mY < height && mY > 0) {
- * osc.amp(0.5, 0.2);
- * } else {
- * osc.amp(0, 0.2);
- * }
- * }
- *
+ * let drum;
+ * function preload() {
+ * drum = loadSound('assets/drum.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * }
*
+ * function canvasPressed() {
+ * drum.stop();
+ * drum.reverseBuffer();
+ * drum.play();
+ * }
+ *
+ *
- *
- *
- *function setup(){
- * cnv = createCanvas(100,100);
- * sound = new p5.AudioIn();
- * sound.start();
- * fft = new p5.FFT();
- * sound.connect(fft);
- *}
- *
- *
- *function draw(){
- *
- * var centroidplot = 0.0;
- * var spectralCentroid = 0;
- *
- *
- * background(0);
- * stroke(0,255,0);
- * var spectrum = fft.analyze();
- * fill(0,255,0); // spectrum is green
- *
- * //draw the spectrum
- * for (var i = 0; i< spectrum.length; i++){
- * var x = map(log(i), 0, log(spectrum.length), 0, width);
- * var h = map(spectrum[i], 0, 255, 0, height);
- * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
- * rect(x, height, rectangle_width, -h )
- * }
-
- * var nyquist = 22050;
- *
- * // get the centroid
- * spectralCentroid = fft.getCentroid();
- *
- * // the mean_freq_index calculation is for the display.
- * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
- *
- * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
- *
- *
- * stroke(255,0,0); // the line showing where the centroid is will be red
- *
- * rect(centroidplot, 0, width / spectrum.length, height)
- * noStroke();
- * fill(255,255,255); // text is white
- * text("centroid: ", 10, 20);
- * text(round(spectralCentroid)+" Hz", 10, 40);
- *}
- *
p5.Signal is a constant audio-rate signal used by p5.Oscillator - * and p5.Envelope for modulation math.
+ * Schedule events to trigger every time a MediaElement + * (audio/video) reaches a playback cue point. * - *This is necessary because Web Audio is processed on a seprate clock. - * For example, the p5 draw loop runs about 60 times per second. But - * the audio clock must process samples 44100 times per second. If we - * want to add a value to each of those samples, we can't do it in the - * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns - * a Tone.Signal from the Tone.js library by Yotam Mann. - * If you want to work directly with audio signals for modular - * synthesis, check out - * tone.js.
+ * Time will be passed as the first parameter to the callback function, + * and param will be the second parameter. * - * @class p5.Signal - * @constructor - * @return {Tone.Signal} A Signal object from the Tone.js library + * + * @method addCue + * @for p5.SoundFile + * @param {Number} time Time in seconds, relative to this media + * element's playback. For example, to trigger + * an event every time playback reaches two + * seconds, pass in the number 2. This will be + * passed as the first parameter to + * the callback function. + * @param {Function} callback Name of a function that will be + * called at the given time. The callback will + * receive time and (optionally) param as its + * two parameters. + * @param {Object} [value] An object to be passed as the + * second parameter to the + * callback function. + * @return {Number} id ID of this cue, + * useful for removeCue(id) * @example *
+ * let mySound;
+ * function preload() {
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
* function setup() {
- * carrier = new p5.Oscillator('sine');
- * carrier.amp(1); // set amplitude
- * carrier.freq(220); // set frequency
- * carrier.start(); // start oscillating
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play', 10, 20);
*
- * modulator = new p5.Oscillator('sawtooth');
- * modulator.disconnect();
- * modulator.amp(1);
- * modulator.freq(4);
- * modulator.start();
+ * // schedule calls to changeText
+ * mySound.addCue(0, changeText, "hello" );
+ * mySound.addCue(0.5, changeText, "hello," );
+ * mySound.addCue(1, changeText, "hello, p5!");
+ * mySound.addCue(1.5, changeText, "hello, p5!!");
+ * mySound.addCue(2, changeText, "hello, p5!!!!!");
+ * }
*
- * // Modulator's default amplitude range is -1 to 1.
- * // Multiply it by -200, so the range is -200 to 200
- * // then add 220 so the range is 20 to 420
- * carrier.freq( modulator.mult(-200).add(220) );
+ * function changeText(val) {
+ * background(220);
+ * text(val, 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * mySound.play();
* }
*
+ * let mySound;
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to download', 10, 20);
+ * }
+ *
+ * function canvasPressed() {
+ * mySound.save('my cool filename');
+ * }
+ *
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * noCanvas();
+ * let soundBlob = mySound.getBlob();
+ *
+ * // Now we can send the blob to a server...
+ * let serverUrl = 'https://jsonplaceholder.typicode.com/posts';
+ * let httpRequestOptions = {
+ * method: 'POST',
+ * body: new FormData().append('soundBlob', soundBlob),
+ * headers: new Headers({
+ * 'Content-Type': 'multipart/form-data'
+ * })
+ * };
+ * httpDo(serverUrl, httpRequestOptions);
+ *
+ * // We can also create an `ObjectURL` pointing to the Blob
+ * let blobUrl = URL.createObjectURL(soundBlob);
+ *
+ * // The `
Creates a signal that oscillates between -1.0 and 1.0. - * By default, the oscillation takes the form of a sinusoidal - * shape ('sine'). Additional types include 'triangle', - * 'sawtooth' and 'square'. The frequency defaults to - * 440 oscillations per second (440Hz, equal to the pitch of an - * 'A' note).
- * - *Set the type of oscillation with setType(), or by instantiating a - * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc. - *
+ * Amplitude measures volume between 0.0 and 1.0. + * Listens to all p5sound by default, or use setInput() + * to listen to a specific sound source. Accepts an optional + * smoothing value, which defaults to 0. * - * @class p5.Oscillator + * @class p5.Amplitude * @constructor - * @param {Number} [freq] frequency defaults to 440Hz - * @param {String} [type] type of oscillator. Options: - * 'sine' (default), 'triangle', - * 'sawtooth', 'square' + * @param {Number} [smoothing] between 0.0 and .999 to smooth + * amplitude readings (defaults to 0) * @example *
- * let osc;
- * let playing = false;
+ * let sound, amplitude;
*
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
* function setup() {
- * backgroundColor = color(255,0,255);
- * textAlign(CENTER);
- *
- * osc = new p5.Oscillator();
- * osc.setType('sine');
- * osc.freq(240);
- * osc.amp(0);
- * osc.start();
+ * let cnv = createCanvas(100,100);
+ * cnv.mouseClicked(toggleSound);
+ * amplitude = new p5.Amplitude();
* }
*
* function draw() {
- * background(backgroundColor)
- * text('click to play', width/2, height/2);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
* }
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {
- * if (!playing) {
- * // ramp amplitude to 0.5 over 0.05 seconds
- * osc.amp(0.5, 0.05);
- * playing = true;
- * backgroundColor = color(0,255,255);
- * } else {
- * // ramp amplitude to 0 over 0.5 seconds
- * osc.amp(0, 0.5);
- * playing = false;
- * backgroundColor = color(255,0,255);
- * }
+ * function toggleSound() {
+ * if (sound.isPlaying() ){
+ * sound.stop();
+ * } else {
+ * sound.play();
* }
* }
- *
- * let osc = new p5.Oscillator(300);
- * osc.start();
- * osc.freq(40, 10);
+ * function preload(){
+ * sound1 = loadSound('assets/beat.mp3');
+ * sound2 = loadSound('assets/drum.mp3');
+ * }
+ * function setup(){
+ * cnv = createCanvas(100, 100);
+ * cnv.mouseClicked(toggleSound);
+ *
+ * amplitude = new p5.Amplitude();
+ * amplitude.setInput(sound2);
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ * function toggleSound(){
+ * if (sound1.isPlaying() && sound2.isPlaying()) {
+ * sound1.stop();
+ * sound2.stop();
+ * } else {
+ * sound1.play();
+ * sound2.play();
+ * }
+ * }
*
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
*
- * @method connect
- * @param {Object} unit A p5.sound or Web Audio object
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mouseClicked(toggleSound);
+ * amplitude = new p5.Amplitude();
+ * }
+ *
+ * function draw() {
+ * background(220, 150);
+ * textAlign(CENTER);
+ * text('tap to play', width/2, 20);
+ *
+ * let level = amplitude.getLevel();
+ * let size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ * function toggleSound(){
+ * if (sound.isPlaying()) {
+ * sound.stop();
+ * } else {
+ * sound.play();
+ * }
+ * }
+ *
FFT (Fast Fourier Transform) is an analysis algorithm that + * isolates individual + * + * audio frequencies within a waveform.
* - * @method phase - * @param {Number} phase float between 0.0 and 1.0 - */ - p5.Oscillator.prototype.phase = function (p) { - var delayAmt = p5.prototype.map(p, 0, 1, 0, 1 / this.f); - var now = p5sound.audiocontext.currentTime; - this.phaseAmount = p; - if (!this.dNode) { - // create a delay node - this.dNode = p5sound.audiocontext.createDelay(); - // put the delay node in between output and panner - this.oscillator.disconnect(); - this.oscillator.connect(this.dNode); - this.dNode.connect(this.output); - } - // set delay time to match phase: - this.dNode.delayTime.setValueAtTime(delayAmt, now); - }; - // ========================== // - // SIGNAL MATH FOR MODULATION // - // ========================== // - // return sigChain(this, scale, thisChain, nextChain, Scale); - var sigChain = function (o, mathObj, thisChain, nextChain, type) { - var chainSource = o.oscillator; - // if this type of math already exists in the chain, replace it - for (var i in o.mathOps) { - if (o.mathOps[i] instanceof type) { - chainSource.disconnect(); - o.mathOps[i].dispose(); - thisChain = i; - // assume nextChain is output gain node unless... - if (thisChain < o.mathOps.length - 2) { - nextChain = o.mathOps[i + 1]; - } - } - } - if (thisChain === o.mathOps.length - 1) { - o.mathOps.push(nextChain); - } - // assume source is the oscillator unless i > 0 - if (i > 0) { - chainSource = o.mathOps[i - 1]; - } - chainSource.disconnect(); - chainSource.connect(mathObj); - mathObj.connect(nextChain); - o.mathOps[thisChain] = mathObj; - return o; - }; - /** - * Add a value to the p5.Oscillator's output amplitude, - * and return the oscillator. Calling this method again - * will override the initial add() with a new value. + *Once instantiated, a p5.FFT object can return an array based on
+ * two types of analyses:
• FFT.waveform()
computes
+ * amplitude values along the time domain. The array indices correspond
+ * to samples across a brief moment in time. Each value represents
+ * amplitude of the waveform at that sample of time.
+ * • FFT.analyze()
computes amplitude values along the
+ * frequency domain. The array indices correspond to frequencies (i.e.
+ * pitches), from the lowest to the highest that humans can hear. Each
+ * value represents amplitude at that slice of the frequency spectrum.
+ * Use with getEnergy()
to measure amplitude at specific
+ * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample
+ * buffer. It returns an array of amplitude measurements, referred
+ * to as bins
. The array is 1024 bins long by default.
+ * You can change the bin array length, but it must be a power of 2
+ * between 16 and 1024 in order for the FFT algorithm to function
+ * correctly. The actual size of the FFT buffer is twice the
+ * number of bins, so given a standard sample rate, the buffer is
+ * 2048/44100 seconds long.
+ * function preload(){
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mouseClicked(togglePlay);
+ * fft = new p5.FFT();
+ * sound.amp(0.2);
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h )
+ * }
+ *
+ * let waveform = fft.waveform();
+ * noFill();
+ * beginShape();
+ * stroke(20);
+ * for (let i = 0; i < waveform.length; i++){
+ * let x = map(i, 0, waveform.length, 0, width);
+ * let y = map( waveform[i], -1, 1, 0, height);
+ * vertex(x,y);
+ * }
+ * endShape();
*
- */
- p5.Oscillator.prototype.add = function (num) {
- var add = new Add(num);
- var thisChain = this.mathOps.length - 1;
- var nextChain = this.output;
- return sigChain(this, add, thisChain, nextChain, Add);
- };
- /**
- * Multiply the p5.Oscillator's output amplitude
- * by a fixed value (i.e. turn it up!). Calling this method
- * again will override the initial mult() with a new value.
+ * text('tap to play', 20, 20);
+ * }
*
- * @method mult
- * @param {Number} number Constant number to multiply
- * @return {p5.Oscillator} Oscillator Returns this oscillator
- * with multiplied output
+ * function togglePlay() {
+ * if (sound.isPlaying()) {
+ * sound.pause();
+ * } else {
+ * sound.loop();
+ * }
+ * }
+ *
new p5.SinOsc()
.
- * This creates a Sine Wave Oscillator and is
- * equivalent to new p5.Oscillator('sine')
- *
or creating a p5.Oscillator and then calling
- * its method setType('sine')
.
- * See p5.Oscillator for methods.
- *
- * @class p5.SinOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
- p5.SinOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'sine');
- };
- p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.TriOsc()
.
- * This creates a Triangle Wave Oscillator and is
- * equivalent to new p5.Oscillator('triangle')
- *
or creating a p5.Oscillator and then calling
- * its method setType('triangle')
.
- * See p5.Oscillator for methods.
- *
- * @class p5.TriOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
- p5.TriOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'triangle');
};
- p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
/**
- * Constructor: new p5.SawOsc()
.
- * This creates a SawTooth Wave Oscillator and is
- * equivalent to new p5.Oscillator('sawtooth')
- *
or creating a p5.Oscillator and then calling
- * its method setType('sawtooth')
.
- * See p5.Oscillator for methods.
+ * Returns an array of amplitude values (between -1.0 and +1.0) that represent
+ * a snapshot of amplitude readings in a single buffer. Length will be
+ * equal to bins (defaults to 1024). Can be used to draw the waveform
+ * of a sound.
*
- * @class p5.SawOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
- p5.SawOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'sawtooth');
- };
- p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.SqrOsc()
.
- * This creates a Square Wave Oscillator and is
- * equivalent to new p5.Oscillator('square')
- *
or creating a p5.Oscillator and then calling
- * its method setType('square')
.
- * See p5.Oscillator for methods.
+ * @method waveform
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {String} [precision] If any value is provided, will return results
+ * in a Float32 Array which is more precise
+ * than a regular array.
+ * @return {Array} Array Array of amplitude values (-1 to 1)
+ * over time. Array length = bins.
*
- * @class p5.SqrOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
*/
- p5.SqrOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'square');
- };
- p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);
-}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_Timeline;
-Tone_core_Timeline = function (Tone) {
- 'use strict';
- Tone.Timeline = function () {
- var options = this.optionsObject(arguments, ['memory'], Tone.Timeline.defaults);
- this._timeline = [];
- this._toRemove = [];
- this._iterating = false;
- this.memory = options.memory;
- };
- Tone.extend(Tone.Timeline);
- Tone.Timeline.defaults = { 'memory': Infinity };
- Object.defineProperty(Tone.Timeline.prototype, 'length', {
- get: function () {
- return this._timeline.length;
- }
- });
- Tone.Timeline.prototype.add = function (event) {
- if (this.isUndef(event.time)) {
- throw new Error('Tone.Timeline: events must have a time attribute');
- }
- if (this._timeline.length) {
- var index = this._search(event.time);
- this._timeline.splice(index + 1, 0, event);
- } else {
- this._timeline.push(event);
- }
- if (this.length > this.memory) {
- var diff = this.length - this.memory;
- this._timeline.splice(0, diff);
- }
- return this;
- };
- Tone.Timeline.prototype.remove = function (event) {
- if (this._iterating) {
- this._toRemove.push(event);
- } else {
- var index = this._timeline.indexOf(event);
- if (index !== -1) {
- this._timeline.splice(index, 1);
+
+
+ p5.FFT.prototype.waveform = function () {
+ var bins, mode, normalArray;
+
+ for (var i = 0; i < arguments.length; i++) {
+ if (typeof arguments[i] === 'number') {
+ bins = arguments[i];
+ this.analyser.fftSize = bins * 2;
}
- }
- return this;
- };
- Tone.Timeline.prototype.get = function (time) {
- var index = this._search(time);
- if (index !== -1) {
- return this._timeline[index];
- } else {
- return null;
- }
- };
- Tone.Timeline.prototype.peek = function () {
- return this._timeline[0];
- };
- Tone.Timeline.prototype.shift = function () {
- return this._timeline.shift();
- };
- Tone.Timeline.prototype.getAfter = function (time) {
- var index = this._search(time);
- if (index + 1 < this._timeline.length) {
- return this._timeline[index + 1];
- } else {
- return null;
- }
- };
- Tone.Timeline.prototype.getBefore = function (time) {
- var len = this._timeline.length;
- if (len > 0 && this._timeline[len - 1].time < time) {
- return this._timeline[len - 1];
- }
- var index = this._search(time);
- if (index - 1 >= 0) {
- return this._timeline[index - 1];
+
+ if (typeof arguments[i] === 'string') {
+ mode = arguments[i];
+ }
+ }
+
+
+ if (mode && !p5.prototype._isSafari()) {
+ timeToFloat(this, this.timeDomain);
+ this.analyser.getFloatTimeDomainData(this.timeDomain);
+ return this.timeDomain;
} else {
- return null;
+ timeToInt(this, this.timeDomain);
+ this.analyser.getByteTimeDomainData(this.timeDomain);
+ var normalArray = new Array();
+
+ for (var j = 0; j < this.timeDomain.length; j++) {
+ var scaled = p5.prototype.map(this.timeDomain[j], 0, 255, -1, 1);
+ normalArray.push(scaled);
+ }
+
+ return normalArray;
}
};
- Tone.Timeline.prototype.cancel = function (after) {
- if (this._timeline.length > 1) {
- var index = this._search(after);
- if (index >= 0) {
- if (this._timeline[index].time === after) {
- for (var i = index; i >= 0; i--) {
- if (this._timeline[i].time === after) {
- index = i;
- } else {
- break;
- }
- }
- this._timeline = this._timeline.slice(0, index);
- } else {
- this._timeline = this._timeline.slice(0, index + 1);
- }
- } else {
- this._timeline = [];
+ /**
+ * Returns an array of amplitude values (between 0 and 255)
+ * across the frequency spectrum. Length is equal to FFT bins
+ * (1024 by default). The array indices correspond to frequencies
+ * (i.e. pitches), from the lowest to the highest that humans can
+ * hear. Each value represents amplitude at that slice of the
+ * frequency spectrum. Must be called prior to using
+ * getEnergy()
.
+ *
+ * @method analyze
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {Number} [scale] If "dB," returns decibel
+ * float measurements between
+ * -140 and 0 (max).
+ * Otherwise returns integers from 0-255.
+ * @return {Array} spectrum Array of energy (amplitude/volume)
+ * values across the frequency spectrum.
+ * Lowest energy (silence) = 0, highest
+ * possible is 255.
+ * @example
+ *
+ * let osc, fft;
+ *
+ * function setup(){
+ * let cnv = createCanvas(100,100);
+ * cnv.mousePressed(startSound);
+ * osc = new p5.Oscillator();
+ * osc.amp(0);
+ * fft = new p5.FFT();
+ * }
+ *
+ * function draw(){
+ * background(220);
+ *
+ * let freq = map(mouseX, 0, windowWidth, 20, 10000);
+ * freq = constrain(freq, 1, 20000);
+ * osc.freq(freq);
+ *
+ * let spectrum = fft.analyze();
+ * noStroke();
+ * fill(255, 0, 255);
+ * for (let i = 0; i< spectrum.length; i++){
+ * let x = map(i, 0, spectrum.length, 0, width);
+ * let h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h );
+ * }
+ *
+ * stroke(255);
+ * if (!osc.started) {
+ * text('tap here and drag to change frequency', 10, 20, width - 20);
+ * } else {
+ * text(round(freq)+'Hz', 10, 20);
+ * }
+ * }
+ *
+ * function startSound() {
+ * osc.start();
+ * osc.amp(0.5, 0.2);
+ * }
+ *
+ * function mouseReleased() {
+ * osc.amp(0, 0.2);
+ * }
+ *
+ * function setup(){
+ * cnv = createCanvas(100,100);
+ * cnv.mousePressed(userStartAudio);
+ * sound = new p5.AudioIn();
+ * sound.start();
+ * fft = new p5.FFT();
+ * sound.connect(fft);
+ *}
+ *
+ *function draw() {
+ * if (getAudioContext().state !== 'running') {
+ * background(220);
+ * text('tap here and enable mic to begin', 10, 20, width - 20);
+ * return;
+ * }
+ * let centroidplot = 0.0;
+ * let spectralCentroid = 0;
+ *
+ * background(0);
+ * stroke(0,255,0);
+ * let spectrum = fft.analyze();
+ * fill(0,255,0); // spectrum is green
+ *
+ * //draw the spectrum
+ * for (let i = 0; i < spectrum.length; i++){
+ * let x = map(log(i), 0, log(spectrum.length), 0, width);
+ * let h = map(spectrum[i], 0, 255, 0, height);
+ * let rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
+ * rect(x, height, rectangle_width, -h )
+ * }
+ * let nyquist = 22050;
+ *
+ * // get the centroid
+ * spectralCentroid = fft.getCentroid();
+ *
+ * // the mean_freq_index calculation is for the display.
+ * let mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
+ *
+ * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
+ *
+ * stroke(255,0,0); // the line showing where the centroid is will be red
+ *
+ * rect(centroidplot, 0, width / spectrum.length, height)
+ * noStroke();
+ * fill(255,255,255); // text is white
+ * text('centroid: ', 10, 20);
+ * text(round(spectralCentroid)+' Hz', 10, 40);
+ *}
+ *
Envelopes are pre-defined amplitude distribution over time.
- * Typically, envelopes are used to control the output volume
- * of an object, a series of fades referred to as Attack, Decay,
- * Sustain and Release (
- * ADSR
- * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
- * control an Oscillator's frequency like this: osc.freq(env)
.
Use setRange
to change the attack/release level.
- * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play
method to play the entire envelope,
- * the ramp
method for a pingable trigger,
- * or triggerAttack
/
- * triggerRelease
to trigger noteOn/noteOff.
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(playEnv);
- * }
+ * Calculates and Returns the 1/N
+ * Octave Bands
+ * N defaults to 3 and minimum central frequency to 15.625Hz.
+ * (1/3 Octave Bands ~= 31 Frequency Bands)
+ * Setting fCtr0 to a central value of a higher octave will ignore the lower bands
+ * and produce less frequency groups.
*
- * function playEnv() {
- * env.play();
- * }
- *
- * let t1 = 0.1; // attack time in seconds
- * let l1 = 0.7; // attack level 0.0 to 1.0
- * let t2 = 0.3; // decay time in seconds
- * let l2 = 0.1; // decay level 0.0 to 1.0
- * let t3 = 0.2; // sustain time in seconds
- * let l3 = 0.5; // sustain level 0.0 to 1.0
- * // release level defaults to zero
- *
- * let env;
- * let triOsc;
- *
- * function setup() {
- * background(0);
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope(t1, l1, t2, l2, t3, l3);
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env); // give the env control of the triOsc's amp
- * triOsc.start();
- * }
+ * p5.Signal is a constant audio-rate signal used by p5.Oscillator
+ * and p5.Envelope for modulation math.
*
- * // mouseClick triggers envelope if over canvas
- * function mouseClicked() {
- * // is mouse over canvas?
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * env.play(triOsc);
- * }
- * }
- *
This is necessary because Web Audio is processed on a seprate clock. + * For example, the p5 draw loop runs about 60 times per second. But + * the audio clock must process samples 44100 times per second. If we + * want to add a value to each of those samples, we can't do it in the + * draw loop, but we can do it by adding a constant-rate audio signal. - * ADSR envelope - * . + *
This class mostly functions behind the scenes in p5.sound, and returns + * a Tone.Signal from the Tone.js library by Yotam Mann. + * If you want to work directly with audio signals for modular + * synthesis, check out + * tone.js.
* - * @method setADSR - * @param {Number} attackTime Time (in seconds before envelope - * reaches Attack Level - * @param {Number} [decayTime] Time (in seconds) before envelope - * reaches Decay/Sustain Level - * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1, - * where 1.0 = attackLevel, 0.0 = releaseLevel. - * The susRatio determines the decayLevel and the level at which the - * sustain portion of the envelope will sustain. - * For example, if attackLevel is 0.4, releaseLevel is 0, - * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is - * increased to 1.0 (usingsetRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ * @class p5.Signal
+ * @constructor
+ * @return {Tone.Signal} A Signal object from the Tone.js library
* @example
*
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
+ * let carrier, modulator;
*
* function setup() {
* let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * text('tap to play', 20, 20);
*
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * carrier = new p5.Oscillator('sine');
+ * carrier.start();
+ * carrier.amp(1); // set amplitude
+ * carrier.freq(220); // set frequency
*
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
+ * modulator = new p5.Oscillator('sawtooth');
+ * modulator.disconnect();
+ * modulator.start();
+ * modulator.amp(1);
+ * modulator.freq(4);
*
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * // Modulator's default amplitude range is -1 to 1.
+ * // Multiply it by -200, so the range is -200 to 200
+ * // then add 220 so the range is 20 to 420
+ * carrier.freq( modulator.mult(-400).add(220) );
+ * }
*
- * cnv.mousePressed(playEnv);
+ * function canvasPressed() {
+ * userStartAudio();
+ * carrier.amp(1.0);
* }
*
- * function playEnv() {
- * env.play();
+ * function mouseReleased() {
+ * carrier.amp(0);
* }
*
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * Fade to value, for smooth transitions
*
- * cnv.mousePressed(playEnv);
- * }
+ * @method fade
+ * @for p5.Signal
+ * @param {Number} value Value to set this signal
+ * @param {Number} [secondsFromNow] Length of fade, in seconds from now
+ */
+
+
+ Signal.prototype.fade = Signal.prototype.linearRampToValueAtTime;
+ Mult.prototype.fade = Signal.prototype.fade;
+ Add.prototype.fade = Signal.prototype.fade;
+ Scale.prototype.fade = Signal.prototype.fade;
+ /**
+ * Connect a p5.sound object or Web Audio node to this
+ * p5.Signal so that its amplitude values can be scaled.
*
- * function playEnv() {
- * env.play();
- * }
- *
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.2;
- * let susPercent = 0.2;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(playEnv);
- * }
+ * Scale this signal value to a given range,
+ * and return the result as an audio signal. Does
+ * not change the value of the original signal,
+ * instead it returns a new p5.SignalScale.
*
- * function playEnv() {
- * // trigger env on triOsc, 0 seconds from now
- * // After decay, sustain for 0.2 seconds before release
- * env.play(triOsc, 0, 0.2);
- * }
- *
Envelopes are pre-defined amplitude distribution over time.
+ * Typically, envelopes are used to control the output volume
+ * of an object, a series of fades referred to as Attack, Decay,
+ * Sustain and Release (
+ * ADSR
+ * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
+ * control an Oscillator's frequency like this: osc.freq(env)
.
Use setRange
to change the attack/release level.
+ * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play
method to play the entire envelope,
+ * the ramp
method for a pingable trigger,
+ * or triggerAttack
/
+ * triggerRelease
to trigger noteOn/noteOff.
+ * let t1 = 0.1; // attack time in seconds
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
*
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.3;
- * let susPercent = 0.4;
- * let releaseTime = 0.5;
- *
- * let env, triOsc;
+ * let env;
+ * let triOsc;
*
* function setup() {
* let cnv = createCanvas(100, 100);
- * background(200);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
+ * background(220);
+ * text('tap to play', 20, 20);
+ * cnv.mousePressed(playSound);
*
+ * env = new p5.Envelope(t1, l1, t2, l2);
* triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(envAttack);
- * }
- *
- * function envAttack() {
- * console.log('trigger attack');
- * env.triggerAttack();
- *
- * background(0,255,0);
- * text('attack!', width/2, height/2);
* }
*
- * function mouseReleased() {
- * env.triggerRelease();
- *
- * background(200);
- * text('click to play', width/2, height/2);
+ * function playSound() {
+ * // starting the oscillator ensures that audio is enabled.
+ * triOsc.start();
+ * env.play(triOsc);
* }
*
- *
- * let attackLevel = 1.0;
- * let releaseLevel = 0;
- *
- * let attackTime = 0.001;
- * let decayTime = 0.3;
- * let susPercent = 0.4;
- * let releaseTime = 0.5;
+ * let attackTime;
+ * let l1 = 0.7; // attack level 0.0 to 1.0
+ * let t2 = 0.3; // decay time in seconds
+ * let l2 = 0.1; // decay level 0.0 to 1.0
+ * let l3 = 0.2; // release time in seconds
*
* let env, triOsc;
*
* function setup() {
* let cnv = createCanvas(100, 100);
- * background(200);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * cnv.mousePressed(playSound);
*
* env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
* triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(envAttack);
* }
*
- * function envAttack() {
- * console.log('trigger attack');
- * env.triggerAttack();
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
*
- * background(0,255,0);
- * text('attack!', width/2, height/2);
+ * attackTime = map(mouseX, 0, width, 0.0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 20);
* }
*
- * function mouseReleased() {
- * env.triggerRelease();
+ * // mouseClick triggers envelope if over canvas
+ * function playSound() {
+ * env.set(attackTime, l1, t2, l2, l3);
*
- * background(200);
- * text('click to play', width/2, height/2);
+ * triOsc.start();
+ * env.play(triOsc);
* }
*
setADSR(attackTime, decayTime)
- * as
- * time constants for simple exponential ramps.
- * If the value is higher than current value, it uses attackTime,
- * while a decrease uses decayTime.
+ * Set values like a traditional
+ *
+ * ADSR envelope
+ * .
*
- * @method ramp
- * @param {Object} unit p5.sound Object or Web Audio Param
- * @param {Number} secondsFromNow When to trigger the ramp
- * @param {Number} v Target value
- * @param {Number} [v2] Second target value (optional)
+ * @method setADSR
+ * @for p5.Envelope
+ * @param {Number} attackTime Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
* @example
*
- * let env, osc, amp, cnv;
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
*
* let attackTime = 0.001;
* let decayTime = 0.2;
- * let attackLevel = 1;
- * let decayLevel = 0;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
*
* function setup() {
- * cnv = createCanvas(100, 100);
- * fill(0,255,0);
- * noStroke();
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
*
* env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime);
- *
- * osc = new p5.Oscillator();
- * osc.amp(env);
- * osc.start();
- *
- * amp = new p5.Amplitude();
- *
- * cnv.mousePressed(triggerRamp);
- * }
- *
- * function triggerRamp() {
- * env.ramp(osc, 0, attackLevel, decayLevel);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
* }
*
* function draw() {
- * background(20,20,20);
- * text('click me', 10, 20);
- * let h = map(amp.getLevel(), 0, 0.4, 0, height);;
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackTime = map(mouseX, 0, width, 0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 40);
+ * }
*
- * rect(0, height, width, -h);
+ * function playEnv() {
+ * triOsc.start();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.play();
* }
*
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
*
- * @method mult
- * @param {Number} number Constant number to multiply
- * @return {p5.Envelope} Envelope Returns this envelope
- * with scaled output
- */
- p5.Envelope.prototype.mult = function (num) {
- var mult = new Mult(num);
- var thisChain = this.mathOps.length;
- var nextChain = this.output;
- return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult);
- };
- /**
- * Scale this envelope's amplitude values to a given
- * range, and return the envelope. Calling this method
- * again will override the initial scale() with new values.
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
*
- * @method scale
- * @param {Number} inMin input range minumum
- * @param {Number} inMax input range maximum
- * @param {Number} outMin input range minumum
- * @param {Number} outMax input range maximum
- * @return {p5.Envelope} Envelope Returns this envelope
- * with scaled output
- */
- p5.Envelope.prototype.scale = function (inMin, inMax, outMin, outMax) {
- var scale = new Scale(inMin, inMax, outMin, outMax);
- var thisChain = this.mathOps.length;
- var nextChain = this.output;
- return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale);
- };
- // get rid of the oscillator
- p5.Envelope.prototype.dispose = function () {
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- this.disconnect();
- if (this.control) {
- this.control.dispose();
- this.control = null;
- }
- for (var i = 1; i < this.mathOps.length; i++) {
- this.mathOps[i].dispose();
- }
- };
- // Different name for backwards compatibility, replicates p5.Envelope class
- p5.Env = function (t1, l1, t2, l2, t3, l3) {
- console.warn('WARNING: p5.Env is now deprecated and may be removed in future versions. ' + 'Please use the new p5.Envelope instead.');
- p5.Envelope.call(this, t1, l1, t2, l2, t3, l3);
- };
- p5.Env.prototype = Object.create(p5.Envelope.prototype);
-}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_signal_TimelineSignal);
-var pulse;
-'use strict';
-pulse = function () {
- var p5sound = master;
- /**
- * Creates a Pulse object, an oscillator that implements
- * Pulse Width Modulation.
- * The pulse is created with two oscillators.
- * Accepts a parameter for frequency, and to set the
- * width between the pulses. See
- * p5.Oscillator
for a full list of methods.
+ * let env, triOsc;
*
- * @class p5.Pulse
- * @extends p5.Oscillator
- * @constructor
- * @param {Number} [freq] Frequency in oscillations per second (Hz)
- * @param {Number} [w] Width between the pulses (0 to 1.0,
- * defaults to 0)
- * @example
- *
- * let pulse;
* function setup() {
- * background(0);
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
*
- * // Create and start the pulse wave oscillator
- * pulse = new p5.Pulse();
- * pulse.amp(0.5);
- * pulse.freq(220);
- * pulse.start();
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
* }
*
* function draw() {
- * let w = map(mouseX, 0, width, 0, 1);
- * w = constrain(w, 0, 1);
- * pulse.width(w)
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackLevel = map(mouseY, height, 0, 0, 1.0);
+ * text('attack level: ' + attackLevel, 5, height - 20);
+ * }
+ *
+ * function playEnv() {
+ * triOsc.start();
+ * env.setRange(attackLevel, releaseLevel);
+ * env.play();
* }
*
*/
- p5.Pulse = function (freq, w) {
- p5.Oscillator.call(this, freq, 'sawtooth');
- // width of PWM, should be betw 0 to 1.0
- this.w = w || 0;
- // create a second oscillator with inverse frequency
- this.osc2 = new p5.SawOsc(freq);
- // create a delay node
- this.dNode = p5sound.audiocontext.createDelay();
- // dc offset
- this.dcOffset = createDCOffset();
- this.dcGain = p5sound.audiocontext.createGain();
- this.dcOffset.connect(this.dcGain);
- this.dcGain.connect(this.output);
- // set delay time based on PWM width
- this.f = freq || 440;
- var mW = this.w / this.oscillator.frequency.value;
- this.dNode.delayTime.value = mW;
- this.dcGain.gain.value = 1.7 * (0.5 - this.w);
- // disconnect osc2 and connect it to delay, which is connected to output
- this.osc2.disconnect();
- this.osc2.panner.disconnect();
- this.osc2.amp(-1);
- // inverted amplitude
- this.osc2.output.connect(this.dNode);
- this.dNode.connect(this.output);
- this.output.gain.value = 1;
- this.output.connect(this.panner);
+
+
+ p5.Envelope.prototype.setRange = function (aLevel, rLevel) {
+ this.aLevel = aLevel || 1;
+ this.rLevel = rLevel || 0;
+ };
+
+
+ p5.Envelope.prototype._setRampAD = function (t1, t2) {
+ this._rampAttackTime = this.checkExpInput(t1);
+ this._rampDecayTime = this.checkExpInput(t2);
+ var TCDenominator = 1.0;
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = t1 / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = t2 / this.checkExpInput(TCDenominator);
+ };
+
+
+ p5.Envelope.prototype.setRampPercentages = function (p1, p2) {
+ this._rampHighPercentage = this.checkExpInput(p1);
+ this._rampLowPercentage = this.checkExpInput(p2);
+ var TCDenominator = 1.0;
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator);
};
- p5.Pulse.prototype = Object.create(p5.Oscillator.prototype);
/**
- * Set the width of a Pulse object (an oscillator that implements
- * Pulse Width Modulation).
+ * Assign a parameter to be controlled by this envelope.
+ * If a p5.Sound object is given, then the p5.Envelope will control its
+ * output gain. If multiple inputs are provided, the env will
+ * control all of them.
*
- * @method width
- * @param {Number} [width] Width between the pulses (0 to 1.0,
- * defaults to 0)
+ * @method setInput
+ * @for p5.Envelope
+ * @param {Object} [...inputs] A p5.sound object or
+ * Web Audio Param.
*/
- p5.Pulse.prototype.width = function (w) {
- if (typeof w === 'number') {
- if (w <= 1 && w >= 0) {
- this.w = w;
- // set delay time based on PWM width
- // var mW = map(this.w, 0, 1.0, 0, 1/this.f);
- var mW = this.w / this.oscillator.frequency.value;
- this.dNode.delayTime.value = mW;
- }
- this.dcGain.gain.value = 1.7 * (0.5 - this.w);
- } else {
- w.connect(this.dNode.delayTime);
- var sig = new p5.SignalAdd(-0.5);
- sig.setInput(w);
- sig = sig.mult(-1);
- sig = sig.mult(1.7);
- sig.connect(this.dcGain.gain);
- }
- };
- p5.Pulse.prototype.start = function (f, time) {
- var now = p5sound.audiocontext.currentTime;
- var t = time || 0;
- if (!this.started) {
- var freq = f || this.f;
- var type = this.oscillator.type;
- this.oscillator = p5sound.audiocontext.createOscillator();
- this.oscillator.frequency.setValueAtTime(freq, now);
- this.oscillator.type = type;
- this.oscillator.connect(this.output);
- this.oscillator.start(t + now);
- // set up osc2
- this.osc2.oscillator = p5sound.audiocontext.createOscillator();
- this.osc2.oscillator.frequency.setValueAtTime(freq, t + now);
- this.osc2.oscillator.type = type;
- this.osc2.oscillator.connect(this.osc2.output);
- this.osc2.start(t + now);
- this.freqNode = [
- this.oscillator.frequency,
- this.osc2.oscillator.frequency
- ];
- // start dcOffset, too
- this.dcOffset = createDCOffset();
- this.dcOffset.connect(this.dcGain);
- this.dcOffset.start(t + now);
- // if LFO connections depend on these oscillators
- if (this.mods !== undefined && this.mods.frequency !== undefined) {
- this.mods.frequency.connect(this.freqNode[0]);
- this.mods.frequency.connect(this.freqNode[1]);
- }
- this.started = true;
- this.osc2.started = true;
- }
- };
- p5.Pulse.prototype.stop = function (time) {
- if (this.started) {
- var t = time || 0;
- var now = p5sound.audiocontext.currentTime;
- this.oscillator.stop(t + now);
- if (this.osc2.oscillator) {
- this.osc2.oscillator.stop(t + now);
- }
- this.dcOffset.stop(t + now);
- this.started = false;
- this.osc2.started = false;
- }
- };
- p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) {
- if (typeof val === 'number') {
- this.f = val;
- var now = p5sound.audiocontext.currentTime;
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var currentFreq = this.oscillator.frequency.value;
- this.oscillator.frequency.cancelScheduledValues(now);
- this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
- this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
- this.osc2.oscillator.frequency.cancelScheduledValues(now);
- this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
- this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
- if (this.freqMod) {
- this.freqMod.output.disconnect();
- this.freqMod = null;
- }
- } else if (val.output) {
- val.output.disconnect();
- val.output.connect(this.oscillator.frequency);
- val.output.connect(this.osc2.oscillator.frequency);
- this.freqMod = val;
+
+
+ p5.Envelope.prototype.setInput = function () {
+ for (var i = 0; i < arguments.length; i++) {
+ this.connect(arguments[i]);
}
};
- // inspiration: http://webaudiodemos.appspot.com/oscilloscope/
- function createDCOffset() {
- var ac = p5sound.audiocontext;
- var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
- var data = buffer.getChannelData(0);
- for (var i = 0; i < 2048; i++)
- data[i] = 1;
- var bufferSource = ac.createBufferSource();
- bufferSource.buffer = buffer;
- bufferSource.loop = true;
- return bufferSource;
- }
-}(master, oscillator);
-var noise;
-'use strict';
-noise = function () {
- var p5sound = master;
/**
- * Noise is a type of oscillator that generates a buffer with random values.
+ * Set whether the envelope ramp is linear (default) or exponential.
+ * Exponential ramps can be useful because we perceive amplitude
+ * and frequency logarithmically.
*
- * @class p5.Noise
- * @extends p5.Oscillator
- * @constructor
- * @param {String} type Type of noise can be 'white' (default),
- * 'brown' or 'pink'.
+ * @method setExp
+ * @for p5.Envelope
+ * @param {Boolean} isExp true is exponential, false is linear
*/
- p5.Noise = function (type) {
- var assignType;
- p5.Oscillator.call(this);
- delete this.f;
- delete this.freq;
- delete this.oscillator;
- if (type === 'brown') {
- assignType = _brownNoise;
- } else if (type === 'pink') {
- assignType = _pinkNoise;
- } else {
- assignType = _whiteNoise;
+
+
+ p5.Envelope.prototype.setExp = function (isExp) {
+ this.isExponential = isExp;
+ };
+
+
+ p5.Envelope.prototype.checkExpInput = function (value) {
+ if (value <= 0) {
+ value = 0.00000001;
}
- this.buffer = assignType;
+
+ return value;
};
- p5.Noise.prototype = Object.create(p5.Oscillator.prototype);
- // generate noise buffers
- var _whiteNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = whiteBuffer.getChannelData(0);
- for (var i = 0; i < bufferSize; i++) {
- noiseData[i] = Math.random() * 2 - 1;
- }
- whiteBuffer.type = 'white';
- return whiteBuffer;
- }();
- var _pinkNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = pinkBuffer.getChannelData(0);
- var b0, b1, b2, b3, b4, b5, b6;
- b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0;
- for (var i = 0; i < bufferSize; i++) {
- var white = Math.random() * 2 - 1;
- b0 = 0.99886 * b0 + white * 0.0555179;
- b1 = 0.99332 * b1 + white * 0.0750759;
- b2 = 0.969 * b2 + white * 0.153852;
- b3 = 0.8665 * b3 + white * 0.3104856;
- b4 = 0.55 * b4 + white * 0.5329522;
- b5 = -0.7616 * b5 - white * 0.016898;
- noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
- noiseData[i] *= 0.11;
- // (roughly) compensate for gain
- b6 = white * 0.115926;
- }
- pinkBuffer.type = 'pink';
- return pinkBuffer;
- }();
- var _brownNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = brownBuffer.getChannelData(0);
- var lastOut = 0;
- for (var i = 0; i < bufferSize; i++) {
- var white = Math.random() * 2 - 1;
- noiseData[i] = (lastOut + 0.02 * white) / 1.02;
- lastOut = noiseData[i];
- noiseData[i] *= 3.5;
- }
- brownBuffer.type = 'brown';
- return brownBuffer;
- }();
/**
- * Set type of noise to 'white', 'pink' or 'brown'.
- * White is the default.
+ * Play tells the envelope to start acting on a given input.
+ * If the input is a p5.sound object (i.e. AudioIn, Oscillator,
+ * SoundFile), then Envelope will control its output volume.
+ * Envelopes can also be used to control any
+ * Web Audio Audio Param.
*
- * @method setType
- * @param {String} [type] 'white', 'pink' or 'brown'
- */
- p5.Noise.prototype.setType = function (type) {
- switch (type) {
- case 'white':
- this.buffer = _whiteNoise;
- break;
- case 'pink':
- this.buffer = _pinkNoise;
- break;
- case 'brown':
- this.buffer = _brownNoise;
- break;
- default:
- this.buffer = _whiteNoise;
- }
- if (this.started) {
- var now = p5sound.audiocontext.currentTime;
- this.stop(now);
- this.start(now + 0.01);
- }
- };
- p5.Noise.prototype.getType = function () {
- return this.buffer.type;
- };
- p5.Noise.prototype.start = function () {
- if (this.started) {
- this.stop();
- }
- this.noise = p5sound.audiocontext.createBufferSource();
- this.noise.buffer = this.buffer;
- this.noise.loop = true;
- this.noise.connect(this.output);
- var now = p5sound.audiocontext.currentTime;
- this.noise.start(now);
- this.started = true;
- };
- p5.Noise.prototype.stop = function () {
- var now = p5sound.audiocontext.currentTime;
- if (this.noise) {
- this.noise.stop(now);
- this.started = false;
- }
- };
- p5.Noise.prototype.dispose = function () {
- var now = p5sound.audiocontext.currentTime;
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- if (this.noise) {
- this.noise.disconnect();
- this.stop(now);
- }
- if (this.output) {
- this.output.disconnect();
- }
- if (this.panner) {
- this.panner.disconnect();
+ * @method play
+ * @for p5.Envelope
+ * @param {Object} unit A p5.sound object or
+ * Web Audio Param.
+ * @param {Number} [startTime] time from now (in seconds) at which to play
+ * @param {Number} [sustainTime] time to sustain before releasing the envelope
+ * @example
+ *
+ * let attackLevel = 1.0;
+ * let releaseLevel = 0;
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.2;
+ * let releaseTime = 0.5;
+ *
+ * let env, triOsc;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playEnv);
+ *
+ * env = new p5.Envelope();
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.freq(220);
+ * triOsc.start();
+ * }
+ *
+ * function draw() {
+ * background(220);
+ * text('tap here to play', 5, 20);
+ * attackTime = map(mouseX, 0, width, 0, 1.0);
+ * attackLevel = map(mouseY, height, 0, 0, 1.0);
+ * text('attack time: ' + attackTime, 5, height - 40);
+ * text('attack level: ' + attackLevel, 5, height - 20);
+ * }
+ *
+ * function playEnv() {
+ * // ensure that audio is enabled
+ * userStartAudio();
+ *
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ * env.play();
+ * }
+ *
+ */
+
+
+ p5.Envelope.prototype.play = function (unit, secondsFromNow, susTime) {
+ var tFromNow = secondsFromNow || 0;
+ var susTime = susTime || 0;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
+ }
}
- this.output = null;
- this.panner = null;
- this.buffer = null;
- this.noise = null;
+
+ this.triggerAttack(unit, tFromNow);
+ this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime);
};
-}(master);
-var audioin;
-'use strict';
-audioin = function () {
- var p5sound = master;
- // an array of input sources
- p5sound.inputSources = [];
/**
- * Get audio from an input, i.e. your computer's microphone.
+ * Trigger the Attack, and Decay portion of the Envelope.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go. Input can be
+ * any p5.sound object, or a
+ * Web Audio Param.
*
- * Turn the mic on/off with the start() and stop() methods. When the mic
- * is on, its volume can be measured with getLevel or by connecting an
- * FFT object.
+ * @method triggerAttack
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow time from now (in seconds)
+ * @example
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.3;
+ * let releaseTime = 0.4;
+ * let env, triOsc;
*
- * If you want to hear the AudioIn, use the .connect() method.
- * AudioIn does not connect to p5.sound output by default to prevent
- * feedback.
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(10);
+ * text('tap to triggerAttack', width/2, height/2);
*
- * Note: This uses the getUserMedia/
- * Stream API, which is not supported by certain browsers. Access in Chrome browser
- * is limited to localhost and https, but access over http may be limited.
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(1.0, 0.0);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.freq(220);
*
- * @class p5.AudioIn
- * @constructor
- * @param {Function} [errorCallback] A function to call if there is an error
- * accessing the AudioIn. For example,
- * Safari and iOS devices do not
- * currently allow microphone access.
- * @example
- *
- * let mic;
- * function setup(){
- * mic = new p5.AudioIn()
- * mic.start();
+ * cnv.mousePressed(envAttack);
* }
- * function draw(){
- * background(0);
- * micLevel = mic.getLevel();
- * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
+ *
+ * function envAttack() {
+ * background(0, 255, 255);
+ * text('release to release', width/2, height/2);
+ *
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * triOsc.start();
+ *
+ * env.triggerAttack(triOsc);
+ * }
+ *
+ * function mouseReleased() {
+ * background(220);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env.triggerRelease(triOsc);
* }
*
*/
- p5.AudioIn = function (errorCallback) {
- // set up audio input
- /**
- * @property {GainNode} input
- */
- this.input = p5sound.audiocontext.createGain();
- /**
- * @property {GainNode} output
- */
- this.output = p5sound.audiocontext.createGain();
- /**
- * @property {MediaStream|null} stream
- */
- this.stream = null;
- /**
- * @property {MediaStreamAudioSourceNode|null} mediaStream
- */
- this.mediaStream = null;
- /**
- * @property {Number|null} currentSource
- */
- this.currentSource = null;
- /**
- * Client must allow browser to access their microphone / audioin source.
- * Default: false. Will become true when the client enables access.
- *
- * @property {Boolean} enabled
- */
- this.enabled = false;
- /**
- * Input amplitude, connect to it by default but not to master out
- *
- * @property {p5.Amplitude} amplitude
- */
- this.amplitude = new p5.Amplitude();
- this.output.connect(this.amplitude.input);
- if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
- errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
+
+
+ p5.Envelope.prototype.triggerAttack = function (unit, secondsFromNow) {
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+ this.lastAttack = t;
+ this.wasTriggered = true;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
+ }
+ }
+
+
+ var valToSet = this.control.getValueAtTime(t);
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
+ } else {
+ this.control.linearRampToValueAtTime(valToSet, t);
+ }
+
+
+ t += this.aTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.aLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.aLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
+ }
+
+
+ t += this.dTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.dLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.dLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
}
- // add to soundArray so we can dispose on close
- p5sound.soundArray.push(this);
};
/**
- * Start processing audio input. This enables the use of other
- * AudioIn methods like getLevel(). Note that by default, AudioIn
- * is not connected to p5.sound's output. So you won't hear
- * anything unless you use the connect() method.
+ * Trigger the Release of the Envelope. This is similar to releasing
+ * the key on a piano and letting the sound fade according to the
+ * release level and release time.
*
- * Certain browsers limit access to the user's microphone. For example,
- * Chrome only allows access from localhost and over https. For this reason,
- * you may want to include an errorCallback—a function that is called in case
- * the browser won't provide mic access.
+ * @method triggerRelease
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow time to trigger the release
+ * @example
+ *
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let susPercent = 0.3;
+ * let releaseTime = 0.4;
+ * let env, triOsc;
*
- * @method start
- * @param {Function} [successCallback] Name of a function to call on
- * success.
- * @param {Function} [errorCallback] Name of a function to call if
- * there was an error. For example,
- * some browsers do not support
- * getUserMedia.
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * background(220);
+ * textAlign(CENTER);
+ * textSize(10);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(1.0, 0.0);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(envAttack);
+ * }
+ *
+ * function envAttack() {
+ * background(0, 255, 255);
+ * text('release to release', width/2, height/2);
+ *
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * triOsc.start();
+ *
+ * env.triggerAttack(triOsc);
+ * }
+ *
+ * function mouseReleased() {
+ * background(220);
+ * text('tap to triggerAttack', width/2, height/2);
+ *
+ * env.triggerRelease(triOsc);
+ * }
+ *
*/
- p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
- var self = this;
- if (this.stream) {
- this.stop();
+
+
+ p5.Envelope.prototype.triggerRelease = function (unit, secondsFromNow) {
+ if (!this.wasTriggered) {
+ return;
}
- // set the audio source
- var audioSource = p5sound.inputSources[self.currentSource];
- var constraints = {
- audio: {
- sampleRate: p5sound.audiocontext.sampleRate,
- echoCancellation: false
+
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
}
- };
- // if developers determine which source to use
- if (p5sound.inputSources[this.currentSource]) {
- constraints.audio.deviceId = audioSource.deviceId;
+ }
+
+
+ var valToSet = this.control.getValueAtTime(t);
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
+ } else {
+ this.control.linearRampToValueAtTime(valToSet, t);
+ }
+
+
+ t += this.rTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.rLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.rLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
}
- window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
- self.stream = stream;
- self.enabled = true;
- // Wrap a MediaStreamSourceNode around the live input
- self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
- self.mediaStream.connect(self.output);
- // only send to the Amplitude reader, so we can see it but not hear it.
- self.amplitude.setInput(self.output);
- if (successCallback)
- successCallback();
- }).catch(function (err) {
- if (errorCallback)
- errorCallback(err);
- else
- console.error(err);
- });
+
+ this.wasTriggered = false;
};
/**
- * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
- * If re-starting, the user may be prompted for permission access.
+ * Exponentially ramp to a value using the first two
+ * values from setADSR(attackTime, decayTime)
+ * as
+ * time constants for simple exponential ramps.
+ * If the value is higher than current value, it uses attackTime,
+ * while a decrease uses decayTime.
*
- * @method stop
- */
- p5.AudioIn.prototype.stop = function () {
- if (this.stream) {
- this.stream.getTracks().forEach(function (track) {
- track.stop();
- });
- this.mediaStream.disconnect();
- delete this.mediaStream;
- delete this.stream;
- }
- };
- /**
- * Connect to an audio unit. If no parameter is provided, will
- * connect to the master output (i.e. your speakers).
+ * @method ramp
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow When to trigger the ramp
+ * @param {Number} v Target value
+ * @param {Number} [v2] Second target value (optional)
+ * @example
+ *
+ * let env, osc, amp;
*
- * @method connect
- * @param {Object} [unit] An object that accepts audio input,
- * such as an FFT
+ * let attackTime = 0.001;
+ * let decayTime = 0.2;
+ * let attackLevel = 1;
+ * let decayLevel = 0;
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * fill(0,255,0);
+ * noStroke();
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime);
+ * osc = new p5.Oscillator();
+ * osc.amp(env);
+ * amp = new p5.Amplitude();
+ *
+ * cnv.mousePressed(triggerRamp);
+ * }
+ *
+ * function triggerRamp() {
+ * // ensures audio is enabled. See also: `userStartAudio`
+ * osc.start();
+ *
+ * env.ramp(osc, 0, attackLevel, decayLevel);
+ * }
+ *
+ * function draw() {
+ * background(20);
+ * text('tap to play', 10, 20);
+ * let h = map(amp.getLevel(), 0, 0.4, 0, height);;
+ * rect(0, height, width, -h);
+ * }
+ *
*/
- p5.AudioIn.prototype.connect = function (unit) {
+
+
+ p5.Envelope.prototype.ramp = function (unit, secondsFromNow, v1, v2) {
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+ var destination1 = this.checkExpInput(v1);
+ var destination2 = typeof v2 !== 'undefined' ? this.checkExpInput(v2) : undefined;
+
if (unit) {
- if (unit.hasOwnProperty('input')) {
- this.output.connect(unit.input);
- } else if (unit.hasOwnProperty('analyser')) {
- this.output.connect(unit.analyser);
- } else {
- this.output.connect(unit);
+ if (this.connection !== unit) {
+ this.connect(unit);
}
- } else {
- this.output.connect(p5sound.input);
+ }
+
+
+ var currentVal = this.checkExpInput(this.control.getValueAtTime(t));
+
+ if (destination1 > currentVal) {
+ this.control.setTargetAtTime(destination1, t, this._rampAttackTC);
+ t += this._rampAttackTime;
+ }
+ else if (destination1 < currentVal) {
+ this.control.setTargetAtTime(destination1, t, this._rampDecayTC);
+ t += this._rampDecayTime;
+ }
+
+
+ if (destination2 === undefined) return;
+
+ if (destination2 > destination1) {
+ this.control.setTargetAtTime(destination2, t, this._rampAttackTC);
+ }
+ else if (destination2 < destination1) {
+ this.control.setTargetAtTime(destination2, t, this._rampDecayTC);
+ }
+ };
+
+ p5.Envelope.prototype.connect = function (unit) {
+ this.connection = unit;
+
+ if (unit instanceof p5.Oscillator || unit instanceof p5.SoundFile || unit instanceof p5.AudioIn || unit instanceof p5.Reverb || unit instanceof p5.Noise || unit instanceof p5.Filter || unit instanceof p5.Delay) {
+ unit = unit.output.gain;
+ }
+
+ if (unit instanceof AudioParam) {
+ unit.setValueAtTime(0, p5sound.audiocontext.currentTime);
}
+
+ if (unit instanceof p5.Signal) {
+ unit.setValue(0);
+ }
+
+ this.output.connect(unit);
};
- /**
- * Disconnect the AudioIn from all audio units. For example, if
- * connect() had been called, disconnect() will stop sending
- * signal to your speakers.
- *
- * @method disconnect
- */
- p5.AudioIn.prototype.disconnect = function () {
+
+ p5.Envelope.prototype.disconnect = function () {
if (this.output) {
this.output.disconnect();
- // stay connected to amplitude even if not outputting to p5
- this.output.connect(this.amplitude.input);
}
+ };
+
+ /**
+ * Add a value to the p5.Oscillator's output amplitude,
+ * and return the oscillator. Calling this method
+ * again will override the initial add() with new values.
+ *
+ * @method add
+ * @for p5.Envelope
+ * @param {Number} number Constant number to add
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
+ */
+
+
+ p5.Envelope.prototype.add = function (num) {
+ var add = new Add(num);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, add, thisChain, nextChain, Add);
};
/**
- * Read the Amplitude (volume level) of an AudioIn. The AudioIn
- * class contains its own instance of the Amplitude class to help
- * make it easy to get a microphone's volume level. Accepts an
- * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
- * .start() before using .getLevel().
+ * Multiply the p5.Envelope's output amplitude
+ * by a fixed value. Calling this method
+ * again will override the initial mult() with new values.
*
- * @method getLevel
- * @param {Number} [smoothing] Smoothing is 0.0 by default.
- * Smooths values based on previous values.
- * @return {Number} Volume level (between 0.0 and 1.0)
+ * @method mult
+ * @for p5.Envelope
+ * @param {Number} number Constant number to multiply
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
*/
- p5.AudioIn.prototype.getLevel = function (smoothing) {
- if (smoothing) {
- this.amplitude.smoothing = smoothing;
- }
- return this.amplitude.getLevel();
+
+
+ p5.Envelope.prototype.mult = function (num) {
+ var mult = new Mult(num);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult);
};
/**
- * Set amplitude (volume) of a mic input between 0 and 1.0.
+ * Scale this envelope's amplitude values to a given
+ * range, and return the envelope. Calling this method
+ * again will override the initial scale() with new values.
*
- * @method amp
- * @param {Number} vol between 0 and 1.0
- * @param {Number} [time] ramp time (optional)
+ * @method scale
+ * @for p5.Envelope
+ * @param {Number} inMin input range minumum
+ * @param {Number} inMax input range maximum
+ * @param {Number} outMin input range minumum
+ * @param {Number} outMax input range maximum
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
*/
- p5.AudioIn.prototype.amp = function (vol, t) {
- if (t) {
- var rampTime = t || 0;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
- this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
- } else {
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
+
+
+ p5.Envelope.prototype.scale = function (inMin, inMax, outMin, outMax) {
+ var scale = new Scale(inMin, inMax, outMin, outMax);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale);
+ };
+
+
+ p5.Envelope.prototype.dispose = function () {
+ var index = p5sound.soundArray.indexOf(this);
+ p5sound.soundArray.splice(index, 1);
+ this.disconnect();
+
+ if (this.control) {
+ this.control.dispose();
+ this.control = null;
}
+
+ for (var i = 1; i < this.mathOps.length; i++) {
+ this.mathOps[i].dispose();
+ }
+ };
+
+
+ p5.Env = function (t1, l1, t2, l2, t3, l3) {
+ console.warn('WARNING: p5.Env is now deprecated and may be removed in future versions. ' + 'Please use the new p5.Envelope instead.');
+ p5.Envelope.call(this, t1, l1, t2, l2, t3, l3);
};
+
+ p5.Env.prototype = Object.create(p5.Envelope.prototype);
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ __webpack_require__(23);
/**
- * Returns a list of available input sources. This is a wrapper
- * for and it returns a Promise.
- *
- * @method getSources
- * @param {Function} [successCallback] This callback function handles the sources when they
- * have been enumerated. The callback function
- * receives the deviceList array as its only argument
- * @param {Function} [errorCallback] This optional callback receives the error
- * message as its argument.
- * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
- * to the enumerateDevices() method
- * @example
- *
- * let audiograb;
+ * Creates a Pulse object, an oscillator that implements
+ * Pulse Width Modulation.
+ * The pulse is created with two oscillators.
+ * Accepts a parameter for frequency, and to set the
+ * width between the pulses. See
+ * p5.Oscillator
for a full list of methods.
*
- * function setup(){
- * //new audioIn
- * audioGrab = new p5.AudioIn();
+ * @class p5.Pulse
+ * @extends p5.Oscillator
+ * @constructor
+ * @param {Number} [freq] Frequency in oscillations per second (Hz)
+ * @param {Number} [w] Width between the pulses (0 to 1.0,
+ * defaults to 0)
+ * @example
+ *
+ * let pulse;
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startPulse);
+ * background(220);
*
- * audioGrab.getSources(function(deviceList) {
- * //print out the array of available sources
- * console.log(deviceList);
- * //set the source to the first item in the deviceList array
- * audioGrab.setSource(0);
- * });
+ * pulse = new p5.Pulse();
+ * pulse.amp(0.5);
+ * pulse.freq(220);
+ * }
+ * function startPulse() {
+ * pulse.start();
+ * pulse.amp(0.5, 0.02);
+ * }
+ * function mouseReleased() {
+ * pulse.amp(0, 0.2);
+ * }
+ * function draw() {
+ * background(220);
+ * text('tap to play', 5, 20, width - 20);
+ * let w = map(mouseX, 0, width, 0, 1);
+ * w = constrain(w, 0, 1);
+ * pulse.width(w);
+ * text('pulse width: ' + w, 5, height - 20);
* }
*
*/
- p5.AudioIn.prototype.getSources = function (onSuccess, onError) {
- return new Promise(function (resolve, reject) {
- window.navigator.mediaDevices.enumerateDevices().then(function (devices) {
- p5sound.inputSources = devices.filter(function (device) {
- return device.kind === 'audioinput';
- });
- resolve(p5sound.inputSources);
- if (onSuccess) {
- onSuccess(p5sound.inputSources);
- }
- }).catch(function (error) {
- reject(error);
- if (onError) {
- onError(error);
- } else {
- console.error('This browser does not support MediaStreamTrack.getSources()');
- }
- });
- });
+
+
+ p5.Pulse = function (freq, w) {
+ p5.Oscillator.call(this, freq, 'sawtooth');
+
+ this.w = w || 0;
+
+ this.osc2 = new p5.SawOsc(freq);
+
+ this.dNode = p5sound.audiocontext.createDelay();
+
+ this.dcOffset = createDCOffset();
+ this.dcGain = p5sound.audiocontext.createGain();
+ this.dcOffset.connect(this.dcGain);
+ this.dcGain.connect(this.output);
+
+ this.f = freq || 440;
+ var mW = this.w / this.oscillator.frequency.value;
+ this.dNode.delayTime.value = mW;
+ this.dcGain.gain.value = 1.7 * (0.5 - this.w);
+
+ this.osc2.disconnect();
+ this.osc2.panner.disconnect();
+ this.osc2.amp(-1);
+
+ this.osc2.output.connect(this.dNode);
+ this.dNode.connect(this.output);
+ this.output.gain.value = 1;
+ this.output.connect(this.panner);
};
+
+ p5.Pulse.prototype = Object.create(p5.Oscillator.prototype);
/**
- * Set the input source. Accepts a number representing a
- * position in the array returned by getSources().
- * This is only available in browsers that support
- * navigator.mediaDevices.enumerateDevices().
+ * Set the width of a Pulse object (an oscillator that implements
+ * Pulse Width Modulation).
*
- * @method setSource
- * @param {number} num position of input source in the array
+ * @method width
+ * @param {Number} [width] Width between the pulses (0 to 1.0,
+ * defaults to 0)
*/
- p5.AudioIn.prototype.setSource = function (num) {
- if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
- // set the current source
- this.currentSource = num;
- console.log('set source to ', p5sound.inputSources[this.currentSource]);
- } else {
- console.log('unable to set input source');
- }
- // restart stream if currently active
- if (this.stream && this.stream.active) {
- this.start();
- }
- };
- // private method
- p5.AudioIn.prototype.dispose = function () {
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- this.stop();
- if (this.output) {
- this.output.disconnect();
- }
- if (this.amplitude) {
- this.amplitude.disconnect();
- }
- delete this.amplitude;
- delete this.output;
- };
-}(master);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Negate;
-Tone_signal_Negate = function (Tone) {
- 'use strict';
- Tone.Negate = function () {
- this._multiply = this.input = this.output = new Tone.Multiply(-1);
- };
- Tone.extend(Tone.Negate, Tone.SignalBase);
- Tone.Negate.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._multiply.dispose();
- this._multiply = null;
- return this;
- };
- return Tone.Negate;
-}(Tone_core_Tone, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Subtract;
-Tone_signal_Subtract = function (Tone) {
- 'use strict';
- Tone.Subtract = function (value) {
- this.createInsOuts(2, 0);
- this._sum = this.input[0] = this.output = new Tone.Gain();
- this._neg = new Tone.Negate();
- this._param = this.input[1] = new Tone.Signal(value);
- this._param.chain(this._neg, this._sum);
- };
- Tone.extend(Tone.Subtract, Tone.Signal);
- Tone.Subtract.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._neg.dispose();
- this._neg = null;
- this._sum.disconnect();
- this._sum = null;
- this._param.dispose();
- this._param = null;
- return this;
- };
- return Tone.Subtract;
-}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Negate, Tone_signal_Signal);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_GreaterThanZero;
-Tone_signal_GreaterThanZero = function (Tone) {
- 'use strict';
- Tone.GreaterThanZero = function () {
- this._thresh = this.output = new Tone.WaveShaper(function (val) {
- if (val <= 0) {
- return 0;
- } else {
- return 1;
- }
- }, 127);
- this._scale = this.input = new Tone.Multiply(10000);
- this._scale.connect(this._thresh);
- };
- Tone.extend(Tone.GreaterThanZero, Tone.SignalBase);
- Tone.GreaterThanZero.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._scale.dispose();
- this._scale = null;
- this._thresh.dispose();
- this._thresh = null;
- return this;
- };
- return Tone.GreaterThanZero;
-}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_GreaterThan;
-Tone_signal_GreaterThan = function (Tone) {
- 'use strict';
- Tone.GreaterThan = function (value) {
- this.createInsOuts(2, 0);
- this._param = this.input[0] = new Tone.Subtract(value);
- this.input[1] = this._param.input[1];
- this._gtz = this.output = new Tone.GreaterThanZero();
- this._param.connect(this._gtz);
- };
- Tone.extend(Tone.GreaterThan, Tone.Signal);
- Tone.GreaterThan.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._param.dispose();
- this._param = null;
- this._gtz.dispose();
- this._gtz = null;
- return this;
- };
- return Tone.GreaterThan;
-}(Tone_core_Tone, Tone_signal_GreaterThanZero, Tone_signal_Subtract);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Abs;
-Tone_signal_Abs = function (Tone) {
- 'use strict';
- Tone.Abs = function () {
- this._abs = this.input = this.output = new Tone.WaveShaper(function (val) {
- if (val === 0) {
- return 0;
- } else {
- return Math.abs(val);
- }
- }, 127);
- };
- Tone.extend(Tone.Abs, Tone.SignalBase);
- Tone.Abs.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._abs.dispose();
- this._abs = null;
- return this;
- };
- return Tone.Abs;
-}(Tone_core_Tone, Tone_signal_WaveShaper);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Modulo;
-Tone_signal_Modulo = function (Tone) {
- 'use strict';
- Tone.Modulo = function (modulus) {
- this.createInsOuts(1, 0);
- this._shaper = new Tone.WaveShaper(Math.pow(2, 16));
- this._multiply = new Tone.Multiply();
- this._subtract = this.output = new Tone.Subtract();
- this._modSignal = new Tone.Signal(modulus);
- this.input.fan(this._shaper, this._subtract);
- this._modSignal.connect(this._multiply, 0, 0);
- this._shaper.connect(this._multiply, 0, 1);
- this._multiply.connect(this._subtract, 0, 1);
- this._setWaveShaper(modulus);
- };
- Tone.extend(Tone.Modulo, Tone.SignalBase);
- Tone.Modulo.prototype._setWaveShaper = function (mod) {
- this._shaper.setMap(function (val) {
- var multiple = Math.floor((val + 0.0001) / mod);
- return multiple;
- });
- };
- Object.defineProperty(Tone.Modulo.prototype, 'value', {
- get: function () {
- return this._modSignal.value;
- },
- set: function (mod) {
- this._modSignal.value = mod;
- this._setWaveShaper(mod);
- }
- });
- Tone.Modulo.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._shaper.dispose();
- this._shaper = null;
- this._multiply.dispose();
- this._multiply = null;
- this._subtract.dispose();
- this._subtract = null;
- this._modSignal.dispose();
- this._modSignal = null;
- return this;
- };
- return Tone.Modulo;
-}(Tone_core_Tone, Tone_signal_WaveShaper, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Pow;
-Tone_signal_Pow = function (Tone) {
- 'use strict';
- Tone.Pow = function (exp) {
- this._exp = this.defaultArg(exp, 1);
- this._expScaler = this.input = this.output = new Tone.WaveShaper(this._expFunc(this._exp), 8192);
- };
- Tone.extend(Tone.Pow, Tone.SignalBase);
- Object.defineProperty(Tone.Pow.prototype, 'value', {
- get: function () {
- return this._exp;
- },
- set: function (exp) {
- this._exp = exp;
- this._expScaler.setMap(this._expFunc(this._exp));
- }
- });
- Tone.Pow.prototype._expFunc = function (exp) {
- return function (val) {
- return Math.pow(Math.abs(val), exp);
- };
- };
- Tone.Pow.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._expScaler.dispose();
- this._expScaler = null;
- return this;
- };
- return Tone.Pow;
-}(Tone_core_Tone);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_AudioToGain;
-Tone_signal_AudioToGain = function (Tone) {
- 'use strict';
- Tone.AudioToGain = function () {
- this._norm = this.input = this.output = new Tone.WaveShaper(function (x) {
- return (x + 1) / 2;
- });
- };
- Tone.extend(Tone.AudioToGain, Tone.SignalBase);
- Tone.AudioToGain.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._norm.dispose();
- this._norm = null;
- return this;
- };
- return Tone.AudioToGain;
-}(Tone_core_Tone, Tone_signal_WaveShaper);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Expr;
-Tone_signal_Expr = function (Tone) {
- 'use strict';
- Tone.Expr = function () {
- var expr = this._replacements(Array.prototype.slice.call(arguments));
- var inputCount = this._parseInputs(expr);
- this._nodes = [];
- this.input = new Array(inputCount);
- for (var i = 0; i < inputCount; i++) {
- this.input[i] = this.context.createGain();
- }
- var tree = this._parseTree(expr);
- var result;
- try {
- result = this._eval(tree);
- } catch (e) {
- this._disposeNodes();
- throw new Error('Tone.Expr: Could evaluate expression: ' + expr);
- }
- this.output = result;
- };
- Tone.extend(Tone.Expr, Tone.SignalBase);
- function applyBinary(Constructor, args, self) {
- var op = new Constructor();
- self._eval(args[0]).connect(op, 0, 0);
- self._eval(args[1]).connect(op, 0, 1);
- return op;
- }
- function applyUnary(Constructor, args, self) {
- var op = new Constructor();
- self._eval(args[0]).connect(op, 0, 0);
- return op;
- }
- function getNumber(arg) {
- return arg ? parseFloat(arg) : undefined;
- }
- function literalNumber(arg) {
- return arg && arg.args ? parseFloat(arg.args) : undefined;
- }
- Tone.Expr._Expressions = {
- 'value': {
- 'signal': {
- regexp: /^\d+\.\d+|^\d+/,
- method: function (arg) {
- var sig = new Tone.Signal(getNumber(arg));
- return sig;
- }
- },
- 'input': {
- regexp: /^\$\d/,
- method: function (arg, self) {
- return self.input[getNumber(arg.substr(1))];
- }
- }
- },
- 'glue': {
- '(': { regexp: /^\(/ },
- ')': { regexp: /^\)/ },
- ',': { regexp: /^,/ }
- },
- 'func': {
- 'abs': {
- regexp: /^abs/,
- method: applyUnary.bind(this, Tone.Abs)
- },
- 'mod': {
- regexp: /^mod/,
- method: function (args, self) {
- var modulus = literalNumber(args[1]);
- var op = new Tone.Modulo(modulus);
- self._eval(args[0]).connect(op);
- return op;
- }
- },
- 'pow': {
- regexp: /^pow/,
- method: function (args, self) {
- var exp = literalNumber(args[1]);
- var op = new Tone.Pow(exp);
- self._eval(args[0]).connect(op);
- return op;
- }
- },
- 'a2g': {
- regexp: /^a2g/,
- method: function (args, self) {
- var op = new Tone.AudioToGain();
- self._eval(args[0]).connect(op);
- return op;
- }
- }
- },
- 'binary': {
- '+': {
- regexp: /^\+/,
- precedence: 1,
- method: applyBinary.bind(this, Tone.Add)
- },
- '-': {
- regexp: /^\-/,
- precedence: 1,
- method: function (args, self) {
- if (args.length === 1) {
- return applyUnary(Tone.Negate, args, self);
- } else {
- return applyBinary(Tone.Subtract, args, self);
- }
- }
- },
- '*': {
- regexp: /^\*/,
- precedence: 0,
- method: applyBinary.bind(this, Tone.Multiply)
- }
- },
- 'unary': {
- '-': {
- regexp: /^\-/,
- method: applyUnary.bind(this, Tone.Negate)
- },
- '!': {
- regexp: /^\!/,
- method: applyUnary.bind(this, Tone.NOT)
- }
- }
- };
- Tone.Expr.prototype._parseInputs = function (expr) {
- var inputArray = expr.match(/\$\d/g);
- var inputMax = 0;
- if (inputArray !== null) {
- for (var i = 0; i < inputArray.length; i++) {
- var inputNum = parseInt(inputArray[i].substr(1)) + 1;
- inputMax = Math.max(inputMax, inputNum);
- }
- }
- return inputMax;
- };
- Tone.Expr.prototype._replacements = function (args) {
- var expr = args.shift();
- for (var i = 0; i < args.length; i++) {
- expr = expr.replace(/\%/i, args[i]);
- }
- return expr;
- };
- Tone.Expr.prototype._tokenize = function (expr) {
- var position = -1;
- var tokens = [];
- while (expr.length > 0) {
- expr = expr.trim();
- var token = getNextToken(expr);
- tokens.push(token);
- expr = expr.substr(token.value.length);
- }
- function getNextToken(expr) {
- for (var type in Tone.Expr._Expressions) {
- var group = Tone.Expr._Expressions[type];
- for (var opName in group) {
- var op = group[opName];
- var reg = op.regexp;
- var match = expr.match(reg);
- if (match !== null) {
- return {
- type: type,
- value: match[0],
- method: op.method
- };
- }
- }
+
+ p5.Pulse.prototype.width = function (w) {
+ if (typeof w === 'number') {
+ if (w <= 1.0 && w >= 0.0) {
+ this.w = w;
+
+ var mW = this.w / this.oscillator.frequency.value;
+ this.dNode.delayTime.value = mW;
}
- throw new SyntaxError('Tone.Expr: Unexpected token ' + expr);
+
+ this.dcGain.gain.value = 1.7 * (0.5 - this.w);
+ } else {
+ w.connect(this.dNode.delayTime);
+ var sig = new p5.SignalAdd(-0.5);
+ sig.setInput(w);
+ sig = sig.mult(-1);
+ sig = sig.mult(1.7);
+ sig.connect(this.dcGain.gain);
}
- return {
- next: function () {
- return tokens[++position];
- },
- peek: function () {
- return tokens[position + 1];
- }
- };
};
- Tone.Expr.prototype._parseTree = function (expr) {
- var lexer = this._tokenize(expr);
- var isUndef = this.isUndef.bind(this);
- function matchSyntax(token, syn) {
- return !isUndef(token) && token.type === 'glue' && token.value === syn;
- }
- function matchGroup(token, groupName, prec) {
- var ret = false;
- var group = Tone.Expr._Expressions[groupName];
- if (!isUndef(token)) {
- for (var opName in group) {
- var op = group[opName];
- if (op.regexp.test(token.value)) {
- if (!isUndef(prec)) {
- if (op.precedence === prec) {
- return true;
- }
- } else {
- return true;
- }
- }
- }
- }
- return ret;
- }
- function parseExpression(precedence) {
- if (isUndef(precedence)) {
- precedence = 5;
- }
- var expr;
- if (precedence < 0) {
- expr = parseUnary();
- } else {
- expr = parseExpression(precedence - 1);
- }
- var token = lexer.peek();
- while (matchGroup(token, 'binary', precedence)) {
- token = lexer.next();
- expr = {
- operator: token.value,
- method: token.method,
- args: [
- expr,
- parseExpression(precedence - 1)
- ]
- };
- token = lexer.peek();
- }
- return expr;
- }
- function parseUnary() {
- var token, expr;
- token = lexer.peek();
- if (matchGroup(token, 'unary')) {
- token = lexer.next();
- expr = parseUnary();
- return {
- operator: token.value,
- method: token.method,
- args: [expr]
- };
- }
- return parsePrimary();
- }
- function parsePrimary() {
- var token, expr;
- token = lexer.peek();
- if (isUndef(token)) {
- throw new SyntaxError('Tone.Expr: Unexpected termination of expression');
- }
- if (token.type === 'func') {
- token = lexer.next();
- return parseFunctionCall(token);
- }
- if (token.type === 'value') {
- token = lexer.next();
- return {
- method: token.method,
- args: token.value
- };
- }
- if (matchSyntax(token, '(')) {
- lexer.next();
- expr = parseExpression();
- token = lexer.next();
- if (!matchSyntax(token, ')')) {
- throw new SyntaxError('Expected )');
- }
- return expr;
- }
- throw new SyntaxError('Tone.Expr: Parse error, cannot process token ' + token.value);
- }
- function parseFunctionCall(func) {
- var token, args = [];
- token = lexer.next();
- if (!matchSyntax(token, '(')) {
- throw new SyntaxError('Tone.Expr: Expected ( in a function call "' + func.value + '"');
- }
- token = lexer.peek();
- if (!matchSyntax(token, ')')) {
- args = parseArgumentList();
- }
- token = lexer.next();
- if (!matchSyntax(token, ')')) {
- throw new SyntaxError('Tone.Expr: Expected ) in a function call "' + func.value + '"');
- }
- return {
- method: func.method,
- args: args,
- name: name
- };
- }
- function parseArgumentList() {
- var token, expr, args = [];
- while (true) {
- expr = parseExpression();
- if (isUndef(expr)) {
- break;
- }
- args.push(expr);
- token = lexer.peek();
- if (!matchSyntax(token, ',')) {
- break;
- }
- lexer.next();
+
+ p5.Pulse.prototype.start = function (f, time) {
+ var now = p5sound.audiocontext.currentTime;
+ var t = time || 0;
+
+ if (!this.started) {
+ var freq = f || this.f;
+ var type = this.oscillator.type;
+ this.oscillator = p5sound.audiocontext.createOscillator();
+ this.oscillator.frequency.setValueAtTime(freq, now);
+ this.oscillator.type = type;
+ this.oscillator.connect(this.output);
+ this.oscillator.start(t + now);
+
+ this.osc2.oscillator = p5sound.audiocontext.createOscillator();
+ this.osc2.oscillator.frequency.setValueAtTime(freq, t + now);
+ this.osc2.oscillator.type = type;
+ this.osc2.oscillator.connect(this.osc2.output);
+ this.osc2.start(t + now);
+ this.freqNode = [this.oscillator.frequency, this.osc2.oscillator.frequency];
+
+ this.dcOffset = createDCOffset();
+ this.dcOffset.connect(this.dcGain);
+ this.dcOffset.start(t + now);
+
+ if (this.mods !== undefined && this.mods.frequency !== undefined) {
+ this.mods.frequency.connect(this.freqNode[0]);
+ this.mods.frequency.connect(this.freqNode[1]);
}
- return args;
- }
- return parseExpression();
- };
- Tone.Expr.prototype._eval = function (tree) {
- if (!this.isUndef(tree)) {
- var node = tree.method(tree.args, this);
- this._nodes.push(node);
- return node;
+
+ this.started = true;
+ this.osc2.started = true;
}
};
- Tone.Expr.prototype._disposeNodes = function () {
- for (var i = 0; i < this._nodes.length; i++) {
- var node = this._nodes[i];
- if (this.isFunction(node.dispose)) {
- node.dispose();
- } else if (this.isFunction(node.disconnect)) {
- node.disconnect();
+
+ p5.Pulse.prototype.stop = function (time) {
+ if (this.started) {
+ var t = time || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.oscillator.stop(t + now);
+
+ if (this.osc2.oscillator) {
+ this.osc2.oscillator.stop(t + now);
}
- node = null;
- this._nodes[i] = null;
+
+ this.dcOffset.stop(t + now);
+ this.started = false;
+ this.osc2.started = false;
}
- this._nodes = null;
- };
- Tone.Expr.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._disposeNodes();
};
- return Tone.Expr;
-}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Subtract, Tone_signal_Multiply, Tone_signal_GreaterThan, Tone_signal_GreaterThanZero, Tone_signal_Abs, Tone_signal_Negate, Tone_signal_Modulo, Tone_signal_Pow);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_EqualPowerGain;
-Tone_signal_EqualPowerGain = function (Tone) {
- 'use strict';
- Tone.EqualPowerGain = function () {
- this._eqPower = this.input = this.output = new Tone.WaveShaper(function (val) {
- if (Math.abs(val) < 0.001) {
- return 0;
- } else {
- return this.equalPowerScale(val);
+
+ p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) {
+ if (typeof val === 'number') {
+ this.f = val;
+ var now = p5sound.audiocontext.currentTime;
+ var rampTime = rampTime || 0;
+ var tFromNow = tFromNow || 0;
+ var currentFreq = this.oscillator.frequency.value;
+ this.oscillator.frequency.cancelScheduledValues(now);
+ this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
+ this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
+ this.osc2.oscillator.frequency.cancelScheduledValues(now);
+ this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
+ this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
+
+ if (this.freqMod) {
+ this.freqMod.output.disconnect();
+ this.freqMod = null;
}
- }.bind(this), 4096);
- };
- Tone.extend(Tone.EqualPowerGain, Tone.SignalBase);
- Tone.EqualPowerGain.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._eqPower.dispose();
- this._eqPower = null;
- return this;
- };
- return Tone.EqualPowerGain;
-}(Tone_core_Tone);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_component_CrossFade;
-Tone_component_CrossFade = function (Tone) {
- 'use strict';
- Tone.CrossFade = function (initialFade) {
- this.createInsOuts(2, 1);
- this.a = this.input[0] = new Tone.Gain();
- this.b = this.input[1] = new Tone.Gain();
- this.fade = new Tone.Signal(this.defaultArg(initialFade, 0.5), Tone.Type.NormalRange);
- this._equalPowerA = new Tone.EqualPowerGain();
- this._equalPowerB = new Tone.EqualPowerGain();
- this._invert = new Tone.Expr('1 - $0');
- this.a.connect(this.output);
- this.b.connect(this.output);
- this.fade.chain(this._equalPowerB, this.b.gain);
- this.fade.chain(this._invert, this._equalPowerA, this.a.gain);
- this._readOnly('fade');
- };
- Tone.extend(Tone.CrossFade);
- Tone.CrossFade.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._writable('fade');
- this._equalPowerA.dispose();
- this._equalPowerA = null;
- this._equalPowerB.dispose();
- this._equalPowerB = null;
- this.fade.dispose();
- this.fade = null;
- this._invert.dispose();
- this._invert = null;
- this.a.dispose();
- this.a = null;
- this.b.dispose();
- this.b = null;
- return this;
- };
- return Tone.CrossFade;
-}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Expr, Tone_signal_EqualPowerGain);
-var effect;
-'use strict';
-effect = function () {
- var p5sound = master;
- var CrossFade = Tone_component_CrossFade;
+ } else if (val.output) {
+ val.output.disconnect();
+ val.output.connect(this.oscillator.frequency);
+ val.output.connect(this.osc2.oscillator.frequency);
+ this.freqMod = val;
+ }
+ };
+
+
+ function createDCOffset() {
+ var ac = p5sound.audiocontext;
+ var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
+ var data = buffer.getChannelData(0);
+
+ for (var i = 0; i < 2048; i++) {
+ data[i] = 1.0;
+ }
+
+ var bufferSource = ac.createBufferSource();
+ bufferSource.buffer = buffer;
+ bufferSource.loop = true;
+ return bufferSource;
+ }
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
/**
- * Effect is a base class for audio effects in p5.
- * This module handles the nodes and methods that are
- * common and useful for current and future effects.
- *
- *
- * This class is extended by p5.Distortion,
- * p5.Compressor,
- * p5.Delay,
- * p5.Filter,
- * p5.Reverb.
+ * Noise is a type of oscillator that generates a buffer with random values.
*
- * @class p5.Effect
- * @constructor
- *
- * @param {Object} [ac] Reference to the audio context of the p5 object
- * @param {AudioNode} [input] Gain Node effect wrapper
- * @param {AudioNode} [output] Gain Node effect wrapper
- * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1)
- * @param {AudioNode} [wet] Effects that extend this class should connect
- * to the wet signal to this gain node, so that dry and wet
- * signals are mixed properly.
- */
- p5.Effect = function () {
- this.ac = p5sound.audiocontext;
- this.input = this.ac.createGain();
- this.output = this.ac.createGain();
- /**
- * The p5.Effect class is built
- * using Tone.js CrossFade
- * @private
- */
- this._drywet = new CrossFade(1);
- /**
- * In classes that extend
- * p5.Effect, connect effect nodes
- * to the wet parameter
- */
- this.wet = this.ac.createGain();
- this.input.connect(this._drywet.a);
- this.wet.connect(this._drywet.b);
- this._drywet.connect(this.output);
- this.connect();
- //Add to the soundArray
- p5sound.soundArray.push(this);
- };
- /**
- * Set the output volume of the filter.
- *
- * @method amp
- * @param {Number} [vol] amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts until rampTime
- * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
+ * @class p5.Noise
+ * @extends p5.Oscillator
+ * @constructor
+ * @param {String} type Type of noise can be 'white' (default),
+ * 'brown' or 'pink'.
*/
- p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001);
+
+
+ p5.Noise = function (type) {
+ var assignType;
+ p5.Oscillator.call(this);
+ delete this.f;
+ delete this.freq;
+ delete this.oscillator;
+
+ if (type === 'brown') {
+ assignType = _brownNoise;
+ } else if (type === 'pink') {
+ assignType = _pinkNoise;
+ } else {
+ assignType = _whiteNoise;
+ }
+
+ this.buffer = assignType;
};
+
+ p5.Noise.prototype = Object.create(p5.Oscillator.prototype);
+
+ var _whiteNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = whiteBuffer.getChannelData(0);
+
+ for (var i = 0; i < bufferSize; i++) {
+ noiseData[i] = Math.random() * 2 - 1;
+ }
+
+ whiteBuffer.type = 'white';
+ return whiteBuffer;
+ }();
+
+ var _pinkNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = pinkBuffer.getChannelData(0);
+ var b0, b1, b2, b3, b4, b5, b6;
+ b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0.0;
+
+ for (var i = 0; i < bufferSize; i++) {
+ var white = Math.random() * 2 - 1;
+ b0 = 0.99886 * b0 + white * 0.0555179;
+ b1 = 0.99332 * b1 + white * 0.0750759;
+ b2 = 0.96900 * b2 + white * 0.1538520;
+ b3 = 0.86650 * b3 + white * 0.3104856;
+ b4 = 0.55000 * b4 + white * 0.5329522;
+ b5 = -0.7616 * b5 - white * 0.0168980;
+ noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
+ noiseData[i] *= 0.11;
+
+ b6 = white * 0.115926;
+ }
+
+ pinkBuffer.type = 'pink';
+ return pinkBuffer;
+ }();
+
+ var _brownNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = brownBuffer.getChannelData(0);
+ var lastOut = 0.0;
+
+ for (var i = 0; i < bufferSize; i++) {
+ var white = Math.random() * 2 - 1;
+ noiseData[i] = (lastOut + 0.02 * white) / 1.02;
+ lastOut = noiseData[i];
+ noiseData[i] *= 3.5;
+ }
+
+ brownBuffer.type = 'brown';
+ return brownBuffer;
+ }();
/**
- * Link effects together in a chain
- * Example usage: filter.chain(reverb, delay, panner);
- * May be used with an open-ended number of arguments
+ * Set type of noise to 'white', 'pink' or 'brown'.
+ * White is the default.
*
- * @method chain
- * @param {Object} [arguments] Chain together multiple sound objects
+ * @method setType
+ * @param {String} [type] 'white', 'pink' or 'brown'
*/
- p5.Effect.prototype.chain = function () {
- if (arguments.length > 0) {
- this.connect(arguments[0]);
- for (var i = 1; i < arguments.length; i += 1) {
- arguments[i - 1].connect(arguments[i]);
- }
+
+
+ p5.Noise.prototype.setType = function (type) {
+ switch (type) {
+ case 'white':
+ this.buffer = _whiteNoise;
+ break;
+
+ case 'pink':
+ this.buffer = _pinkNoise;
+ break;
+
+ case 'brown':
+ this.buffer = _brownNoise;
+ break;
+
+ default:
+ this.buffer = _whiteNoise;
}
- return this;
- };
- /**
- * Adjust the dry/wet value.
- *
- * @method drywet
- * @param {Number} [fade] The desired drywet value (0 - 1.0)
- */
- p5.Effect.prototype.drywet = function (fade) {
- if (typeof fade !== 'undefined') {
- this._drywet.fade.value = fade;
+
+ if (this.started) {
+ var now = p5sound.audiocontext.currentTime;
+ this.stop(now);
+ this.start(now + .01);
}
- return this._drywet.fade.value;
};
- /**
- * Send output to a p5.js-sound, Web Audio Node, or use signal to
- * control an AudioParam
- *
- * @method connect
- * @param {Object} unit
- */
- p5.Effect.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- this.output.connect(u.input ? u.input : u);
+
+ p5.Noise.prototype.getType = function () {
+ return this.buffer.type;
};
- /**
- * Disconnect all output.
- *
- * @method disconnect
- */
- p5.Effect.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+
+ p5.Noise.prototype.start = function () {
+ if (this.started) {
+ this.stop();
+ }
+
+ this.noise = p5sound.audiocontext.createBufferSource();
+ this.noise.buffer = this.buffer;
+ this.noise.loop = true;
+ this.noise.connect(this.output);
+ var now = p5sound.audiocontext.currentTime;
+ this.noise.start(now);
+ this.started = true;
+ };
+
+ p5.Noise.prototype.stop = function () {
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.noise) {
+ this.noise.stop(now);
+ this.started = false;
}
};
- p5.Effect.prototype.dispose = function () {
- // remove refernce form soundArray
+
+ p5.Noise.prototype.dispose = function () {
+ var now = p5sound.audiocontext.currentTime;
+
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
- if (this.input) {
- this.input.disconnect();
- delete this.input;
+
+ if (this.noise) {
+ this.noise.disconnect();
+ this.stop(now);
}
+
if (this.output) {
this.output.disconnect();
- delete this.output;
- }
- if (this._drywet) {
- this._drywet.disconnect();
- delete this._drywet;
}
- if (this.wet) {
- this.wet.disconnect();
- delete this.wet;
+
+ if (this.panner) {
+ this.panner.disconnect();
}
- this.ac = undefined;
+
+ this.output = null;
+ this.panner = null;
+ this.buffer = null;
+ this.noise = null;
};
- return p5.Effect;
-}(master, Tone_component_CrossFade);
-var filter;
-'use strict';
-filter = function () {
- var p5sound = master;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+
+ p5sound.inputSources = [];
/**
- * A p5.Filter uses a Web Audio Biquad Filter to filter
- * the frequency response of an input source. Subclasses
- * include:
- * * p5.LowPass
:
- * Allows frequencies below the cutoff frequency to pass through,
- * and attenuates frequencies above the cutoff.
- * * p5.HighPass
:
- * The opposite of a lowpass filter.
- * * p5.BandPass
:
- * Allows a range of frequencies to pass through and attenuates
- * the frequencies below and above this frequency range.
+ * Get audio from an input, i.e. your computer's microphone.
*
- * The .res()
method controls either width of the
- * bandpass, or resonance of the low/highpass cutoff frequency.
+ * Turn the mic on/off with the start() and stop() methods. When the mic
+ * is on, its volume can be measured with getLevel or by connecting an
+ * FFT object.
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
+ * If you want to hear the AudioIn, use the .connect() method.
+ * AudioIn does not connect to p5.sound output by default to prevent
+ * feedback.
*
- * @class p5.Filter
- * @extends p5.Effect
+ * Note: This uses the getUserMedia/
+ * Stream API, which is not supported by certain browsers. Access in Chrome browser
+ * is limited to localhost and https, but access over http may be limited.
+ *
+ * @class p5.AudioIn
* @constructor
- * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
+ * @param {Function} [errorCallback] A function to call if there is an error
+ * accessing the AudioIn. For example,
+ * Safari and iOS devices do not
+ * currently allow microphone access.
* @example
*
- * let fft, noise, filter;
- *
- * function setup() {
- * fill(255, 40, 255);
- *
- * filter = new p5.BandPass();
- *
- * noise = new p5.Noise();
- * // disconnect unfiltered noise,
- * // and connect to filter
- * noise.disconnect();
- * noise.connect(filter);
- * noise.start();
+ * let mic;
*
- * fft = new p5.FFT();
+ * function setup(){
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(userStartAudio);
+ * textAlign(CENTER);
+ * mic = new p5.AudioIn();
+ * mic.start();
* }
*
- * function draw() {
- * background(30);
- *
- * // set the BandPass frequency based on mouseX
- * let freq = map(mouseX, 0, width, 20, 10000);
- * filter.freq(freq);
- * // give the filter a narrow band (lower res = wider bandpass)
- * filter.res(50);
- *
- * // draw filtered spectrum
- * let spectrum = fft.analyze();
- * noStroke();
- * for (let i = 0; i < spectrum.length; i++) {
- * let x = map(i, 0, spectrum.length, 0, width);
- * let h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width/spectrum.length, h);
- * }
- *
- * isMouseOverCanvas();
- * }
+ * function draw(){
+ * background(0);
+ * fill(255);
+ * text('tap to start', width/2, 20);
*
- * function isMouseOverCanvas() {
- * let mX = mouseX, mY = mouseY;
- * if (mX > 0 && mX < width && mY < height && mY > 0) {
- * noise.amp(0.5, 0.2);
- * } else {
- * noise.amp(0, 0.2);
- * }
+ * micLevel = mic.getLevel();
+ * let y = height - micLevel * height;
+ * ellipse(width/2, y, 10, 10);
* }
*
*/
- //constructor with inheritance
- p5.Filter = function (type) {
- Effect.call(this);
- //add extend Effect by adding a Biquad Filter
+
+ p5.AudioIn = function (errorCallback) {
+
/**
- * The p5.Filter is built with a
- *
- * Web Audio BiquadFilter Node.
- *
- * @property {DelayNode} biquadFilter
+ * @property {GainNode} input
*/
- this.biquad = this.ac.createBiquadFilter();
- this.input.connect(this.biquad);
- this.biquad.connect(this.wet);
- if (type) {
- this.setType(type);
- }
- //Properties useful for the toggle method.
- this._on = true;
- this._untoggledType = this.biquad.type;
- };
- p5.Filter.prototype = Object.create(Effect.prototype);
- /**
- * Filter an audio signal according to a set
- * of filter parameters.
- *
- * @method process
- * @param {Object} Signal An object that outputs audio
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance/Width of the filter frequency
- * from 0.001 to 1000
- */
- p5.Filter.prototype.process = function (src, freq, res, time) {
- src.connect(this.input);
- this.set(freq, res, time);
- };
- /**
- * Set the frequency and the resonance of the filter.
- *
- * @method set
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance (Q) from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- p5.Filter.prototype.set = function (freq, res, time) {
- if (freq) {
- this.freq(freq, time);
- }
- if (res) {
- this.res(res, time);
- }
+ this.input = p5sound.audiocontext.createGain();
+ /**
+ * @property {GainNode} output
+ */
+
+ this.output = p5sound.audiocontext.createGain();
+ /**
+ * @property {MediaStream|null} stream
+ */
+
+ this.stream = null;
+ /**
+ * @property {MediaStreamAudioSourceNode|null} mediaStream
+ */
+
+ this.mediaStream = null;
+ /**
+ * @property {Number|null} currentSource
+ */
+
+ this.currentSource = null;
+ /**
+ * Client must allow browser to access their microphone / audioin source.
+ * Default: false. Will become true when the client enables access.
+ *
+ * @property {Boolean} enabled
+ */
+
+ this.enabled = false;
+ /**
+ * Input amplitude, connect to it by default but not to master out
+ *
+ * @property {p5.Amplitude} amplitude
+ */
+
+ this.amplitude = new p5.Amplitude();
+ this.output.connect(this.amplitude.input);
+
+ if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
+ errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
+ }
+
+
+ p5sound.soundArray.push(this);
};
/**
- * Set the filter frequency, in Hz, from 10 to 22050 (the range of
- * human hearing, although in reality most people hear in a narrower
- * range).
+ * Start processing audio input. This enables the use of other
+ * AudioIn methods like getLevel(). Note that by default, AudioIn
+ * is not connected to p5.sound's output. So you won't hear
+ * anything unless you use the connect() method.
*
- * @method freq
- * @param {Number} freq Filter Frequency
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current frequency value
- */
- p5.Filter.prototype.freq = function (freq, time) {
- var t = time || 0;
- if (freq <= 0) {
- freq = 1;
- }
- if (typeof freq === 'number') {
- this.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.frequency.exponentialRampToValueAtTime(freq, this.ac.currentTime + 0.02 + t);
- } else if (freq) {
- freq.connect(this.biquad.frequency);
- }
- return this.biquad.frequency.value;
- };
- /**
- * Controls either width of a bandpass frequency,
- * or the resonance of a low/highpass cutoff frequency.
+ * Certain browsers limit access to the user's microphone. For example,
+ * Chrome only allows access from localhost and over https. For this reason,
+ * you may want to include an errorCallback—a function that is called in case
+ * the browser won't provide mic access.
*
- * @method res
- * @param {Number} res Resonance/Width of filter freq
- * from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current res value
+ * @method start
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] Name of a function to call on
+ * success.
+ * @param {Function} [errorCallback] Name of a function to call if
+ * there was an error. For example,
+ * some browsers do not support
+ * getUserMedia.
*/
- p5.Filter.prototype.res = function (res, time) {
- var t = time || 0;
- if (typeof res === 'number') {
- this.biquad.Q.value = res;
- this.biquad.Q.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.Q.linearRampToValueAtTime(res, this.ac.currentTime + 0.02 + t);
- } else if (res) {
- res.connect(this.biquad.Q);
+
+
+ p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
+ var self = this;
+
+ if (this.stream) {
+ this.stop();
+ }
+
+
+ var audioSource = p5sound.inputSources[self.currentSource];
+ var constraints = {
+ audio: {
+ sampleRate: p5sound.audiocontext.sampleRate,
+ echoCancellation: false
+ }
+ };
+
+ if (p5sound.inputSources[this.currentSource]) {
+ constraints.audio.deviceId = audioSource.deviceId;
}
- return this.biquad.Q.value;
+
+ window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
+ self.stream = stream;
+ self.enabled = true;
+
+ self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
+ self.mediaStream.connect(self.output);
+
+ self.amplitude.setInput(self.output);
+ if (successCallback) successCallback();
+ })["catch"](function (err) {
+ if (errorCallback) errorCallback(err);else console.error(err);
+ });
};
/**
- * Controls the gain attribute of a Biquad Filter.
- * This is distinctly different from .amp() which is inherited from p5.Effect
- * .amp() controls the volume via the output gain node
- * p5.Filter.gain() controls the gain parameter of a Biquad Filter node.
+ * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
+ * If re-starting, the user may be prompted for permission access.
*
- * @method gain
- * @param {Number} gain
- * @return {Number} Returns the current or updated gain value
+ * @method stop
+ * @for p5.AudioIn
*/
- p5.Filter.prototype.gain = function (gain, time) {
- var t = time || 0;
- if (typeof gain === 'number') {
- this.biquad.gain.value = gain;
- this.biquad.gain.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.gain.linearRampToValueAtTime(gain, this.ac.currentTime + 0.02 + t);
- } else if (gain) {
- gain.connect(this.biquad.gain);
+
+
+ p5.AudioIn.prototype.stop = function () {
+ if (this.stream) {
+ this.stream.getTracks().forEach(function (track) {
+ track.stop();
+ });
+ this.mediaStream.disconnect();
+ delete this.mediaStream;
+ delete this.stream;
}
- return this.biquad.gain.value;
};
/**
- * Toggle function. Switches between the specified type and allpass
+ * Connect to an audio unit. If no parameter is provided, will
+ * connect to the master output (i.e. your speakers).
*
- * @method toggle
- * @return {boolean} [Toggle value]
+ * @method connect
+ * @for p5.AudioIn
+ * @param {Object} [unit] An object that accepts audio input,
+ * such as an FFT
*/
- p5.Filter.prototype.toggle = function () {
- this._on = !this._on;
- if (this._on === true) {
- this.biquad.type = this._untoggledType;
- } else if (this._on === false) {
- this.biquad.type = 'allpass';
+
+
+ p5.AudioIn.prototype.connect = function (unit) {
+ if (unit) {
+ if (unit.hasOwnProperty('input')) {
+ this.output.connect(unit.input);
+ } else if (unit.hasOwnProperty('analyser')) {
+ this.output.connect(unit.analyser);
+ } else {
+ this.output.connect(unit);
+ }
+ } else {
+ this.output.connect(p5sound.input);
}
- return this._on;
};
/**
- * Set the type of a p5.Filter. Possible types include:
- * "lowpass" (default), "highpass", "bandpass",
- * "lowshelf", "highshelf", "peaking", "notch",
- * "allpass".
+ * Disconnect the AudioIn from all audio units. For example, if
+ * connect() had been called, disconnect() will stop sending
+ * signal to your speakers.
*
- * @method setType
- * @param {String} t
+ * @method disconnect
+ * @for p5.AudioIn
*/
- p5.Filter.prototype.setType = function (t) {
- this.biquad.type = t;
- this._untoggledType = this.biquad.type;
- };
- p5.Filter.prototype.dispose = function () {
- // remove reference from soundArray
- Effect.prototype.dispose.apply(this);
- if (this.biquad) {
- this.biquad.disconnect();
- delete this.biquad;
+
+
+ p5.AudioIn.prototype.disconnect = function () {
+ if (this.output) {
+ this.output.disconnect();
+
+ this.output.connect(this.amplitude.input);
}
};
/**
- * Constructor: new p5.LowPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('lowpass')
.
- * See p5.Filter for methods.
+ * Read the Amplitude (volume level) of an AudioIn. The AudioIn
+ * class contains its own instance of the Amplitude class to help
+ * make it easy to get a microphone's volume level. Accepts an
+ * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
+ * .start() before using .getLevel().
*
- * @class p5.LowPass
- * @constructor
- * @extends p5.Filter
+ * @method getLevel
+ * @for p5.AudioIn
+ * @param {Number} [smoothing] Smoothing is 0.0 by default.
+ * Smooths values based on previous values.
+ * @return {Number} Volume level (between 0.0 and 1.0)
*/
- p5.LowPass = function () {
- p5.Filter.call(this, 'lowpass');
+
+
+ p5.AudioIn.prototype.getLevel = function (smoothing) {
+ if (smoothing) {
+ this.amplitude.smoothing = smoothing;
+ }
+
+ return this.amplitude.getLevel();
};
- p5.LowPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Constructor: new p5.HighPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('highpass')
.
- * See p5.Filter for methods.
+ * Set amplitude (volume) of a mic input between 0 and 1.0.
*
- * @class p5.HighPass
- * @constructor
- * @extends p5.Filter
+ * @method amp
+ * @for p5.AudioIn
+ * @param {Number} vol between 0 and 1.0
+ * @param {Number} [time] ramp time (optional)
*/
- p5.HighPass = function () {
- p5.Filter.call(this, 'highpass');
+
+
+ p5.AudioIn.prototype.amp = function (vol, t) {
+ if (t) {
+ var rampTime = t || 0;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
+ this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
+ } else {
+ this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
+ }
};
- p5.HighPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Constructor: new p5.BandPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('bandpass')
.
- * See p5.Filter for methods.
+ * Returns a list of available input sources. This is a wrapper
+ * for and it returns a Promise.
*
- * @class p5.BandPass
- * @constructor
- * @extends p5.Filter
+ * @method getSources
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] This callback function handles the sources when they
+ * have been enumerated. The callback function
+ * receives the deviceList array as its only argument
+ * @param {Function} [errorCallback] This optional callback receives the error
+ * message as its argument.
+ * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
+ * to the enumerateDevices() method
+ * @example
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ *
*/
- p5.BandPass = function () {
- p5.Filter.call(this, 'bandpass');
+
+
+ p5.AudioIn.prototype.getSources = function (onSuccess, onError) {
+ return new Promise(function (resolve, reject) {
+ window.navigator.mediaDevices.enumerateDevices().then(function (devices) {
+ p5sound.inputSources = devices.filter(function (device) {
+ return device.kind === 'audioinput';
+ });
+ resolve(p5sound.inputSources);
+
+ if (onSuccess) {
+ onSuccess(p5sound.inputSources);
+ }
+ })["catch"](function (error) {
+ reject(error);
+
+ if (onError) {
+ onError(error);
+ } else {
+ console.error('This browser does not support MediaStreamTrack.getSources()');
+ }
+ });
+ });
};
- p5.BandPass.prototype = Object.create(p5.Filter.prototype);
- return p5.Filter;
-}(master, effect);
-var src_eqFilter;
-'use strict';
-src_eqFilter = function () {
- var Filter = filter;
- var p5sound = master;
/**
- * EQFilter extends p5.Filter with constraints
- * necessary for the p5.EQ
+ * Set the input source. Accepts a number representing a
+ * position in the array returned by getSources().
+ * This is only available in browsers that support
+ * navigator.mediaDevices.enumerateDevices().
*
- * @private
+ * @method setSource
+ * @for p5.AudioIn
+ * @param {number} num position of input source in the array
+ * @example
+ *
+ * let audioIn;
+ *
+ * function setup(){
+ * text('getting sources...', 0, 20);
+ * audioIn = new p5.AudioIn();
+ * audioIn.getSources(gotSources);
+ * }
+ *
+ * function gotSources(deviceList) {
+ * if (deviceList.length > 0) {
+ * //set the source to the first item in the deviceList array
+ * audioIn.setSource(0);
+ * let currentSource = deviceList[audioIn.currentSource];
+ * text('set source to: ' + currentSource.deviceId, 5, 20, width);
+ * }
+ * }
+ *
*/
- var EQFilter = function (freq, res) {
- Filter.call(this, 'peaking');
- this.disconnect();
- this.set(freq, res);
- this.biquad.gain.value = 0;
- delete this.input;
- delete this.output;
- delete this._drywet;
- delete this.wet;
- };
- EQFilter.prototype = Object.create(Filter.prototype);
- EQFilter.prototype.amp = function () {
- console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
- };
- EQFilter.prototype.drywet = function () {
- console.warn('`drywet()` is not available for p5.EQ bands.');
- };
- EQFilter.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- if (this.biquad) {
- this.biquad.connect(u.input ? u.input : u);
+
+
+ p5.AudioIn.prototype.setSource = function (num) {
+ if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
+ this.currentSource = num;
+ console.log('set source to ', p5sound.inputSources[this.currentSource]);
} else {
- this.output.connect(u.input ? u.input : u);
- }
- };
- EQFilter.prototype.disconnect = function () {
- if (this.biquad) {
- this.biquad.disconnect();
+ console.log('unable to set input source');
+ }
+
+
+ if (this.stream && this.stream.active) {
+ this.start();
}
- };
- EQFilter.prototype.dispose = function () {
- // remove reference form soundArray
+ };
+
+
+ p5.AudioIn.prototype.dispose = function () {
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
- this.disconnect();
- delete this.biquad;
+ this.stop();
+
+ if (this.output) {
+ this.output.disconnect();
+ }
+
+ if (this.amplitude) {
+ this.amplitude.disconnect();
+ }
+
+ delete this.amplitude;
+ delete this.output;
};
- return EQFilter;
-}(filter, master);
-var eq;
-'use strict';
-eq = function () {
- var Effect = effect;
- var EQFilter = src_eqFilter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(52),__webpack_require__(58),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.CrossFade=function(e){this.createInsOuts(2,1),this.a=this.input[0]=new i.Gain,this.b=this.input[1]=new i.Gain,this.fade=new i.Signal(this.defaultArg(e,.5),i.Type.NormalRange),this._equalPowerA=new i.EqualPowerGain,this._equalPowerB=new i.EqualPowerGain,this._invert=new i.Expr("1 - $0"),this.a.connect(this.output),this.b.connect(this.output),this.fade.chain(this._equalPowerB,this.b.gain),this.fade.chain(this._invert,this._equalPowerA,this.a.gain),this._readOnly("fade")},i.extend(i.CrossFade),i.CrossFade.prototype.dispose=function(){return i.prototype.dispose.call(this),this._writable("fade"),this._equalPowerA.dispose(),this._equalPowerA=null,this._equalPowerB.dispose(),this._equalPowerB=null,this.fade.dispose(),this.fade=null,this._invert.dispose(),this._invert=null,this.a.dispose(),this.a=null,this.b.dispose(),this.b=null,this},i.CrossFade}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(16),__webpack_require__(3),__webpack_require__(53),__webpack_require__(26),__webpack_require__(54),__webpack_require__(25),__webpack_require__(55),__webpack_require__(56),__webpack_require__(57)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(p){"use strict";function r(e,n,r){var t=new e;return r._eval(n[0]).connect(t,0,0),r._eval(n[1]).connect(t,0,1),t}function t(e,n,r){var t=new e;return r._eval(n[0]).connect(t,0,0),t}function o(e){return e?parseFloat(e):void 0}function i(e){return e&&e.args?parseFloat(e.args):void 0}return p.Expr=function(){var n=this._replacements(Array.prototype.slice.call(arguments)),e=this._parseInputs(n);this._nodes=[],this.input=new Array(e);for(var r=0;r
- * let eq;
- * let band_names;
- * let band_index;
- *
- * let soundFile, play;
+ * let eq, soundFile
+ * let eqBandIndex = 0;
+ * let eqBandNames = ['lows', 'mids', 'highs'];
*
* function preload() {
* soundFormats('mp3', 'ogg');
@@ -8862,75 +8052,78 @@ eq = function () {
* }
*
* function setup() {
- * eq = new p5.EQ(3);
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(toggleSound);
+ *
+ * eq = new p5.EQ(eqBandNames.length);
* soundFile.disconnect();
* eq.process(soundFile);
- *
- * band_names = ['lows','mids','highs'];
- * band_index = 0;
- * play = false;
- * textAlign(CENTER);
* }
*
* function draw() {
* background(30);
* noStroke();
* fill(255);
- * text('click to kill',50,25);
+ * textAlign(CENTER);
+ * text('filtering ', 50, 25);
*
* fill(255, 40, 255);
* textSize(26);
- * text(band_names[band_index],50,55);
+ * text(eqBandNames[eqBandIndex], 50, 55);
*
* fill(255);
* textSize(9);
- * text('space = play/pause',50,80);
+ *
+ * if (!soundFile.isPlaying()) {
+ * text('tap to play', 50, 80);
+ * } else {
+ * text('tap to filter next band', 50, 80)
+ * }
* }
*
- * //If mouse is over canvas, cycle to the next band and kill the frequency
- * function mouseClicked() {
+ * function toggleSound() {
+ * if (!soundFile.isPlaying()) {
+ * soundFile.play();
+ * } else {
+ * eqBandIndex = (eqBandIndex + 1) % eq.bands.length;
+ * }
+ *
* for (let i = 0; i < eq.bands.length; i++) {
* eq.bands[i].gain(0);
* }
- * eq.bands[band_index].gain(-40);
- * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {
- * band_index === 2 ? band_index = 0 : band_index++;
- * }
- * }
- *
- * //use space bar to trigger play / pause
- * function keyPressed() {
- * if (key===' ') {
- * play = !play
- * play ? soundFile.loop() : soundFile.pause();
- * }
+ * // filter the band we want to filter
+ * eq.bands[eqBandIndex].gain(-40);
* }
*
*/
+
+
p5.EQ = function (_eqsize) {
- Effect.call(this);
- //p5.EQ can be of size (3) or (8), defaults to 3
+ Effect.call(this);
+
_eqsize = _eqsize === 3 || _eqsize === 8 ? _eqsize : 3;
var factor;
_eqsize === 3 ? factor = Math.pow(2, 3) : factor = 2;
/**
* The p5.EQ is built with abstracted p5.Filter objects.
- * To modify any bands, use methods of the
* p5.Filter API, especially `gain` and `freq`.
* Bands are stored in an array, with indices 0 - 3, or 0 - 7
* @property {Array} bands
*
*/
+
this.bands = [];
var freq, res;
+
for (var i = 0; i < _eqsize; i++) {
if (i === _eqsize - 1) {
freq = 21000;
- res = 0.01;
+ res = .01;
} else if (i === 0) {
freq = 100;
- res = 0.1;
+ res = .1;
} else if (i === 1) {
freq = _eqsize === 3 ? 360 * factor : 360;
res = 1;
@@ -8938,30 +8131,35 @@ eq = function () {
freq = this.bands[i - 1].freq() * factor;
res = 1;
}
+
this.bands[i] = this._newBand(freq, res);
+
if (i > 0) {
this.bands[i - 1].connect(this.bands[i].biquad);
} else {
this.input.connect(this.bands[i].biquad);
}
}
+
this.bands[_eqsize - 1].connect(this.output);
};
+
p5.EQ.prototype = Object.create(Effect.prototype);
/**
* Process an input by connecting it to the EQ
* @method process
* @param {Object} src Audio source
*/
+
p5.EQ.prototype.process = function (src) {
src.connect(this.input);
- };
- // /**
+ };
// * Set the frequency and gain of each band in the EQ. This method should be
// * called with 3 or 8 frequency and gain pairs, depending on the size of the EQ.
// * ex. eq.set(freq0, gain0, freq1, gain1, freq2, gain2);
// *
// * @method set
+ // * @for p5.EQ
// * @param {Number} [freq0] Frequency value for band with index 0
// * @param {Number} [gain0] Gain value for band with index 0
// * @param {Number} [freq1] Frequency value for band with index 1
@@ -8979,6 +8177,8 @@ eq = function () {
// * @param {Number} [freq7] Frequency value for band with index 7
// * @param {Number} [gain7] Gain value for band with index 7
// */
+
+
p5.EQ.prototype.set = function () {
if (arguments.length === this.bands.length * 2) {
for (var i = 0; i < arguments.length; i += 2) {
@@ -8994,30 +8194,110 @@ eq = function () {
* the raw biquad filter. This method returns an abstracted p5.Filter,
* which can be added to p5.EQ.bands, in order to create new EQ bands.
* @private
+ * @for p5.EQ
* @method _newBand
* @param {Number} freq
* @param {Number} res
* @return {Object} Abstracted Filter
*/
+
+
p5.EQ.prototype._newBand = function (freq, res) {
return new EQFilter(freq, res);
};
+
p5.EQ.prototype.dispose = function () {
Effect.prototype.dispose.apply(this);
+
if (this.bands) {
while (this.bands.length > 0) {
delete this.bands.pop().dispose();
}
+
delete this.bands;
}
};
+
return p5.EQ;
-}(effect, src_eqFilter);
-var panner3d;
-'use strict';
-panner3d = function () {
- var p5sound = master;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var Filter = __webpack_require__(15);
+
+ var p5sound = __webpack_require__(1);
+ /**
+ * EQFilter extends p5.Filter with constraints
+ * necessary for the p5.EQ
+ *
+ * @private
+ */
+
+
+ var EQFilter = function EQFilter(freq, res) {
+ Filter.call(this, 'peaking');
+ this.disconnect();
+ this.set(freq, res);
+ this.biquad.gain.value = 0;
+ delete this.input;
+ delete this.output;
+ delete this._drywet;
+ delete this.wet;
+ };
+
+ EQFilter.prototype = Object.create(Filter.prototype);
+
+ EQFilter.prototype.amp = function () {
+ console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
+ };
+
+ EQFilter.prototype.drywet = function () {
+ console.warn('`drywet()` is not available for p5.EQ bands.');
+ };
+
+ EQFilter.prototype.connect = function (unit) {
+ var u = unit || p5.soundOut.input;
+
+ if (this.biquad) {
+ this.biquad.connect(u.input ? u.input : u);
+ } else {
+ this.output.connect(u.input ? u.input : u);
+ }
+ };
+
+ EQFilter.prototype.disconnect = function () {
+ if (this.biquad) {
+ this.biquad.disconnect();
+ }
+ };
+
+ EQFilter.prototype.dispose = function () {
+ var index = p5sound.soundArray.indexOf(this);
+ p5sound.soundArray.splice(index, 1);
+ this.disconnect();
+ delete this.biquad;
+ };
+
+ return EQFilter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4);
/**
* Panner3D is based on the
@@ -9034,6 +8314,8 @@ panner3d = function () {
* @class p5.Panner3D
* @constructor
*/
+
+
p5.Panner3D = function () {
Effect.call(this);
/**
@@ -9052,58 +8334,69 @@ panner3d = function () {
* @property {AudioNode} panner
*
*/
+
this.panner = this.ac.createPanner();
this.panner.panningModel = 'HRTF';
this.panner.distanceModel = 'linear';
this.panner.connect(this.output);
this.input.connect(this.panner);
};
+
p5.Panner3D.prototype = Object.create(Effect.prototype);
/**
* Connect an audio sorce
*
* @method process
+ * @for p5.Panner3D
* @param {Object} src Input source
*/
+
p5.Panner3D.prototype.process = function (src) {
src.connect(this.input);
};
/**
* Set the X,Y,Z position of the Panner
* @method set
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
* @param {Number} time
* @return {Array} Updated x, y, z values as an array
*/
+
+
p5.Panner3D.prototype.set = function (xVal, yVal, zVal, time) {
this.positionX(xVal, time);
this.positionY(yVal, time);
this.positionZ(zVal, time);
- return [
- this.panner.positionX.value,
- this.panner.positionY.value,
- this.panner.positionZ.value
- ];
+ return [this.panner.positionX.value, this.panner.positionY.value, this.panner.positionZ.value];
};
/**
* Getter and setter methods for position coordinates
* @method positionX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for position coordinates
* @method positionY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for position coordinates
* @method positionZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
+
p5.Panner3D.prototype.positionX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.panner.positionX.value = xVal;
this.panner.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9111,10 +8404,13 @@ panner3d = function () {
} else if (xVal) {
xVal.connect(this.panner.positionX);
}
+
return this.panner.positionX.value;
};
+
p5.Panner3D.prototype.positionY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.panner.positionY.value = yVal;
this.panner.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9122,10 +8418,13 @@ panner3d = function () {
} else if (yVal) {
yVal.connect(this.panner.positionY);
}
+
return this.panner.positionY.value;
};
+
p5.Panner3D.prototype.positionZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.panner.positionZ.value = zVal;
this.panner.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9133,44 +8432,52 @@ panner3d = function () {
} else if (zVal) {
zVal.connect(this.panner.positionZ);
}
+
return this.panner.positionZ.value;
};
/**
* Set the X,Y,Z position of the Panner
* @method orient
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
* @param {Number} time
* @return {Array} Updated x, y, z values as an array
*/
+
+
p5.Panner3D.prototype.orient = function (xVal, yVal, zVal, time) {
this.orientX(xVal, time);
this.orientY(yVal, time);
this.orientZ(zVal, time);
- return [
- this.panner.orientationX.value,
- this.panner.orientationY.value,
- this.panner.orientationZ.value
- ];
+ return [this.panner.orientationX.value, this.panner.orientationY.value, this.panner.orientationZ.value];
};
/**
* Getter and setter methods for orient coordinates
* @method orientX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for orient coordinates
* @method orientY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for orient coordinates
* @method orientZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
+
p5.Panner3D.prototype.orientX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.panner.orientationX.value = xVal;
this.panner.orientationX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9178,10 +8485,13 @@ panner3d = function () {
} else if (xVal) {
xVal.connect(this.panner.orientationX);
}
+
return this.panner.orientationX.value;
};
+
p5.Panner3D.prototype.orientY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.panner.orientationY.value = yVal;
this.panner.orientationY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9189,10 +8499,13 @@ panner3d = function () {
} else if (yVal) {
yVal.connect(this.panner.orientationY);
}
+
return this.panner.orientationY.value;
};
+
p5.Panner3D.prototype.orientZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.panner.orientationZ.value = zVal;
this.panner.orientationZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9200,14 +8513,18 @@ panner3d = function () {
} else if (zVal) {
zVal.connect(this.panner.orientationZ);
}
+
return this.panner.orientationZ.value;
};
/**
* Set the rolloff factor and max distance
* @method setFalloff
+ * @for p5.Panner3D
* @param {Number} [maxDistance]
* @param {Number} [rolloffFactor]
*/
+
+
p5.Panner3D.prototype.setFalloff = function (maxDistance, rolloffFactor) {
this.maxDist(maxDistance);
this.rolloff(rolloffFactor);
@@ -9215,42 +8532,59 @@ panner3d = function () {
/**
* Maxium distance between the source and the listener
* @method maxDist
+ * @for p5.Panner3D
* @param {Number} maxDistance
* @return {Number} updated value
*/
+
+
p5.Panner3D.prototype.maxDist = function (maxDistance) {
if (typeof maxDistance === 'number') {
this.panner.maxDistance = maxDistance;
}
+
return this.panner.maxDistance;
};
/**
* How quickly the volume is reduced as the source moves away from the listener
* @method rollof
+ * @for p5.Panner3D
* @param {Number} rolloffFactor
* @return {Number} updated value
*/
+
+
p5.Panner3D.prototype.rolloff = function (rolloffFactor) {
if (typeof rolloffFactor === 'number') {
this.panner.rolloffFactor = rolloffFactor;
}
+
return this.panner.rolloffFactor;
};
+
p5.Panner3D.dispose = function () {
Effect.prototype.dispose.apply(this);
+
if (this.panner) {
this.panner.disconnect();
delete this.panner;
}
};
+
return p5.Panner3D;
-}(master, effect);
-var listener3d;
-'use strict';
-listener3d = function () {
- var p5sound = master;
- var Effect = effect;
- // /**
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4);
// * listener is a class that can construct both a Spatial Panner
// * and a Spatial Listener. The panner is based on the
// * Web Audio Spatial Panner Node
@@ -9273,18 +8607,20 @@ listener3d = function () {
// * @param {AudioParam} listener.distanceModel "linear", "inverse", or "exponential"
// * @param {String} [type] [Specify construction of a spatial panner or listener]
// */
+
+
p5.Listener3D = function (type) {
this.ac = p5sound.audiocontext;
this.listener = this.ac.listener;
- };
- // /**
+ };
// * Connect an audio sorce
// * @param {Object} src Input source
// */
+
+
p5.Listener3D.prototype.process = function (src) {
src.connect(this.input);
- };
- // /**
+ };
// * Set the X,Y,Z position of the Panner
// * @param {[Number]} xVal
// * @param {[Number]} yVal
@@ -9292,22 +8628,22 @@ listener3d = function () {
// * @param {[Number]} time
// * @return {[Array]} [Updated x, y, z values as an array]
// */
+
+
p5.Listener3D.prototype.position = function (xVal, yVal, zVal, time) {
this.positionX(xVal, time);
this.positionY(yVal, time);
this.positionZ(zVal, time);
- return [
- this.listener.positionX.value,
- this.listener.positionY.value,
- this.listener.positionZ.value
- ];
- };
- // /**
+ return [this.listener.positionX.value, this.listener.positionY.value, this.listener.positionZ.value];
+ };
// * Getter and setter methods for position coordinates
// * @return {Number} [updated coordinate value]
// */
+
+
p5.Listener3D.prototype.positionX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.positionX.value = xVal;
this.listener.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9315,10 +8651,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.positionX);
}
+
return this.listener.positionX.value;
};
+
p5.Listener3D.prototype.positionY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.positionY.value = yVal;
this.listener.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9326,10 +8665,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.positionY);
}
+
return this.listener.positionY.value;
};
+
p5.Listener3D.prototype.positionZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.positionZ.value = zVal;
this.listener.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9337,10 +8679,9 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.positionZ);
}
+
return this.listener.positionZ.value;
- };
- // cannot define method when class definition is commented
- // /**
+ };
// * Overrides the listener orient() method because Listener has slightly
// * different params. In human terms, Forward vectors are the direction the
// * nose is pointing. Up vectors are the direction of the top of the head.
@@ -9355,6 +8696,8 @@ listener3d = function () {
// * @param {Number} time
// * @return {Array} All orienation params
// */
+
+
p5.Listener3D.prototype.orient = function (xValF, yValF, zValF, xValU, yValU, zValU, time) {
if (arguments.length === 3 || arguments.length === 4) {
time = arguments[3];
@@ -9363,41 +8706,31 @@ listener3d = function () {
this.orientForward(xValF, yValF, zValF);
this.orientUp(xValU, yValU, zValU, time);
}
- return [
- this.listener.forwardX.value,
- this.listener.forwardY.value,
- this.listener.forwardZ.value,
- this.listener.upX.value,
- this.listener.upY.value,
- this.listener.upZ.value
- ];
+
+ return [this.listener.forwardX.value, this.listener.forwardY.value, this.listener.forwardZ.value, this.listener.upX.value, this.listener.upY.value, this.listener.upZ.value];
};
+
p5.Listener3D.prototype.orientForward = function (xValF, yValF, zValF, time) {
this.forwardX(xValF, time);
this.forwardY(yValF, time);
this.forwardZ(zValF, time);
- return [
- this.listener.forwardX,
- this.listener.forwardY,
- this.listener.forwardZ
- ];
+ return [this.listener.forwardX, this.listener.forwardY, this.listener.forwardZ];
};
+
p5.Listener3D.prototype.orientUp = function (xValU, yValU, zValU, time) {
this.upX(xValU, time);
this.upY(yValU, time);
this.upZ(zValU, time);
- return [
- this.listener.upX,
- this.listener.upY,
- this.listener.upZ
- ];
- };
- // /**
+ return [this.listener.upX, this.listener.upY, this.listener.upZ];
+ };
// * Getter and setter methods for orient coordinates
// * @return {Number} [updated coordinate value]
// */
+
+
p5.Listener3D.prototype.forwardX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.forwardX.value = xVal;
this.listener.forwardX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9405,10 +8738,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.forwardX);
}
+
return this.listener.forwardX.value;
};
+
p5.Listener3D.prototype.forwardY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.forwardY.value = yVal;
this.listener.forwardY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9416,10 +8752,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.forwardY);
}
+
return this.listener.forwardY.value;
};
+
p5.Listener3D.prototype.forwardZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.forwardZ.value = zVal;
this.listener.forwardZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9427,10 +8766,13 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.forwardZ);
}
+
return this.listener.forwardZ.value;
};
+
p5.Listener3D.prototype.upX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.upX.value = xVal;
this.listener.upX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9438,10 +8780,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.upX);
}
+
return this.listener.upX.value;
};
+
p5.Listener3D.prototype.upY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.upY.value = yVal;
this.listener.upY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9449,10 +8794,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.upY);
}
+
return this.listener.upY.value;
};
+
p5.Listener3D.prototype.upZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.upZ.value = zVal;
this.listener.upZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9460,21 +8808,30 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.upZ);
}
+
return this.listener.upZ.value;
};
+
return p5.Listener3D;
-}(master, effect);
-var delay;
-'use strict';
-delay = function () {
- var Filter = filter;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var Filter = __webpack_require__(15);
+
+ var Effect = __webpack_require__(4);
/**
* Delay is an echo effect. It processes an existing sound source,
* and outputs a delayed version of that sound. The p5.Delay can
* produce different effects depending on the delayTime, feedback,
* filter, and type. In the example below, a feedback of 0.5 (the
- * defaul value) will produce a looping delay that decreases in
+ * default value) will produce a looping delay that decreases in
* volume by 50% each repeat. A filter will cut out the high
* frequencies so that the delay does not sound as piercing as the
* original source.
@@ -9489,40 +8846,36 @@ delay = function () {
* @constructor
* @example
*
- * let noise, env, delay;
+ * let osc;
*
* function setup() {
- * background(0);
- * noStroke();
- * fill(255);
+ * let cnv = createCanvas(100, 100);
+ * background(220);
* textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * noise = new p5.Noise('brown');
- * noise.amp(0);
- * noise.start();
+ * text('tap to play', width/2, height/2);
*
+ * osc = new p5.Oscillator('square');
+ * osc.amp(0.5);
* delay = new p5.Delay();
*
* // delay.process() accepts 4 parameters:
- * // source, delayTime, feedback, filter frequency
- * // play with these numbers!!
- * delay.process(noise, .12, .7, 2300);
+ * // source, delayTime (in seconds), feedback, filter frequency
+ * delay.process(osc, 0.12, .7, 2300);
*
- * // play the noise with an envelope,
- * // a series of fades ( time / value pairs )
- * env = new p5.Envelope(.01, 0.2, .2, .1);
+ * cnv.mousePressed(oscStart);
* }
*
- * // mouseClick triggers envelope
- * function mouseClicked() {
- * // is mouse over canvas?
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * env.play(noise);
- * }
+ * function oscStart() {
+ * osc.start();
+ * }
+ *
+ * function mouseReleased() {
+ * osc.stop();
* }
*
*/
+
+
p5.Delay = function () {
Effect.call(this);
this._split = this.ac.createChannelSplitter(2);
@@ -9534,46 +8887,64 @@ delay = function () {
*
* Web Audio Delay Nodes, one for each stereo channel.
*
+ * @for p5.Delay
* @property {DelayNode} leftDelay
*/
+
this.leftDelay = this.ac.createDelay();
/**
* The p5.Delay is built with two
*
* Web Audio Delay Nodes, one for each stereo channel.
- *
+ * @for p5.Delay
* @property {DelayNode} rightDelay
*/
+
this.rightDelay = this.ac.createDelay();
this._leftFilter = new Filter();
this._rightFilter = new Filter();
+
this._leftFilter.disconnect();
+
this._rightFilter.disconnect();
+
this._leftFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
+
this._rightFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
+
this._leftFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
- this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
- // graph routing
+
+ this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
+
+
this.input.connect(this._split);
this.leftDelay.connect(this._leftGain);
this.rightDelay.connect(this._rightGain);
+
this._leftGain.connect(this._leftFilter.input);
+
this._rightGain.connect(this._rightFilter.input);
+
this._merge.connect(this.wet);
+
this._leftFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
- this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
- // default routing
+
+ this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
+
+
this.setType(0);
- this._maxDelay = this.leftDelay.delayTime.maxValue;
- // set initial feedback to 0.5
+ this._maxDelay = this.leftDelay.delayTime.maxValue;
+
this.feedback(0.5);
};
+
p5.Delay.prototype = Object.create(Effect.prototype);
/**
* Add delay to an audio signal according to a set
* of delay parameters.
*
* @method process
+ * @for p5.Delay
* @param {Object} Signal An object that outputs audio
* @param {Number} [delayTime] Time (in seconds) of the delay/echo.
* Some browsers limit delayTime to
@@ -9585,3172 +8956,2735 @@ delay = function () {
* below the lowPass will be part of the
* delay.
*/
+
p5.Delay.prototype.process = function (src, _delayTime, _feedback, _filter) {
var feedback = _feedback || 0;
var delayTime = _delayTime || 0;
- if (feedback >= 1) {
+
+ if (feedback >= 1.0) {
throw new Error('Feedback value will force a positive feedback loop.');
}
+
if (delayTime >= this._maxDelay) {
throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.');
}
+
src.connect(this.input);
this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this._leftGain.gain.value = feedback;
this._rightGain.gain.value = feedback;
+
if (_filter) {
this._leftFilter.freq(_filter);
+
this._rightFilter.freq(_filter);
}
};
/**
- * Set the delay (echo) time, in seconds. Usually this value will be
- * a floating point number between 0.0 and 1.0.
- *
- * @method delayTime
- * @param {Number} delayTime Time (in seconds) of the delay
- */
- p5.Delay.prototype.delayTime = function (t) {
- // if t is an audio node...
- if (typeof t !== 'number') {
- t.connect(this.leftDelay.delayTime);
- t.connect(this.rightDelay.delayTime);
- } else {
- this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
- this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
- this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
- this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
- }
- };
- /**
- * Feedback occurs when Delay sends its signal back through its input
- * in a loop. The feedback amount determines how much signal to send each
- * time through the loop. A feedback greater than 1.0 is not desirable because
- * it will increase the overall output each time through the loop,
- * creating an infinite feedback loop. The default value is 0.5
- *
- * @method feedback
- * @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
- * Oscillator that can be used to
- * modulate this param
- * @returns {Number} Feedback value
- *
- */
- p5.Delay.prototype.feedback = function (f) {
- // if f is an audio node...
- if (f && typeof f !== 'number') {
- f.connect(this._leftGain.gain);
- f.connect(this._rightGain.gain);
- } else if (f >= 1) {
- throw new Error('Feedback value will force a positive feedback loop.');
- } else if (typeof f === 'number') {
- this._leftGain.gain.value = f;
- this._rightGain.gain.value = f;
- }
- // return value of feedback
- return this._leftGain.gain.value;
- };
- /**
- * Set a lowpass filter frequency for the delay. A lowpass filter
- * will cut off any frequencies higher than the filter frequency.
- *
- * @method filter
- * @param {Number|Object} cutoffFreq A lowpass filter will cut off any
- * frequencies higher than the filter frequency.
- * @param {Number|Object} res Resonance of the filter frequency
- * cutoff, or an object (i.e. a p5.Oscillator)
- * that can be used to modulate this parameter.
- * High numbers (i.e. 15) will produce a resonance,
- * low numbers (i.e. .2) will produce a slope.
- */
- p5.Delay.prototype.filter = function (freq, q) {
- this._leftFilter.set(freq, q);
- this._rightFilter.set(freq, q);
- };
- /**
- * Choose a preset type of delay. 'pingPong' bounces the signal
- * from the left to the right channel to produce a stereo effect.
- * Any other parameter will revert to the default delay setting.
- *
- * @method setType
- * @param {String|Number} type 'pingPong' (1) or 'default' (0)
- */
- p5.Delay.prototype.setType = function (t) {
- if (t === 1) {
- t = 'pingPong';
- }
- this._split.disconnect();
- this._leftFilter.disconnect();
- this._rightFilter.disconnect();
- this._split.connect(this.leftDelay, 0);
- this._split.connect(this.rightDelay, 1);
- switch (t) {
- case 'pingPong':
- this._rightFilter.setType(this._leftFilter.biquad.type);
- this._leftFilter.output.connect(this._merge, 0, 0);
- this._rightFilter.output.connect(this._merge, 0, 1);
- this._leftFilter.output.connect(this.rightDelay);
- this._rightFilter.output.connect(this.leftDelay);
- break;
- default:
- this._leftFilter.output.connect(this._merge, 0, 0);
- this._rightFilter.output.connect(this._merge, 0, 1);
- this._leftFilter.output.connect(this.leftDelay);
- this._rightFilter.output.connect(this.rightDelay);
- }
- };
- // DocBlocks for methods inherited from p5.Effect
- /**
- * Set the output level of the delay effect.
- *
- * @method amp
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- /**
- * Send output to a p5.sound or web audio object
- *
- * @method connect
- * @param {Object} unit
- */
- /**
- * Disconnect all output.
- *
- * @method disconnect
- */
- p5.Delay.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
- this._split.disconnect();
- this._leftFilter.dispose();
- this._rightFilter.dispose();
- this._merge.disconnect();
- this._leftGain.disconnect();
- this._rightGain.disconnect();
- this.leftDelay.disconnect();
- this.rightDelay.disconnect();
- this._split = undefined;
- this._leftFilter = undefined;
- this._rightFilter = undefined;
- this._merge = undefined;
- this._leftGain = undefined;
- this._rightGain = undefined;
- this.leftDelay = undefined;
- this.rightDelay = undefined;
- };
-}(filter, effect);
-var reverb;
-'use strict';
-reverb = function () {
- var CustomError = errorHandler;
- var Effect = effect;
- /**
- * Reverb adds depth to a sound through a large number of decaying
- * echoes. It creates the perception that sound is occurring in a
- * physical space. The p5.Reverb has paramters for Time (how long does the
- * reverb last) and decayRate (how much the sound decays with each echo)
- * that can be set with the .set() or .process() methods. The p5.Convolver
- * extends p5.Reverb allowing you to recreate the sound of actual physical
- * spaces through convolution.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Reverb
- * @extends p5.Effect
- * @constructor
- * @example
- *
- * let soundFile, reverb;
- * function preload() {
- * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * reverb = new p5.Reverb();
- * soundFile.disconnect(); // so we'll only hear reverb...
- *
- * // connect soundFile to reverb, process w/
- * // 3 second reverbTime, decayRate of 2%
- * reverb.process(soundFile, 3, 2);
- * soundFile.play();
- * }
- *
- */
- p5.Reverb = function () {
- Effect.call(this);
- this._initConvolverNode();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- // default params
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
- this._buildImpulse();
- };
- p5.Reverb.prototype = Object.create(Effect.prototype);
- p5.Reverb.prototype._initConvolverNode = function () {
- this.convolverNode = this.ac.createConvolver();
- this.input.connect(this.convolverNode);
- this.convolverNode.connect(this.wet);
- };
- p5.Reverb.prototype._teardownConvolverNode = function () {
- if (this.convolverNode) {
- this.convolverNode.disconnect();
- delete this.convolverNode;
- }
- };
- p5.Reverb.prototype._setBuffer = function (audioBuffer) {
- this._teardownConvolverNode();
- this._initConvolverNode();
- this.convolverNode.buffer = audioBuffer;
- };
- /**
- * Connect a source to the reverb, and assign reverb parameters.
- *
- * @method process
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
- p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
- src.connect(this.input);
- var rebuild = false;
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
- if (decayRate) {
- this._decay = decayRate;
- }
- if (reverse) {
- this._reverse = reverse;
- }
- if (rebuild) {
- this._buildImpulse();
- }
- };
- /**
- * Set the reverb settings. Similar to .process(), but without
- * assigning a new input.
- *
- * @method set
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
- p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
- var rebuild = false;
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
- if (decayRate) {
- this._decay = decayRate;
- }
- if (reverse) {
- this._reverse = reverse;
- }
- if (rebuild) {
- this._buildImpulse();
- }
- };
- // DocBlocks for methods inherited from p5.Effect
- /**
- * Set the output level of the reverb effect.
- *
- * @method amp
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- /**
- * Send output to a p5.sound or web audio object
+ * Set the delay (echo) time, in seconds. Usually this value will be
+ * a floating point number between 0.0 and 1.0.
*
- * @method connect
- * @param {Object} unit
+ * @method delayTime
+ * @for p5.Delay
+ * @param {Number} delayTime Time (in seconds) of the delay
*/
+
+
+ p5.Delay.prototype.delayTime = function (t) {
+ if (typeof t !== 'number') {
+ t.connect(this.leftDelay.delayTime);
+ t.connect(this.rightDelay.delayTime);
+ } else {
+ this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
+ this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime);
+ this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
+ this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime);
+ }
+ };
/**
- * Disconnect all output.
+ * Feedback occurs when Delay sends its signal back through its input
+ * in a loop. The feedback amount determines how much signal to send each
+ * time through the loop. A feedback greater than 1.0 is not desirable because
+ * it will increase the overall output each time through the loop,
+ * creating an infinite feedback loop. The default value is 0.5
+ *
+ * @method feedback
+ * @for p5.Delay
+ * @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
+ * Oscillator that can be used to
+ * modulate this param
+ * @returns {Number} Feedback value
*
- * @method disconnect
*/
+
+
+ p5.Delay.prototype.feedback = function (f) {
+ if (f && typeof f !== 'number') {
+ f.connect(this._leftGain.gain);
+ f.connect(this._rightGain.gain);
+ } else if (f >= 1.0) {
+ throw new Error('Feedback value will force a positive feedback loop.');
+ } else if (typeof f === 'number') {
+ this._leftGain.gain.value = f;
+ this._rightGain.gain.value = f;
+ }
+
+
+ return this._leftGain.gain.value;
+ };
/**
- * Inspired by Simple Reverb by Jordan Santell
- * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
- *
- * Utility function for building an impulse response
- * based on the module parameters.
+ * Set a lowpass filter frequency for the delay. A lowpass filter
+ * will cut off any frequencies higher than the filter frequency.
*
- * @private
+ * @method filter
+ * @for p5.Delay
+ * @param {Number|Object} cutoffFreq A lowpass filter will cut off any
+ * frequencies higher than the filter frequency.
+ * @param {Number|Object} res Resonance of the filter frequency
+ * cutoff, or an object (i.e. a p5.Oscillator)
+ * that can be used to modulate this parameter.
+ * High numbers (i.e. 15) will produce a resonance,
+ * low numbers (i.e. .2) will produce a slope.
*/
- p5.Reverb.prototype._buildImpulse = function () {
- var rate = this.ac.sampleRate;
- var length = rate * this._seconds;
- var decay = this._decay;
- var impulse = this.ac.createBuffer(2, length, rate);
- var impulseL = impulse.getChannelData(0);
- var impulseR = impulse.getChannelData(1);
- var n, i;
- for (i = 0; i < length; i++) {
- n = this._reverse ? length - i : i;
- impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- }
- this._setBuffer(impulse);
- };
- p5.Reverb.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
- this._teardownConvolverNode();
+
+
+ p5.Delay.prototype.filter = function (freq, q) {
+ this._leftFilter.set(freq, q);
+
+ this._rightFilter.set(freq, q);
};
- // =======================================================================
- // *** p5.Convolver ***
- // =======================================================================
/**
- * p5.Convolver extends p5.Reverb. It can emulate the sound of real
- * physical spaces through a process called
- * convolution.
- *
- * Convolution multiplies any audio input by an "impulse response"
- * to simulate the dispersion of sound over time. The impulse response is
- * generated from an audio file that you provide. One way to
- * generate an impulse response is to pop a balloon in a reverberant space
- * and record the echo. Convolution can also be used to experiment with
- * sound.
- *
- * Use the method createConvolution(path)
to instantiate a
- * p5.Convolver with a path to your impulse response audio file.
- *
- * @class p5.Convolver
- * @extends p5.Effect
- * @constructor
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call when loading succeeds
- * @param {Function} [errorCallback] function to call if loading fails.
- * This function will receive an error or
- * XMLHttpRequest object with information
- * about what went wrong.
- * @example
- *
- * let cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
+ * Choose a preset type of delay. 'pingPong' bounces the signal
+ * from the left to the right channel to produce a stereo effect.
+ * Any other parameter will revert to the default delay setting.
*
- * sound.play();
- * }
- *
+ * @method setType
+ * @for p5.Delay
+ * @param {String|Number} type 'pingPong' (1) or 'default' (0)
*/
- p5.Convolver = function (path, callback, errorCallback) {
- p5.Reverb.call(this);
- /**
- * Internally, the p5.Convolver uses the a
- *
- * Web Audio Convolver Node.
- *
- * @property {ConvolverNode} convolverNode
- */
- this._initConvolverNode();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- if (path) {
- this.impulses = [];
- this._loadBuffer(path, callback, errorCallback);
- } else {
- // parameters
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
- this._buildImpulse();
+
+
+ p5.Delay.prototype.setType = function (t) {
+ if (t === 1) {
+ t = 'pingPong';
}
- };
- p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
- p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
+
+ this._split.disconnect();
+
+ this._leftFilter.disconnect();
+
+ this._rightFilter.disconnect();
+
+ this._split.connect(this.leftDelay, 0);
+
+ this._split.connect(this.rightDelay, 1);
+
+ switch (t) {
+ case 'pingPong':
+ this._rightFilter.setType(this._leftFilter.biquad.type);
+
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.rightDelay);
+
+ this._rightFilter.output.connect(this.leftDelay);
+
+ break;
+
+ default:
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.leftDelay);
+
+ this._rightFilter.output.connect(this.rightDelay);
+
+ }
+ };
+
/**
- * Create a p5.Convolver. Accepts a path to a soundfile
- * that will be used to generate an impulse response.
- *
- * @method createConvolver
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call if loading is successful.
- * The object will be passed in as the argument
- * to the callback function.
- * @param {Function} [errorCallback] function to call if loading is not successful.
- * A custom error will be passed in as the argument
- * to the callback function.
- * @return {p5.Convolver}
- * @example
- *
- * let cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
+ * Set the output level of the delay effect.
*
- * sound.play();
- * }
- *
+ * @method amp
+ * @for p5.Delay
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
*/
- p5.prototype.createConvolver = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
- var self = this;
- var cReverb = new p5.Convolver(path, function (buffer) {
- if (typeof callback === 'function') {
- callback(buffer);
- }
- if (typeof self._decrementPreload === 'function') {
- self._decrementPreload();
- }
- }, errorCallback);
- cReverb.impulses = [];
- return cReverb;
- };
+
/**
- * Private method to load a buffer as an Impulse Response,
- * assign it to the convolverNode, and add to the Array of .impulses.
+ * Send output to a p5.sound or web audio object
*
- * @param {String} path
- * @param {Function} callback
- * @param {Function} errorCallback
- * @private
+ * @method connect
+ * @for p5.Delay
+ * @param {Object} unit
*/
- p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
- var path = p5.prototype._checkFileFormats(path);
- var self = this;
- var errorTrace = new Error().stack;
- var ac = p5.prototype.getAudioContext();
- var request = new XMLHttpRequest();
- request.open('GET', path, true);
- request.responseType = 'arraybuffer';
- request.onload = function () {
- if (request.status === 200) {
- // on success loading file:
- ac.decodeAudioData(request.response, function (buff) {
- var buffer = {};
- var chunks = path.split('/');
- buffer.name = chunks[chunks.length - 1];
- buffer.audioBuffer = buff;
- self.impulses.push(buffer);
- self._setBuffer(buffer.audioBuffer);
- if (callback) {
- callback(buffer);
- }
- }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015
- function () {
- var err = new CustomError('decodeAudioData', errorTrace, self.url);
- var msg = 'AudioContext error at decodeAudioData for ' + self.url;
- if (errorCallback) {
- err.msg = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- });
- } else {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- }
- };
- // if there is another error, aside from 404...
- request.onerror = function () {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- };
- request.send();
+
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Delay
+ */
+
+
+ p5.Delay.prototype.dispose = function () {
+ Effect.prototype.dispose.apply(this);
+
+ this._split.disconnect();
+
+ this._leftFilter.dispose();
+
+ this._rightFilter.dispose();
+
+ this._merge.disconnect();
+
+ this._leftGain.disconnect();
+
+ this._rightGain.disconnect();
+
+ this.leftDelay.disconnect();
+ this.rightDelay.disconnect();
+ this._split = undefined;
+ this._leftFilter = undefined;
+ this._rightFilter = undefined;
+ this._merge = undefined;
+ this._leftGain = undefined;
+ this._rightGain = undefined;
+ this.leftDelay = undefined;
+ this.rightDelay = undefined;
};
- p5.Convolver.prototype.set = null;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var CustomError = __webpack_require__(11);
+
+ var Effect = __webpack_require__(4);
/**
- * Connect a source to the reverb, and assign reverb parameters.
+ * Reverb adds depth to a sound through a large number of decaying
+ * echoes. It creates the perception that sound is occurring in a
+ * physical space. The p5.Reverb has paramters for Time (how long does the
+ * reverb last) and decayRate (how much the sound decays with each echo)
+ * that can be set with the .set() or .process() methods. The p5.Convolver
+ * extends p5.Reverb allowing you to recreate the sound of actual physical
+ * spaces through convolution.
*
- * @method process
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Reverb
+ * @extends p5.Effect
+ * @constructor
* @example
*
- * let cVerb, sound;
+ * let soundFile, reverb;
* function preload() {
- * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
*
- * cVerb = createConvolver('assets/concrete-tunnel.mp3');
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
*
- * sound = loadSound('assets/beat.mp3');
+ * reverb = new p5.Reverb();
+ * soundFile.disconnect(); // so we'll only hear reverb...
+ *
+ * // connect soundFile to reverb, process w/
+ * // 3 second reverbTime, decayRate of 2%
+ * reverb.process(soundFile, 3, 2);
* }
*
- * function setup() {
- * // disconnect from master output...
- * sound.disconnect();
+ * function draw() {
+ * let dryWet = constrain(map(mouseX, 0, width, 0, 1), 0, 1);
+ * // 1 = all reverb, 0 = no reverb
+ * reverb.drywet(dryWet);
*
- * // ...and process with (i.e. connect to) cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
+ * background(220);
+ * text('tap to play', 10, 20);
+ * text('dry/wet: ' + round(dryWet * 100) + '%', 10, height - 20);
+ * }
*
- * sound.play();
+ * function playSound() {
+ * soundFile.play();
* }
*
*/
- p5.Convolver.prototype.process = function (src) {
- src.connect(this.input);
+
+
+ p5.Reverb = function () {
+ Effect.call(this);
+
+ this._initConvolverNode();
+
+
+ this.input.gain.value = 0.5;
+
+ this._seconds = 3;
+ this._decay = 2;
+ this._reverse = false;
+
+ this._buildImpulse();
};
- /**
- * If you load multiple impulse files using the .addImpulse method,
- * they will be stored as Objects in this Array. Toggle between them
- * with the toggleImpulse(id)
method.
- *
- * @property {Array} impulses
- */
- p5.Convolver.prototype.impulses = [];
- /**
- * Load and assign a new Impulse Response to the p5.Convolver.
- * The impulse is added to the .impulses
array. Previous
- * impulses can be accessed with the .toggleImpulse(id)
- * method.
- *
- * @method addImpulse
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
- p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
- this._loadBuffer(path, callback, errorCallback);
+
+ p5.Reverb.prototype = Object.create(Effect.prototype);
+
+ p5.Reverb.prototype._initConvolverNode = function () {
+ this.convolverNode = this.ac.createConvolver();
+ this.input.connect(this.convolverNode);
+ this.convolverNode.connect(this.wet);
};
- /**
- * Similar to .addImpulse, except that the .impulses
- * Array is reset to save memory. A new .impulses
- * array is created with this impulse as the only item.
- *
- * @method resetImpulse
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
- p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+
+ p5.Reverb.prototype._teardownConvolverNode = function () {
+ if (this.convolverNode) {
+ this.convolverNode.disconnect();
+ delete this.convolverNode;
}
- this.impulses = [];
- this._loadBuffer(path, callback, errorCallback);
+ };
+
+ p5.Reverb.prototype._setBuffer = function (audioBuffer) {
+ this._teardownConvolverNode();
+
+ this._initConvolverNode();
+
+ this.convolverNode.buffer = audioBuffer;
};
/**
- * If you have used .addImpulse()
to add multiple impulses
- * to a p5.Convolver, then you can use this method to toggle between
- * the items in the .impulses
Array. Accepts a parameter
- * to identify which impulse you wish to use, identified either by its
- * original filename (String) or by its position in the .impulses
- *
Array (Number).
- * You can access the objects in the .impulses Array directly. Each
- * Object has two attributes: an .audioBuffer
(type:
- * Web Audio
- * AudioBuffer) and a .name
, a String that corresponds
- * with the original filename.
+ * Connect a source to the reverb, and assign reverb parameters.
*
- * @method toggleImpulse
- * @param {String|Number} id Identify the impulse by its original filename
- * (String), or by its position in the
- * .impulses
Array (Number).
- */
- p5.Convolver.prototype.toggleImpulse = function (id) {
- if (typeof id === 'number' && id < this.impulses.length) {
- this._setBuffer(this.impulses[id].audioBuffer);
- }
- if (typeof id === 'string') {
- for (var i = 0; i < this.impulses.length; i++) {
- if (this.impulses[i].name === id) {
- this._setBuffer(this.impulses[i].audioBuffer);
- break;
- }
- }
- }
- };
- p5.Convolver.prototype.dispose = function () {
- p5.Reverb.prototype.dispose.apply(this);
- // remove all the Impulse Response buffers
- for (var i in this.impulses) {
- if (this.impulses[i]) {
- this.impulses[i] = null;
- }
+ * @method process
+ * @for p5.Reverb
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
+ */
+
+
+ p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
+ src.connect(this.input);
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
}
- };
-}(errorHandler, effect);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_TimelineState;
-Tone_core_TimelineState = function (Tone) {
- 'use strict';
- Tone.TimelineState = function (initial) {
- Tone.Timeline.call(this);
- this._initial = initial;
- };
- Tone.extend(Tone.TimelineState, Tone.Timeline);
- Tone.TimelineState.prototype.getValueAtTime = function (time) {
- var event = this.get(time);
- if (event !== null) {
- return event.state;
- } else {
- return this._initial;
+
+ if (decayRate) {
+ this._decay = decayRate;
}
- };
- Tone.TimelineState.prototype.setStateAtTime = function (state, time) {
- this.add({
- 'state': state,
- 'time': time
- });
- };
- return Tone.TimelineState;
-}(Tone_core_Tone, Tone_core_Timeline);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_Clock;
-Tone_core_Clock = function (Tone) {
- 'use strict';
- Tone.Clock = function () {
- Tone.Emitter.call(this);
- var options = this.optionsObject(arguments, [
- 'callback',
- 'frequency'
- ], Tone.Clock.defaults);
- this.callback = options.callback;
- this._nextTick = 0;
- this._lastState = Tone.State.Stopped;
- this.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency);
- this._readOnly('frequency');
- this.ticks = 0;
- this._state = new Tone.TimelineState(Tone.State.Stopped);
- this._boundLoop = this._loop.bind(this);
- this.context.on('tick', this._boundLoop);
- };
- Tone.extend(Tone.Clock, Tone.Emitter);
- Tone.Clock.defaults = {
- 'callback': Tone.noOp,
- 'frequency': 1,
- 'lookAhead': 'auto'
- };
- Object.defineProperty(Tone.Clock.prototype, 'state', {
- get: function () {
- return this._state.getValueAtTime(this.now());
+
+ if (reverse) {
+ this._reverse = reverse;
}
- });
- Tone.Clock.prototype.start = function (time, offset) {
- time = this.toSeconds(time);
- if (this._state.getValueAtTime(time) !== Tone.State.Started) {
- this._state.add({
- 'state': Tone.State.Started,
- 'time': time,
- 'offset': offset
- });
+
+ if (rebuild) {
+ this._buildImpulse();
}
- return this;
- };
- Tone.Clock.prototype.stop = function (time) {
- time = this.toSeconds(time);
- this._state.cancel(time);
- this._state.setStateAtTime(Tone.State.Stopped, time);
- return this;
};
- Tone.Clock.prototype.pause = function (time) {
- time = this.toSeconds(time);
- if (this._state.getValueAtTime(time) === Tone.State.Started) {
- this._state.setStateAtTime(Tone.State.Paused, time);
+ /**
+ * Set the reverb settings. Similar to .process(), but without
+ * assigning a new input.
+ *
+ * @method set
+ * @for p5.Reverb
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
+ */
+
+
+ p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
}
- return this;
- };
- Tone.Clock.prototype._loop = function () {
- var now = this.now();
- var lookAhead = this.context.lookAhead;
- var updateInterval = this.context.updateInterval;
- var lagCompensation = this.context.lag * 2;
- var loopInterval = now + lookAhead + updateInterval + lagCompensation;
- while (loopInterval > this._nextTick && this._state) {
- var currentState = this._state.getValueAtTime(this._nextTick);
- if (currentState !== this._lastState) {
- this._lastState = currentState;
- var event = this._state.get(this._nextTick);
- if (currentState === Tone.State.Started) {
- this._nextTick = event.time;
- if (!this.isUndef(event.offset)) {
- this.ticks = event.offset;
- }
- this.emit('start', event.time, this.ticks);
- } else if (currentState === Tone.State.Stopped) {
- this.ticks = 0;
- this.emit('stop', event.time);
- } else if (currentState === Tone.State.Paused) {
- this.emit('pause', event.time);
- }
- }
- var tickTime = this._nextTick;
- if (this.frequency) {
- this._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick);
- if (currentState === Tone.State.Started) {
- this.callback(tickTime);
- this.ticks++;
- }
- }
+
+ if (decayRate) {
+ this._decay = decayRate;
}
- };
- Tone.Clock.prototype.getStateAtTime = function (time) {
- time = this.toSeconds(time);
- return this._state.getValueAtTime(time);
- };
- Tone.Clock.prototype.dispose = function () {
- Tone.Emitter.prototype.dispose.call(this);
- this.context.off('tick', this._boundLoop);
- this._writable('frequency');
- this.frequency.dispose();
- this.frequency = null;
- this._boundLoop = null;
- this._nextTick = Infinity;
- this.callback = null;
- this._state.dispose();
- this._state = null;
- };
- return Tone.Clock;
-}(Tone_core_Tone, Tone_signal_TimelineSignal, Tone_core_TimelineState, Tone_core_Emitter);
-var metro;
-'use strict';
-metro = function () {
- var p5sound = master;
- // requires the Tone.js library's Clock (MIT license, Yotam Mann)
- // https://github.com/TONEnoTONE/Tone.js/
- var Clock = Tone_core_Clock;
- p5.Metro = function () {
- this.clock = new Clock({ 'callback': this.ontick.bind(this) });
- this.syncedParts = [];
- this.bpm = 120;
- // gets overridden by p5.Part
- this._init();
- this.prevTick = 0;
- this.tatumTime = 0;
- this.tickCallback = function () {
- };
- };
- p5.Metro.prototype.ontick = function (tickTime) {
- var elapsedTime = tickTime - this.prevTick;
- var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
- if (elapsedTime - this.tatumTime <= -0.02) {
- return;
- } else {
- // console.log('ok', this.syncedParts[0].phrases[0].name);
- this.prevTick = tickTime;
- // for all of the active things on the metro:
- var self = this;
- this.syncedParts.forEach(function (thisPart) {
- if (!thisPart.isPlaying)
- return;
- thisPart.incrementStep(secondsFromNow);
- // each synced source keeps track of its own beat number
- thisPart.phrases.forEach(function (thisPhrase) {
- var phraseArray = thisPhrase.sequence;
- var bNum = self.metroTicks % phraseArray.length;
- if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
- thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
- }
- });
- });
- this.metroTicks += 1;
- this.tickCallback(secondsFromNow);
+
+ if (reverse) {
+ this._reverse = reverse;
}
- };
- p5.Metro.prototype.setBPM = function (bpm, rampTime) {
- var beatTime = 60 / (bpm * this.tatums);
- var now = p5sound.audiocontext.currentTime;
- this.tatumTime = beatTime;
- var rampTime = rampTime || 0;
- this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
- this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
- this.bpm = bpm;
- };
- p5.Metro.prototype.getBPM = function () {
- return this.clock.getRate() / this.tatums * 60;
- };
- p5.Metro.prototype._init = function () {
- this.metroTicks = 0;
- };
- // clear existing synced parts, add only this one
- p5.Metro.prototype.resetSync = function (part) {
- this.syncedParts = [part];
- };
- // push a new synced part to the array
- p5.Metro.prototype.pushSync = function (part) {
- this.syncedParts.push(part);
- };
- p5.Metro.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.start(now + t);
- this.setBPM(this.bpm);
- };
- p5.Metro.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.stop(now + t);
- };
- p5.Metro.prototype.beatLength = function (tatums) {
- this.tatums = 1 / tatums / 4;
- };
-}(master, Tone_core_Clock);
-var looper;
-'use strict';
-looper = function () {
- var p5sound = master;
- var BPM = 120;
+
+ if (rebuild) {
+ this._buildImpulse();
+ }
+ };
+
/**
- * Set the global tempo, in beats per minute, for all
- * p5.Parts. This method will impact all active p5.Parts.
+ * Set the output level of the reverb effect.
*
- * @method setBPM
- * @param {Number} BPM Beats Per Minute
- * @param {Number} rampTime Seconds from now
+ * @method amp
+ * @for p5.Reverb
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
*/
- p5.prototype.setBPM = function (bpm, rampTime) {
- BPM = bpm;
- for (var i in p5sound.parts) {
- if (p5sound.parts[i]) {
- p5sound.parts[i].setBPM(bpm, rampTime);
- }
+
+ /**
+ * Send output to a p5.sound or web audio object
+ *
+ * @method connect
+ * @for p5.Reverb
+ * @param {Object} unit
+ */
+
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Reverb
+ */
+
+ /**
+ * Inspired by Simple Reverb by Jordan Santell
+ * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
+ *
+ * Utility function for building an impulse response
+ * based on the module parameters.
+ *
+ * @private
+ */
+
+
+ p5.Reverb.prototype._buildImpulse = function () {
+ var rate = this.ac.sampleRate;
+ var length = rate * this._seconds;
+ var decay = this._decay;
+ var impulse = this.ac.createBuffer(2, length, rate);
+ var impulseL = impulse.getChannelData(0);
+ var impulseR = impulse.getChannelData(1);
+ var n, i;
+
+ for (i = 0; i < length; i++) {
+ n = this._reverse ? length - i : i;
+ impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
+ impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
}
+
+ this._setBuffer(impulse);
};
+
+ p5.Reverb.prototype.dispose = function () {
+ Effect.prototype.dispose.apply(this);
+
+ this._teardownConvolverNode();
+ };
+
/**
- * A phrase is a pattern of musical events over time, i.e.
- * a series of notes and rests.
+ * p5.Convolver extends p5.Reverb. It can emulate the sound of real
+ * physical spaces through a process called
+ * convolution.
*
- * Phrases must be added to a p5.Part for playback, and
- * each part can play multiple phrases at the same time.
- * For example, one Phrase might be a kick drum, another
- * could be a snare, and another could be the bassline.
+ * Convolution multiplies any audio input by an "impulse response"
+ * to simulate the dispersion of sound over time. The impulse response is
+ * generated from an audio file that you provide. One way to
+ * generate an impulse response is to pop a balloon in a reverberant space
+ * and record the echo. Convolution can also be used to experiment with
+ * sound.
*
- * The first parameter is a name so that the phrase can be
- * modified or deleted later. The callback is a a function that
- * this phrase will call at every step—for example it might be
- * called playNote(value){}
. The array determines
- * which value is passed into the callback at each step of the
- * phrase. It can be numbers, an object with multiple numbers,
- * or a zero (0) indicates a rest so the callback won't be called).
+ * Use the method createConvolution(path)
to instantiate a
+ * p5.Convolver with a path to your impulse response audio file.
*
- * @class p5.Phrase
+ * @class p5.Convolver
+ * @extends p5.Effect
* @constructor
- * @param {String} name Name so that you can access the Phrase.
- * @param {Function} callback The name of a function that this phrase
- * will call. Typically it will play a sound,
- * and accept two parameters: a time at which
- * to play the sound (in seconds from now),
- * and a value from the sequence array. The
- * time should be passed into the play() or
- * start() method to ensure precision.
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call when loading succeeds
+ * @param {Function} [errorCallback] function to call if loading fails.
+ * This function will receive an error or
+ * XMLHttpRequest object with information
+ * about what went wrong.
* @example
*
- * let mySound, myPhrase, myPart;
- * let pattern = [1,0,0,2,0,2,0,0];
- * let msg = 'click to play';
- *
+ * let cVerb, sound;
* function preload() {
- * mySound = loadSound('assets/beatbox.mp3');
- * }
- *
- * function setup() {
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * masterVolume(0.1);
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
*
- * myPhrase = new p5.Phrase('bbox', makeSound, pattern);
- * myPart = new p5.Part();
- * myPart.addPhrase(myPhrase);
- * myPart.setBPM(60);
- * }
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
*
- * function draw() {
- * background(0);
- * text(msg, width/2, height/2);
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
- * function makeSound(time, playbackRate) {
- * mySound.rate(playbackRate);
- * mySound.play(time);
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * myPart.start();
- * msg = 'playing pattern';
- * }
+ * // disconnect from master output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
* }
*
+ * function playSound() {
+ * sound.play();
+ * }
*
*/
- p5.Phrase = function (name, callback, sequence) {
- this.phraseStep = 0;
- this.name = name;
- this.callback = callback;
+
+
+ p5.Convolver = function (path, callback, errorCallback) {
+ p5.Reverb.call(this);
/**
- * Array of values to pass into the callback
- * at each step of the phrase. Depending on the callback
- * function's requirements, these values may be numbers,
- * strings, or an object with multiple parameters.
- * Zero (0) indicates a rest.
+ * Internally, the p5.Convolver uses the a
+ *
+ * Web Audio Convolver Node.
*
- * @property {Array} sequence
+ * @property {ConvolverNode} convolverNode
*/
- this.sequence = sequence;
+
+ this._initConvolverNode();
+
+
+ this.input.gain.value = 0.5;
+
+ if (path) {
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
+ } else {
+ this._seconds = 3;
+ this._decay = 2;
+ this._reverse = false;
+
+ this._buildImpulse();
+ }
};
+
+ p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
+ p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
/**
- * A p5.Part plays back one or more p5.Phrases. Instantiate a part
- * with steps and tatums. By default, each step represents a 1/16th note.
- *
- * See p5.Phrase for more about musical timing.
+ * Create a p5.Convolver. Accepts a path to a soundfile
+ * that will be used to generate an impulse response.
*
- * @class p5.Part
- * @constructor
- * @param {Number} [steps] Steps in the part
- * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
+ * @method createConvolver
+ * @for p5
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call if loading is successful.
+ * The object will be passed in as the argument
+ * to the callback function.
+ * @param {Function} [errorCallback] function to call if loading is not successful.
+ * A custom error will be passed in as the argument
+ * to the callback function.
+ * @return {p5.Convolver}
* @example
*
- * let box, drum, myPart;
- * let boxPat = [1,0,0,2,0,2,0,0];
- * let drumPat = [0,1,1,0,2,0,1,0];
- * let msg = 'click to play';
- *
+ * let cVerb, sound;
* function preload() {
- * box = loadSound('assets/beatbox.mp3');
- * drum = loadSound('assets/drum.mp3');
- * }
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
*
- * function setup() {
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * masterVolume(0.1);
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
*
- * let boxPhrase = new p5.Phrase('box', playBox, boxPat);
- * let drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
- * myPart = new p5.Part();
- * myPart.addPhrase(boxPhrase);
- * myPart.addPhrase(drumPhrase);
- * myPart.setBPM(60);
- * masterVolume(0.1);
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
- * function draw() {
- * background(0);
- * text(msg, width/2, height/2);
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
*
- * function playBox(time, playbackRate) {
- * box.rate(playbackRate);
- * box.play(time);
- * }
+ * // disconnect from master output...
+ * sound.disconnect();
*
- * function playDrum(time, playbackRate) {
- * drum.rate(playbackRate);
- * drum.play(time);
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
* }
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * myPart.start();
- * msg = 'playing part';
- * }
+ * function playSound() {
+ * sound.play();
* }
*
*/
- p5.Part = function (steps, bLength) {
- this.length = steps || 0;
- // how many beats
- this.partStep = 0;
- this.phrases = [];
- this.isPlaying = false;
- this.noLoop();
- this.tatums = bLength || 0.0625;
- // defaults to quarter note
- this.metro = new p5.Metro();
- this.metro._init();
- this.metro.beatLength(this.tatums);
- this.metro.setBPM(BPM);
- p5sound.parts.push(this);
- this.callback = function () {
- };
- };
- /**
- * Set the tempo of this part, in Beats Per Minute.
- *
- * @method setBPM
- * @param {Number} BPM Beats Per Minute
- * @param {Number} [rampTime] Seconds from now
- */
- p5.Part.prototype.setBPM = function (tempo, rampTime) {
- this.metro.setBPM(tempo, rampTime);
- };
- /**
- * Returns the tempo, in Beats Per Minute, of this part.
- *
- * @method getBPM
- * @return {Number}
- */
- p5.Part.prototype.getBPM = function () {
- return this.metro.getBPM();
- };
- /**
- * Start playback of this part. It will play
- * through all of its phrases at a speed
- * determined by setBPM.
- *
- * @method start
- * @param {Number} [time] seconds from now
- */
- p5.Part.prototype.start = function (time) {
- if (!this.isPlaying) {
- this.isPlaying = true;
- this.metro.resetSync(this);
- var t = time || 0;
- this.metro.start(t);
+
+ p5.prototype.createConvolver = function (path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ var self = this;
+ var cReverb = new p5.Convolver(path, function (buffer) {
+ if (typeof callback === 'function') {
+ callback(buffer);
+ }
+
+ if (typeof self._decrementPreload === 'function') {
+ self._decrementPreload();
+ }
+ }, errorCallback);
+ cReverb.impulses = [];
+ return cReverb;
};
/**
- * Loop playback of this part. It will begin
- * looping through all of its phrases at a speed
- * determined by setBPM.
+ * Private method to load a buffer as an Impulse Response,
+ * assign it to the convolverNode, and add to the Array of .impulses.
*
- * @method loop
- * @param {Number} [time] seconds from now
+ * @param {String} path
+ * @param {Function} callback
+ * @param {Function} errorCallback
+ * @private
*/
- p5.Part.prototype.loop = function (time) {
- this.looping = true;
- // rest onended function
- this.onended = function () {
- this.partStep = 0;
+
+
+ p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
+ var path = p5.prototype._checkFileFormats(path);
+
+ var self = this;
+ var errorTrace = new Error().stack;
+ var ac = p5.prototype.getAudioContext();
+ var request = new XMLHttpRequest();
+ request.open('GET', path, true);
+ request.responseType = 'arraybuffer';
+
+ request.onload = function () {
+ if (request.status === 200) {
+ ac.decodeAudioData(request.response, function (buff) {
+ var buffer = {};
+ var chunks = path.split('/');
+ buffer.name = chunks[chunks.length - 1];
+ buffer.audioBuffer = buff;
+ self.impulses.push(buffer);
+
+ self._setBuffer(buffer.audioBuffer);
+
+ if (callback) {
+ callback(buffer);
+ }
+ },
+ function () {
+ var err = new CustomError('decodeAudioData', errorTrace, self.url);
+ var msg = 'AudioContext error at decodeAudioData for ' + self.url;
+
+ if (errorCallback) {
+ err.msg = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ });
+ }
+ else {
+ var err = new CustomError('loadConvolver', errorTrace, self.url);
+ var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ }
+ };
+
+
+ request.onerror = function () {
+ var err = new CustomError('loadConvolver', errorTrace, self.url);
+ var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
};
- var t = time || 0;
- this.start(t);
+
+ request.send();
};
+
+ p5.Convolver.prototype.set = null;
/**
- * Tell the part to stop looping.
+ * Connect a source to the convolver.
*
- * @method noLoop
- */
- p5.Part.prototype.noLoop = function () {
- this.looping = false;
- // rest onended function
- this.onended = function () {
- this.stop();
- };
- };
- /**
- * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
+ * @method process
+ * @for p5.Convolver
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @example
+ *
+ * let cVerb, sound;
+ * function preload() {
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
*
- * @method stop
- * @param {Number} [time] seconds from now
- */
- p5.Part.prototype.stop = function (time) {
- this.partStep = 0;
- this.pause(time);
- };
- /**
- * Pause the part. Playback will resume
- * from the current step.
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
*
- * @method pause
- * @param {Number} time seconds from now
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSound);
+ * background(220);
+ * text('tap to play', 20, 20);
+ *
+ * // disconnect from master output...
+ * sound.disconnect();
+ *
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ * }
+ *
+ * function playSound() {
+ * sound.play();
+ * }
+ *
+ *
*/
- p5.Part.prototype.pause = function (time) {
- this.isPlaying = false;
- var t = time || 0;
- this.metro.stop(t);
+
+ p5.Convolver.prototype.process = function (src) {
+ src.connect(this.input);
};
/**
- * Add a p5.Phrase to this Part.
+ * If you load multiple impulse files using the .addImpulse method,
+ * they will be stored as Objects in this Array. Toggle between them
+ * with the toggleImpulse(id)
method.
*
- * @method addPhrase
- * @param {p5.Phrase} phrase reference to a p5.Phrase
+ * @property {Array} impulses
+ * @for p5.Convolver
*/
- p5.Part.prototype.addPhrase = function (name, callback, array) {
- var p;
- if (arguments.length === 3) {
- p = new p5.Phrase(name, callback, array);
- } else if (arguments[0] instanceof p5.Phrase) {
- p = arguments[0];
- } else {
- throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
- }
- this.phrases.push(p);
- // reset the length if phrase is longer than part's existing length
- if (p.sequence.length > this.length) {
- this.length = p.sequence.length;
- }
- };
+
+
+ p5.Convolver.prototype.impulses = [];
/**
- * Remove a phrase from this part, based on the name it was
- * given when it was created.
+ * Load and assign a new Impulse Response to the p5.Convolver.
+ * The impulse is added to the .impulses
array. Previous
+ * impulses can be accessed with the .toggleImpulse(id)
+ * method.
*
- * @method removePhrase
- * @param {String} phraseName
+ * @method addImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
*/
- p5.Part.prototype.removePhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases.splice(i, 1);
- }
+
+ p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ this._loadBuffer(path, callback, errorCallback);
};
/**
- * Get a phrase from this part, based on the name it was
- * given when it was created. Now you can modify its array.
+ * Similar to .addImpulse, except that the .impulses
+ * Array is reset to save memory. A new .impulses
+ * array is created with this impulse as the only item.
*
- * @method getPhrase
- * @param {String} phraseName
+ * @method resetImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
*/
- p5.Part.prototype.getPhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- return this.phrases[i];
- }
+
+
+ p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
};
/**
- * Find all sequences with the specified name, and replace their patterns with the specified array.
+ * If you have used .addImpulse()
to add multiple impulses
+ * to a p5.Convolver, then you can use this method to toggle between
+ * the items in the .impulses
Array. Accepts a parameter
+ * to identify which impulse you wish to use, identified either by its
+ * original filename (String) or by its position in the .impulses
+ *
Array (Number).
+ * You can access the objects in the .impulses Array directly. Each
+ * Object has two attributes: an .audioBuffer
(type:
+ * Web Audio
+ * AudioBuffer) and a .name
, a String that corresponds
+ * with the original filename.
*
- * @method replaceSequence
- * @param {String} phraseName
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
+ * @method toggleImpulse
+ * @for p5.Convolver
+ * @param {String|Number} id Identify the impulse by its original filename
+ * (String), or by its position in the
+ * .impulses
Array (Number).
*/
- p5.Part.prototype.replaceSequence = function (name, array) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases[i].sequence = array;
- }
+
+
+ p5.Convolver.prototype.toggleImpulse = function (id) {
+ if (typeof id === 'number' && id < this.impulses.length) {
+ this._setBuffer(this.impulses[id].audioBuffer);
}
- };
- p5.Part.prototype.incrementStep = function (time) {
- if (this.partStep < this.length - 1) {
- this.callback(time);
- this.partStep += 1;
- } else {
- if (!this.looping && this.partStep === this.length - 1) {
- console.log('done');
- // this.callback(time);
- this.onended();
+
+ if (typeof id === 'string') {
+ for (var i = 0; i < this.impulses.length; i++) {
+ if (this.impulses[i].name === id) {
+ this._setBuffer(this.impulses[i].audioBuffer);
+
+ break;
+ }
}
}
};
- /**
- * Set the function that will be called at every step. This will clear the previous function.
- *
- * @method onStep
- * @param {Function} callback The name of the callback
- * you want to fire
- * on every beat/tatum.
- */
- p5.Part.prototype.onStep = function (callback) {
- this.callback = callback;
- };
- // ===============
- // p5.Score
- // ===============
- /**
- * A Score consists of a series of Parts. The parts will
- * be played back in order. For example, you could have an
- * A part, a B part, and a C part, and play them back in this order
- * new p5.Score(a, a, b, a, c)
- *
- * @class p5.Score
- * @constructor
- * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
- */
- p5.Score = function () {
- // for all of the arguments
- this.parts = [];
- this.currentPart = 0;
- var thisScore = this;
- for (var i in arguments) {
- if (arguments[i] && this.parts[i]) {
- this.parts[i] = arguments[i];
- this.parts[i].nextPart = this.parts[i + 1];
- this.parts[i].onended = function () {
- thisScore.resetPart(i);
- playNextPart(thisScore);
- };
+
+ p5.Convolver.prototype.dispose = function () {
+ p5.Reverb.prototype.dispose.apply(this);
+
+ for (var i in this.impulses) {
+ if (this.impulses[i]) {
+ this.impulses[i] = null;
}
}
- this.looping = false;
};
- p5.Score.prototype.onended = function () {
- if (this.looping) {
- // this.resetParts();
- this.parts[0].start();
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+
+ var Clock = __webpack_require__(27);
+
+ p5.Metro = function () {
+ this.clock = new Clock({
+ 'callback': this.ontick.bind(this)
+ });
+ this.syncedParts = [];
+ this.bpm = 120;
+
+ this._init();
+
+ this.prevTick = 0;
+ this.tatumTime = 0;
+
+ this.tickCallback = function () {};
+ };
+
+ p5.Metro.prototype.ontick = function (tickTime) {
+ var elapsedTime = tickTime - this.prevTick;
+ var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
+
+ if (elapsedTime - this.tatumTime <= -0.02) {
+ return;
} else {
- this.parts[this.parts.length - 1].onended = function () {
- this.stop();
- this.resetParts();
- };
+ this.prevTick = tickTime;
+
+ var self = this;
+ this.syncedParts.forEach(function (thisPart) {
+ if (!thisPart.isPlaying) return;
+ thisPart.incrementStep(secondsFromNow);
+
+ thisPart.phrases.forEach(function (thisPhrase) {
+ var phraseArray = thisPhrase.sequence;
+ var bNum = self.metroTicks % phraseArray.length;
+
+ if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
+ thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
+ }
+ });
+ });
+ this.metroTicks += 1;
+ this.tickCallback(secondsFromNow);
}
- this.currentPart = 0;
- };
- /**
- * Start playback of the score.
- *
- * @method start
- */
- p5.Score.prototype.start = function () {
- this.parts[this.currentPart].start();
- this.scoreStep = 0;
};
- /**
- * Stop playback of the score.
- *
- * @method stop
- */
- p5.Score.prototype.stop = function () {
- this.parts[this.currentPart].stop();
- this.currentPart = 0;
- this.scoreStep = 0;
+
+ p5.Metro.prototype.setBPM = function (bpm, rampTime) {
+ var beatTime = 60 / (bpm * this.tatums);
+ var now = p5sound.audiocontext.currentTime;
+ this.tatumTime = beatTime;
+ var rampTime = rampTime || 0;
+ this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
+ this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
+ this.bpm = bpm;
};
- /**
- * Pause playback of the score.
- *
- * @method pause
- */
- p5.Score.prototype.pause = function () {
- this.parts[this.currentPart].stop();
+
+ p5.Metro.prototype.getBPM = function () {
+ return this.clock.getRate() / this.tatums * 60;
};
- /**
- * Loop playback of the score.
- *
- * @method loop
- */
- p5.Score.prototype.loop = function () {
- this.looping = true;
- this.start();
+
+ p5.Metro.prototype._init = function () {
+ this.metroTicks = 0;
+ };
+
+
+ p5.Metro.prototype.resetSync = function (part) {
+ this.syncedParts = [part];
+ };
+
+
+ p5.Metro.prototype.pushSync = function (part) {
+ this.syncedParts.push(part);
};
- /**
- * Stop looping playback of the score. If it
- * is currently playing, this will go into effect
- * after the current round of playback completes.
- *
- * @method noLoop
- */
- p5.Score.prototype.noLoop = function () {
- this.looping = false;
+
+ p5.Metro.prototype.start = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.clock.start(now + t);
+ this.setBPM(this.bpm);
};
- p5.Score.prototype.resetParts = function () {
- var self = this;
- this.parts.forEach(function (part) {
- self.resetParts[part];
- });
+
+ p5.Metro.prototype.stop = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.clock.stop(now + t);
};
- p5.Score.prototype.resetPart = function (i) {
- this.parts[i].stop();
- this.parts[i].partStep = 0;
- for (var p in this.parts[i].phrases) {
- if (this.parts[i]) {
- this.parts[i].phrases[p].phraseStep = 0;
- }
- }
+
+ p5.Metro.prototype.beatLength = function (tatums) {
+ this.tatums = 1 / tatums / 4;
};
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(24),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){"use strict";return t.TimelineState=function(e){t.Timeline.call(this),this._initial=e},t.extend(t.TimelineState,t.Timeline),t.TimelineState.prototype.getValueAtTime=function(e){var t=this.get(e);return null!==t?t.state:this._initial},t.TimelineState.prototype.setStateAtTime=function(e,t){this.add({state:e,time:t})},t.TimelineState}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var BPM = 120;
/**
- * Set the tempo for all parts in the score
+ * Set the global tempo, in beats per minute, for all
+ * p5.Parts. This method will impact all active p5.Parts.
*
* @method setBPM
+ * @for p5
* @param {Number} BPM Beats Per Minute
* @param {Number} rampTime Seconds from now
*/
- p5.Score.prototype.setBPM = function (bpm, rampTime) {
- for (var i in this.parts) {
- if (this.parts[i]) {
- this.parts[i].setBPM(bpm, rampTime);
+
+ p5.prototype.setBPM = function (bpm, rampTime) {
+ BPM = bpm;
+
+ for (var i in p5sound.parts) {
+ if (p5sound.parts[i]) {
+ p5sound.parts[i].setBPM(bpm, rampTime);
}
}
};
- function playNextPart(aScore) {
- aScore.currentPart++;
- if (aScore.currentPart >= aScore.parts.length) {
- aScore.scoreStep = 0;
- aScore.onended();
- } else {
- aScore.scoreStep = 0;
- aScore.parts[aScore.currentPart - 1].stop();
- aScore.parts[aScore.currentPart].start();
- }
- }
-}(master);
-var soundloop;
-'use strict';
-soundloop = function () {
- var p5sound = master;
- var Clock = Tone_core_Clock;
/**
- * SoundLoop
+ * A phrase is a pattern of musical events over time, i.e.
+ * a series of notes and rests.
*
- * @class p5.SoundLoop
- * @constructor
+ * Phrases must be added to a p5.Part for playback, and
+ * each part can play multiple phrases at the same time.
+ * For example, one Phrase might be a kick drum, another
+ * could be a snare, and another could be the bassline.
*
- * @param {Function} callback this function will be called on each iteration of theloop
- * @param {Number|String} [interval] amount of time or beats for each iteration of the loop
- * defaults to 1
+ * The first parameter is a name so that the phrase can be
+ * modified or deleted later. The callback is a a function that
+ * this phrase will call at every step—for example it might be
+ * called playNote(value){}
. The array determines
+ * which value is passed into the callback at each step of the
+ * phrase. It can be numbers, an object with multiple numbers,
+ * or a zero (0) indicates a rest so the callback won't be called).
*
- * @example
- *
- * let click;
- * let looper1;
+ * @class p5.Phrase
+ * @constructor
+ * @param {String} name Name so that you can access the Phrase.
+ * @param {Function} callback The name of a function that this phrase
+ * will call. Typically it will play a sound,
+ * and accept two parameters: a time at which
+ * to play the sound (in seconds from now),
+ * and a value from the sequence array. The
+ * time should be passed into the play() or
+ * start() method to ensure precision.
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
+ * @example
+ *
+ * let mySound, myPhrase, myPart;
+ * let pattern = [1,0,0,2,0,2,0,0];
+ *
+ * function preload() {
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playMyPart);
+ * background(220);
+ * text('tap to play', width/2, height/2);
+ * textAlign(CENTER, CENTER);
+ *
+ * myPhrase = new p5.Phrase('bbox', onEachStep, pattern);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(myPhrase);
+ * myPart.setBPM(60);
+ * }
+ *
+ * function onEachStep(time, playbackRate) {
+ * mySound.rate(playbackRate);
+ * mySound.play(time);
+ * }
+ *
+ * function playMyPart() {
+ * userStartAudio();
+ * myPart.start();
+ * }
+ *
+ */
+
+
+ p5.Phrase = function (name, callback, sequence) {
+ this.phraseStep = 0;
+ this.name = name;
+ this.callback = callback;
+ /**
+ * Array of values to pass into the callback
+ * at each step of the phrase. Depending on the callback
+ * function's requirements, these values may be numbers,
+ * strings, or an object with multiple parameters.
+ * Zero (0) indicates a rest.
+ *
+ * @property {Array} sequence
+ */
+
+ this.sequence = sequence;
+ };
+ /**
+ * A p5.Part plays back one or more p5.Phrases. Instantiate a part
+ * with steps and tatums. By default, each step represents a 1/16th note.
+ *
+ * See p5.Phrase for more about musical timing.
+ *
+ * @class p5.Part
+ * @constructor
+ * @param {Number} [steps] Steps in the part
+ * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
+ * @example
+ *
+ * let box, drum, myPart;
+ * let boxPat = [1,0,0,2,0,2,0,0];
+ * let drumPat = [0,1,1,0,2,0,1,0];
+ *
+ * function preload() {
+ * box = loadSound('assets/beatbox.mp3');
+ * drum = loadSound('assets/drum.mp3');
+ * }
+ *
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playMyPart);
+ * background(220);
+ * textAlign(CENTER, CENTER);
+ * text('tap to play', width/2, height/2);
+ *
+ * let boxPhrase = new p5.Phrase('box', playBox, boxPat);
+ * let drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(boxPhrase);
+ * myPart.addPhrase(drumPhrase);
+ * myPart.setBPM(60);
+ * }
+ *
+ * function playBox(time, playbackRate) {
+ * box.rate(playbackRate);
+ * box.play(time);
+ * }
*
- * function preload() {
- * click = loadSound('assets/drum.mp3');
- * }
+ * function playDrum(time, playbackRate) {
+ * drum.rate(playbackRate);
+ * drum.play(time);
+ * }
*
- * function setup() {
- * //the looper's callback is passed the timeFromNow
- * //this value should be used as a reference point from
- * //which to schedule sounds
- * looper1 = new p5.SoundLoop(function(timeFromNow){
- * click.play(timeFromNow);
- * background(255 * (looper1.iterations % 2));
- * }, 2);
- *
- * //stop after 10 iteratios;
- * looper1.maxIterations = 10;
- * //start the loop
- * looper1.start();
- * }
- *
+ * function playMyPart() {
+ * userStartAudio();
+ *
+ * myPart.start();
+ * }
+ *
*/
- p5.SoundLoop = function (callback, interval) {
- this.callback = callback;
- /**
- * musicalTimeMode uses Tone.Time convention
- * true if string, false if number
- * @property {Boolean} musicalTimeMode
- */
- this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
- this._interval = interval || 1;
- /**
- * musicalTimeMode variables
- * modify these only when the interval is specified in musicalTime format as a string
- */
- this._timeSignature = 4;
- this._bpm = 60;
+
+
+ p5.Part = function (steps, bLength) {
+ this.length = steps || 0;
+
+ this.partStep = 0;
+ this.phrases = [];
this.isPlaying = false;
- /**
- * Set a limit to the number of loops to play. defaults to Infinity
- * @property {Number} maxIterations
- */
- this.maxIterations = Infinity;
- var self = this;
- this.clock = new Clock({
- 'callback': function (time) {
- var timeFromNow = time - p5sound.audiocontext.currentTime;
- /**
- * Do not initiate the callback if timeFromNow is < 0
- * This ususually occurs for a few milliseconds when the page
- * is not fully loaded
- *
- * The callback should only be called until maxIterations is reached
- */
- if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
- self.callback(timeFromNow);
- }
- },
- 'frequency': this._calcFreq()
- });
+ this.noLoop();
+ this.tatums = bLength || 0.0625;
+
+ this.metro = new p5.Metro();
+
+ this.metro._init();
+
+ this.metro.beatLength(this.tatums);
+ this.metro.setBPM(BPM);
+ p5sound.parts.push(this);
+
+ this.callback = function () {};
};
/**
- * Start the loop
- * @method start
- * @param {Number} [timeFromNow] schedule a starting time
+ * Set the tempo of this part, in Beats Per Minute.
+ *
+ * @method setBPM
+ * @for p5.Part
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} [rampTime] Seconds from now
*/
- p5.SoundLoop.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (!this.isPlaying) {
- this.clock.start(now + t);
- this.isPlaying = true;
- }
+
+
+ p5.Part.prototype.setBPM = function (tempo, rampTime) {
+ this.metro.setBPM(tempo, rampTime);
};
/**
- * Stop the loop
- * @method stop
- * @param {Number} [timeFromNow] schedule a stopping time
+ * Returns the tempo, in Beats Per Minute, of this part.
+ *
+ * @method getBPM
+ * @for p5.Part
+ * @return {Number}
*/
- p5.SoundLoop.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (this.isPlaying) {
- this.clock.stop(now + t);
- this.isPlaying = false;
- }
+
+
+ p5.Part.prototype.getBPM = function () {
+ return this.metro.getBPM();
};
/**
- * Pause the loop
- * @method pause
- * @param {Number} [timeFromNow] schedule a pausing time
+ * Start playback of this part. It will play
+ * through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method start
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype.pause = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (this.isPlaying) {
- this.clock.pause(now + t);
- this.isPlaying = false;
+
+
+ p5.Part.prototype.start = function (time) {
+ if (!this.isPlaying) {
+ this.isPlaying = true;
+ this.metro.resetSync(this);
+ var t = time || 0;
+ this.metro.start(t);
}
};
/**
- * Synchronize loops. Use this method to start two more more loops in synchronization
- * or to start a loop in synchronization with a loop that is already playing
- * This method will schedule the implicit loop in sync with the explicit master loop
- * i.e. loopToStart.syncedStart(loopToSyncWith)
- *
- * @method syncedStart
- * @param {Object} otherLoop a p5.SoundLoop to sync with
- * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
+ * Loop playback of this part. It will begin
+ * looping through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method loop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype.syncedStart = function (otherLoop, timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (!otherLoop.isPlaying) {
- otherLoop.clock.start(now + t);
- otherLoop.isPlaying = true;
- this.clock.start(now + t);
- this.isPlaying = true;
- } else if (otherLoop.isPlaying) {
- var time = otherLoop.clock._nextTick - p5sound.audiocontext.currentTime;
- this.clock.start(now + time);
- this.isPlaying = true;
- }
+
+
+ p5.Part.prototype.loop = function (time) {
+ this.looping = true;
+
+ this.onended = function () {
+ this.partStep = 0;
+ };
+
+ var t = time || 0;
+ this.start(t);
};
/**
- * Updates frequency value, reflected in next callback
- * @private
- * @method _update
+ * Tell the part to stop looping.
+ *
+ * @method noLoop
+ * @for p5.Part
*/
- p5.SoundLoop.prototype._update = function () {
- this.clock.frequency.value = this._calcFreq();
+
+
+ p5.Part.prototype.noLoop = function () {
+ this.looping = false;
+
+ this.onended = function () {
+ this.stop();
+ };
};
/**
- * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
- * @private
- * @method _calcFreq
- * @return {Number} new clock frequency value
+ * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
+ *
+ * @method stop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype._calcFreq = function () {
- //Seconds mode, bpm / timesignature has no effect
- if (typeof this._interval === 'number') {
- this.musicalTimeMode = false;
- return 1 / this._interval;
- } else if (typeof this._interval === 'string') {
- this.musicalTimeMode = true;
- return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
- }
+
+
+ p5.Part.prototype.stop = function (time) {
+ this.partStep = 0;
+ this.pause(time);
};
/**
- * Convert notation from musical time format to seconds
- * Uses Tone.Time convention
- * @private
- * @method _convertNotation
- * @param {String} value value to be converted
- * @return {Number} converted value in seconds
+ * Pause the part. Playback will resume
+ * from the current step.
+ *
+ * @method pause
+ * @for p5.Part
+ * @param {Number} time seconds from now
*/
- p5.SoundLoop.prototype._convertNotation = function (value) {
- var type = value.slice(-1);
- value = Number(value.slice(0, -1));
- switch (type) {
- case 'm':
- return this._measure(value);
- case 'n':
- return this._note(value);
- default:
- console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
- }
+
+
+ p5.Part.prototype.pause = function (time) {
+ this.isPlaying = false;
+ var t = time || 0;
+ this.metro.stop(t);
};
/**
- * Helper conversion methods of measure and note
- * @private
- * @method _measure
- * @private
- * @method _note
+ * Add a p5.Phrase to this Part.
+ *
+ * @method addPhrase
+ * @for p5.Part
+ * @param {p5.Phrase} phrase reference to a p5.Phrase
*/
- p5.SoundLoop.prototype._measure = function (value) {
- return value * this._timeSignature;
- };
- p5.SoundLoop.prototype._note = function (value) {
- return this._timeSignature / value;
+
+
+ p5.Part.prototype.addPhrase = function (name, callback, array) {
+ var p;
+
+ if (arguments.length === 3) {
+ p = new p5.Phrase(name, callback, array);
+ } else if (arguments[0] instanceof p5.Phrase) {
+ p = arguments[0];
+ } else {
+ throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
+ }
+
+ this.phrases.push(p);
+
+ if (p.sequence.length > this.length) {
+ this.length = p.sequence.length;
+ }
};
/**
- * Getters and Setters, setting any paramter will result in a change in the clock's
- * frequency, that will be reflected after the next callback
- * beats per minute (defaults to 60)
- * @property {Number} bpm
- */
- Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
- get: function () {
- return this._bpm;
- },
- set: function (bpm) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ * Remove a phrase from this part, based on the name it was
+ * given when it was created.
+ *
+ * @method removePhrase
+ * @for p5.Part
+ * @param {String} phraseName
+ */
+
+
+ p5.Part.prototype.removePhrase = function (name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases.splice(i, 1);
}
- this._bpm = bpm;
- this._update();
}
- });
+ };
/**
- * number of quarter notes in a measure (defaults to 4)
- * @property {Number} timeSignature
+ * Get a phrase from this part, based on the name it was
+ * given when it was created. Now you can modify its array.
+ *
+ * @method getPhrase
+ * @for p5.Part
+ * @param {String} phraseName
*/
- Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
- get: function () {
- return this._timeSignature;
- },
- set: function (timeSig) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+
+
+ p5.Part.prototype.getPhrase = function (name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ return this.phrases[i];
}
- this._timeSignature = timeSig;
- this._update();
}
- });
+ };
/**
- * length of the loops interval
- * @property {Number|String} interval
+ * Find all sequences with the specified name, and replace their patterns with the specified array.
+ *
+ * @method replaceSequence
+ * @for p5.Part
+ * @param {String} phraseName
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
*/
- Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
- get: function () {
- return this._interval;
- },
- set: function (interval) {
- this.musicalTimeMode = typeof interval === 'Number' ? false : true;
- this._interval = interval;
- this._update();
+
+
+ p5.Part.prototype.replaceSequence = function (name, array) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases[i].sequence = array;
+ }
}
- });
- /**
- * how many times the callback has been called so far
- * @property {Number} iterations
- * @readonly
- */
- Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
- get: function () {
- return this.clock.ticks;
+ };
+
+ p5.Part.prototype.incrementStep = function (time) {
+ if (this.partStep < this.length - 1) {
+ this.callback(time);
+ this.partStep += 1;
+ } else {
+ if (!this.looping && this.partStep === this.length - 1) {
+ this.onended();
+ }
}
- });
- return p5.SoundLoop;
-}(master, Tone_core_Clock);
-var compressor;
-compressor = function () {
- 'use strict';
- var p5sound = master;
- var Effect = effect;
- var CustomError = errorHandler;
+ };
/**
- * Compressor is an audio effect class that performs dynamics compression
- * on an audio input source. This is a very commonly used technique in music
- * and sound production. Compression creates an overall louder, richer,
- * and fuller sound by lowering the volume of louds and raising that of softs.
- * Compression can be used to avoid clipping (sound distortion due to
- * peaks in volume) and is especially useful when many sounds are played
- * at once. Compression can be used on indivudal sound sources in addition
- * to the master output.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Compressor
- * @constructor
- * @extends p5.Effect
+ * Set the function that will be called at every step. This will clear the previous function.
*
- *
+ * @method onStep
+ * @for p5.Part
+ * @param {Function} callback The name of the callback
+ * you want to fire
+ * on every beat/tatum.
*/
- p5.Compressor = function () {
- Effect.call(this);
- /**
- * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
- *
- * @property {AudioNode} compressor
- */
- this.compressor = this.ac.createDynamicsCompressor();
- this.input.connect(this.compressor);
- this.compressor.connect(this.wet);
- };
- p5.Compressor.prototype = Object.create(Effect.prototype);
- /**
- * Performs the same function as .connect, but also accepts
- * optional parameters to set compressor's audioParams
- * @method process
- *
- * @param {Object} src Sound source to be connected
- *
- * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [threshold] The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
- */
- p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) {
- src.connect(this.input);
- this.set(attack, knee, ratio, threshold, release);
- };
+
+
+ p5.Part.prototype.onStep = function (callback) {
+ this.callback = callback;
+ };
+
/**
- * Set the paramters of a compressor.
- * @method set
- * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} knee A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
+ * A Score consists of a series of Parts. The parts will
+ * be played back in order. For example, you could have an
+ * A part, a B part, and a C part, and play them back in this order
+ * new p5.Score(a, a, b, a, c)
+ *
+ * @class p5.Score
+ * @constructor
+ * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
*/
- p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) {
- if (typeof attack !== 'undefined') {
- this.attack(attack);
- }
- if (typeof knee !== 'undefined') {
- this.knee(knee);
- }
- if (typeof ratio !== 'undefined') {
- this.ratio(ratio);
- }
- if (typeof threshold !== 'undefined') {
- this.threshold(threshold);
+
+
+ p5.Score = function () {
+ this.parts = [];
+ this.currentPart = 0;
+ var thisScore = this;
+
+ for (var i in arguments) {
+ if (arguments[i] && this.parts[i]) {
+ this.parts[i] = arguments[i];
+ this.parts[i].nextPart = this.parts[i + 1];
+
+ this.parts[i].onended = function () {
+ thisScore.resetPart(i);
+ playNextPart(thisScore);
+ };
+ }
}
- if (typeof release !== 'undefined') {
- this.release(release);
+
+ this.looping = false;
+ };
+
+ p5.Score.prototype.onended = function () {
+ if (this.looping) {
+ this.parts[0].start();
+ } else {
+ this.parts[this.parts.length - 1].onended = function () {
+ this.stop();
+ this.resetParts();
+ };
}
+
+ this.currentPart = 0;
};
/**
- * Get current attack or set value w/ time ramp
- *
- *
- * @method attack
- * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [time] Assign time value to schedule the change in value
+ * Start playback of the score.
+ *
+ * @method start
+ * @for p5.Score
*/
- p5.Compressor.prototype.attack = function (attack, time) {
- var t = time || 0;
- if (typeof attack == 'number') {
- this.compressor.attack.value = attack;
- this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t);
- } else if (typeof attack !== 'undefined') {
- attack.connect(this.compressor.attack);
- }
- return this.compressor.attack.value;
+
+
+ p5.Score.prototype.start = function () {
+ this.parts[this.currentPart].start();
+ this.scoreStep = 0;
};
/**
- * Get current knee or set value w/ time ramp
- *
- * @method knee
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [time] Assign time value to schedule the change in value
+ * Stop playback of the score.
+ *
+ * @method stop
+ * @for p5.Score
*/
- p5.Compressor.prototype.knee = function (knee, time) {
- var t = time || 0;
- if (typeof knee == 'number') {
- this.compressor.knee.value = knee;
- this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t);
- } else if (typeof knee !== 'undefined') {
- knee.connect(this.compressor.knee);
- }
- return this.compressor.knee.value;
+
+
+ p5.Score.prototype.stop = function () {
+ this.parts[this.currentPart].stop();
+ this.currentPart = 0;
+ this.scoreStep = 0;
};
/**
- * Get current ratio or set value w/ time ramp
- * @method ratio
+ * Pause playback of the score.
*
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [time] Assign time value to schedule the change in value
+ * @method pause
+ * @for p5.Score
*/
- p5.Compressor.prototype.ratio = function (ratio, time) {
- var t = time || 0;
- if (typeof ratio == 'number') {
- this.compressor.ratio.value = ratio;
- this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t);
- } else if (typeof ratio !== 'undefined') {
- ratio.connect(this.compressor.ratio);
- }
- return this.compressor.ratio.value;
+
+
+ p5.Score.prototype.pause = function () {
+ this.parts[this.currentPart].stop();
};
/**
- * Get current threshold or set value w/ time ramp
- * @method threshold
+ * Loop playback of the score.
*
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [time] Assign time value to schedule the change in value
- */
- p5.Compressor.prototype.threshold = function (threshold, time) {
- var t = time || 0;
- if (typeof threshold == 'number') {
- this.compressor.threshold.value = threshold;
- this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t);
- } else if (typeof threshold !== 'undefined') {
- threshold.connect(this.compressor.threshold);
- }
- return this.compressor.threshold.value;
+ * @method loop
+ * @for p5.Score
+ */
+
+
+ p5.Score.prototype.loop = function () {
+ this.looping = true;
+ this.start();
};
/**
- * Get current release or set value w/ time ramp
- * @method release
- *
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
+ * Stop looping playback of the score. If it
+ * is currently playing, this will go into effect
+ * after the current round of playback completes.
*
- * @param {Number} [time] Assign time value to schedule the change in value
+ * @method noLoop
+ * @for p5.Score
*/
- p5.Compressor.prototype.release = function (release, time) {
- var t = time || 0;
- if (typeof release == 'number') {
- this.compressor.release.value = release;
- this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t);
- } else if (typeof number !== 'undefined') {
- release.connect(this.compressor.release);
+
+
+ p5.Score.prototype.noLoop = function () {
+ this.looping = false;
+ };
+
+ p5.Score.prototype.resetParts = function () {
+ var self = this;
+ this.parts.forEach(function (part) {
+ self.resetParts[part];
+ });
+ };
+
+ p5.Score.prototype.resetPart = function (i) {
+ this.parts[i].stop();
+ this.parts[i].partStep = 0;
+
+ for (var p in this.parts[i].phrases) {
+ if (this.parts[i]) {
+ this.parts[i].phrases[p].phraseStep = 0;
+ }
}
- return this.compressor.release.value;
};
/**
- * Return the current reduction value
+ * Set the tempo for all parts in the score
*
- * @method reduction
- * @return {Number} Value of the amount of gain reduction that is applied to the signal
+ * @method setBPM
+ * @for p5.Score
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} rampTime Seconds from now
*/
- p5.Compressor.prototype.reduction = function () {
- return this.compressor.reduction.value;
- };
- p5.Compressor.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
- if (this.compressor) {
- this.compressor.disconnect();
- delete this.compressor;
+
+
+ p5.Score.prototype.setBPM = function (bpm, rampTime) {
+ for (var i in this.parts) {
+ if (this.parts[i]) {
+ this.parts[i].setBPM(bpm, rampTime);
+ }
}
};
- return p5.Compressor;
-}(master, effect, errorHandler);
-var soundRecorder;
-'use strict';
-soundRecorder = function () {
- // inspiration: recorder.js, Tone.js & typedarray.org
- var p5sound = master;
- var convertToWav = helpers.convertToWav;
- var ac = p5sound.audiocontext;
+
+ function playNextPart(aScore) {
+ aScore.currentPart++;
+
+ if (aScore.currentPart >= aScore.parts.length) {
+ aScore.scoreStep = 0;
+ aScore.onended();
+ } else {
+ aScore.scoreStep = 0;
+ aScore.parts[aScore.currentPart - 1].stop();
+ aScore.parts[aScore.currentPart].start();
+ }
+ }
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Clock = __webpack_require__(27);
/**
- * Record sounds for playback and/or to save as a .wav file.
- * The p5.SoundRecorder records all sound output from your sketch,
- * or can be assigned a specific source with setInput().
- * The record() method accepts a p5.SoundFile as a parameter.
- * When playback is stopped (either after the given amount of time,
- * or with the stop() method), the p5.SoundRecorder will send its
- * recording to that p5.SoundFile for playback.
- *
- * @class p5.SoundRecorder
- * @constructor
- * @example
- *
- * let mic, recorder, soundFile;
- * let state = 0;
- *
- * function setup() {
- * background(200);
- * // create an audio in
- * mic = new p5.AudioIn();
- *
- * // prompts user to enable their browser mic
- * mic.start();
- *
- * // create a sound recorder
- * recorder = new p5.SoundRecorder();
- *
- * // connect the mic to the recorder
- * recorder.setInput(mic);
- *
- * // this sound file will be used to
- * // playback & save the recording
- * soundFile = new p5.SoundFile();
+ * SoundLoop
*
- * text('keyPress to record', 20, 20);
- * }
+ * @class p5.SoundLoop
+ * @constructor
*
- * function keyPressed() {
- * // make sure user enabled the mic
- * if (state === 0 && mic.enabled) {
+ * @param {Function} callback this function will be called on each iteration of theloop
+ * @param {Number|String} [interval] amount of time (if a number) or beats (if a string, following Tone.Time convention) for each iteration of the loop. Defaults to 1 second.
*
- * // record to our p5.SoundFile
- * recorder.record(soundFile);
+ * @example
+ *
+ * let synth, soundLoop;
+ * let notePattern = [60, 62, 64, 67, 69, 72];
*
- * background(255,0,0);
- * text('Recording!', 20, 20);
- * state++;
- * }
- * else if (state === 1) {
- * background(0,255,0);
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * colorMode(HSB);
+ * background(0, 0, 86);
+ * text('tap to start/stop', 10, 20);
+ *
+ * //the looper's callback is passed the timeFromNow
+ * //this value should be used as a reference point from
+ * //which to schedule sounds
+ * let intervalInSeconds = 0.2;
+ * soundLoop = new p5.SoundLoop(onSoundLoop, intervalInSeconds);
+ *
+ * synth = new p5.MonoSynth();
+ * }
*
- * // stop recorder and
- * // send result to soundFile
- * recorder.stop();
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
*
- * text('Stopped', 20, 20);
- * state++;
- * }
+ * if (soundLoop.isPlaying) {
+ * soundLoop.stop();
+ * } else {
+ * // start the loop
+ * soundLoop.start();
+ * }
+ * }
*
- * else if (state === 2) {
- * soundFile.play(); // play the result!
- * save(soundFile, 'mySound.wav');
- * state++;
- * }
- * }
- *
+ * function onSoundLoop(timeFromNow) {
+ * let noteIndex = (soundLoop.iterations - 1) % notePattern.length;
+ * let note = midiToFreq(notePattern[noteIndex]);
+ * synth.play(note, 0.5, timeFromNow);
+ * background(noteIndex * 360 / notePattern.length, 50, 100);
+ * }
+ *
*/
- p5.SoundRecorder = function () {
- this.input = ac.createGain();
- this.output = ac.createGain();
- this.recording = false;
- this.bufferSize = 1024;
- this._channels = 2;
- // stereo (default)
- this._clear();
- // initialize variables
- this._jsNode = ac.createScriptProcessor(this.bufferSize, this._channels, 2);
- this._jsNode.onaudioprocess = this._audioprocess.bind(this);
+
+
+ p5.SoundLoop = function (callback, interval) {
+ this.callback = callback;
/**
- * callback invoked when the recording is over
- * @private
- * @type Function(Float32Array)
+ * musicalTimeMode uses Tone.Time convention
+ * true if string, false if number
+ * @property {Boolean} musicalTimeMode
*/
- this._callback = function () {
- };
- // connections
- this._jsNode.connect(p5.soundOut._silentNode);
- this.setInput();
- // add this p5.SoundFile to the soundArray
- p5sound.soundArray.push(this);
+
+ this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
+ this._interval = interval || 1;
+ /**
+ * musicalTimeMode variables
+ * modify these only when the interval is specified in musicalTime format as a string
+ */
+
+ this._timeSignature = 4;
+ this._bpm = 60;
+ this.isPlaying = false;
+ /**
+ * Set a limit to the number of loops to play. defaults to Infinity
+ * @property {Number} maxIterations
+ */
+
+ this.maxIterations = Infinity;
+ var self = this;
+ this.clock = new Clock({
+ 'callback': function callback(time) {
+ var timeFromNow = time - p5sound.audiocontext.currentTime;
+ /**
+ * Do not initiate the callback if timeFromNow is < 0
+ * This ususually occurs for a few milliseconds when the page
+ * is not fully loaded
+ *
+ * The callback should only be called until maxIterations is reached
+ */
+
+ if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
+ self.callback(timeFromNow);
+ }
+ },
+ 'frequency': this._calcFreq()
+ });
};
/**
- * Connect a specific device to the p5.SoundRecorder.
- * If no parameter is given, p5.SoundRecorer will record
- * all audible p5.sound from your sketch.
- *
- * @method setInput
- * @param {Object} [unit] p5.sound object or a web audio unit
- * that outputs sound
+ * Start the loop
+ * @method start
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a starting time
*/
- p5.SoundRecorder.prototype.setInput = function (unit) {
- this.input.disconnect();
- this.input = null;
- this.input = ac.createGain();
- this.input.connect(this._jsNode);
- this.input.connect(this.output);
- if (unit) {
- unit.connect(this.input);
- } else {
- p5.soundOut.output.connect(this.input);
+
+
+ p5.SoundLoop.prototype.start = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (!this.isPlaying) {
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ }
+ };
+ /**
+ * Stop the loop
+ * @method stop
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a stopping time
+ */
+
+
+ p5.SoundLoop.prototype.stop = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.stop(now + t);
+ this.isPlaying = false;
+ }
+ };
+ /**
+ * Pause the loop
+ * @method pause
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a pausing time
+ */
+
+
+ p5.SoundLoop.prototype.pause = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.pause(now + t);
+ this.isPlaying = false;
}
};
/**
- * Start recording. To access the recording, provide
- * a p5.SoundFile as the first parameter. The p5.SoundRecorder
- * will send its recording to that p5.SoundFile for playback once
- * recording is complete. Optional parameters include duration
- * (in seconds) of the recording, and a callback function that
- * will be called once the complete recording has been
- * transfered to the p5.SoundFile.
+ * Synchronize loops. Use this method to start two more more loops in synchronization
+ * or to start a loop in synchronization with a loop that is already playing
+ * This method will schedule the implicit loop in sync with the explicit master loop
+ * i.e. loopToStart.syncedStart(loopToSyncWith)
*
- * @method record
- * @param {p5.SoundFile} soundFile p5.SoundFile
- * @param {Number} [duration] Time (in seconds)
- * @param {Function} [callback] The name of a function that will be
- * called once the recording completes
+ * @method syncedStart
+ * @for p5.SoundLoop
+ * @param {Object} otherLoop a p5.SoundLoop to sync with
+ * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
*/
- p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
- this.recording = true;
- if (duration) {
- this.sampleLimit = Math.round(duration * ac.sampleRate);
- }
- if (sFile && callback) {
- this._callback = function () {
- this.buffer = this._getBuffer();
- sFile.setBuffer(this.buffer);
- callback();
- };
- } else if (sFile) {
- this._callback = function () {
- this.buffer = this._getBuffer();
- sFile.setBuffer(this.buffer);
- };
+
+
+ p5.SoundLoop.prototype.syncedStart = function (otherLoop, timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (!otherLoop.isPlaying) {
+ otherLoop.clock.start(now + t);
+ otherLoop.isPlaying = true;
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ } else if (otherLoop.isPlaying) {
+ var time = otherLoop.clock._nextTick - p5sound.audiocontext.currentTime;
+ this.clock.start(now + time);
+ this.isPlaying = true;
}
};
/**
- * Stop the recording. Once the recording is stopped,
- * the results will be sent to the p5.SoundFile that
- * was given on .record(), and if a callback function
- * was provided on record, that function will be called.
- *
- * @method stop
+ * Updates frequency value, reflected in next callback
+ * @private
+ * @for p5.SoundLoop
+ * @method _update
*/
- p5.SoundRecorder.prototype.stop = function () {
- this.recording = false;
- this._callback();
- this._clear();
- };
- p5.SoundRecorder.prototype._clear = function () {
- this._leftBuffers = [];
- this._rightBuffers = [];
- this.recordedSamples = 0;
- this.sampleLimit = null;
+
+
+ p5.SoundLoop.prototype._update = function () {
+ this.clock.frequency.value = this._calcFreq();
};
/**
- * internal method called on audio process
- *
- * @private
- * @param {AudioProcessorEvent} event
+ * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
+ * @private
+ * @for p5.SoundLoop
+ * @method _calcFreq
+ * @return {Number} new clock frequency value
*/
- p5.SoundRecorder.prototype._audioprocess = function (event) {
- if (this.recording === false) {
- return;
- } else if (this.recording === true) {
- // if we are past the duration, then stop... else:
- if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {
- this.stop();
- } else {
- // get channel data
- var left = event.inputBuffer.getChannelData(0);
- var right = event.inputBuffer.getChannelData(1);
- // clone the samples
- this._leftBuffers.push(new Float32Array(left));
- this._rightBuffers.push(new Float32Array(right));
- this.recordedSamples += this.bufferSize;
+
+
+ p5.SoundLoop.prototype._calcFreq = function () {
+ if (typeof this._interval === 'number') {
+ this.musicalTimeMode = false;
+ return 1 / this._interval;
+ }
+ else if (typeof this._interval === 'string') {
+ this.musicalTimeMode = true;
+ return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
}
- }
- };
- p5.SoundRecorder.prototype._getBuffer = function () {
- var buffers = [];
- buffers.push(this._mergeBuffers(this._leftBuffers));
- buffers.push(this._mergeBuffers(this._rightBuffers));
- return buffers;
- };
- p5.SoundRecorder.prototype._mergeBuffers = function (channelBuffer) {
- var result = new Float32Array(this.recordedSamples);
- var offset = 0;
- var lng = channelBuffer.length;
- for (var i = 0; i < lng; i++) {
- var buffer = channelBuffer[i];
- result.set(buffer, offset);
- offset += buffer.length;
- }
- return result;
};
- p5.SoundRecorder.prototype.dispose = function () {
- this._clear();
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- this._callback = function () {
- };
- if (this.input) {
- this.input.disconnect();
+ /**
+ * Convert notation from musical time format to seconds
+ * Uses Tone.Time convention
+ * @private
+ * @for p5.SoundLoop
+ * @method _convertNotation
+ * @param {String} value value to be converted
+ * @return {Number} converted value in seconds
+ */
+
+
+ p5.SoundLoop.prototype._convertNotation = function (value) {
+ var type = value.slice(-1);
+ value = Number(value.slice(0, -1));
+
+ switch (type) {
+ case 'm':
+ return this._measure(value);
+
+ case 'n':
+ return this._note(value);
+
+ default:
+ console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
}
- this.input = null;
- this._jsNode = null;
};
/**
- * Save a p5.SoundFile as a .wav file. The browser will prompt the user
- * to download the file to their device.
- * For uploading audio to a server, use
- * `p5.SoundFile.saveBlob`.
- *
- * @for p5
- * @method saveSound
- * @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save
- * @param {String} fileName name of the resulting .wav file.
+ * Helper conversion methods of measure and note
+ * @private
+ * @for p5.SoundLoop
+ * @method _measure
*/
- // add to p5.prototype as this is used by the p5 `save()` method.
- p5.prototype.saveSound = function (soundFile, fileName) {
- const dataView = convertToWav(soundFile.buffer);
- p5.prototype.writeFile([dataView], fileName, 'wav');
+
+
+ p5.SoundLoop.prototype._measure = function (value) {
+ return value * this._timeSignature;
};
-}(master, helpers);
-var peakdetect;
-'use strict';
-peakdetect = function () {
/**
- * PeakDetect works in conjunction with p5.FFT to
- * look for onsets in some or all of the frequency spectrum.
- *
- *
- * To use p5.PeakDetect, call update
in the draw loop
- * and pass in a p5.FFT object.
- *
- *
- * You can listen for a specific part of the frequency spectrum by
- * setting the range between freq1
and freq2
.
- *
- *
- * threshold
is the threshold for detecting a peak,
- * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
- * as 1.0.
- *
- *
- * The update method is meant to be run in the draw loop, and
- * frames determines how many loops must pass before
- * another peak can be detected.
- * For example, if the frameRate() = 60, you could detect the beat of a
- * 120 beat-per-minute song with this equation:
- * framesPerPeak = 60 / (estimatedBPM / 60 );
- *
- *
- *
- * Based on example contribtued by @b2renger, and a simple beat detection
- * explanation by Felix Turner.
- *
- *
- * @class p5.PeakDetect
- * @constructor
- * @param {Number} [freq1] lowFrequency - defaults to 20Hz
- * @param {Number} [freq2] highFrequency - defaults to 20000 Hz
- * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1
- * scaled logarithmically where 0.1 is 1/2 the loudness
- * of 1.0. Defaults to 0.35.
- * @param {Number} [framesPerPeak] Defaults to 20.
- * @example
- *
- *
- * let cnv, soundFile, fft, peakDetect;
- * let ellipseWidth = 10;
- *
- * function preload() {
- * soundFile = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * background(0);
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- *
- * // p5.PeakDetect requires a p5.FFT
- * fft = new p5.FFT();
- * peakDetect = new p5.PeakDetect();
- * }
- *
- * function draw() {
- * background(0);
- * text('click to play/pause', width/2, height/2);
- *
- * // peakDetect accepts an fft post-analysis
- * fft.analyze();
- * peakDetect.update(fft);
- *
- * if ( peakDetect.isDetected ) {
- * ellipseWidth = 50;
- * } else {
- * ellipseWidth *= 0.95;
- * }
- *
- * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
- * }
- *
- * // toggle play/stop when canvas is clicked
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * if (soundFile.isPlaying() ) {
- * soundFile.stop();
- * } else {
- * soundFile.play();
- * }
- * }
- * }
- *
+ * @private
+ * @method _note
+ * @for p5.SoundLoop
*/
- p5.PeakDetect = function (freq1, freq2, threshold, _framesPerPeak) {
- // framesPerPeak determines how often to look for a beat.
- // If a beat is provided, try to look for a beat based on bpm
- this.framesPerPeak = _framesPerPeak || 20;
- this.framesSinceLastPeak = 0;
- this.decayRate = 0.95;
- this.threshold = threshold || 0.35;
- this.cutoff = 0;
- // how much to increase the cutoff
- // TO DO: document this / figure out how to make it accessible
- this.cutoffMult = 1.5;
- this.energy = 0;
- this.penergy = 0;
- // TO DO: document this property / figure out how to make it accessible
- this.currentValue = 0;
- /**
- * isDetected is set to true when a peak is detected.
- *
- * @attribute isDetected {Boolean}
- * @default false
- */
- this.isDetected = false;
- this.f1 = freq1 || 40;
- this.f2 = freq2 || 20000;
- // function to call when a peak is detected
- this._onPeak = function () {
- };
+
+
+ p5.SoundLoop.prototype._note = function (value) {
+ return this._timeSignature / value;
};
/**
- * The update method is run in the draw loop.
- *
- * Accepts an FFT object. You must call .analyze()
- * on the FFT object prior to updating the peakDetect
- * because it relies on a completed FFT analysis.
- *
- * @method update
- * @param {p5.FFT} fftObject A p5.FFT object
+ * Getters and Setters, setting any paramter will result in a change in the clock's
+ * frequency, that will be reflected after the next callback
+ * beats per minute (defaults to 60)
+ * @property {Number} bpm
+ * @for p5.SoundLoop
+ */
+
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
+ get: function get() {
+ return this._bpm;
+ },
+ set: function set(bpm) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ }
+
+ this._bpm = bpm;
+
+ this._update();
+ }
+ });
+ /**
+ * number of quarter notes in a measure (defaults to 4)
+ * @property {Number} timeSignature
+ * @for p5.SoundLoop
*/
- p5.PeakDetect.prototype.update = function (fftObject) {
- var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255;
- if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) {
- // trigger callback
- this._onPeak();
- this.isDetected = true;
- // debounce
- this.cutoff = nrg * this.cutoffMult;
- this.framesSinceLastPeak = 0;
- } else {
- this.isDetected = false;
- if (this.framesSinceLastPeak <= this.framesPerPeak) {
- this.framesSinceLastPeak++;
- } else {
- this.cutoff *= this.decayRate;
- this.cutoff = Math.max(this.cutoff, this.threshold);
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
+ get: function get() {
+ return this._timeSignature;
+ },
+ set: function set(timeSig) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
}
+
+ this._timeSignature = timeSig;
+
+ this._update();
}
- this.currentValue = nrg;
- this.penergy = nrg;
- };
+ });
/**
- * onPeak accepts two arguments: a function to call when
- * a peak is detected. The value of the peak,
- * between 0.0 and 1.0, is passed to the callback.
- *
- * @method onPeak
- * @param {Function} callback Name of a function that will
- * be called when a peak is
- * detected.
- * @param {Object} [val] Optional value to pass
- * into the function when
- * a peak is detected.
- * @example
- *
- * let cnv, soundFile, fft, peakDetect;
- * let ellipseWidth = 0;
- *
- * function preload() {
- * soundFile = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * cnv = createCanvas(100,100);
- * textAlign(CENTER);
- *
- * fft = new p5.FFT();
- * peakDetect = new p5.PeakDetect();
- *
- * setupSound();
- *
- * // when a beat is detected, call triggerBeat()
- * peakDetect.onPeak(triggerBeat);
- * }
- *
- * function draw() {
- * background(0);
- * fill(255);
- * text('click to play', width/2, height/2);
- *
- * fft.analyze();
- * peakDetect.update(fft);
- *
- * ellipseWidth *= 0.95;
- * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
- * }
- *
- * // this function is called by peakDetect.onPeak
- * function triggerBeat() {
- * ellipseWidth = 50;
- * }
- *
- * // mouseclick starts/stops sound
- * function setupSound() {
- * cnv.mouseClicked( function() {
- * if (soundFile.isPlaying() ) {
- * soundFile.stop();
- * } else {
- * soundFile.play();
- * }
- * });
- * }
- *
+ * length of the loops interval
+ * @property {Number|String} interval
+ * @for p5.SoundLoop
*/
- p5.PeakDetect.prototype.onPeak = function (callback, val) {
- var self = this;
- self._onPeak = function () {
- callback(self.energy, val);
- };
- };
-}();
-var gain;
-'use strict';
-gain = function () {
- var p5sound = master;
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
+ get: function get() {
+ return this._interval;
+ },
+ set: function set(interval) {
+ this.musicalTimeMode = typeof interval === 'Number' ? false : true;
+ this._interval = interval;
+
+ this._update();
+ }
+ });
/**
- * A gain node is usefull to set the relative volume of sound.
- * It's typically used to build mixers.
- *
- * @class p5.Gain
- * @constructor
- * @example
- *
- *
- * // load two soundfile and crossfade beetween them
- * let sound1,sound2;
- * let gain1, gain2, gain3;
- *
- * function preload(){
- * soundFormats('ogg', 'mp3');
- * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');
- * sound2 = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * createCanvas(400,200);
- *
- * // create a 'master' gain to which we will connect both soundfiles
- * gain3 = new p5.Gain();
- * gain3.connect();
- *
- * // setup first sound for playing
- * sound1.rate(1);
- * sound1.loop();
- * sound1.disconnect(); // diconnect from p5 output
- *
- * gain1 = new p5.Gain(); // setup a gain node
- * gain1.setInput(sound1); // connect the first sound to its input
- * gain1.connect(gain3); // connect its output to the 'master'
- *
- * sound2.rate(1);
- * sound2.disconnect();
- * sound2.loop();
- *
- * gain2 = new p5.Gain();
- * gain2.setInput(sound2);
- * gain2.connect(gain3);
- *
- * }
- *
- * function draw(){
- * background(180);
- *
- * // calculate the horizontal distance beetween the mouse and the right of the screen
- * let d = dist(mouseX,0,width,0);
+ * how many times the callback has been called so far
+ * @property {Number} iterations
+ * @for p5.SoundLoop
+ * @readonly
+ */
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
+ get: function get() {
+ return this.clock.ticks;
+ }
+ });
+ return p5.SoundLoop;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ 'use strict';
+
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4);
+
+ var CustomError = __webpack_require__(11);
+ /**
+ * Compressor is an audio effect class that performs dynamics compression
+ * on an audio input source. This is a very commonly used technique in music
+ * and sound production. Compression creates an overall louder, richer,
+ * and fuller sound by lowering the volume of louds and raising that of softs.
+ * Compression can be used to avoid clipping (sound distortion due to
+ * peaks in volume) and is especially useful when many sounds are played
+ * at once. Compression can be used on indivudal sound sources in addition
+ * to the master output.
*
- * // map the horizontal position of the mouse to values useable for volume control of sound1
- * let vol1 = map(mouseX,0,width,0,1);
- * let vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
*
- * gain1.amp(vol1,0.5,0);
- * gain2.amp(vol2,0.5,0);
+ * @class p5.Compressor
+ * @constructor
+ * @extends p5.Effect
*
- * // map the vertical position of the mouse to values useable for 'master volume control'
- * let vol3 = map(mouseY,0,height,0,1);
- * gain3.amp(vol3,0.5,0);
- * }
- *
*
*/
- p5.Gain = function () {
- this.ac = p5sound.audiocontext;
- this.input = this.ac.createGain();
- this.output = this.ac.createGain();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- this.input.connect(this.output);
- // add to the soundArray
- p5sound.soundArray.push(this);
+
+
+ p5.Compressor = function () {
+ Effect.call(this);
+ /**
+ * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
+ *
+ * @property {AudioNode} compressor
+ */
+
+ this.compressor = this.ac.createDynamicsCompressor();
+ this.input.connect(this.compressor);
+ this.compressor.connect(this.wet);
};
+
+ p5.Compressor.prototype = Object.create(Effect.prototype);
/**
- * Connect a source to the gain node.
+ * Performs the same function as .connect, but also accepts
+ * optional parameters to set compressor's audioParams
+ * @method process
+ * @for p5.Compressor
*
- * @method setInput
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
- */
- p5.Gain.prototype.setInput = function (src) {
- src.connect(this.input);
- };
- /**
- * Send output to a p5.sound or web audio object
+ * @param {Object} src Sound source to be connected
*
- * @method connect
- * @param {Object} unit
+ * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [threshold] The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
*/
- p5.Gain.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- this.output.connect(u.input ? u.input : u);
+
+ p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) {
+ src.connect(this.input);
+ this.set(attack, knee, ratio, threshold, release);
};
/**
- * Disconnect all output.
- *
- * @method disconnect
+ * Set the paramters of a compressor.
+ * @method set
+ * @for p5.Compressor
+ * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} knee A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
*/
- p5.Gain.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+
+
+ p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) {
+ if (typeof attack !== 'undefined') {
+ this.attack(attack);
}
- };
- /**
- * Set the output level of the gain node.
- *
- * @method amp
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- p5.Gain.prototype.amp = function (vol, rampTime, tFromNow) {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- };
- p5.Gain.prototype.dispose = function () {
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- if (this.output) {
- this.output.disconnect();
- delete this.output;
+
+ if (typeof knee !== 'undefined') {
+ this.knee(knee);
}
- if (this.input) {
- this.input.disconnect();
- delete this.input;
+
+ if (typeof ratio !== 'undefined') {
+ this.ratio(ratio);
+ }
+
+ if (typeof threshold !== 'undefined') {
+ this.threshold(threshold);
+ }
+
+ if (typeof release !== 'undefined') {
+ this.release(release);
}
};
-}(master);
-var audioVoice;
-'use strict';
-audioVoice = function () {
- var p5sound = master;
/**
- * Base class for monophonic synthesizers. Any extensions of this class
- * should follow the API and implement the methods below in order to
- * remain compatible with p5.PolySynth();
+ * Get current attack or set value w/ time ramp
*
- * @class p5.AudioVoice
- * @constructor
+ *
+ * @method attack
+ * @for p5.Compressor
+ * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice = function () {
- this.ac = p5sound.audiocontext;
- this.output = this.ac.createGain();
- this.connect();
- p5sound.soundArray.push(this);
- };
- p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {
- };
- p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {
- };
- p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {
+
+
+ p5.Compressor.prototype.attack = function (attack, time) {
+ var t = time || 0;
+
+ if (typeof attack == 'number') {
+ this.compressor.attack.value = attack;
+ this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t);
+ } else if (typeof attack !== 'undefined') {
+ attack.connect(this.compressor.attack);
+ }
+
+ return this.compressor.attack.value;
};
- p5.AudioVoice.prototype.amp = function (vol, rampTime) {
+ /**
+ * Get current knee or set value w/ time ramp
+ *
+ * @method knee
+ * @for p5.Compressor
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+
+ p5.Compressor.prototype.knee = function (knee, time) {
+ var t = time || 0;
+
+ if (typeof knee == 'number') {
+ this.compressor.knee.value = knee;
+ this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t);
+ } else if (typeof knee !== 'undefined') {
+ knee.connect(this.compressor.knee);
+ }
+
+ return this.compressor.knee.value;
};
/**
- * Connect to p5 objects or Web Audio Nodes
- * @method connect
- * @param {Object} unit
+ * Get current ratio or set value w/ time ramp
+ * @method ratio
+ * @for p5.Compressor
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
+
+
+ p5.Compressor.prototype.ratio = function (ratio, time) {
+ var t = time || 0;
+
+ if (typeof ratio == 'number') {
+ this.compressor.ratio.value = ratio;
+ this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t);
+ } else if (typeof ratio !== 'undefined') {
+ ratio.connect(this.compressor.ratio);
+ }
+
+ return this.compressor.ratio.value;
};
/**
- * Disconnect from soundOut
- * @method disconnect
+ * Get current threshold or set value w/ time ramp
+ * @method threshold
+ * @for p5.Compressor
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice.prototype.disconnect = function () {
- this.output.disconnect();
- };
- p5.AudioVoice.prototype.dispose = function () {
- if (this.output) {
- this.output.disconnect();
- delete this.output;
+
+
+ p5.Compressor.prototype.threshold = function (threshold, time) {
+ var t = time || 0;
+
+ if (typeof threshold == 'number') {
+ this.compressor.threshold.value = threshold;
+ this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t);
+ } else if (typeof threshold !== 'undefined') {
+ threshold.connect(this.compressor.threshold);
}
+
+ return this.compressor.threshold.value;
};
- return p5.AudioVoice;
-}(master);
-var monosynth;
-'use strict';
-monosynth = function () {
- var p5sound = master;
- var AudioVoice = audioVoice;
- var noteToFreq = helpers.noteToFreq;
- var DEFAULT_SUSTAIN = 0.15;
/**
- * A MonoSynth is used as a single voice for sound synthesis.
- * This is a class to be used in conjunction with the PolySynth
- * class. Custom synthetisers should be built inheriting from
- * this class.
- *
- * @class p5.MonoSynth
- * @constructor
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * monoSynth = new p5.MonoSynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // time from now (in seconds)
- * let time = 0;
- * // note duration (in seconds)
- * let dur = 0.25;
- * // velocity (volume, from 0 to 1)
- * let v = 0.2;
- *
- * monoSynth.play("G3", v, time, dur);
- * monoSynth.play("C4", v, time += dur, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
- **/
- p5.MonoSynth = function () {
- AudioVoice.call(this);
- this.oscillator = new p5.Oscillator();
- this.env = new p5.Envelope();
- this.env.setRange(1, 0);
- this.env.setExp(true);
- //set params
- this.setADSR(0.02, 0.25, 0.05, 0.35);
- // oscillator --> env --> this.output (gain) --> p5.soundOut
- this.oscillator.disconnect();
- this.oscillator.connect(this.output);
- this.env.disconnect();
- this.env.setInput(this.output.gain);
- // reset oscillator gain to 1.0
- this.oscillator.output.gain.value = 1;
- this.oscillator.start();
- this.connect();
- p5sound.soundArray.push(this);
+ * Get current release or set value w/ time ramp
+ * @method release
+ * @for p5.Compressor
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
+ *
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+
+ p5.Compressor.prototype.release = function (release, time) {
+ var t = time || 0;
+
+ if (typeof release == 'number') {
+ this.compressor.release.value = release;
+ this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t);
+ } else if (typeof number !== 'undefined') {
+ release.connect(this.compressor.release);
+ }
+
+ return this.compressor.release.value;
};
- p5.MonoSynth.prototype = Object.create(p5.AudioVoice.prototype);
/**
- * Play tells the MonoSynth to start playing a note. This method schedules
- * the calling of .triggerAttack and .triggerRelease.
- *
- * @method play
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @param {Number} [sustainTime] time to sustain before releasing the envelope
- * @example
- *
- * let monoSynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * monoSynth = new p5.MonoSynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // time from now (in seconds)
- * let time = 0;
- * // note duration (in seconds)
- * let dur = 1/6;
- * // note velocity (volume, from 0 to 1)
- * let v = random();
- *
- * monoSynth.play("Fb3", v, 0, dur);
- * monoSynth.play("Gb3", v, time += dur, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
- *
- */
- p5.MonoSynth.prototype.play = function (note, velocity, secondsFromNow, susTime) {
- this.triggerAttack(note, velocity, ~~secondsFromNow);
- this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN));
+ * Return the current reduction value
+ *
+ * @method reduction
+ * @for p5.Compressor
+ * @return {Number} Value of the amount of gain reduction that is applied to the signal
+ */
+
+
+ p5.Compressor.prototype.reduction = function () {
+ return this.compressor.reduction.value;
+ };
+
+ p5.Compressor.prototype.dispose = function () {
+ Effect.prototype.dispose.apply(this);
+
+ if (this.compressor) {
+ this.compressor.disconnect();
+ delete this.compressor;
+ }
};
+
+ return p5.Compressor;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+ }),
+ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var _require = __webpack_require__(6),
+ convertToWav = _require.convertToWav,
+ safeBufferSize = _require.safeBufferSize;
+
+ var processorNames = __webpack_require__(10);
+
+ var ac = p5sound.audiocontext;
/**
- * Trigger the Attack, and Decay portion of the Envelope.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
+ * Record sounds for playback and/or to save as a .wav file.
+ * The p5.SoundRecorder records all sound output from your sketch,
+ * or can be assigned a specific source with setInput().
+ * The record() method accepts a p5.SoundFile as a parameter.
+ * When playback is stopped (either after the given amount of time,
+ * or with the stop() method), the p5.SoundRecorder will send its
+ * recording to that p5.SoundFile for playback.
*
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @method triggerAttack
+ * @class p5.SoundRecorder
+ * @constructor
* @example
*
- * let monoSynth = new p5.MonoSynth();
+ * let mic, recorder, soundFile;
+ * let state = 0;
*
- * function mousePressed() {
- * monoSynth.triggerAttack("E3");
- * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(canvasPressed);
+ * background(220);
+ * textAlign(CENTER, CENTER);
*
- * function mouseReleased() {
- * monoSynth.triggerRelease();
+ * // create an audio in
+ * mic = new p5.AudioIn();
+ *
+ * // prompts user to enable their browser mic
+ * mic.start();
+ *
+ * // create a sound recorder
+ * recorder = new p5.SoundRecorder();
+ *
+ * // connect the mic to the recorder
+ * recorder.setInput(mic);
+ *
+ * // this sound file will be used to
+ * // playback & save the recording
+ * soundFile = new p5.SoundFile();
+ *
+ * text('tap to record', width/2, height/2);
* }
- *
- */
- p5.MonoSynth.prototype.triggerAttack = function (note, velocity, secondsFromNow) {
- var secondsFromNow = ~~secondsFromNow;
- var freq = noteToFreq(note);
- var vel = velocity || 0.1;
- this.oscillator.freq(freq, 0, secondsFromNow);
- this.env.ramp(this.output.gain, secondsFromNow, vel);
- };
- /**
- * Trigger the release of the Envelope. This is similar to releasing
- * the key on a piano and letting the sound fade according to the
- * release level and release time.
*
- * @param {Number} secondsFromNow time to trigger the release
- * @method triggerRelease
- * @example
- *
- * let monoSynth = new p5.MonoSynth();
+ * function canvasPressed() {
+ * // ensure audio is enabled
+ * userStartAudio();
+ *
+ * // make sure user enabled the mic
+ * if (state === 0 && mic.enabled) {
+ *
+ * // record to our p5.SoundFile
+ * recorder.record(soundFile);
+ *
+ * background(255,0,0);
+ * text('Recording!', width/2, height/2);
+ * state++;
+ * }
+ * else if (state === 1) {
+ * background(0,255,0);
*
- * function mousePressed() {
- * monoSynth.triggerAttack("E3");
- * }
+ * // stop recorder and
+ * // send result to soundFile
+ * recorder.stop();
*
- * function mouseReleased() {
- * monoSynth.triggerRelease();
- * }
- *
- */
- p5.MonoSynth.prototype.triggerRelease = function (secondsFromNow) {
- var secondsFromNow = secondsFromNow || 0;
- this.env.ramp(this.output.gain, secondsFromNow, 0);
- };
- /**
- * Set values like a traditional
- *
- * ADSR envelope
- * .
+ * text('Done! Tap to play and download', width/2, height/2, width - 20);
+ * state++;
+ * }
*
- * @method setADSR
- * @param {Number} attackTime Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- */
- p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) {
- this.env.setADSR(attack, decay, sustain, release);
- };
- /**
- * Getters and Setters
- * @property {Number} attack
- */
- /**
- * @property {Number} decay
- */
- /**
- * @property {Number} sustain
- */
- /**
- * @property {Number} release
+ * else if (state === 2) {
+ * soundFile.play(); // play the result!
+ * save(soundFile, 'mySound.wav');
+ * state++;
+ * }
+ * }
+ *
*/
- Object.defineProperties(p5.MonoSynth.prototype, {
- 'attack': {
- get: function () {
- return this.env.aTime;
- },
- set: function (attack) {
- this.env.setADSR(attack, this.env.dTime, this.env.sPercent, this.env.rTime);
- }
- },
- 'decay': {
- get: function () {
- return this.env.dTime;
- },
- set: function (decay) {
- this.env.setADSR(this.env.aTime, decay, this.env.sPercent, this.env.rTime);
- }
- },
- 'sustain': {
- get: function () {
- return this.env.sPercent;
- },
- set: function (sustain) {
- this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime);
+
+ p5.SoundRecorder = function () {
+ this.input = ac.createGain();
+ this.output = ac.createGain();
+ this._inputChannels = 2;
+ this._outputChannels = 2;
+
+ var workletBufferSize = safeBufferSize(1024);
+ this._workletNode = new AudioWorkletNode(ac, processorNames.recorderProcessor, {
+ outputChannelCount: [this._outputChannels],
+ processorOptions: {
+ numInputChannels: this._inputChannels,
+ bufferSize: workletBufferSize
}
- },
- 'release': {
- get: function () {
- return this.env.rTime;
- },
- set: function (release) {
- this.env.setADSR(this.env.aTime, this.env.dTime, this.env.sPercent, release);
+ });
+
+ this._workletNode.port.onmessage = function (event) {
+ if (event.data.name === 'buffers') {
+ var buffers = [new Float32Array(event.data.leftBuffer), new Float32Array(event.data.rightBuffer)];
+
+ this._callback(buffers);
}
- }
- });
- /**
- * MonoSynth amp
- * @method amp
- * @param {Number} vol desired volume
- * @param {Number} [rampTime] Time to reach new volume
- * @return {Number} new volume value
- */
- p5.MonoSynth.prototype.amp = function (vol, rampTime) {
- var t = rampTime || 0;
- if (typeof vol !== 'undefined') {
- this.oscillator.amp(vol, t);
- }
- return this.oscillator.amp().value;
+ }.bind(this);
+ /**
+ * callback invoked when the recording is over
+ * @private
+ * @type Function(Float32Array)
+ */
+
+
+ this._callback = function () {};
+
+
+ this._workletNode.connect(p5.soundOut._silentNode);
+
+ this.setInput();
+
+ p5sound.soundArray.push(this);
};
/**
- * Connect to a p5.sound / Web Audio object.
+ * Connect a specific device to the p5.SoundRecorder.
+ * If no parameter is given, p5.SoundRecorer will record
+ * all audible p5.sound from your sketch.
*
- * @method connect
- * @param {Object} unit A p5.sound or Web Audio object
+ * @method setInput
+ * @for p5.SoundRecorder
+ * @param {Object} [unit] p5.sound object or a web audio unit
+ * that outputs sound
*/
- p5.MonoSynth.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
+
+
+ p5.SoundRecorder.prototype.setInput = function (unit) {
+ this.input.disconnect();
+ this.input = null;
+ this.input = ac.createGain();
+ this.input.connect(this._workletNode);
+ this.input.connect(this.output);
+
+ if (unit) {
+ unit.connect(this.input);
+ } else {
+ p5.soundOut.output.connect(this.input);
+ }
};
/**
- * Disconnect all outputs
+ * Start recording. To access the recording, provide
+ * a p5.SoundFile as the first parameter. The p5.SoundRecorder
+ * will send its recording to that p5.SoundFile for playback once
+ * recording is complete. Optional parameters include duration
+ * (in seconds) of the recording, and a callback function that
+ * will be called once the complete recording has been
+ * transfered to the p5.SoundFile.
*
- * @method disconnect
+ * @method record
+ * @for p5.SoundRecorder
+ * @param {p5.SoundFile} soundFile p5.SoundFile
+ * @param {Number} [duration] Time (in seconds)
+ * @param {Function} [callback] The name of a function that will be
+ * called once the recording completes
*/
- p5.MonoSynth.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+
+
+ p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
+ this._workletNode.port.postMessage({
+ name: 'start',
+ duration: duration
+ });
+
+ if (sFile && callback) {
+ this._callback = function (buffer) {
+ sFile.setBuffer(buffer);
+ callback();
+ };
+ } else if (sFile) {
+ this._callback = function (buffer) {
+ sFile.setBuffer(buffer);
+ };
}
};
/**
- * Get rid of the MonoSynth and free up its resources / memory.
+ * Stop the recording. Once the recording is stopped,
+ * the results will be sent to the p5.SoundFile that
+ * was given on .record(), and if a callback function
+ * was provided on record, that function will be called.
*
- * @method dispose
+ * @method stop
+ * @for p5.SoundRecorder
*/
- p5.MonoSynth.prototype.dispose = function () {
- AudioVoice.prototype.dispose.apply(this);
- if (this.env) {
- this.env.dispose();
- }
- if (this.oscillator) {
- this.oscillator.dispose();
- }
+
+
+ p5.SoundRecorder.prototype.stop = function () {
+ this._workletNode.port.postMessage({
+ name: 'stop'
+ });
};
-}(master, audioVoice, helpers);
-var polysynth;
-'use strict';
-polysynth = function () {
- var p5sound = master;
- var TimelineSignal = Tone_signal_TimelineSignal;
- var noteToFreq = helpers.noteToFreq;
- /**
- * An AudioVoice is used as a single voice for sound synthesis.
- * The PolySynth class holds an array of AudioVoice, and deals
- * with voices allocations, with setting notes to be played, and
- * parameters to be set.
- *
- * @class p5.PolySynth
- * @constructor
- *
- * @param {Number} [synthVoice] A monophonic synth voice inheriting
- * the AudioVoice class. Defaults to p5.MonoSynth
- * @param {Number} [maxVoices] Number of voices, defaults to 8;
- * @example
- *
- * let polySynth;
- *
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * polySynth = new p5.PolySynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // note duration (in seconds)
- * let dur = 1.5;
- *
- * // time from now (in seconds)
- * let time = 0;
- *
- * // velocity (volume, from 0 to 1)
- * let vel = 0.1;
- *
- * // notes can overlap with each other
- * polySynth.play("G2", vel, 0, dur);
- * polySynth.play("C3", vel, time += 1/3, dur);
- * polySynth.play("G3", vel, time += 1/3, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
PeakDetect works in conjunction with p5.FFT to + * look for onsets in some or all of the frequency spectrum. + *
+ *
+ * To use p5.PeakDetect, call update
in the draw loop
+ * and pass in a p5.FFT object.
+ *
+ * You can listen for a specific part of the frequency spectrum by
+ * setting the range between freq1
and freq2
.
+ *
threshold
is the threshold for detecting a peak,
+ * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
+ * as 1.0.
+ * The update method is meant to be run in the draw loop, and
+ * frames determines how many loops must pass before
+ * another peak can be detected.
+ * For example, if the frameRate() = 60, you could detect the beat of a
+ * 120 beat-per-minute song with this equation:
+ * framesPerPeak = 60 / (estimatedBPM / 60 );
+ *
+ * Based on example contribtued by @b2renger, and a simple beat detection + * explanation by Felix Turner. + *
+ * + * @class p5.PeakDetect + * @constructor + * @param {Number} [freq1] lowFrequency - defaults to 20Hz + * @param {Number} [freq2] highFrequency - defaults to 20000 Hz + * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1 + * scaled logarithmically where 0.1 is 1/2 the loudness + * of 1.0. Defaults to 0.35. + * @param {Number} [framesPerPeak] Defaults to 20. * @example *
- * let polySynth;
*
- * function setup() {
- * let cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 10;
*
- * polySynth = new p5.PolySynth();
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
+ * }
*
+ * function setup() {
+ * background(0);
+ * noStroke();
+ * fill(255);
* textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ *
+ * // p5.PeakDetect requires a p5.FFT
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
* }
*
- * function playSynth() {
- * // note duration (in seconds)
- * let dur = 0.1;
+ * function draw() {
+ * background(0);
+ * text('click to play/pause', width/2, height/2);
*
- * // time from now (in seconds)
- * let time = 0;
+ * // peakDetect accepts an fft post-analysis
+ * fft.analyze();
+ * peakDetect.update(fft);
*
- * // velocity (volume, from 0 to 1)
- * let vel = 0.1;
+ * if ( peakDetect.isDetected ) {
+ * ellipseWidth = 50;
+ * } else {
+ * ellipseWidth *= 0.95;
+ * }
*
- * polySynth.play("G2", vel, 0, dur);
- * polySynth.play("C3", vel, 0, dur);
- * polySynth.play("G3", vel, 0, dur);
+ * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
+ * }
*
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
+ * // toggle play/stop when canvas is clicked
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * if (soundFile.isPlaying() ) {
+ * soundFile.stop();
+ * } else {
+ * soundFile.play();
+ * }
+ * }
* }
*
setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
- p5.PolySynth.prototype.noteADSR = function (note, a, d, s, r, timeFromNow) {
- var now = p5sound.audiocontext.currentTime;
- var timeFromNow = timeFromNow || 0;
- var t = now + timeFromNow;
- this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
- };
- /**
- * Set the PolySynths global envelope. This method modifies the envelopes of each
- * monosynth so that all notes are played with this envelope.
+ * Accepts an FFT object. You must call .analyze()
+ * on the FFT object prior to updating the peakDetect
+ * because it relies on a completed FFT analysis.
*
- * @method setADSR
- * @param {Number} [attackTime] Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
- p5.PolySynth.prototype.setADSR = function (a, d, s, r) {
- this.audiovoices.forEach(function (voice) {
- voice.setADSR(a, d, s, r);
- });
+ * @method update
+ * @param {p5.FFT} fftObject A p5.FFT object
+ */
+
+
+ p5.PeakDetect.prototype.update = function (fftObject) {
+ var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255;
+
+ if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) {
+ this._onPeak();
+
+ this.isDetected = true;
+
+ this.cutoff = nrg * this.cutoffMult;
+ this.framesSinceLastPeak = 0;
+ } else {
+ this.isDetected = false;
+
+ if (this.framesSinceLastPeak <= this.framesPerPeak) {
+ this.framesSinceLastPeak++;
+ } else {
+ this.cutoff *= this.decayRate;
+ this.cutoff = Math.max(this.cutoff, this.threshold);
+ }
+ }
+
+ this.currentValue = nrg;
+ this.penergy = nrg;
};
/**
- * Trigger the Attack, and Decay portion of a MonoSynth.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
+ * onPeak accepts two arguments: a function to call when
+ * a peak is detected. The value of the peak,
+ * between 0.0 and 1.0, is passed to the callback.
*
- * @method noteAttack
- * @param {Number} [note] midi note on which attack should be triggered.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
- * @param {Number} [secondsFromNow] time from now (in seconds)
+ * @method onPeak
+ * @param {Function} callback Name of a function that will
+ * be called when a peak is
+ * detected.
+ * @param {Object} [val] Optional value to pass
+ * into the function when
+ * a peak is detected.
* @example
*
- * let polySynth = new p5.PolySynth();
- * let pitches = ["G", "D", "G", "C"];
- * let octaves = [2, 3, 4];
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 0;
+ *
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
+ * }
+ *
+ * function setup() {
+ * cnv = createCanvas(100,100);
+ * textAlign(CENTER);
+ *
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
+ *
+ * setupSound();
+ *
+ * // when a beat is detected, call triggerBeat()
+ * peakDetect.onPeak(triggerBeat);
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * fill(255);
+ * text('click to play', width/2, height/2);
+ *
+ * fft.analyze();
+ * peakDetect.update(fft);
*
- * function mousePressed() {
- * // play a chord: multiple notes at the same time
- * for (let i = 0; i < 4; i++) {
- * let note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
+ * ellipseWidth *= 0.95;
+ * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
* }
*
- * function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
+ * // this function is called by peakDetect.onPeak
+ * function triggerBeat() {
+ * ellipseWidth = 50;
* }
- *
- * let pitches = ["G", "D", "G", "C"];
- * let octaves = [2, 3, 4];
- * let polySynth = new p5.PolySynth();
*
- * function mousePressed() {
- * // play a chord: multiple notes at the same time
- * for (let i = 0; i < 4; i++) {
- * let note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
+ * // load two soundfile and crossfade beetween them
+ * let sound1,sound2;
+ * let sound1Gain, sound2Gain, masterGain;
+ * function preload(){
+ * soundFormats('ogg', 'mp3');
+ * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');
+ * sound2 = loadSound('assets/beat');
+ * }
+ * function setup() {
+ * let cnv = createCanvas(100, 100);
+ * cnv.mousePressed(startSound);
+ * // create a 'master' gain to which we will connect both soundfiles
+ * masterGain = new p5.Gain();
+ * masterGain.connect();
+ * sound1.disconnect(); // diconnect from p5 output
+ * sound1Gain = new p5.Gain(); // setup a gain node
+ * sound1Gain.setInput(sound1); // connect the first sound to its input
+ * sound1Gain.connect(masterGain); // connect its output to the 'master'
+ * sound2.disconnect();
+ * sound2Gain = new p5.Gain();
+ * sound2Gain.setInput(sound2);
+ * sound2Gain.connect(masterGain);
+ * }
+ * function startSound() {
+ * sound1.loop();
+ * sound2.loop();
+ * loop();
* }
- *
* function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
+ * sound1.stop();
+ * sound2.stop();
* }
- *