diff --git a/src/app.js b/src/app.js index af09bbd2..daf13be6 100644 --- a/src/app.js +++ b/src/app.js @@ -1,6 +1,10 @@ import 'audioworklet-polyfill'; import './shims'; -import './audiocontext'; + +import { getAudioContext, userStartAudio } from './audiocontext'; +p5.prototype.getAudioContext = getAudioContext; +p5.prototype.userStartAudio = userStartAudio; + import './master'; import './helpers'; import './errorHandler'; @@ -19,7 +23,10 @@ import './oscillator'; import './envelope'; import './pulse'; import './noise'; -import './audioin'; + +import AudioIn from './audioin'; +p5.AudioIn = AudioIn; + import './filter'; import './eq'; import './panner3d'; @@ -33,9 +40,12 @@ import './compressor'; import './soundRecorder'; import './peakDetect'; import './gain'; -import './monosynth'; -import './polysynth'; import './distortion'; -import './audioVoice'; -import './monosynth'; + +import AudioVoice from './audioVoice'; +p5.AudioVoice = AudioVoice; + +import MonoSynth from './monosynth'; +p5.MonoSynth = MonoSynth; + import './polysynth'; diff --git a/src/audioVoice.js b/src/audioVoice.js index f4279d04..242b4fc1 100644 --- a/src/audioVoice.js +++ b/src/audioVoice.js @@ -8,55 +8,47 @@ import p5sound from './master'; * @class p5.AudioVoice * @constructor */ -p5.AudioVoice = function () { - this.ac = p5sound.audiocontext; - this.output = this.ac.createGain(); - this.connect(); - p5sound.soundArray.push(this); -}; - -p5.AudioVoice.prototype.play = function ( - note, - velocity, - secondsFromNow, - sustime -) {}; - -p5.AudioVoice.prototype.triggerAttack = function ( - note, - velocity, - secondsFromNow -) {}; - -p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {}; - -p5.AudioVoice.prototype.amp = function (vol, rampTime) {}; +class AudioVoice { + constructor() { + this.ac = p5sound.audiocontext; + this.output = this.ac.createGain(); + this.connect(); + p5sound.soundArray.push(this); + } + play(note, velocity, secondsFromNow, sustime) {} -/** - * Connect to p5 objects or Web Audio Nodes - * @method connect - * @for p5.AudioVoice - * @param {Object} unit - */ -p5.AudioVoice.prototype.connect = function (unit) { - var u = unit || p5sound.input; - this.output.connect(u.input ? u.input : u); -}; + triggerAttack(note, velocity, secondsFromNow) {} -/** - * Disconnect from soundOut - * @method disconnect - * @for p5.AudioVoice - */ -p5.AudioVoice.prototype.disconnect = function () { - this.output.disconnect(); -}; + triggerRelease(secondsFromNow) {} -p5.AudioVoice.prototype.dispose = function () { - if (this.output) { + amp(vol, rampTime) {} + + /** + * Connect to p5 objects or Web Audio Nodes + * @method connect + * @for p5.AudioVoice + * @param {Object} unit + */ + connect(unit) { + var u = unit || p5sound.input; + this.output.connect(u.input ? u.input : u); + } + + /** + * Disconnect from soundOut + * @method disconnect + * @for p5.AudioVoice + */ + disconnect() { this.output.disconnect(); - delete this.output; } -}; -export default p5.AudioVoice; + dispose() { + if (this.output) { + this.output.disconnect(); + delete this.output; + } + } +} + +export default AudioVoice; diff --git a/src/audiocontext.js b/src/audiocontext.js index 5ca93d8e..b8276db2 100644 --- a/src/audiocontext.js +++ b/src/audiocontext.js @@ -45,9 +45,9 @@ Tone.setContext(audiocontext); * * */ -p5.prototype.getAudioContext = function () { +export function getAudioContext() { return audiocontext; -}; +} /** *

It is not only a good practice to give users control over starting @@ -102,7 +102,7 @@ p5.prototype.getAudioContext = function () { * } * */ -p5.prototype.userStartAudio = function (elements, callback) { +export function userStartAudio(elements, callback) { var elt = elements; if (elements instanceof p5.Element) { elt = elements.elt; @@ -112,6 +112,6 @@ p5.prototype.userStartAudio = function (elements, callback) { }); } return StartAudioContext(audiocontext, elt, callback); -}; +} export default audiocontext; diff --git a/src/audioin.js b/src/audioin.js index 10ab52a4..78b402d0 100644 --- a/src/audioin.js +++ b/src/audioin.js @@ -31,7 +31,7 @@ p5sound.inputSources = []; * * function setup(){ * let cnv = createCanvas(100, 100); - * cnv.mousePressed(userStartAudio); + * cnv.mousePressed(useraudiocontextStartAudio); * textAlign(CENTER); * mic = new p5.AudioIn(); * mic.start(); @@ -48,348 +48,351 @@ p5sound.inputSources = []; * } * */ -p5.AudioIn = function (errorCallback) { - // set up audio input - /** - * @property {GainNode} input - */ - this.input = p5sound.audiocontext.createGain(); - /** - * @property {GainNode} output - */ - this.output = p5sound.audiocontext.createGain(); +class AudioIn { + constructor(errorCallback) { + // set up audio input + /** + * @property {GainNode} input + */ + this.input = p5sound.audiocontext.createGain(); + /** + * @property {GainNode} output + */ + this.output = p5sound.audiocontext.createGain(); - /** - * @property {MediaStream|null} stream - */ - this.stream = null; - /** - * @property {MediaStreamAudioSourceNode|null} mediaStream - */ - this.mediaStream = null; - /** - * @property {Number|null} currentSource - */ - this.currentSource = null; + /** + * @property {MediaStream|null} stream + */ + this.stream = null; + /** + * @property {MediaStreamAudioSourceNode|null} mediaStream + */ + this.mediaStream = null; + /** + * @property {Number|null} currentSource + */ + this.currentSource = null; - /** - * Client must allow browser to access their microphone / audioin source. - * Default: false. Will become true when the client enables access. - * - * @property {Boolean} enabled - */ - this.enabled = false; + /** + * Client must allow browser to access their microphone / audioin source. + * Default: false. Will become true when the client enables access. + * + * @property {Boolean} enabled + */ + this.enabled = false; + + /** + * Input amplitude, connect to it by default but not to master out + * + * @property {p5.Amplitude} amplitude + */ + this.amplitude = new Amplitude(); + this.output.connect(this.amplitude.input); + if ( + !window.MediaStreamTrack || + !window.navigator.mediaDevices || + !window.navigator.mediaDevices.getUserMedia + ) { + errorCallback + ? errorCallback() + : window.alert( + 'This browser does not support MediaStreamTrack and mediaDevices' + ); + } + + // add to soundArray so we can dispose on close + p5sound.soundArray.push(this); + } /** - * Input amplitude, connect to it by default but not to master out + * Start processing audio input. This enables the use of other + * AudioIn methods like getLevel(). Note that by default, AudioIn + * is not connected to p5.sound's output. So you won't hear + * anything unless you use the connect() method.
* - * @property {p5.Amplitude} amplitude + * Certain browsers limit access to the user's microphone. For example, + * Chrome only allows access from localhost and over https. For this reason, + * you may want to include an errorCallback—a function that is called in case + * the browser won't provide mic access. + * + * @method start + * @for p5.AudioIn + * @param {Function} [successCallback] Name of a function to call on + * success. + * @param {Function} [errorCallback] Name of a function to call if + * there was an error. For example, + * some browsers do not support + * getUserMedia. */ - this.amplitude = new Amplitude(); - this.output.connect(this.amplitude.input); - - if ( - !window.MediaStreamTrack || - !window.navigator.mediaDevices || - !window.navigator.mediaDevices.getUserMedia - ) { - errorCallback - ? errorCallback() - : window.alert( - 'This browser does not support MediaStreamTrack and mediaDevices' - ); - } + start(successCallback, errorCallback) { + var self = this; - // add to soundArray so we can dispose on close - p5sound.soundArray.push(this); -}; - -/** - * Start processing audio input. This enables the use of other - * AudioIn methods like getLevel(). Note that by default, AudioIn - * is not connected to p5.sound's output. So you won't hear - * anything unless you use the connect() method.
- * - * Certain browsers limit access to the user's microphone. For example, - * Chrome only allows access from localhost and over https. For this reason, - * you may want to include an errorCallback—a function that is called in case - * the browser won't provide mic access. - * - * @method start - * @for p5.AudioIn - * @param {Function} [successCallback] Name of a function to call on - * success. - * @param {Function} [errorCallback] Name of a function to call if - * there was an error. For example, - * some browsers do not support - * getUserMedia. - */ -p5.AudioIn.prototype.start = function (successCallback, errorCallback) { - var self = this; + if (this.stream) { + this.stop(); + } - if (this.stream) { - this.stop(); - } + // set the audio source + var audioSource = p5sound.inputSources[self.currentSource]; + var constraints = { + audio: { + sampleRate: p5sound.audiocontext.sampleRate, + echoCancellation: false, + }, + }; - // set the audio source - var audioSource = p5sound.inputSources[self.currentSource]; - var constraints = { - audio: { - sampleRate: p5sound.audiocontext.sampleRate, - echoCancellation: false, - }, - }; + // if developers determine which source to use + if (p5sound.inputSources[this.currentSource]) { + constraints.audio.deviceId = audioSource.deviceId; + } - // if developers determine which source to use - if (p5sound.inputSources[this.currentSource]) { - constraints.audio.deviceId = audioSource.deviceId; + window.navigator.mediaDevices + .getUserMedia(constraints) + .then(function (stream) { + self.stream = stream; + self.enabled = true; + // Wrap a MediaStreamSourceNode around the live input + self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream); + self.mediaStream.connect(self.output); + // only send to the Amplitude reader, so we can see it but not hear it. + self.amplitude.setInput(self.output); + if (successCallback) successCallback(); + }) + .catch(function (err) { + if (errorCallback) errorCallback(err); + else console.error(err); + }); } - window.navigator.mediaDevices - .getUserMedia(constraints) - .then(function (stream) { - self.stream = stream; - self.enabled = true; - // Wrap a MediaStreamSourceNode around the live input - self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream); - self.mediaStream.connect(self.output); - // only send to the Amplitude reader, so we can see it but not hear it. - self.amplitude.setInput(self.output); - if (successCallback) successCallback(); - }) - .catch(function (err) { - if (errorCallback) errorCallback(err); - else console.error(err); - }); -}; - -/** - * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel(). - * If re-starting, the user may be prompted for permission access. - * - * @method stop - * @for p5.AudioIn - */ -p5.AudioIn.prototype.stop = function () { - if (this.stream) { - this.stream.getTracks().forEach(function (track) { - track.stop(); - }); + /** + * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel(). + * If re-starting, the user may be prompted for permission access. + * + * @method stop + * @for p5.AudioIn + */ + stop() { + if (this.stream) { + this.stream.getTracks().forEach(function (track) { + track.stop(); + }); - this.mediaStream.disconnect(); + this.mediaStream.disconnect(); - delete this.mediaStream; - delete this.stream; + delete this.mediaStream; + delete this.stream; + } } -}; -/** - * Connect to an audio unit. If no parameter is provided, will - * connect to the master output (i.e. your speakers).
- * - * @method connect - * @for p5.AudioIn - * @param {Object} [unit] An object that accepts audio input, - * such as an FFT - */ -p5.AudioIn.prototype.connect = function (unit) { - if (unit) { - if (unit.hasOwnProperty('input')) { - this.output.connect(unit.input); - } else if (unit.hasOwnProperty('analyser')) { - this.output.connect(unit.analyser); + /** + * Connect to an audio unit. If no parameter is provided, will + * connect to the master output (i.e. your speakers).
+ * + * @method connect + * @for p5.AudioIn + * @param {Object} [unit] An object that accepts audio input, + * such as an FFT + */ + connect(unit) { + if (unit) { + if (unit.hasOwnProperty('input')) { + this.output.connect(unit.input); + } else if (unit.hasOwnProperty('analyser')) { + this.output.connect(unit.analyser); + } else { + this.output.connect(unit); + } } else { - this.output.connect(unit); + this.output.connect(p5sound.input); } - } else { - this.output.connect(p5sound.input); } -}; -/** - * Disconnect the AudioIn from all audio units. For example, if - * connect() had been called, disconnect() will stop sending - * signal to your speakers.
- * - * @method disconnect - * @for p5.AudioIn - */ -p5.AudioIn.prototype.disconnect = function () { - if (this.output) { - this.output.disconnect(); - // stay connected to amplitude even if not outputting to p5 - this.output.connect(this.amplitude.input); + /** + * Disconnect the AudioIn from all audio units. For example, if + * connect() had been called, disconnect() will stop sending + * signal to your speakers.
+ * + * @method disconnect + * @for p5.AudioIn + */ + disconnect() { + if (this.output) { + this.output.disconnect(); + // stay connected to amplitude even if not outputting to p5 + this.output.connect(this.amplitude.input); + } } -}; -/** - * Read the Amplitude (volume level) of an AudioIn. The AudioIn - * class contains its own instance of the Amplitude class to help - * make it easy to get a microphone's volume level. Accepts an - * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must - * .start() before using .getLevel().
- * - * @method getLevel - * @for p5.AudioIn - * @param {Number} [smoothing] Smoothing is 0.0 by default. - * Smooths values based on previous values. - * @return {Number} Volume level (between 0.0 and 1.0) - */ -p5.AudioIn.prototype.getLevel = function (smoothing) { - if (smoothing) { - this.amplitude.smoothing = smoothing; + /** + * Read the Amplitude (volume level) of an AudioIn. The AudioIn + * class contains its own instance of the Amplitude class to help + * make it easy to get a microphone's volume level. Accepts an + * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must + * .start() before using .getLevel().
+ * + * @method getLevel + * @for p5.AudioIn + * @param {Number} [smoothing] Smoothing is 0.0 by default. + * Smooths values based on previous values. + * @return {Number} Volume level (between 0.0 and 1.0) + */ + getLevel(smoothing) { + if (smoothing) { + this.amplitude.smoothing = smoothing; + } + return this.amplitude.getLevel(); } - return this.amplitude.getLevel(); -}; -/** - * Set amplitude (volume) of a mic input between 0 and 1.0.
- * - * @method amp - * @for p5.AudioIn - * @param {Number} vol between 0 and 1.0 - * @param {Number} [time] ramp time (optional) - */ -p5.AudioIn.prototype.amp = function (vol, t) { - if (t) { - var rampTime = t || 0; - var currentVol = this.output.gain.value; - this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); - this.output.gain.setValueAtTime( - currentVol, - p5sound.audiocontext.currentTime - ); - this.output.gain.linearRampToValueAtTime( - vol, - rampTime + p5sound.audiocontext.currentTime - ); - } else { - this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); - this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime); + /** + * Set amplitude (volume) of a mic input between 0 and 1.0.
+ * + * @method amp + * @for p5.AudioIn + * @param {Number} vol between 0 and 1.0 + * @param {Number} [time] ramp time (optional) + */ + amp(vol, t) { + if (t) { + var rampTime = t || 0; + var currentVol = this.output.gain.value; + this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); + this.output.gain.setValueAtTime( + currentVol, + p5sound.audiocontext.currentTime + ); + this.output.gain.linearRampToValueAtTime( + vol, + rampTime + p5sound.audiocontext.currentTime + ); + } else { + this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); + this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime); + } } -}; -/** - * Returns a list of available input sources. This is a wrapper - * for - * MediaDevices.enumerateDevices() - Web APIs | MDN - * and it returns a Promise. - * @method getSources - * @for p5.AudioIn - * @param {Function} [successCallback] This callback function handles the sources when they - * have been enumerated. The callback function - * receives the deviceList array as its only argument - * @param {Function} [errorCallback] This optional callback receives the error - * message as its argument. - * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar - * to the enumerateDevices() method - * @example - *

- * let audioIn; - * - * function setup(){ - * text('getting sources...', 0, 20); - * audioIn = new p5.AudioIn(); - * audioIn.getSources(gotSources); - * } - * - * function gotSources(deviceList) { - * if (deviceList.length > 0) { - * //set the source to the first item in the deviceList array - * audioIn.setSource(0); - * let currentSource = deviceList[audioIn.currentSource]; - * text('set source to: ' + currentSource.deviceId, 5, 20, width); - * } - * } - *
- */ -p5.AudioIn.prototype.getSources = function (onSuccess, onError) { - return new Promise(function (resolve, reject) { - window.navigator.mediaDevices - .enumerateDevices() - .then(function (devices) { - p5sound.inputSources = devices.filter(function (device) { - return device.kind === 'audioinput'; + /** + * Returns a list of available input sources. This is a wrapper + * for + * MediaDevices.enumerateDevices() - Web APIs | MDN + * and it returns a Promise. + * @method getSources + * @for p5.AudioIn + * @param {Function} [successCallback] This callback function handles the sources when they + * have been enumerated. The callback function + * receives the deviceList array as its only argument + * @param {Function} [errorCallback] This optional callback receives the error + * message as its argument. + * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar + * to the enumerateDevices() method + * @example + *
+ * let audioIn; + * + * function setup(){ + * text('getting sources...', 0, 20); + * audioIn = new p5.AudioIn(); + * audioIn.getSources(gotSources); + * } + * + * function gotSources(deviceList) { + * if (deviceList.length > 0) { + * //set the source to the first item in the deviceList array + * audioIn.setSource(0); + * let currentSource = deviceList[audioIn.currentSource]; + * text('set source to: ' + currentSource.deviceId, 5, 20, width); + * } + * } + *
+ */ + getSources(onSuccess, onError) { + return new Promise(function (resolve, reject) { + window.navigator.mediaDevices + .enumerateDevices() + .then(function (devices) { + p5sound.inputSources = devices.filter(function (device) { + return device.kind === 'audioinput'; + }); + resolve(p5sound.inputSources); + if (onSuccess) { + onSuccess(p5sound.inputSources); + } + }) + .catch(function (error) { + reject(error); + if (onError) { + onError(error); + } else { + console.error( + 'This browser does not support MediaStreamTrack.getSources()' + ); + } }); - resolve(p5sound.inputSources); - if (onSuccess) { - onSuccess(p5sound.inputSources); - } - }) - .catch(function (error) { - reject(error); - if (onError) { - onError(error); - } else { - console.error( - 'This browser does not support MediaStreamTrack.getSources()' - ); - } - }); - }); -}; - -/** - * Set the input source. Accepts a number representing a - * position in the array returned by getSources(). - * This is only available in browsers that support - * - * navigator.mediaDevices.enumerateDevices() - * - * @method setSource - * @for p5.AudioIn - * @param {number} num position of input source in the array - * @example - *
- * let audioIn; - * - * function setup(){ - * text('getting sources...', 0, 20); - * audioIn = new p5.AudioIn(); - * audioIn.getSources(gotSources); - * } - * - * function gotSources(deviceList) { - * if (deviceList.length > 0) { - * //set the source to the first item in the deviceList array - * audioIn.setSource(0); - * let currentSource = deviceList[audioIn.currentSource]; - * text('set source to: ' + currentSource.deviceId, 5, 20, width); - * } - * } - *
- */ -p5.AudioIn.prototype.setSource = function (num) { - if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) { - // set the current source - this.currentSource = num; - console.log('set source to ', p5sound.inputSources[this.currentSource]); - } else { - console.log('unable to set input source'); + }); } - // restart stream if currently active - if (this.stream && this.stream.active) { - this.start(); + /** + * Set the input source. Accepts a number representing a + * position in the array returned by getSources(). + * This is only available in browsers that support + * + * navigator.mediaDevices.enumerateDevices() + * + * @method setSource + * @for p5.AudioIn + * @param {number} num position of input source in the array + * @example + *
+ * let audioIn; + * + * function setup(){ + * text('getting sources...', 0, 20); + * audioIn = new p5.AudioIn(); + * audioIn.getSources(gotSources); + * } + * + * function gotSources(deviceList) { + * if (deviceList.length > 0) { + * //set the source to the first item in the deviceList array + * audioIn.setSource(0); + * let currentSource = deviceList[audioIn.currentSource]; + * text('set source to: ' + currentSource.deviceId, 5, 20, width); + * } + * } + *
+ */ + setSource(num) { + if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) { + // set the current source + this.currentSource = num; + console.log('set source to ', p5sound.inputSources[this.currentSource]); + } else { + console.log('unable to set input source'); + } + + // restart stream if currently active + if (this.stream && this.stream.active) { + this.start(); + } } -}; -// private method -p5.AudioIn.prototype.dispose = function () { - // remove reference from soundArray - var index = p5sound.soundArray.indexOf(this); - p5sound.soundArray.splice(index, 1); + // private method + dispose() { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); - this.stop(); + this.stop(); - if (this.output) { - this.output.disconnect(); - } - if (this.amplitude) { - this.amplitude.disconnect(); + if (this.output) { + this.output.disconnect(); + } + if (this.amplitude) { + this.amplitude.disconnect(); + } + delete this.amplitude; + delete this.output; } - delete this.amplitude; - delete this.output; -}; +} + +export default AudioIn; diff --git a/src/envelope.js b/src/envelope.js index e40295ab..e17cf84d 100644 --- a/src/envelope.js +++ b/src/envelope.js @@ -897,4 +897,5 @@ p5.Env = function (t1, l1, t2, l2, t3, l3) { }; p5.Env.prototype = Object.create(p5.Envelope.prototype); -export default p5.Envelope; +const Envelope = p5.Envelope; +export default Envelope; diff --git a/src/monosynth.js b/src/monosynth.js index 481fb66d..b9ed6107 100644 --- a/src/monosynth.js +++ b/src/monosynth.js @@ -1,5 +1,7 @@ -import p5sound from './master'; import AudioVoice from './audioVoice'; +import Envelope from './envelope'; +import p5sound from './master'; +import Oscillator from './oscillator'; import { noteToFreq } from './helpers'; var DEFAULT_SUSTAIN = 0.15; @@ -42,320 +44,317 @@ var DEFAULT_SUSTAIN = 0.15; * **/ -p5.MonoSynth = function () { - AudioVoice.call(this); - - this.oscillator = new p5.Oscillator(); +class MonoSynth extends AudioVoice { + constructor() { + super(); + this.oscillator = new Oscillator(); - this.env = new p5.Envelope(); - this.env.setRange(1, 0); - this.env.setExp(true); + this.env = new Envelope(); //to be changed + this.env.setRange(1, 0); + this.env.setExp(true); - //set params - this.setADSR(0.02, 0.25, 0.05, 0.35); + //set params + this.setADSR(0.02, 0.25, 0.05, 0.35); - // oscillator --> env --> this.output (gain) --> p5.soundOut - this.oscillator.disconnect(); - this.oscillator.connect(this.output); + // oscillator --> env --> this.output (gain) --> p5.soundOut + this.oscillator.disconnect(); + this.oscillator.connect(this.output); - this.env.disconnect(); - this.env.setInput(this.output.gain); + this.env.disconnect(); + this.env.setInput(this.output.gain); - // reset oscillator gain to 1.0 - this.oscillator.output.gain.value = 1.0; + // reset oscillator gain to 1.0 + this.oscillator.output.gain.value = 1.0; - this.oscillator.start(); - this.connect(); + this.oscillator.start(); + this.connect(); - p5sound.soundArray.push(this); -}; + p5sound.soundArray.push(this); -p5.MonoSynth.prototype = Object.create(p5.AudioVoice.prototype); - -/** - * Play tells the MonoSynth to start playing a note. This method schedules - * the calling of .triggerAttack and .triggerRelease. - * - * @method play - * @for p5.MonoSynth - * @param {String | Number} note the note you want to play, specified as a - * frequency in Hertz (Number) or as a midi - * value in Note/Octave format ("C4", "Eb3"...etc") - * See - * Tone. Defaults to 440 hz. - * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1) - * @param {Number} [secondsFromNow] time from now (in seconds) at which to play - * @param {Number} [sustainTime] time to sustain before releasing the envelope. Defaults to 0.15 seconds. - * @example - *
- * let monoSynth; - * - * function setup() { - * let cnv = createCanvas(100, 100); - * cnv.mousePressed(playSynth); - * background(220); - * textAlign(CENTER); - * text('tap to play', width/2, height/2); - * - * monoSynth = new p5.MonoSynth(); - * } - * - * function playSynth() { - * userStartAudio(); - * - * let note = random(['Fb4', 'G4']); - * // note velocity (volume, from 0 to 1) - * let velocity = random(); - * // time from now (in seconds) - * let time = 0; - * // note duration (in seconds) - * let dur = 1/6; - * - * monoSynth.play(note, velocity, time, dur); - * } - *
- * - */ -p5.MonoSynth.prototype.play = function ( - note, - velocity, - secondsFromNow, - susTime -) { - this.triggerAttack(note, velocity, ~~secondsFromNow); - this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN)); -}; + /** + * Getters and Setters + * @property {Number} attack + * @for p5.MonoSynth + */ + /** + * @property {Number} decay + * @for p5.MonoSynth + */ + /** + * @property {Number} sustain + * @for p5.MonoSynth + */ + /** + * @property {Number} release + * @for p5.MonoSynth + */ + Object.defineProperties(this, { + attack: { + get: function () { + return this.env.aTime; + }, + set: function (attack) { + this.env.setADSR( + attack, + this.env.dTime, + this.env.sPercent, + this.env.rTime + ); + }, + }, + decay: { + get: function () { + return this.env.dTime; + }, + set: function (decay) { + this.env.setADSR( + this.env.aTime, + decay, + this.env.sPercent, + this.env.rTime + ); + }, + }, + sustain: { + get: function () { + return this.env.sPercent; + }, + set: function (sustain) { + this.env.setADSR( + this.env.aTime, + this.env.dTime, + sustain, + this.env.rTime + ); + }, + }, + release: { + get: function () { + return this.env.rTime; + }, + set: function (release) { + this.env.setADSR( + this.env.aTime, + this.env.dTime, + this.env.sPercent, + release + ); + }, + }, + }); + } -/** - * Trigger the Attack, and Decay portion of the Envelope. - * Similar to holding down a key on a piano, but it will - * hold the sustain level until you let go. - * - * @param {String | Number} note the note you want to play, specified as a - * frequency in Hertz (Number) or as a midi - * value in Note/Octave format ("C4", "Eb3"...etc") - * See - * Tone. Defaults to 440 hz - * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1) - * @param {Number} [secondsFromNow] time from now (in seconds) at which to play - * @method triggerAttack - * @for p5.MonoSynth - * @example - *
- * let monoSynth; - * - * function setup() { - * let cnv = createCanvas(100, 100); - * cnv.mousePressed(triggerAttack); - * background(220); - * text('tap here for attack, let go to release', 5, 20, width - 20); - * monoSynth = new p5.MonoSynth(); - * } - * - * function triggerAttack() { - * userStartAudio(); - * - * monoSynth.triggerAttack("E3"); - * } - * - * function mouseReleased() { - * monoSynth.triggerRelease(); - * } - *
- */ -p5.MonoSynth.prototype.triggerAttack = function ( - note, - velocity, - secondsFromNow = 0 -) { - var freq = noteToFreq(note); - var vel = velocity || 0.1; - this.oscillator.freq(freq, 0, secondsFromNow); - this.env.ramp(this.output.gain, secondsFromNow, vel); -}; + /** + * Play tells the MonoSynth to start playing a note. This method schedules + * the calling of .triggerAttack and .triggerRelease. + * + * @method play + * @for p5.MonoSynth + * @param {String | Number} note the note you want to play, specified as a + * frequency in Hertz (Number) or as a midi + * value in Note/Octave format ("C4", "Eb3"...etc") + * See + * Tone. Defaults to 440 hz. + * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1) + * @param {Number} [secondsFromNow] time from now (in seconds) at which to play + * @param {Number} [sustainTime] time to sustain before releasing the envelope. Defaults to 0.15 seconds. + * @example + *
+ * let monoSynth; + * + * function setup() { + * let cnv = createCanvas(100, 100); + * cnv.mousePressed(playSynth); + * background(220); + * textAlign(CENTER); + * text('tap to play', width/2, height/2); + * + * monoSynth = new p5.MonoSynth(); + * } + * + * function playSynth() { + * userStartAudio(); + * + * let note = random(['Fb4', 'G4']); + * // note velocity (volume, from 0 to 1) + * let velocity = random(); + * // time from now (in seconds) + * let time = 0; + * // note duration (in seconds) + * let dur = 1/6; + * + * monoSynth.play(note, velocity, time, dur); + * } + *
+ * + */ + play(note, velocity, secondsFromNow, susTime) { + this.triggerAttack(note, velocity, ~~secondsFromNow); + this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN)); + } -/** - * Trigger the release of the Envelope. This is similar to releasing - * the key on a piano and letting the sound fade according to the - * release level and release time. - * - * @param {Number} secondsFromNow time to trigger the release - * @method triggerRelease - * @for p5.MonoSynth - * @example - *
- * let monoSynth; - * - * function setup() { - * let cnv = createCanvas(100, 100); - * cnv.mousePressed(triggerAttack); - * background(220); - * text('tap here for attack, let go to release', 5, 20, width - 20); - * monoSynth = new p5.MonoSynth(); - * } - * - * function triggerAttack() { - * userStartAudio(); - * - * monoSynth.triggerAttack("E3"); - * } - * - * function mouseReleased() { - * monoSynth.triggerRelease(); - * } - *
- */ -p5.MonoSynth.prototype.triggerRelease = function (secondsFromNow = 0) { - this.env.ramp(this.output.gain, secondsFromNow, 0); -}; + /** + * Trigger the Attack, and Decay portion of the Envelope. + * Similar to holding down a key on a piano, but it will + * hold the sustain level until you let go. + * + * @param {String | Number} note the note you want to play, specified as a + * frequency in Hertz (Number) or as a midi + * value in Note/Octave format ("C4", "Eb3"...etc") + * See + * Tone. Defaults to 440 hz + * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1) + * @param {Number} [secondsFromNow] time from now (in seconds) at which to play + * @method triggerAttack + * @for p5.MonoSynth + * @example + *
+ * let monoSynth; + * + * function setup() { + * let cnv = createCanvas(100, 100); + * cnv.mousePressed(triggerAttack); + * background(220); + * text('tap here for attack, let go to release', 5, 20, width - 20); + * monoSynth = new p5.MonoSynth(); + * } + * + * function triggerAttack() { + * userStartAudio(); + * + * monoSynth.triggerAttack("E3"); + * } + * + * function mouseReleased() { + * monoSynth.triggerRelease(); + * } + *
+ */ + triggerAttack(note, velocity, secondsFromNow = 0) { + var freq = noteToFreq(note); + var vel = velocity || 0.1; + this.oscillator.freq(freq, 0, secondsFromNow); + this.env.ramp(this.output.gain, secondsFromNow, vel); + } -/** - * Set values like a traditional - * - * ADSR envelope - * . - * - * @method setADSR - * @for p5.MonoSynth - * @param {Number} attackTime Time (in seconds before envelope - * reaches Attack Level - * @param {Number} [decayTime] Time (in seconds) before envelope - * reaches Decay/Sustain Level - * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1, - * where 1.0 = attackLevel, 0.0 = releaseLevel. - * The susRatio determines the decayLevel and the level at which the - * sustain portion of the envelope will sustain. - * For example, if attackLevel is 0.4, releaseLevel is 0, - * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is - * increased to 1.0 (using setRange), - * then decayLevel would increase proportionally, to become 0.5. - * @param {Number} [releaseTime] Time in seconds from now (defaults to 0) - */ -p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) { - this.env.setADSR(attack, decay, sustain, release); -}; + /** + * Trigger the release of the Envelope. This is similar to releasing + * the key on a piano and letting the sound fade according to the + * release level and release time. + * + * @param {Number} secondsFromNow time to trigger the release + * @method triggerRelease + * @for p5.MonoSynth + * @example + *
+ * let monoSynth; + * + * function setup() { + * let cnv = createCanvas(100, 100); + * cnv.mousePressed(triggerAttack); + * background(220); + * text('tap here for attack, let go to release', 5, 20, width - 20); + * monoSynth = new p5.MonoSynth(); + * } + * + * function triggerAttack() { + * userStartAudio(); + * + * monoSynth.triggerAttack("E3"); + * } + * + * function mouseReleased() { + * monoSynth.triggerRelease(); + * } + *
+ */ + triggerRelease(secondsFromNow = 0) { + this.env.ramp(this.output.gain, secondsFromNow, 0); + } -/** - * Getters and Setters - * @property {Number} attack - * @for p5.MonoSynth - */ -/** - * @property {Number} decay - * @for p5.MonoSynth - */ -/** - * @property {Number} sustain - * @for p5.MonoSynth - */ -/** - * @property {Number} release - * @for p5.MonoSynth - */ -Object.defineProperties(p5.MonoSynth.prototype, { - attack: { - get: function () { - return this.env.aTime; - }, - set: function (attack) { - this.env.setADSR( - attack, - this.env.dTime, - this.env.sPercent, - this.env.rTime - ); - }, - }, - decay: { - get: function () { - return this.env.dTime; - }, - set: function (decay) { - this.env.setADSR( - this.env.aTime, - decay, - this.env.sPercent, - this.env.rTime - ); - }, - }, - sustain: { - get: function () { - return this.env.sPercent; - }, - set: function (sustain) { - this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime); - }, - }, - release: { - get: function () { - return this.env.rTime; - }, - set: function (release) { - this.env.setADSR( - this.env.aTime, - this.env.dTime, - this.env.sPercent, - release - ); - }, - }, -}); + /** + * Set values like a traditional + * + * ADSR envelope + * . + * + * @method setADSR + * @for p5.MonoSynth + * @param {Number} attackTime Time (in seconds before envelope + * reaches Attack Level + * @param {Number} [decayTime] Time (in seconds) before envelope + * reaches Decay/Sustain Level + * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1, + * where 1.0 = attackLevel, 0.0 = releaseLevel. + * The susRatio determines the decayLevel and the level at which the + * sustain portion of the envelope will sustain. + * For example, if attackLevel is 0.4, releaseLevel is 0, + * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is + * increased to 1.0 (using setRange), + * then decayLevel would increase proportionally, to become 0.5. + * @param {Number} [releaseTime] Time in seconds from now (defaults to 0) + */ + setADSR(attack, decay, sustain, release) { + this.env.setADSR(attack, decay, sustain, release); + } -/** - * MonoSynth amp - * @method amp - * @for p5.MonoSynth - * @param {Number} vol desired volume - * @param {Number} [rampTime] Time to reach new volume - * @return {Number} new volume value - */ -p5.MonoSynth.prototype.amp = function (vol, rampTime) { - var t = rampTime || 0; - if (typeof vol !== 'undefined') { - this.oscillator.amp(vol, t); + /** + * MonoSynth amp + * @method amp + * @for p5.MonoSynth + * @param {Number} vol desired volume + * @param {Number} [rampTime] Time to reach new volume + * @return {Number} new volume value + */ + amp(vol, rampTime) { + var t = rampTime || 0; + if (typeof vol !== 'undefined') { + this.oscillator.amp(vol, t); + } + return this.oscillator.amp().value; } - return this.oscillator.amp().value; -}; -/** - * Connect to a p5.sound / Web Audio object. - * - * @method connect - * @for p5.MonoSynth - * @param {Object} unit A p5.sound or Web Audio object - */ + /** + * Connect to a p5.sound / Web Audio object. + * + * @method connect + * @for p5.MonoSynth + * @param {Object} unit A p5.sound or Web Audio object + */ -p5.MonoSynth.prototype.connect = function (unit) { - var u = unit || p5sound.input; - this.output.connect(u.input ? u.input : u); -}; + connect(unit) { + var u = unit || p5sound.input; + this.output.connect(u.input ? u.input : u); + } -/** - * Disconnect all outputs - * - * @method disconnect - * @for p5.MonoSynth - */ -p5.MonoSynth.prototype.disconnect = function () { - if (this.output) { - this.output.disconnect(); + /** + * Disconnect all outputs + * + * @method disconnect + * @for p5.MonoSynth + */ + disconnect() { + if (this.output) { + this.output.disconnect(); + } } -}; -/** - * Get rid of the MonoSynth and free up its resources / memory. - * - * @method dispose - * @for p5.MonoSynth - */ -p5.MonoSynth.prototype.dispose = function () { - AudioVoice.prototype.dispose.apply(this); + /** + * Get rid of the MonoSynth and free up its resources / memory. + * + * @method dispose + * @for p5.MonoSynth + */ + dispose() { + super.dispose(); - if (this.env) { - this.env.dispose(); + if (this.env) { + this.env.dispose(); + } + if (this.oscillator) { + this.oscillator.dispose(); + } } - if (this.oscillator) { - this.oscillator.dispose(); - } -}; +} + +export default MonoSynth; diff --git a/src/oscillator.js b/src/oscillator.js index 577390cb..490f2a67 100644 --- a/src/oscillator.js +++ b/src/oscillator.js @@ -626,4 +626,5 @@ p5.SqrOsc = function (freq) { p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype); -export const Oscillator = p5.Oscillator; +const Oscillator = p5.Oscillator; +export default Oscillator;