diff --git a/.eslintignore b/.eslintignore index d9391185..f1c00568 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,2 +1,4 @@ Gruntfile.js test/test.js +webpack.config.js +lib/ diff --git a/Gruntfile.js b/Gruntfile.js index 38f26842..d228f9f5 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -1,3 +1,5 @@ +const webpackConfig = require('./webpack.config.js'); + module.exports = function(grunt) { grunt.initConfig({ @@ -10,148 +12,9 @@ module.exports = function(grunt) { src: ['src/**/*.js', 'test/tests/**/*.js'] } }, - watch: { - // p5 dist - main: { - files: ['src/**/*.js'], - tasks: ['requirejs'], - options: { - livereload: { - port: 35728 - } - }, - } - }, - requirejs: { - unmin: { - options: { - baseUrl: '.', - findNestedDependencies: true, - include: ['src/app'], - onBuildWrite: function( name, path, contents ) { - if (path.indexOf('node_modules/tone/') > -1) { - return '/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/\n' + - require('amdclean').clean({ - 'code': contents.replace(/console.log(.*);/g, ''), - 'escodegen': { - 'comment': false, - 'skipDirOptimize':true, - 'format': { - 'indent': { - 'style': ' ', - 'adjustMultiLineComment': true - } - } - } - }); - } else if (path.indexOf('node_modules/startaudiocontext') > -1) { - // return '/** StartAudioContext.js by Yotam Mann, MIT License 2017 https://github.com/tambien/StartAudioContext http://opensource.org/licenses/MIT **/\n' + - return require('amdclean').clean({ - code: contents, - escodegen: { - comment: false, - format: { - indent: { - style: ' ', - adjustMultiLineComment: true - } - } - } - }); - } else { - return require('amdclean').clean({ - 'code':contents, - 'escodegen': { - 'comment': true, - 'format': { - 'indent': { - 'style': ' ', - 'adjustMultiLineComment': true - } - } - } - }); - } - }, - optimize: 'none', - out: 'lib/p5.sound.js', - paths: { - 'Tone' : 'node_modules/tone/Tone', - 'StartAudioContext' : 'node_modules/startaudiocontext/StartAudioContext', - 'automation-timeline': 'node_modules/web-audio-automation-timeline/build/automation-timeline-amd', - 'panner' : 'src/panner', - 'shims': 'src/shims', - 'audiocontext': 'src/audiocontext', - 'master': 'src/master', - 'helpers': 'src/helpers', - 'errorHandler': 'src/errorHandler', - 'soundfile': 'src/soundfile', - 'amplitude': 'src/amplitude', - 'fft': 'src/fft', - 'oscillator': 'src/oscillator', - 'pulse': 'src/pulse', - 'noise': 'src/noise', - 'audioin': 'src/audioin', - 'envelope': 'src/envelope', - 'delay': 'src/delay', - 'effect': 'src/effect', - 'panner3d' : 'src/panner3d', - 'listener3d': 'src/listener3d', - 'filter': 'src/filter', - 'reverb': 'src/reverb', - 'eq': 'src/eq', - 'distortion': 'src/distortion', - 'compressor': 'src/compressor', - 'looper': 'src/looper', - 'soundloop': 'src/soundLoop', - 'soundRecorder': 'src/soundRecorder', - 'signal': 'src/signal', - 'metro': 'src/metro', - 'peakdetect': 'src/peakDetect', - 'gain': 'src/gain', - 'audioVoice': 'src/audioVoice', - 'monosynth': 'src/monosynth', - 'polysynth': 'src/polysynth' - }, - useStrict: true, - wrap: { - start: '/*! p5.sound.js v<%= pkg.version %> <%= grunt.template.today("yyyy-mm-dd") %> */\n' + grunt.file.read('./fragments/before.frag'), - end: grunt.file.read('./fragments/after.frag') - } - } - }, - min: { - options: { - baseUrl: '.', - findNestedDependencies: true, - include: ['src/app'], - onBuildWrite: function( name, path, contents ) { - if (path.indexOf('node_modules/tone/') > -1) { - return require('amdclean').clean({ - 'code':contents.replace(/console.log(.*);/g, ''), - 'escodegen': { - 'comment': false - } - }); - } else { - return require('amdclean').clean({ - 'code':contents, - 'escodegen': { - 'comment': false - } - }); - } - }, - optimize: 'uglify2', - out: 'lib/p5.sound.min.js', - paths: '<%= requirejs.unmin.options.paths %>', - useStrict: true, - wrap: { - start: '/*! p5.sound.min.js v<%= pkg.version %> <%= grunt.template.today("yyyy-mm-dd") %> */\n' + grunt.file.read('./fragments/before.frag'), - end: grunt.file.read('./fragments/after.frag') - } - } - }, + webpack: { + prod: webpackConfig, + dev: Object.assign({ watch: true }, webpackConfig) }, open: { testChrome: { @@ -179,15 +42,14 @@ module.exports = function(grunt) { }); - grunt.loadNpmTasks('grunt-contrib-requirejs'); + grunt.loadNpmTasks('grunt-webpack'); grunt.loadNpmTasks('grunt-eslint'); - grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-contrib-connect'); grunt.loadNpmTasks('grunt-open'); grunt.registerTask('lint', ['eslint:source']); - grunt.registerTask('default', ['requirejs']); - grunt.registerTask('dev', ['connect','requirejs', 'watch']); + grunt.registerTask('default', ['webpack:prod']); + grunt.registerTask('dev', ['connect','webpack:dev']); grunt.registerTask('serve', 'connect:server:keepalive'); grunt.registerTask('run-tests', ['serve', 'open']); }; diff --git a/fragments/before.frag b/fragments/before.frag index aa755247..cafe1bb7 100644 --- a/fragments/before.frag +++ b/fragments/before.frag @@ -62,13 +62,3 @@ * * Web Audio API: http://w3.org/TR/webaudio/ */ - -(function (root, factory) { - if (typeof define === 'function' && define.amd) - define('p5.sound', ['p5'], function (p5) { (factory(p5));}); - else if (typeof exports === 'object') - factory(require('../p5')); - else - factory(root['p5']); -}(this, function (p5) { - diff --git a/lib/p5.sound.js b/lib/p5.sound.js index 33b13045..2e9be89b 100644 --- a/lib/p5.sound.js +++ b/lib/p5.sound.js @@ -1,4 +1,3 @@ -/*! p5.sound.js v0.3.11 2019-03-14 */ /** * p5.sound extends p5 with Web Audio functionality including audio input, @@ -64,8768 +63,8121 @@ * Web Audio API: http://w3.org/TR/webaudio/ */ -(function (root, factory) { - if (typeof define === 'function' && define.amd) - define('p5.sound', ['p5'], function (p5) { (factory(p5));}); - else if (typeof exports === 'object') - factory(require('../p5')); - else - factory(root['p5']); -}(this, function (p5) { - -var shims; -'use strict'; /** - * This module has shims - */ -shims = function () { - /* AudioContext Monkeypatch - Copyright 2013 Chris Wilson - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - */ - (function () { - function fixSetTarget(param) { - if (!param) - // if NYI, just return - return; - if (!param.setTargetAtTime) - param.setTargetAtTime = param.setTargetValueAtTime; - } - if (window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext')) { - window.AudioContext = window.webkitAudioContext; - if (typeof AudioContext.prototype.createGain !== 'function') - AudioContext.prototype.createGain = AudioContext.prototype.createGainNode; - if (typeof AudioContext.prototype.createDelay !== 'function') - AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode; - if (typeof AudioContext.prototype.createScriptProcessor !== 'function') - AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode; - if (typeof AudioContext.prototype.createPeriodicWave !== 'function') - AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable; - AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain; - AudioContext.prototype.createGain = function () { - var node = this.internal_createGain(); - fixSetTarget(node.gain); - return node; - }; - AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay; - AudioContext.prototype.createDelay = function (maxDelayTime) { - var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay(); - fixSetTarget(node.delayTime); - return node; - }; - AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource; - AudioContext.prototype.createBufferSource = function () { - var node = this.internal_createBufferSource(); - if (!node.start) { - node.start = function (when, offset, duration) { - if (offset || duration) - this.noteGrainOn(when || 0, offset, duration); - else - this.noteOn(when || 0); - }; - } else { - node.internal_start = node.start; - node.start = function (when, offset, duration) { - if (typeof duration !== 'undefined') - node.internal_start(when || 0, offset, duration); - else - node.internal_start(when || 0, offset || 0); - }; - } - if (!node.stop) { - node.stop = function (when) { - this.noteOff(when || 0); - }; - } else { - node.internal_stop = node.stop; - node.stop = function (when) { - node.internal_stop(when || 0); - }; - } - fixSetTarget(node.playbackRate); - return node; - }; - AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor; - AudioContext.prototype.createDynamicsCompressor = function () { - var node = this.internal_createDynamicsCompressor(); - fixSetTarget(node.threshold); - fixSetTarget(node.knee); - fixSetTarget(node.ratio); - fixSetTarget(node.reduction); - fixSetTarget(node.attack); - fixSetTarget(node.release); - return node; - }; - AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter; - AudioContext.prototype.createBiquadFilter = function () { - var node = this.internal_createBiquadFilter(); - fixSetTarget(node.frequency); - fixSetTarget(node.detune); - fixSetTarget(node.Q); - fixSetTarget(node.gain); - return node; - }; - if (typeof AudioContext.prototype.createOscillator !== 'function') { - AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator; - AudioContext.prototype.createOscillator = function () { - var node = this.internal_createOscillator(); - if (!node.start) { - node.start = function (when) { - this.noteOn(when || 0); - }; - } else { - node.internal_start = node.start; - node.start = function (when) { - node.internal_start(when || 0); - }; - } - if (!node.stop) { - node.stop = function (when) { - this.noteOff(when || 0); - }; - } else { - node.internal_stop = node.stop; - node.stop = function (when) { - node.internal_stop(when || 0); - }; - } - if (!node.setPeriodicWave) - node.setPeriodicWave = node.setWaveTable; - fixSetTarget(node.frequency); - fixSetTarget(node.detune); - return node; - }; - } - } - if (window.hasOwnProperty('webkitOfflineAudioContext') && !window.hasOwnProperty('OfflineAudioContext')) { - window.OfflineAudioContext = window.webkitOfflineAudioContext; - } - }(window)); - // <-- end MonkeyPatch. - // Polyfill for AudioIn, also handled by p5.dom createCapture - navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; - /** - * Determine which filetypes are supported (inspired by buzz.js) - * The audio element (el) will only be used to test browser support for various audio formats - */ - var el = document.createElement('audio'); - p5.prototype.isSupported = function () { - return !!el.canPlayType; - }; - var isOGGSupported = function () { - return !!el.canPlayType && el.canPlayType('audio/ogg; codecs="vorbis"'); - }; - var isMP3Supported = function () { - return !!el.canPlayType && el.canPlayType('audio/mpeg;'); - }; - var isWAVSupported = function () { - return !!el.canPlayType && el.canPlayType('audio/wav; codecs="1"'); - }; - var isAACSupported = function () { - return !!el.canPlayType && (el.canPlayType('audio/x-m4a;') || el.canPlayType('audio/aac;')); - }; - var isAIFSupported = function () { - return !!el.canPlayType && el.canPlayType('audio/x-aiff;'); - }; - p5.prototype.isFileSupported = function (extension) { - switch (extension.toLowerCase()) { - case 'mp3': - return isMP3Supported(); - case 'wav': - return isWAVSupported(); - case 'ogg': - return isOGGSupported(); - case 'aac': - case 'm4a': - case 'mp4': - return isAACSupported(); - case 'aif': - case 'aiff': - return isAIFSupported(); - default: - return false; - } - }; -}(); -var StartAudioContext; -(function (root, factory) { - if (true) { - StartAudioContext = function () { - return factory(); - }(); - } else if (typeof module === 'object' && module.exports) { - module.exports = factory(); - } else { - root.StartAudioContext = factory(); - } -}(this, function () { - var TapListener = function (element, context) { - this._dragged = false; - this._element = element; - this._bindedMove = this._moved.bind(this); - this._bindedEnd = this._ended.bind(this, context); - element.addEventListener('touchstart', this._bindedEnd); - element.addEventListener('touchmove', this._bindedMove); - element.addEventListener('touchend', this._bindedEnd); - element.addEventListener('mouseup', this._bindedEnd); - }; - TapListener.prototype._moved = function (e) { - this._dragged = true; - }; - TapListener.prototype._ended = function (context) { - if (!this._dragged) { - startContext(context); - } - this._dragged = false; - }; - TapListener.prototype.dispose = function () { - this._element.removeEventListener('touchstart', this._bindedEnd); - this._element.removeEventListener('touchmove', this._bindedMove); - this._element.removeEventListener('touchend', this._bindedEnd); - this._element.removeEventListener('mouseup', this._bindedEnd); - this._bindedMove = null; - this._bindedEnd = null; - this._element = null; - }; - function startContext(context) { - var buffer = context.createBuffer(1, 1, context.sampleRate); - var source = context.createBufferSource(); - source.buffer = buffer; - source.connect(context.destination); - source.start(0); - if (context.resume) { - context.resume(); - } - } - function isStarted(context) { - return context.state === 'running'; - } - function onStarted(context, callback) { - function checkLoop() { - if (isStarted(context)) { - callback(); - } else { - requestAnimationFrame(checkLoop); - if (context.resume) { - context.resume(); - } - } - } - if (isStarted(context)) { - callback(); - } else { - checkLoop(); - } - } - function bindTapListener(element, tapListeners, context) { - if (Array.isArray(element) || NodeList && element instanceof NodeList) { - for (var i = 0; i < element.length; i++) { - bindTapListener(element[i], tapListeners, context); - } - } else if (typeof element === 'string') { - bindTapListener(document.querySelectorAll(element), tapListeners, context); - } else if (element.jquery && typeof element.toArray === 'function') { - bindTapListener(element.toArray(), tapListeners, context); - } else if (Element && element instanceof Element) { - var tap = new TapListener(element, context); - tapListeners.push(tap); - } - } - function StartAudioContext(context, elements, callback) { - var promise = new Promise(function (success) { - onStarted(context, success); - }); - var tapListeners = []; - if (!elements) { - elements = document.body; - } - bindTapListener(elements, tapListeners, context); - promise.then(function () { - for (var i = 0; i < tapListeners.length; i++) { - tapListeners[i].dispose(); - } - tapListeners = null; - if (callback) { - callback(); - } - }); - return promise; - } - return StartAudioContext; -})); -/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ -var Tone_core_Tone; -Tone_core_Tone = function () { - 'use strict'; - var Tone = function (inputs, outputs) { - if (this.isUndef(inputs) || inputs === 1) { - this.input = this.context.createGain(); - } else if (inputs > 1) { - this.input = new Array(inputs); - } - if (this.isUndef(outputs) || outputs === 1) { - this.output = this.context.createGain(); - } else if (outputs > 1) { - this.output = new Array(inputs); - } - }; - Tone.prototype.set = function (params, value, rampTime) { - if (this.isObject(params)) { - rampTime = value; - } else if (this.isString(params)) { - var tmpObj = {}; - tmpObj[params] = value; - params = tmpObj; - } - paramLoop: - for (var attr in params) { - value = params[attr]; - var parent = this; - if (attr.indexOf('.') !== -1) { - var attrSplit = attr.split('.'); - for (var i = 0; i < attrSplit.length - 1; i++) { - parent = parent[attrSplit[i]]; - if (parent instanceof Tone) { - attrSplit.splice(0, i + 1); - var innerParam = attrSplit.join('.'); - parent.set(innerParam, value); - continue paramLoop; - } - } - attr = attrSplit[attrSplit.length - 1]; - } - var param = parent[attr]; - if (this.isUndef(param)) { - continue; - } - if (Tone.Signal && param instanceof Tone.Signal || Tone.Param && param instanceof Tone.Param) { - if (param.value !== value) { - if (this.isUndef(rampTime)) { - param.value = value; - } else { - param.rampTo(value, rampTime); - } - } - } else if (param instanceof AudioParam) { - if (param.value !== value) { - param.value = value; - } - } else if (param instanceof Tone) { - param.set(value); - } else if (param !== value) { - parent[attr] = value; - } - } - return this; - }; - Tone.prototype.get = function (params) { - if (this.isUndef(params)) { - params = this._collectDefaults(this.constructor); - } else if (this.isString(params)) { - params = [params]; - } - var ret = {}; - for (var i = 0; i < params.length; i++) { - var attr = params[i]; - var parent = this; - var subRet = ret; - if (attr.indexOf('.') !== -1) { - var attrSplit = attr.split('.'); - for (var j = 0; j < attrSplit.length - 1; j++) { - var subAttr = attrSplit[j]; - subRet[subAttr] = subRet[subAttr] || {}; - subRet = subRet[subAttr]; - parent = parent[subAttr]; - } - attr = attrSplit[attrSplit.length - 1]; - } - var param = parent[attr]; - if (this.isObject(params[attr])) { - subRet[attr] = param.get(); - } else if (Tone.Signal && param instanceof Tone.Signal) { - subRet[attr] = param.value; - } else if (Tone.Param && param instanceof Tone.Param) { - subRet[attr] = param.value; - } else if (param instanceof AudioParam) { - subRet[attr] = param.value; - } else if (param instanceof Tone) { - subRet[attr] = param.get(); - } else if (!this.isFunction(param) && !this.isUndef(param)) { - subRet[attr] = param; - } - } - return ret; - }; - Tone.prototype._collectDefaults = function (constr) { - var ret = []; - if (!this.isUndef(constr.defaults)) { - ret = Object.keys(constr.defaults); - } - if (!this.isUndef(constr._super)) { - var superDefs = this._collectDefaults(constr._super); - for (var i = 0; i < superDefs.length; i++) { - if (ret.indexOf(superDefs[i]) === -1) { - ret.push(superDefs[i]); - } - } - } - return ret; - }; - Tone.prototype.toString = function () { - for (var className in Tone) { - var isLetter = className[0].match(/^[A-Z]$/); - var sameConstructor = Tone[className] === this.constructor; - if (this.isFunction(Tone[className]) && isLetter && sameConstructor) { - return className; - } - } - return 'Tone'; - }; - Object.defineProperty(Tone.prototype, 'numberOfInputs', { - get: function () { - if (this.input) { - if (this.isArray(this.input)) { - return this.input.length; - } else { - return 1; - } - } else { - return 0; - } - } - }); - Object.defineProperty(Tone.prototype, 'numberOfOutputs', { - get: function () { - if (this.output) { - if (this.isArray(this.output)) { - return this.output.length; - } else { - return 1; - } - } else { - return 0; - } - } - }); - Tone.prototype.dispose = function () { - if (!this.isUndef(this.input)) { - if (this.input instanceof AudioNode) { - this.input.disconnect(); - } - this.input = null; - } - if (!this.isUndef(this.output)) { - if (this.output instanceof AudioNode) { - this.output.disconnect(); - } - this.output = null; - } - return this; - }; - Tone.prototype.connect = function (unit, outputNum, inputNum) { - if (Array.isArray(this.output)) { - outputNum = this.defaultArg(outputNum, 0); - this.output[outputNum].connect(unit, 0, inputNum); - } else { - this.output.connect(unit, outputNum, inputNum); - } - return this; - }; - Tone.prototype.disconnect = function (destination, outputNum, inputNum) { - if (this.isArray(this.output)) { - if (this.isNumber(destination)) { - this.output[destination].disconnect(); - } else { - outputNum = this.defaultArg(outputNum, 0); - this.output[outputNum].disconnect(destination, 0, inputNum); - } - } else { - this.output.disconnect.apply(this.output, arguments); - } - }; - Tone.prototype.connectSeries = function () { - if (arguments.length > 1) { - var currentUnit = arguments[0]; - for (var i = 1; i < arguments.length; i++) { - var toUnit = arguments[i]; - currentUnit.connect(toUnit); - currentUnit = toUnit; - } - } - return this; - }; - Tone.prototype.chain = function () { - if (arguments.length > 0) { - var currentUnit = this; - for (var i = 0; i < arguments.length; i++) { - var toUnit = arguments[i]; - currentUnit.connect(toUnit); - currentUnit = toUnit; - } - } - return this; - }; - Tone.prototype.fan = function () { - if (arguments.length > 0) { - for (var i = 0; i < arguments.length; i++) { - this.connect(arguments[i]); - } - } - return this; - }; - AudioNode.prototype.chain = Tone.prototype.chain; - AudioNode.prototype.fan = Tone.prototype.fan; - Tone.prototype.defaultArg = function (given, fallback) { - if (this.isObject(given) && this.isObject(fallback)) { - var ret = {}; - for (var givenProp in given) { - ret[givenProp] = this.defaultArg(fallback[givenProp], given[givenProp]); - } - for (var fallbackProp in fallback) { - ret[fallbackProp] = this.defaultArg(given[fallbackProp], fallback[fallbackProp]); - } - return ret; - } else { - return this.isUndef(given) ? fallback : given; - } - }; - Tone.prototype.optionsObject = function (values, keys, defaults) { - var options = {}; - if (values.length === 1 && this.isObject(values[0])) { - options = values[0]; - } else { - for (var i = 0; i < keys.length; i++) { - options[keys[i]] = values[i]; - } - } - if (!this.isUndef(defaults)) { - return this.defaultArg(options, defaults); - } else { - return options; - } - }; - Tone.prototype.isUndef = function (val) { - return typeof val === 'undefined'; - }; - Tone.prototype.isFunction = function (val) { - return typeof val === 'function'; - }; - Tone.prototype.isNumber = function (arg) { - return typeof arg === 'number'; - }; - Tone.prototype.isObject = function (arg) { - return Object.prototype.toString.call(arg) === '[object Object]' && arg.constructor === Object; - }; - Tone.prototype.isBoolean = function (arg) { - return typeof arg === 'boolean'; - }; - Tone.prototype.isArray = function (arg) { - return Array.isArray(arg); - }; - Tone.prototype.isString = function (arg) { - return typeof arg === 'string'; - }; - Tone.noOp = function () { - }; - Tone.prototype._readOnly = function (property) { - if (Array.isArray(property)) { - for (var i = 0; i < property.length; i++) { - this._readOnly(property[i]); - } - } else { - Object.defineProperty(this, property, { - writable: false, - enumerable: true - }); - } - }; - Tone.prototype._writable = function (property) { - if (Array.isArray(property)) { - for (var i = 0; i < property.length; i++) { - this._writable(property[i]); - } - } else { - Object.defineProperty(this, property, { writable: true }); - } - }; - Tone.State = { - Started: 'started', - Stopped: 'stopped', - Paused: 'paused' - }; - Tone.prototype.equalPowerScale = function (percent) { - var piFactor = 0.5 * Math.PI; - return Math.sin(percent * piFactor); - }; - Tone.prototype.dbToGain = function (db) { - return Math.pow(2, db / 6); - }; - Tone.prototype.gainToDb = function (gain) { - return 20 * (Math.log(gain) / Math.LN10); - }; - Tone.prototype.intervalToFrequencyRatio = function (interval) { - return Math.pow(2, interval / 12); - }; - Tone.prototype.now = function () { - return Tone.context.now(); - }; - Tone.now = function () { - return Tone.context.now(); - }; - Tone.extend = function (child, parent) { - if (Tone.prototype.isUndef(parent)) { - parent = Tone; - } - function TempConstructor() { - } - TempConstructor.prototype = parent.prototype; - child.prototype = new TempConstructor(); - child.prototype.constructor = child; - child._super = parent; - }; - var audioContext; - Object.defineProperty(Tone, 'context', { - get: function () { - return audioContext; - }, - set: function (context) { - if (Tone.Context && context instanceof Tone.Context) { - audioContext = context; - } else { - audioContext = new Tone.Context(context); - } - if (Tone.Context) { - Tone.Context.emit('init', audioContext); - } - } - }); - Object.defineProperty(Tone.prototype, 'context', { - get: function () { - return Tone.context; - } - }); - Tone.setContext = function (ctx) { - Tone.context = ctx; - }; - Object.defineProperty(Tone.prototype, 'blockTime', { - get: function () { - return 128 / this.context.sampleRate; - } - }); - Object.defineProperty(Tone.prototype, 'sampleTime', { - get: function () { - return 1 / this.context.sampleRate; - } - }); - Object.defineProperty(Tone, 'supported', { - get: function () { - var hasAudioContext = window.hasOwnProperty('AudioContext') || window.hasOwnProperty('webkitAudioContext'); - var hasPromises = window.hasOwnProperty('Promise'); - var hasWorkers = window.hasOwnProperty('Worker'); - return hasAudioContext && hasPromises && hasWorkers; - } - }); - Tone.version = 'r10'; - if (!window.TONE_SILENCE_VERSION_LOGGING) { - } - return Tone; -}(); -/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ -var Tone_core_Emitter; -Tone_core_Emitter = function (Tone) { - 'use strict'; - Tone.Emitter = function () { - this._events = {}; - }; - Tone.extend(Tone.Emitter); - Tone.Emitter.prototype.on = function (event, callback) { - var events = event.split(/\W+/); - for (var i = 0; i < events.length; i++) { - var eventName = events[i]; - if (!this._events.hasOwnProperty(eventName)) { - this._events[eventName] = []; - } - this._events[eventName].push(callback); - } - return this; - }; - Tone.Emitter.prototype.off = function (event, callback) { - var events = event.split(/\W+/); - for (var ev = 0; ev < events.length; ev++) { - event = events[ev]; - if (this._events.hasOwnProperty(event)) { - if (Tone.prototype.isUndef(callback)) { - this._events[event] = []; - } else { - var eventList = this._events[event]; - for (var i = 0; i < eventList.length; i++) { - if (eventList[i] === callback) { - eventList.splice(i, 1); - } - } - } - } - } - return this; - }; - Tone.Emitter.prototype.emit = function (event) { - if (this._events) { - var args = Array.apply(null, arguments).slice(1); - if (this._events.hasOwnProperty(event)) { - var eventList = this._events[event]; - for (var i = 0, len = eventList.length; i < len; i++) { - eventList[i].apply(this, args); - } - } - } - return this; - }; - Tone.Emitter.mixin = function (object) { - var functions = [ - 'on', - 'off', - 'emit' - ]; - object._events = {}; - for (var i = 0; i < functions.length; i++) { - var func = functions[i]; - var emitterFunc = Tone.Emitter.prototype[func]; - object[func] = emitterFunc; - } - }; - Tone.Emitter.prototype.dispose = function () { - Tone.prototype.dispose.call(this); - this._events = null; - return this; - }; - return Tone.Emitter; -}(Tone_core_Tone); -/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ -var Tone_core_Context; -Tone_core_Context = function (Tone) { - if (!window.hasOwnProperty('AudioContext') && window.hasOwnProperty('webkitAudioContext')) { - window.AudioContext = window.webkitAudioContext; - } - Tone.Context = function (context) { - Tone.Emitter.call(this); - if (!context) { - context = new window.AudioContext(); - } - this._context = context; - for (var prop in this._context) { - this._defineProperty(this._context, prop); - } - this._latencyHint = 'interactive'; - this._lookAhead = 0.1; - this._updateInterval = this._lookAhead / 3; - this._computedUpdateInterval = 0; - this._worker = this._createWorker(); - this._constants = {}; - }; - Tone.extend(Tone.Context, Tone.Emitter); - Tone.Emitter.mixin(Tone.Context); - Tone.Context.prototype._defineProperty = function (context, prop) { - if (this.isUndef(this[prop])) { - Object.defineProperty(this, prop, { - get: function () { - if (typeof context[prop] === 'function') { - return context[prop].bind(context); - } else { - return context[prop]; - } - }, - set: function (val) { - context[prop] = val; - } - }); - } - }; - Tone.Context.prototype.now = function () { - return this._context.currentTime; - }; - Tone.Context.prototype._createWorker = function () { - window.URL = window.URL || window.webkitURL; - var blob = new Blob(['var timeoutTime = ' + (this._updateInterval * 1000).toFixed(1) + ';' + 'self.onmessage = function(msg){' + '\ttimeoutTime = parseInt(msg.data);' + '};' + 'function tick(){' + '\tsetTimeout(tick, timeoutTime);' + '\tself.postMessage(\'tick\');' + '}' + 'tick();']); - var blobUrl = URL.createObjectURL(blob); - var worker = new Worker(blobUrl); - worker.addEventListener('message', function () { - this.emit('tick'); - }.bind(this)); - worker.addEventListener('message', function () { - var now = this.now(); - if (this.isNumber(this._lastUpdate)) { - var diff = now - this._lastUpdate; - this._computedUpdateInterval = Math.max(diff, this._computedUpdateInterval * 0.97); - } - this._lastUpdate = now; - }.bind(this)); - return worker; - }; - Tone.Context.prototype.getConstant = function (val) { - if (this._constants[val]) { - return this._constants[val]; - } else { - var buffer = this._context.createBuffer(1, 128, this._context.sampleRate); - var arr = buffer.getChannelData(0); - for (var i = 0; i < arr.length; i++) { - arr[i] = val; - } - var constant = this._context.createBufferSource(); - constant.channelCount = 1; - constant.channelCountMode = 'explicit'; - constant.buffer = buffer; - constant.loop = true; - constant.start(0); - this._constants[val] = constant; - return constant; - } - }; - Object.defineProperty(Tone.Context.prototype, 'lag', { - get: function () { - var diff = this._computedUpdateInterval - this._updateInterval; - diff = Math.max(diff, 0); - return diff; - } - }); - Object.defineProperty(Tone.Context.prototype, 'lookAhead', { - get: function () { - return this._lookAhead; - }, - set: function (lA) { - this._lookAhead = lA; - } - }); - Object.defineProperty(Tone.Context.prototype, 'updateInterval', { - get: function () { - return this._updateInterval; - }, - set: function (interval) { - this._updateInterval = Math.max(interval, Tone.prototype.blockTime); - this._worker.postMessage(Math.max(interval * 1000, 1)); - } - }); - Object.defineProperty(Tone.Context.prototype, 'latencyHint', { - get: function () { - return this._latencyHint; - }, - set: function (hint) { - var lookAhead = hint; - this._latencyHint = hint; - if (this.isString(hint)) { - switch (hint) { - case 'interactive': - lookAhead = 0.1; - this._context.latencyHint = hint; - break; - case 'playback': - lookAhead = 0.8; - this._context.latencyHint = hint; - break; - case 'balanced': - lookAhead = 0.25; - this._context.latencyHint = hint; - break; - case 'fastest': - lookAhead = 0.01; - break; - } - } - this.lookAhead = lookAhead; - this.updateInterval = lookAhead / 3; - } - }); - function shimConnect() { - var nativeConnect = AudioNode.prototype.connect; - var nativeDisconnect = AudioNode.prototype.disconnect; - function toneConnect(B, outNum, inNum) { - if (B.input) { - if (Array.isArray(B.input)) { - if (Tone.prototype.isUndef(inNum)) { - inNum = 0; - } - this.connect(B.input[inNum]); - } else { - this.connect(B.input, outNum, inNum); - } - } else { - try { - if (B instanceof AudioNode) { - nativeConnect.call(this, B, outNum, inNum); - } else { - nativeConnect.call(this, B, outNum); - } - } catch (e) { - throw new Error('error connecting to node: ' + B + '\n' + e); - } - } - } - function toneDisconnect(B, outNum, inNum) { - if (B && B.input && Array.isArray(B.input)) { - if (Tone.prototype.isUndef(inNum)) { - inNum = 0; - } - this.disconnect(B.input[inNum], outNum, inNum); - } else if (B && B.input) { - this.disconnect(B.input, outNum, inNum); - } else { - try { - nativeDisconnect.apply(this, arguments); - } catch (e) { - throw new Error('error disconnecting node: ' + B + '\n' + e); - } - } - } - if (AudioNode.prototype.connect !== toneConnect) { - AudioNode.prototype.connect = toneConnect; - AudioNode.prototype.disconnect = toneDisconnect; - } - } - if (Tone.supported) { - shimConnect(); - Tone.context = new Tone.Context(); - } else { - console.warn('This browser does not support Tone.js'); - } - return Tone.Context; -}(Tone_core_Tone); -var audiocontext; -'use strict'; -audiocontext = function (StartAudioContext, Context, Tone) { - // Create the Audio Context - const audiocontext = new window.AudioContext(); - Tone.context.dispose(); - Tone.setContext(audiocontext); - /** - *
Returns the Audio Context for this sketch. Useful for users - * who would like to dig deeper into the Web Audio API - * .
- * - *Some browsers require users to startAudioContext - * with a user gesture, such as touchStarted in the example below.
- * - * @method getAudioContext - * @return {Object} AudioContext for this sketch - * @example - *
- * function draw() {
- * background(255);
- * textAlign(CENTER);
- *
- * if (getAudioContext().state !== 'running') {
- * text('click to start audio', width/2, height/2);
- * } else {
- * text('audio is enabled', width/2, height/2);
- * }
- * }
- *
- * function touchStarted() {
- * if (getAudioContext().state !== 'running') {
- * getAudioContext().resume();
- * }
- * var synth = new p5.MonoSynth();
- * synth.play('A4', 0.5, 0, 0.2);
- * }
- *
- *
It is a good practice to give users control over starting audio playback. - * This practice is enforced by Google Chrome's autoplay policy as of r70 - * (info), iOS Safari, and other browsers. - *
- * - *- * userStartAudio() starts the Audio Context on a user gesture. It utilizes - * the StartAudioContext library by - * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext. - *
- * - *Starting the audio context on a user gesture can be as simple as userStartAudio()
.
- * Optional parameters let you decide on a specific element that will start the audio context,
- * and/or call a function once the audio context is started.
- * function setup() {
- * var myDiv = createDiv('click to start audio');
- * myDiv.position(0, 0);
- *
- * var mySynth = new p5.MonoSynth();
- *
- * // This won't play until the context has started
- * mySynth.play('A6');
- *
- * // Start the audio context on a click/touch event
- * userStartAudio().then(function() {
- * myDiv.remove();
- * });
- * }
- *
Scale the output of all sound in this sketch
- * Scaled between 0.0 (silence) and 1.0 (full volume). - * 1.0 is the maximum amplitude of a digital sound, so multiplying - * by greater than 1.0 may cause digital distortion. To - * fade, provide arampTime
parameter. For more
- * complex fades, see the Envelope class.
- *
- * Alternately, you can pass in a signal source such as an
- * oscillator to modulate the amplitude with an audio signal.
- *
- * How This Works: When you load the p5.sound module, it - * creates a single instance of p5sound. All sound objects in this - * module output to p5sound before reaching your computer's output. - * So if you change the amplitude of p5sound, it impacts all of the - * sound in this module.
- * - *If no value is provided, returns a Web Audio API Gain Node
- * - * @method masterVolume - * @param {Number|Object} volume Volume (amplitude) between 0.0 - * and 1.0 or modulating signal/oscillator - * @param {Number} [rampTime] Fade for t seconds - * @param {Number} [timeFromNow] Schedule this event to happen at - * t seconds in the future - */ - p5.prototype.masterVolume = function (vol, rampTime, tFromNow) { - if (typeof vol === 'number') { - var rampTime = rampTime || 0; - var tFromNow = tFromNow || 0; - var now = p5sound.audiocontext.currentTime; - var currentVol = p5sound.output.gain.value; - p5sound.output.gain.cancelScheduledValues(now + tFromNow); - p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); - p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); - } else if (vol) { - vol.connect(p5sound.output.gain); - } else { - // return the Gain Node - return p5sound.output.gain; - } - }; - /** - * `p5.soundOut` is the p5.sound master output. It sends output to - * the destination of this window's web audio context. It contains - * Web Audio API nodes including a dyanmicsCompressor (.limiter
),
- * and Gain Nodes for .input
and .output
.
- *
- * @property {Object} soundOut
- */
- p5.prototype.soundOut = p5.soundOut = p5sound;
- /**
- * a silent connection to the DesinationNode
- * which will ensure that anything connected to it
- * will not be garbage collected
- *
- * @private
- */
- p5.soundOut._silentNode = p5sound.audiocontext.createGain();
- p5.soundOut._silentNode.gain.value = 0;
- p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
- return p5sound;
-}(audiocontext);
-var helpers;
-'use strict';
-helpers = function () {
- var p5sound = master;
- /**
- * @for p5
- */
- /**
- * Returns a number representing the sample rate, in samples per second,
- * of all sound objects in this audio context. It is determined by the
- * sampling rate of your operating system's sound card, and it is not
- * currently possile to change.
- * It is often 44100, or twice the range of human hearing.
- *
- * @method sampleRate
- * @return {Number} samplerate samples per second
- */
- p5.prototype.sampleRate = function () {
- return p5sound.audiocontext.sampleRate;
- };
- /**
- * Returns the closest MIDI note value for
- * a given frequency.
- *
- * @method freqToMidi
- * @param {Number} frequency A freqeuncy, for example, the "A"
- * above Middle C is 440Hz
- * @return {Number} MIDI note value
- */
- p5.prototype.freqToMidi = function (f) {
- var mathlog2 = Math.log(f / 440) / Math.log(2);
- var m = Math.round(12 * mathlog2) + 69;
- return m;
- };
- /**
- * Returns the frequency value of a MIDI note value.
- * General MIDI treats notes as integers where middle C
- * is 60, C# is 61, D is 62 etc. Useful for generating
- * musical frequencies with oscillators.
- *
- * @method midiToFreq
- * @param {Number} midiNote The number of a MIDI note
- * @return {Number} Frequency value of the given MIDI note
- * @example
- *
- * var notes = [60, 64, 67, 72];
- * var i = 0;
- *
- * function setup() {
- * osc = new p5.Oscillator('Triangle');
- * osc.start();
- * frameRate(1);
- * }
- *
- * function draw() {
- * var freq = midiToFreq(notes[i]);
- * osc.freq(freq);
- * i++;
- * if (i >= notes.length){
- * i = 0;
- * }
- * }
- *
- * function preload() {
- * // set the global sound formats
- * soundFormats('mp3', 'ogg');
- *
- * // load either beatbox.mp3, or .ogg, depending on browser
- * mySound = loadSound('assets/beatbox.mp3');
- * }
- *
- * function setup() {
- * mySound.play();
- * }
- *
The p5.SoundFile may not be available immediately because - * it loads the file information asynchronously.
- * - *To do something with the sound as soon as it loads - * pass the name of a function as the second parameter.
- * - *Only one file path is required. However, audio file formats - * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all - * web browsers. If you want to ensure compatability, instead of a single - * file path, you may include an Array of filepaths, and the browser will - * choose a format that works.
- * - * @class p5.SoundFile - * @constructor - * @param {String|Array} path path to a sound file (String). Optionally, - * you may include multiple file formats in - * an array. Alternately, accepts an object - * from the HTML5 File API, or a p5.File. - * @param {Function} [successCallback] Name of a function to call once file loads - * @param {Function} [errorCallback] Name of a function to call if file fails to - * load. This function will receive an error or - * XMLHttpRequest object with information - * about what went wrong. - * @param {Function} [whileLoadingCallback] Name of a function to call while file - * is loading. That function will - * receive progress of the request to - * load the sound file - * (between 0 and 1) as its first - * parameter. This progress - * does not account for the additional - * time needed to decode the audio data. - * - * @example - *
- *
- * function preload() {
- * soundFormats('mp3', 'ogg');
- * mySound = loadSound('assets/doorbell.mp3');
- * }
- *
- * function setup() {
- * mySound.setVolume(0.1);
- * mySound.play();
- * }
- *
- *
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
- * }
+ * Returns a number representing the master amplitude (volume) for sound
+ * in this sketch.
*
- * function setup() {
- * mySound.setVolume(0.1);
- * mySound.play();
- * }
- *
Scale the output of all sound in this sketch
+ * Scaled between 0.0 (silence) and 1.0 (full volume). + * 1.0 is the maximum amplitude of a digital sound, so multiplying + * by greater than 1.0 may cause digital distortion. To + * fade, provide arampTime
parameter. For more
+ * complex fades, see the Envelope class.
+ *
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * How This Works: When you load the p5.sound module, it + * creates a single instance of p5sound. All sound objects in this + * module output to p5sound before reaching your computer's output. + * So if you change the amplitude of p5sound, it impacts all of the + * sound in this module.
* - * @private - * @param {Function} [successCallback] Name of a function to call once file loads - * @param {Function} [errorCallback] Name of a function to call if there is an error + *If no value is provided, returns a Web Audio API Gain Node
+ * + * @method masterVolume + * @param {Number|Object} volume Volume (amplitude) between 0.0 + * and 1.0 or modulating signal/oscillator + * @param {Number} [rampTime] Fade for t seconds + * @param {Number} [timeFromNow] Schedule this event to happen at + * t seconds in the future */ - p5.SoundFile.prototype.load = function (callback, errorCallback) { - var self = this; - var errorTrace = new Error().stack; - if (this.url !== undefined && this.url !== '') { - var request = new XMLHttpRequest(); - request.addEventListener('progress', function (evt) { - self._updateProgress(evt); - }, false); - request.open('GET', this.url, true); - request.responseType = 'arraybuffer'; - request.onload = function () { - if (request.status === 200) { - // on sucess loading file: - if (!self.panner) - return; - ac.decodeAudioData(request.response, // success decoding buffer: - function (buff) { - if (!self.panner) - return; - self.buffer = buff; - self.panner.inputChannels(buff.numberOfChannels); - if (callback) { - callback(self); - } - }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015 - function () { - if (!self.panner) - return; - var err = new CustomError('decodeAudioData', errorTrace, self.url); - var msg = 'AudioContext error at decodeAudioData for ' + self.url; - if (errorCallback) { - err.msg = msg; - errorCallback(err); - } else { - console.error(msg + '\n The error stack trace includes: \n' + err.stack); - } - }); - } else { - if (!self.panner) - return; - var err = new CustomError('loadSound', errorTrace, self.url); - var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')'; - if (errorCallback) { - err.message = msg; - errorCallback(err); - } else { - console.error(msg + '\n The error stack trace includes: \n' + err.stack); - } - } - }; - // if there is another error, aside from 404... - request.onerror = function () { - var err = new CustomError('loadSound', errorTrace, self.url); - var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.'; - if (errorCallback) { - err.message = msg; - errorCallback(err); - } else { - console.error(msg + '\n The error stack trace includes: \n' + err.stack); - } - }; - request.send(); - } else if (this.file !== undefined) { - var reader = new FileReader(); - reader.onload = function () { - if (!self.panner) - return; - ac.decodeAudioData(reader.result, function (buff) { - if (!self.panner) - return; - self.buffer = buff; - self.panner.inputChannels(buff.numberOfChannels); - if (callback) { - callback(self); - } - }); - }; - reader.onerror = function (e) { - if (!self.panner) - return; - if (onerror) { - onerror(e); - } - }; - reader.readAsArrayBuffer(this.file); - } - }; - // TO DO: use this method to create a loading bar that shows progress during file upload/decode. - p5.SoundFile.prototype._updateProgress = function (evt) { - if (evt.lengthComputable) { - var percentComplete = evt.loaded / evt.total * 0.99; - this._whileLoading(percentComplete, evt); + + + p5.prototype.masterVolume = function (vol, rampTime, tFromNow) { + if (typeof vol === 'number') { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = p5sound.output.gain.value; + p5sound.output.gain.cancelScheduledValues(now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(p5sound.output.gain); } else { - // Unable to compute progress information since the total size is unknown - this._whileLoading('size unknown'); + // return the Gain Node + return p5sound.output.gain; } }; /** - * Returns true if the sound file finished loading successfully. + * `p5.soundOut` is the p5.sound master output. It sends output to + * the destination of this window's web audio context. It contains + * Web Audio API nodes including a dyanmicsCompressor (.limiter
),
+ * and Gain Nodes for .input
and .output
.
*
- * @method isLoaded
- * @return {Boolean}
+ * @property {Object} soundOut
*/
- p5.SoundFile.prototype.isLoaded = function () {
- if (this.buffer) {
- return true;
- } else {
- return false;
- }
- };
+
+
+ p5.prototype.soundOut = p5.soundOut = p5sound;
/**
- * Play the p5.SoundFile
+ * a silent connection to the DesinationNode
+ * which will ensure that anything connected to it
+ * will not be garbage collected
*
- * @method play
- * @param {Number} [startTime] (optional) schedule playback to start (in seconds from now).
- * @param {Number} [rate] (optional) playback rate
- * @param {Number} [amp] (optional) amplitude (volume)
- * of playback
- * @param {Number} [cueStart] (optional) cue start time in seconds
- * @param {Number} [duration] (optional) duration of playback in seconds
+ * @private
*/
- p5.SoundFile.prototype.play = function (startTime, rate, amp, _cueStart, duration) {
- if (!this.output) {
- console.warn('SoundFile.play() called after dispose');
- return;
- }
- var self = this;
- var now = p5sound.audiocontext.currentTime;
- var cueStart, cueEnd;
- var time = startTime || 0;
- if (time < 0) {
- time = 0;
- }
- time = time + now;
- if (typeof rate !== 'undefined') {
- this.rate(rate);
- }
- if (typeof amp !== 'undefined') {
- this.setVolume(amp);
- }
- // TO DO: if already playing, create array of buffers for easy stop()
- if (this.buffer) {
- // reset the pause time (if it was paused)
- this._pauseTime = 0;
- // handle restart playmode
- if (this.mode === 'restart' && this.buffer && this.bufferSourceNode) {
- this.bufferSourceNode.stop(time);
- this._counterNode.stop(time);
- }
- //dont create another instance if already playing
- if (this.mode === 'untildone' && this.isPlaying()) {
- return;
- }
- // make a new source and counter. They are automatically assigned playbackRate and buffer
- this.bufferSourceNode = this._initSourceNode();
- // garbage collect counterNode and create a new one
- delete this._counterNode;
- this._counterNode = this._initCounterNode();
- if (_cueStart) {
- if (_cueStart >= 0 && _cueStart < this.buffer.duration) {
- // this.startTime = cueStart;
- cueStart = _cueStart;
- } else {
- throw 'start time out of range';
- }
- } else {
- cueStart = 0;
- }
- if (duration) {
- // if duration is greater than buffer.duration, just play entire file anyway rather than throw an error
- duration = duration <= this.buffer.duration - cueStart ? duration : this.buffer.duration;
- }
- // if it was paused, play at the pause position
- if (this._paused) {
- this.bufferSourceNode.start(time, this.pauseTime, duration);
- this._counterNode.start(time, this.pauseTime, duration);
- } else {
- this.bufferSourceNode.start(time, cueStart, duration);
- this._counterNode.start(time, cueStart, duration);
- }
- this._playing = true;
- this._paused = false;
- // add source to sources array, which is used in stopAll()
- this.bufferSourceNodes.push(this.bufferSourceNode);
- this.bufferSourceNode._arrayIndex = this.bufferSourceNodes.length - 1;
- this.bufferSourceNode.addEventListener('ended', this._clearOnEnd);
- } else {
- throw 'not ready to play file, buffer has yet to load. Try preload()';
- }
- // if looping, will restart at original time
- this.bufferSourceNode.loop = this._looping;
- this._counterNode.loop = this._looping;
- if (this._looping === true) {
- cueEnd = duration ? duration : cueStart - 1e-15;
- this.bufferSourceNode.loopStart = cueStart;
- this.bufferSourceNode.loopEnd = cueEnd;
- this._counterNode.loopStart = cueStart;
- this._counterNode.loopEnd = cueEnd;
- }
+
+ p5.soundOut._silentNode = p5sound.audiocontext.createGain();
+ p5.soundOut._silentNode.gain.value = 0;
+
+ p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);
+
+ return p5sound;
+}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 2 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(5),__webpack_require__(8),__webpack_require__(21),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Signal=function(){var t=this.optionsObject(arguments,["value","units"],n.Signal.defaults);this.output=this._gain=this.context.createGain(),t.param=this._gain.gain,n.Param.call(this,t),this.input=this._param=this._gain.gain,this.context.getConstant(1).chain(this._gain)},n.extend(n.Signal,n.Param),n.Signal.defaults={value:0,units:n.Type.Default,convert:!0},n.Signal.prototype.connect=n.SignalBase.prototype.connect,n.Signal.prototype.dispose=function(){return n.Param.prototype.dispose.call(this),this._param=null,this._gain.disconnect(),this._gain=null,this},n.Signal}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 3 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Multiply=function(t){this.createInsOuts(2,0),this._mult=this.input[0]=this.output=new i.Gain,this._param=this.input[1]=this.output.gain,this._param.value=this.defaultArg(t,0)},i.extend(i.Multiply,i.Signal),i.Multiply.prototype.dispose=function(){return i.prototype.dispose.call(this),this._mult.dispose(),this._mult=null,this._param=null,this},i.Multiply}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 4 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var CrossFade = __webpack_require__(45);
+ /**
+ * Effect is a base class for audio effects in p5. restart
and
- * sustain
. Play Mode determines what happens to a
- * p5.SoundFile if it is triggered while in the middle of playback.
- * In sustain mode, playback will continue simultaneous to the
- * new playback. In restart mode, play() will stop playback
- * and start over. With untilDone, a sound will play only if it's
- * not already playing. Sustain is the default mode.
+ * Set the output volume of the filter.
*
- * @method playMode
- * @param {String} str 'restart' or 'sustain' or 'untilDone'
- * @example
- *
- * var mySound;
- * function preload(){
- * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- * function mouseClicked() {
- * mySound.playMode('sustain');
- * mySound.play();
- * }
- * function keyPressed() {
- * mySound.playMode('restart');
- * mySound.play();
- * }
+ * @method amp
+ * @for p5.Effect
+ * @param {Number} [vol] amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts until rampTime
+ * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
+ */
+
+
+ p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) {
+ var rampTime = rampTime || 0;
+ var tFromNow = tFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + .001);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + .001);
+ };
+ /**
+ * Link effects together in a chain
+ * Example usage: filter.chain(reverb, delay, panner);
+ * May be used with an open-ended number of arguments
*
- *
- * var soundFile;
- *
- * function preload() {
- * soundFormats('ogg', 'mp3');
- * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
- * }
- * function setup() {
- * background(0, 255, 0);
- * soundFile.setVolume(0.1);
- * soundFile.loop();
- * }
- * function keyTyped() {
- * if (key == 'p') {
- * soundFile.pause();
- * background(255, 0, 0);
- * }
- * }
+ * Adjust the dry/wet value.
*
- * function keyReleased() {
- * if (key == 'p') {
- * soundFile.play();
- * background(0, 255, 0);
- * }
- * }
- *
- *
+ * var notes = [60, 64, 67, 72];
+ * var i = 0;
+ *
+ * function setup() {
+ * osc = new p5.Oscillator('Triangle');
+ * osc.start();
+ * frameRate(1);
+ * }
+ *
+ * function draw() {
+ * var freq = midiToFreq(notes[i]);
+ * osc.freq(freq);
+ * i++;
+ * if (i >= notes.length){
+ * i = 0;
+ * }
+ * }
+ *
*/
- p5.SoundFile.prototype.isLooping = function () {
- if (!this.bufferSourceNode) {
- return false;
+
+
+ var midiToFreq = p5.prototype.midiToFreq = function (m) {
+ return 440 * Math.pow(2, (m - 69) / 12.0);
+ }; // This method converts ANSI notes specified as a string "C4", "Eb3" to a frequency
+
+
+ var noteToFreq = function noteToFreq(note) {
+ if (typeof note !== 'string') {
+ return note;
}
- if (this._looping === true && this.isPlaying() === true) {
- return true;
+
+ var wholeNotes = {
+ A: 21,
+ B: 23,
+ C: 24,
+ D: 26,
+ E: 28,
+ F: 29,
+ G: 31
+ };
+ var value = wholeNotes[note[0].toUpperCase()];
+ var octave = ~~note.slice(-1);
+ value += 12 * (octave - 1);
+
+ switch (note[1]) {
+ case '#':
+ value += 1;
+ break;
+
+ case 'b':
+ value -= 1;
+ break;
+
+ default:
+ break;
}
- return false;
+
+ return midiToFreq(value);
};
/**
- * Returns true if a p5.SoundFile is playing, false if not (i.e.
- * paused or stopped).
+ * List the SoundFile formats that you will include. LoadSound
+ * will search your directory for these extensions, and will pick
+ * a format that is compatable with the client's web browser.
+ * Here is a free online file
+ * converter.
*
- * @method isPlaying
- * @return {Boolean}
- */
- p5.SoundFile.prototype.isPlaying = function () {
- return this._playing;
- };
- /**
- * Returns true if a p5.SoundFile is paused, false if not (i.e.
- * playing or stopped).
+ * @method soundFormats
+ * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg'
+ * @example
+ *
+ * function preload() {
+ * // set the global sound formats
+ * soundFormats('mp3', 'ogg');
*
- * @method isPaused
- * @return {Boolean}
- */
- p5.SoundFile.prototype.isPaused = function () {
- return this._paused;
- };
- /**
- * Stop soundfile playback.
+ * // load either beatbox.mp3, or .ogg, depending on browser
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
*
- * @method stop
- * @param {Number} [startTime] (optional) schedule event to occur
- * in seconds from now
+ * function setup() {
+ * mySound.play();
+ * }
+ *
rampTime
parameter. For more
- * complex fades, see the Envelope class.
- *
- * Alternately, you can pass in a signal source such as an
- * oscillator to modulate the amplitude with an audio signal.
- *
- * @method setVolume
- * @param {Number|Object} volume Volume (amplitude) between 0.0
- * and 1.0 or modulating signal/oscillator
- * @param {Number} [rampTime] Fade for t seconds
- * @param {Number} [timeFromNow] Schedule this event to happen at
- * t seconds in the future
- */
- p5.SoundFile.prototype.setVolume = function (vol, _rampTime, _tFromNow) {
- if (typeof vol === 'number') {
- var rampTime = _rampTime || 0;
- var tFromNow = _tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now + tFromNow);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- } else if (vol) {
- vol.connect(this.output.gain);
- } else {
- // return the Gain Node
- return this.output.gain;
+
+ o.mathOps[thisChain - 1].disconnect();
+ o.mathOps[thisChain - 1].connect(math);
+ math.connect(nextChain);
+ o.mathOps[thisChain] = math;
+ return o;
+ }; // helper methods to convert audio file as .wav format,
+ // will use as saving .wav file and saving blob object
+ // Thank you to Matt Diamond's RecorderJS (MIT License)
+ // https://github.com/mattdiamond/Recorderjs
+
+
+ function convertToWav(audioBuffer) {
+ var leftChannel, rightChannel;
+ leftChannel = audioBuffer.getChannelData(0); // handle mono files
+
+ if (audioBuffer.numberOfChannels > 1) {
+ rightChannel = audioBuffer.getChannelData(1);
+ } else {
+ rightChannel = leftChannel;
+ }
+
+ var interleaved = interleave(leftChannel, rightChannel); // create the buffer and view to create the .WAV file
+
+ var buffer = new window.ArrayBuffer(44 + interleaved.length * 2);
+ var view = new window.DataView(buffer); // write the WAV container,
+ // check spec at: https://web.archive.org/web/20171215131933/http://tiny.systems/software/soundProgrammer/WavFormatDocs.pdf
+ // RIFF chunk descriptor
+
+ writeUTFBytes(view, 0, 'RIFF');
+ view.setUint32(4, 36 + interleaved.length * 2, true);
+ writeUTFBytes(view, 8, 'WAVE'); // FMT sub-chunk
+
+ writeUTFBytes(view, 12, 'fmt ');
+ view.setUint32(16, 16, true);
+ view.setUint16(20, 1, true); // stereo (2 channels)
+
+ view.setUint16(22, 2, true);
+ view.setUint32(24, p5sound.audiocontext.sampleRate, true);
+ view.setUint32(28, p5sound.audiocontext.sampleRate * 4, true);
+ view.setUint16(32, 4, true);
+ view.setUint16(34, 16, true); // data sub-chunk
+
+ writeUTFBytes(view, 36, 'data');
+ view.setUint32(40, interleaved.length * 2, true); // write the PCM samples
+
+ var lng = interleaved.length;
+ var index = 44;
+ var volume = 1;
+
+ for (var i = 0; i < lng; i++) {
+ view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
+ index += 2;
+ }
+
+ return view;
+ } // helper methods to save waves
+
+
+ function interleave(leftChannel, rightChannel) {
+ var length = leftChannel.length + rightChannel.length;
+ var result = new Float32Array(length);
+ var inputIndex = 0;
+
+ for (var index = 0; index < length;) {
+ result[index++] = leftChannel[inputIndex];
+ result[index++] = rightChannel[inputIndex];
+ inputIndex++;
+ }
+
+ return result;
+ }
+
+ function writeUTFBytes(view, offset, string) {
+ var lng = string.length;
+
+ for (var i = 0; i < lng; i++) {
+ view.setUint8(offset + i, string.charCodeAt(i));
}
+ }
+
+ return {
+ convertToWav: convertToWav,
+ midiToFreq: midiToFreq,
+ noteToFreq: noteToFreq
};
- // same as setVolume, to match Processing Sound
- p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;
- // these are the same thing
- p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;
- p5.SoundFile.prototype.getVolume = function () {
- return this.output.gain.value;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 7 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.Add=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.input[1]=this.output=new i.Gain,this._param=this.input[1]=new i.Signal(t),this._param.connect(this._sum)},i.extend(i.Add,i.Signal),i.Add.prototype.dispose=function(){return i.prototype.dispose.call(this),this._sum.dispose(),this._sum=null,this._param.dispose(),this._param=null,this},i.Add}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 8 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(19),__webpack_require__(39),__webpack_require__(40),__webpack_require__(11)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){return t.Type={Default:"number",Time:"time",Frequency:"frequency",TransportTime:"transportTime",Ticks:"ticks",NormalRange:"normalRange",AudioRange:"audioRange",Decibels:"db",Interval:"interval",BPM:"bpm",Positive:"positive",Cents:"cents",Degrees:"degrees",MIDI:"midi",BarsBeatsSixteenths:"barsBeatsSixteenths",Samples:"samples",Hertz:"hertz",Note:"note",Milliseconds:"milliseconds",Seconds:"seconds",Notation:"notation"},t.prototype.toSeconds=function(e){return this.isNumber(e)?e:this.isUndef(e)?this.now():this.isString(e)?new t.Time(e).toSeconds():e instanceof t.TimeBase?e.toSeconds():void 0},t.prototype.toFrequency=function(e){return this.isNumber(e)?e:this.isString(e)||this.isUndef(e)?new t.Frequency(e).valueOf():e instanceof t.TimeBase?e.toFrequency():void 0},t.prototype.toTicks=function(e){return this.isNumber(e)||this.isString(e)?new t.TransportTime(e).toTicks():this.isUndef(e)?t.Transport.ticks:e instanceof t.TimeBase?e.toTicks():void 0},t}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 9 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(21),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return window.GainNode&&!AudioContext.prototype.createGain&&(AudioContext.prototype.createGain=AudioContext.prototype.createGainNode),i.Gain=function(){var t=this.optionsObject(arguments,["gain","units"],i.Gain.defaults);this.input=this.output=this._gainNode=this.context.createGain(),this.gain=new i.Param({param:this._gainNode.gain,units:t.units,value:t.gain,convert:t.convert}),this._readOnly("gain")},i.extend(i.Gain),i.Gain.defaults={gain:1,convert:!0},i.Gain.prototype.dispose=function(){i.Param.prototype.dispose.call(this),this._gainNode.disconnect(),this._gainNode=null,this._writable("gain"),this.gain.dispose(),this.gain=null},i.prototype.createInsOuts=function(t,n){1===t?this.input=new i.Gain:1A p5.Filter uses a Web Audio Biquad Filter to filter + * the frequency response of an input source. Subclasses + * include:
+ *p5.LowPass
:
+ * Allows frequencies below the cutoff frequency to pass through,
+ * and attenuates frequencies above the cutoff.p5.HighPass
:
+ * The opposite of a lowpass filter. p5.BandPass
:
+ * Allows a range of frequencies to pass through and attenuates
+ * the frequencies below and above this frequency range.
+ * The .res()
method controls either width of the
+ * bandpass, or resonance of the low/highpass cutoff frequency.
*
- * var ball = {};
- * var soundFile;
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
*
- * function preload() {
- * soundFormats('ogg', 'mp3');
- * soundFile = loadSound('assets/beatbox.mp3');
+ * @class p5.Filter
+ * @extends p5.Effect
+ * @constructor
+ * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
+ * @example
+ *
+ * var fft, noise, filter;
+ *
+ * function setup() {
+ * fill(255, 40, 255);
+ *
+ * filter = new p5.BandPass();
+ *
+ * noise = new p5.Noise();
+ * // disconnect unfiltered noise,
+ * // and connect to filter
+ * noise.disconnect();
+ * noise.connect(filter);
+ * noise.start();
+ *
+ * fft = new p5.FFT();
* }
*
* function draw() {
- * background(0);
- * ball.x = constrain(mouseX, 0, width);
- * ellipse(ball.x, height/2, 20, 20)
+ * background(30);
+ *
+ * // set the BandPass frequency based on mouseX
+ * var freq = map(mouseX, 0, width, 20, 10000);
+ * filter.freq(freq);
+ * // give the filter a narrow band (lower res = wider bandpass)
+ * filter.res(50);
+ *
+ * // draw filtered spectrum
+ * var spectrum = fft.analyze();
+ * noStroke();
+ * for (var i = 0; i < spectrum.length; i++) {
+ * var x = map(i, 0, spectrum.length, 0, width);
+ * var h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width/spectrum.length, h);
+ * }
+ *
+ * isMouseOverCanvas();
* }
*
- * function mousePressed(){
- * // map the ball's x location to a panning degree
- * // between -1.0 (left) and 1.0 (right)
- * var panning = map(ball.x, 0., width,-1.0, 1.0);
- * soundFile.pan(panning);
- * soundFile.play();
+ * function isMouseOverCanvas() {
+ * var mX = mouseX, mY = mouseY;
+ * if (mX > 0 && mX < width && mY < height && mY > 0) {
+ * noise.amp(0.5, 0.2);
+ * } else {
+ * noise.amp(0, 0.2);
+ * }
* }
- *
+ *
- * var song;
- *
- * function preload() {
- * song = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * song.loop();
- * }
- *
- * function draw() {
- * background(200);
- *
- * // Set the rate to a range between 0.1 and 4
- * // Changing the rate also alters the pitch
- * var speed = map(mouseY, 0.1, height, 0, 2);
- * speed = constrain(speed, 0.01, 4);
- * song.rate(speed);
- *
- * // Draw a circle to show what is going on
- * stroke(0);
- * fill(51, 100);
- * ellipse(mouseX, 100, 48, 48);
- * }
- *
- *
- * new p5.LowPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('lowpass')
.
+ * See p5.Filter for methods.
+ *
+ * @class p5.LowPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+
+ p5.LowPass = function () {
+ p5.Filter.call(this, 'lowpass');
};
+
+ p5.LowPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Return the sample rate of the sound file.
- *
- * @method sampleRate
- * @return {Number} [sampleRate]
- */
- p5.SoundFile.prototype.sampleRate = function () {
- return this.buffer.sampleRate;
+ * Constructor: new p5.HighPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('highpass')
.
+ * See p5.Filter for methods.
+ *
+ * @class p5.HighPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+ p5.HighPass = function () {
+ p5.Filter.call(this, 'highpass');
};
+
+ p5.HighPass.prototype = Object.create(p5.Filter.prototype);
/**
- * Return the number of samples in a sound file.
- * Equal to sampleRate * duration.
- *
- * @method frames
- * @return {Number} [sampleCount]
- */
- p5.SoundFile.prototype.frames = function () {
- return this.buffer.length;
+ * Constructor: new p5.BandPass()
Filter.
+ * This is the same as creating a p5.Filter and then calling
+ * its method setType('bandpass')
.
+ * See p5.Filter for methods.
+ *
+ * @class p5.BandPass
+ * @constructor
+ * @extends p5.Filter
+ */
+
+ p5.BandPass = function () {
+ p5.Filter.call(this, 'bandpass');
};
+
+ p5.BandPass.prototype = Object.create(p5.Filter.prototype);
+ return p5.Filter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 15 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(24),__webpack_require__(2),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(n){"use strict";return n.Subtract=function(t){this.createInsOuts(2,0),this._sum=this.input[0]=this.output=new n.Gain,this._neg=new n.Negate,this._param=this.input[1]=new n.Signal(t),this._param.chain(this._neg,this._sum)},n.extend(n.Subtract,n.Signal),n.Subtract.prototype.dispose=function(){return n.prototype.dispose.call(this),this._neg.dispose(),this._neg=null,this._sum.disconnect(),this._sum=null,this._param.dispose(),this._param=null,this},n.Subtract}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 16 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+/* WEBPACK VAR INJECTION */(function(global) {var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;
+
+global.TONE_SILENCE_VERSION_LOGGING = true;
+!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(33), __webpack_require__(11), __webpack_require__(0)], __WEBPACK_AMD_DEFINE_RESULT__ = (function (StartAudioContext, Context, Tone) {
+ // Create the Audio Context
+ var audiocontext = new window.AudioContext(); // Tone and p5.sound share the same audio context
+
+ Tone.context.dispose();
+ Tone.setContext(audiocontext);
/**
- * Returns an array of amplitude peaks in a p5.SoundFile that can be
- * used to draw a static waveform. Scans through the p5.SoundFile's
- * audio buffer to find the greatest amplitudes. Accepts one
- * parameter, 'length', which determines size of the array.
- * Larger arrays result in more precise waveform visualizations.
+ * Returns the Audio Context for this sketch. Useful for users + * who would like to dig deeper into the Web Audio API + * .
+ * + *Some browsers require users to startAudioContext + * with a user gesture, such as touchStarted in the example below.
+ * + * @method getAudioContext + * @return {Object} AudioContext for this sketch + * @example + *
+ * function draw() {
+ * background(255);
+ * textAlign(CENTER);
*
- * Inspired by Wavesurfer.js.
+ * if (getAudioContext().state !== 'running') {
+ * text('click to start audio', width/2, height/2);
+ * } else {
+ * text('audio is enabled', width/2, height/2);
+ * }
+ * }
*
- * @method getPeaks
- * @params {Number} [length] length is the size of the returned array.
- * Larger length results in more precision.
- * Defaults to 5*width of the browser window.
- * @returns {Float32Array} Array of peaks.
+ * function touchStarted() {
+ * if (getAudioContext().state !== 'running') {
+ * getAudioContext().resume();
+ * }
+ * var synth = new p5.MonoSynth();
+ * synth.play('A4', 0.5, 0, 0.2);
+ * }
+ *
+ *
It is a good practice to give users control over starting audio playback. + * This practice is enforced by Google Chrome's autoplay policy as of r70 + * (info), iOS Safari, and other browsers. + *
* - * @method reverseBuffer + *+ * userStartAudio() starts the Audio Context on a user gesture. It utilizes + * the StartAudioContext library by + * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext. + *
+ * + *Starting the audio context on a user gesture can be as simple as userStartAudio()
.
+ * Optional parameters let you decide on a specific element that will start the audio context,
+ * and/or call a function once the audio context is started.
- * var drum;
+ * function setup() {
+ * var myDiv = createDiv('click to start audio');
+ * myDiv.position(0, 0);
*
- * function preload() {
- * drum = loadSound('assets/drum.mp3');
- * }
+ * var mySynth = new p5.MonoSynth();
*
- * function setup() {
- * drum.reverseBuffer();
- * drum.play();
- * }
+ * // This won't play until the context has started
+ * mySynth.play('A6');
*
- *
- * Set the type of oscillation with setType(), or by instantiating a + * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc. + *
+ * + * @class p5.Oscillator + * @constructor + * @param {Number} [freq] frequency defaults to 440Hz + * @param {String} [type] type of oscillator. Options: + * 'sine' (default), 'triangle', + * 'sawtooth', 'square' + * @example + *
+ * var osc;
+ * var playing = false;
+ *
+ * function setup() {
+ * backgroundColor = color(255,0,255);
+ * textAlign(CENTER);
+ *
+ * osc = new p5.Oscillator();
+ * osc.setType('sine');
+ * osc.freq(240);
+ * osc.amp(0);
+ * osc.start();
+ * }
+ *
+ * function draw() {
+ * background(backgroundColor)
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {
+ * if (!playing) {
+ * // ramp amplitude to 0.5 over 0.05 seconds
+ * osc.amp(0.5, 0.05);
+ * playing = true;
+ * backgroundColor = color(0,255,255);
+ * } else {
+ * // ramp amplitude to 0 over 0.5 seconds
+ * osc.amp(0, 0.5);
+ * playing = false;
+ * backgroundColor = color(255,0,255);
+ * }
+ * }
+ * }
+ *
+ * var osc = new p5.Oscillator(300);
+ * osc.start();
+ * osc.freq(40, 10);
+ *
- * var mySound;
- * function preload() {
- * mySound = loadSound('assets/beat.mp3');
- * }
+ * @method scale
+ * @for p5.Oscillator
+ * @param {Number} inMin input range minumum
+ * @param {Number} inMax input range maximum
+ * @param {Number} outMin input range minumum
+ * @param {Number} outMax input range maximum
+ * @return {p5.Oscillator} Oscillator Returns this oscillator
+ * with scaled output
+ */
+
+
+ p5.Oscillator.prototype.scale = function (inMin, inMax, outMin, outMax) {
+ var mapOutMin, mapOutMax;
+
+ if (arguments.length === 4) {
+ mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
+ mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
+ } else {
+ mapOutMin = arguments[0];
+ mapOutMax = arguments[1];
+ }
+
+ var scale = new Scale(mapOutMin, mapOutMax);
+ var thisChain = this.mathOps.length - 1;
+ var nextChain = this.output;
+ return sigChain(this, scale, thisChain, nextChain, Scale); // this.output.disconnect();
+ // this.output.connect(scale)
+ }; // ============================== //
+ // SinOsc, TriOsc, SqrOsc, SawOsc //
+ // ============================== //
+
+ /**
+ * Constructor: new p5.SinOsc()
.
+ * This creates a Sine Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sine')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('sine')
.
+ * See p5.Oscillator for methods.
*
- * function setup() {
- * background(0);
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * @class p5.SinOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+
+ p5.SinOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'sine');
+ };
+
+ p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.TriOsc()
.
+ * This creates a Triangle Wave Oscillator and is
+ * equivalent to new p5.Oscillator('triangle')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('triangle')
.
+ * See p5.Oscillator for methods.
*
- * // schedule calls to changeText
- * mySound.addCue(0.50, changeText, "hello" );
- * mySound.addCue(1.00, changeText, "p5" );
- * mySound.addCue(1.50, changeText, "what" );
- * mySound.addCue(2.00, changeText, "do" );
- * mySound.addCue(2.50, changeText, "you" );
- * mySound.addCue(3.00, changeText, "want" );
- * mySound.addCue(4.00, changeText, "to" );
- * mySound.addCue(5.00, changeText, "make" );
- * mySound.addCue(6.00, changeText, "?" );
- * }
+ * @class p5.TriOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+ p5.TriOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'triangle');
+ };
+
+ p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.SawOsc()
.
+ * This creates a SawTooth Wave Oscillator and is
+ * equivalent to new p5.Oscillator('sawtooth')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('sawtooth')
.
+ * See p5.Oscillator for methods.
*
- * function changeText(val) {
- * background(0);
- * text(val, width/2, height/2);
- * }
+ * @class p5.SawOsc
+ * @constructor
+ * @extends p5.Oscillator
+ * @param {Number} [freq] Set the frequency
+ */
+
+ p5.SawOsc = function (freq) {
+ p5.Oscillator.call(this, freq, 'sawtooth');
+ };
+
+ p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Constructor: new p5.SqrOsc()
.
+ * This creates a Square Wave Oscillator and is
+ * equivalent to new p5.Oscillator('square')
+ *
or creating a p5.Oscillator and then calling
+ * its method setType('square')
.
+ * See p5.Oscillator for methods.
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * if (mySound.isPlaying() ) {
- * mySound.stop();
- * } else {
- * mySound.play();
- * }
- * }
- * }
- *
+ * var monoSynth;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ *
+ * monoSynth = new p5.MonoSynth();
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * function playSynth() {
+ * // time from now (in seconds)
+ * var time = 0;
+ * // note duration (in seconds)
+ * var dur = 0.25;
+ * // velocity (volume, from 0 to 1)
+ * var v = 0.2;
+ *
+ * monoSynth.play("G3", v, time, dur);
+ * monoSynth.play("C4", v, time += dur, dur);
+ *
+ * background(random(255), random(255), 255);
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * var monoSynth;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ *
+ * monoSynth = new p5.MonoSynth();
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * function playSynth() {
+ * // time from now (in seconds)
+ * var time = 0;
+ * // note duration (in seconds)
+ * var dur = 1/6;
+ * // note velocity (volume, from 0 to 1)
+ * var v = random();
+ *
+ * monoSynth.play("Fb3", v, 0, dur);
+ * monoSynth.play("Gb3", v, time += dur, dur);
+ *
+ * background(random(255), random(255), 255);
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * var monoSynth = new p5.MonoSynth();
+ *
+ * function mousePressed() {
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
+ * var monoSynth = new p5.MonoSynth();
+ *
+ * function mousePressed() {
+ * monoSynth.triggerAttack("E3");
+ * }
+ *
+ * function mouseReleased() {
+ * monoSynth.triggerRelease();
+ * }
+ *
setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ */
+
+
+ p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) {
+ this.env.setADSR(attack, decay, sustain, release);
};
/**
- * Save a p5.SoundFile as a .wav file. The browser will prompt the user
- * to download the file to their device. To upload a file to a server, see
- * getBlob
- *
- * @method save
- * @param {String} [fileName] name of the resulting .wav file.
- * @example
- *
- * var inp, button, mySound;
- * var fileName = 'cool';
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
- * }
- * function setup() {
- * btn = createButton('click to save file');
- * btn.position(0, 0);
- * btn.mouseClicked(handleMouseClick);
- * }
- *
- * function handleMouseClick() {
- * mySound.save(fileName);
- * }
- *
- *
- * function preload() {
- * mySound = loadSound('assets/doorbell.mp3');
- * }
- *
- * function setup() {
- * noCanvas();
- * var soundBlob = mySound.getBlob();
- *
- * // Now we can send the blob to a server...
- * var serverUrl = 'https://jsonplaceholder.typicode.com/posts';
- * var httpRequestOptions = {
- * method: 'POST',
- * body: new FormData().append('soundBlob', soundBlob),
- * headers: new Headers({
- * 'Content-Type': 'multipart/form-data'
- * })
- * };
- * httpDo(serverUrl, httpRequestOptions);
- *
- * // We can also create an `ObjectURL` pointing to the Blob
- * var blobUrl = URL.createObjectURL(soundBlob);
- *
- * // The `
- * var sound, amplitude, cnv;
- *
- * function preload(){
- * sound = loadSound('assets/beat.mp3');
- * }
- * function setup() {
- * cnv = createCanvas(100,100);
- * amplitude = new p5.Amplitude();
- *
- * // start / stop the sound when canvas is clicked
- * cnv.mouseClicked(function() {
- * if (sound.isPlaying() ){
- * sound.stop();
- * } else {
- * sound.play();
- * }
- * });
- * }
- * function draw() {
- * background(0);
- * fill(255);
- * var level = amplitude.getLevel();
- * var size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
- * }
- *
- *
- * function preload(){
- * sound1 = loadSound('assets/beat.mp3');
- * sound2 = loadSound('assets/drum.mp3');
- * }
- * function setup(){
- * amplitude = new p5.Amplitude();
- * sound1.play();
- * sound2.play();
- * amplitude.setInput(sound2);
- * }
- * function draw() {
- * background(0);
- * fill(255);
- * var level = amplitude.getLevel();
- * var size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
- * }
- * function mouseClicked(){
- * sound1.stop();
- * sound2.stop();
- * }
- *
- * function preload(){
- * sound = loadSound('assets/beat.mp3');
- * }
- * function setup() {
- * amplitude = new p5.Amplitude();
- * sound.play();
- * }
- * function draw() {
- * background(0);
- * fill(255);
- * var level = amplitude.getLevel();
- * var size = map(level, 0, 1, 0, 200);
- * ellipse(width/2, height/2, size, size);
- * }
- * function mouseClicked(){
- * sound.stop();
- * }
- *
+ * var polySynth;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ *
+ * polySynth = new p5.PolySynth();
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ * }
+ *
+ * function playSynth() {
+ * // note duration (in seconds)
+ * var dur = 1.5;
+ *
+ * // time from now (in seconds)
+ * var time = 0;
+ *
+ * // velocity (volume, from 0 to 1)
+ * var vel = 0.1;
+ *
+ * // notes can overlap with each other
+ * polySynth.play("G2", vel, 0, dur);
+ * polySynth.play("C3", vel, time += 1/3, dur);
+ * polySynth.play("G3", vel, time += 1/3, dur);
+ *
+ * background(random(255), random(255), 255);
+ * text('click to play', width/2, height/2);
+ * }
+ *
FFT (Fast Fourier Transform) is an analysis algorithm that - * isolates individual - * - * audio frequencies within a waveform.
- * - *Once instantiated, a p5.FFT object can return an array based on
- * two types of analyses:
• FFT.waveform()
computes
- * amplitude values along the time domain. The array indices correspond
- * to samples across a brief moment in time. Each value represents
- * amplitude of the waveform at that sample of time.
- * • FFT.analyze()
computes amplitude values along the
- * frequency domain. The array indices correspond to frequencies (i.e.
- * pitches), from the lowest to the highest that humans can hear. Each
- * value represents amplitude at that slice of the frequency spectrum.
- * Use with getEnergy()
to measure amplitude at specific
- * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample
- * buffer. It returns an array of amplitude measurements, referred
- * to as bins
. The array is 1024 bins long by default.
- * You can change the bin array length, but it must be a power of 2
- * between 16 and 1024 in order for the FFT algorithm to function
- * correctly. The actual size of the FFT buffer is twice the
- * number of bins, so given a standard sample rate, the buffer is
- * 2048/44100 seconds long.
- * function preload(){
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
+ * var polySynth;
*
- * function setup(){
- * var cnv = createCanvas(100,100);
- * cnv.mouseClicked(togglePlay);
- * fft = new p5.FFT();
- * sound.amp(0.2);
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * cnv.mousePressed(playSynth);
+ *
+ * polySynth = new p5.PolySynth();
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
* }
*
- * function draw(){
- * background(0);
+ * function playSynth() {
+ * // note duration (in seconds)
+ * var dur = 0.1;
*
- * var spectrum = fft.analyze();
- * noStroke();
- * fill(0,255,0); // spectrum is green
- * for (var i = 0; i< spectrum.length; i++){
- * var x = map(i, 0, spectrum.length, 0, width);
- * var h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h )
- * }
+ * // time from now (in seconds)
+ * var time = 0;
*
- * var waveform = fft.waveform();
- * noFill();
- * beginShape();
- * stroke(255,0,0); // waveform is red
- * strokeWeight(1);
- * for (var i = 0; i< waveform.length; i++){
- * var x = map(i, 0, waveform.length, 0, width);
- * var y = map( waveform[i], -1, 1, 0, height);
- * vertex(x,y);
- * }
- * endShape();
+ * // velocity (volume, from 0 to 1)
+ * var vel = 0.1;
*
- * text('click to play/pause', 4, 10);
- * }
+ * polySynth.play("G2", vel, 0, dur);
+ * polySynth.play("C3", vel, 0, dur);
+ * polySynth.play("G3", vel, 0, dur);
*
- * // fade sound if mouse is over canvas
- * function togglePlay() {
- * if (sound.isPlaying()) {
- * sound.pause();
- * } else {
- * sound.loop();
- * }
+ * background(random(255), random(255), 255);
+ * text('click to play', width/2, height/2);
* }
*
setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+
+ p5.PolySynth.prototype.noteADSR = function (note, a, d, s, r, timeFromNow) {
+ var now = p5sound.audiocontext.currentTime;
+ var timeFromNow = timeFromNow || 0;
+ var t = now + timeFromNow;
+ this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
};
/**
- * Returns an array of amplitude values (between -1.0 and +1.0) that represent
- * a snapshot of amplitude readings in a single buffer. Length will be
- * equal to bins (defaults to 1024). Can be used to draw the waveform
- * of a sound.
- *
- * @method waveform
- * @param {Number} [bins] Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @param {String} [precision] If any value is provided, will return results
- * in a Float32 Array which is more precise
- * than a regular array.
- * @return {Array} Array Array of amplitude values (-1 to 1)
- * over time. Array length = bins.
+ * Set the PolySynths global envelope. This method modifies the envelopes of each
+ * monosynth so that all notes are played with this envelope.
*
- */
- p5.FFT.prototype.waveform = function () {
- var bins, mode, normalArray;
- for (var i = 0; i < arguments.length; i++) {
- if (typeof arguments[i] === 'number') {
- bins = arguments[i];
- this.analyser.fftSize = bins * 2;
- }
- if (typeof arguments[i] === 'string') {
- mode = arguments[i];
- }
- }
- // getFloatFrequencyData doesnt work in Safari as of 5/2015
- if (mode && !p5.prototype._isSafari()) {
- timeToFloat(this, this.timeDomain);
- this.analyser.getFloatTimeDomainData(this.timeDomain);
- return this.timeDomain;
- } else {
- timeToInt(this, this.timeDomain);
- this.analyser.getByteTimeDomainData(this.timeDomain);
- var normalArray = new Array();
- for (var j = 0; j < this.timeDomain.length; j++) {
- var scaled = p5.prototype.map(this.timeDomain[j], 0, 255, -1, 1);
- normalArray.push(scaled);
- }
- return normalArray;
- }
+ * @method setADSR
+ * @for p5.PolySynth
+ * @param {Number} [attackTime] Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ **/
+
+
+ p5.PolySynth.prototype.setADSR = function (a, d, s, r) {
+ this.audiovoices.forEach(function (voice) {
+ voice.setADSR(a, d, s, r);
+ });
};
/**
- * Returns an array of amplitude values (between 0 and 255)
- * across the frequency spectrum. Length is equal to FFT bins
- * (1024 by default). The array indices correspond to frequencies
- * (i.e. pitches), from the lowest to the highest that humans can
- * hear. Each value represents amplitude at that slice of the
- * frequency spectrum. Must be called prior to using
- * getEnergy()
.
+ * Trigger the Attack, and Decay portion of a MonoSynth.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go.
*
- * @method analyze
- * @param {Number} [bins] Must be a power of two between
- * 16 and 1024. Defaults to 1024.
- * @param {Number} [scale] If "dB," returns decibel
- * float measurements between
- * -140 and 0 (max).
- * Otherwise returns integers from 0-255.
- * @return {Array} spectrum Array of energy (amplitude/volume)
- * values across the frequency spectrum.
- * Lowest energy (silence) = 0, highest
- * possible is 255.
+ * @method noteAttack
+ * @for p5.PolySynth
+ * @param {Number} [note] midi note on which attack should be triggered.
+ * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
+ * @param {Number} [secondsFromNow] time from now (in seconds)
* @example
*
- * var osc;
- * var fft;
- *
- * function setup(){
- * createCanvas(100,100);
- * osc = new p5.Oscillator();
- * osc.amp(0);
- * osc.start();
- * fft = new p5.FFT();
- * }
- *
- * function draw(){
- * background(0);
- *
- * var freq = map(mouseX, 0, 800, 20, 15000);
- * freq = constrain(freq, 1, 20000);
- * osc.freq(freq);
+ * var polySynth = new p5.PolySynth();
+ * var pitches = ["G", "D", "G", "C"];
+ * var octaves = [2, 3, 4];
*
- * var spectrum = fft.analyze();
- * noStroke();
- * fill(0,255,0); // spectrum is green
- * for (var i = 0; i< spectrum.length; i++){
- * var x = map(i, 0, spectrum.length, 0, width);
- * var h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width / spectrum.length, h );
+ * function mousePressed() {
+ * // play a chord: multiple notes at the same time
+ * for (var i = 0; i < 4; i++) {
+ * var note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
* }
- *
- * stroke(255);
- * text('Freq: ' + round(freq)+'Hz', 10, 10);
- *
- * isMouseOverCanvas();
* }
*
- * // only play sound when mouse is over canvas
- * function isMouseOverCanvas() {
- * var mX = mouseX, mY = mouseY;
- * if (mX > 0 && mX < width && mY < height && mY > 0) {
- * osc.amp(0.5, 0.2);
- * } else {
- * osc.amp(0, 0.2);
- * }
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
* }
*
+ * var pitches = ["G", "D", "G", "C"];
+ * var octaves = [2, 3, 4];
+ * var polySynth = new p5.PolySynth();
*
- */
- p5.FFT.prototype.getEnergy = function (frequency1, frequency2) {
- var nyquist = p5sound.audiocontext.sampleRate / 2;
- if (frequency1 === 'bass') {
- frequency1 = this.bass[0];
- frequency2 = this.bass[1];
- } else if (frequency1 === 'lowMid') {
- frequency1 = this.lowMid[0];
- frequency2 = this.lowMid[1];
- } else if (frequency1 === 'mid') {
- frequency1 = this.mid[0];
- frequency2 = this.mid[1];
- } else if (frequency1 === 'highMid') {
- frequency1 = this.highMid[0];
- frequency2 = this.highMid[1];
- } else if (frequency1 === 'treble') {
- frequency1 = this.treble[0];
- frequency2 = this.treble[1];
- }
- if (typeof frequency1 !== 'number') {
- throw 'invalid input for getEnergy()';
- } else if (!frequency2) {
- // if only one parameter:
- var index = Math.round(frequency1 / nyquist * this.freqDomain.length);
- return this.freqDomain[index];
- } else if (frequency1 && frequency2) {
- // if two parameters:
- // if second is higher than first
- if (frequency1 > frequency2) {
- var swap = frequency2;
- frequency2 = frequency1;
- frequency1 = swap;
- }
- var lowIndex = Math.round(frequency1 / nyquist * this.freqDomain.length);
- var highIndex = Math.round(frequency2 / nyquist * this.freqDomain.length);
- var total = 0;
- var numFrequencies = 0;
- // add up all of the values for the frequencies
- for (var i = lowIndex; i <= highIndex; i++) {
- total += this.freqDomain[i];
- numFrequencies += 1;
+ * function mousePressed() {
+ * // play a chord: multiple notes at the same time
+ * for (var i = 0; i < 4; i++) {
+ * var note = random(pitches) + random(octaves);
+ * polySynth.noteAttack(note, 0.1);
+ * }
+ * }
+ *
+ * function mouseReleased() {
+ * // release all voices
+ * polySynth.noteRelease();
+ * }
+ *
- *
- *
- *function setup(){
- * cnv = createCanvas(100,100);
- * sound = new p5.AudioIn();
- * sound.start();
- * fft = new p5.FFT();
- * sound.connect(fft);
- *}
- *
- *
- *function draw(){
- *
- * var centroidplot = 0.0;
- * var spectralCentroid = 0;
- *
- *
- * background(0);
- * stroke(0,255,0);
- * var spectrum = fft.analyze();
- * fill(0,255,0); // spectrum is green
- *
- * //draw the spectrum
- * for (var i = 0; i< spectrum.length; i++){
- * var x = map(log(i), 0, log(spectrum.length), 0, width);
- * var h = map(spectrum[i], 0, 255, 0, height);
- * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
- * rect(x, height, rectangle_width, -h )
- * }
-
- * var nyquist = 22050;
- *
- * // get the centroid
- * spectralCentroid = fft.getCentroid();
- *
- * // the mean_freq_index calculation is for the display.
- * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
- *
- * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
- *
- *
- * stroke(255,0,0); // the line showing where the centroid is will be red
- *
- * rect(centroidplot, 0, width / spectrum.length, height)
- * noStroke();
- * fill(255,255,255); // text is white
- * text("centroid: ", 10, 20);
- * text(round(spectralCentroid)+" Hz", 10, 40);
- *}
- *
SoundFile object with a path to a file.
+ * + *The p5.SoundFile may not be available immediately because + * it loads the file information asynchronously.
+ * + *To do something with the sound as soon as it loads + * pass the name of a function as the second parameter.
+ * + *Only one file path is required. However, audio file formats + * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all + * web browsers. If you want to ensure compatability, instead of a single + * file path, you may include an Array of filepaths, and the browser will + * choose a format that works.
+ * + * @class p5.SoundFile + * @constructor + * @param {String|Array} path path to a sound file (String). Optionally, + * you may include multiple file formats in + * an array. Alternately, accepts an object + * from the HTML5 File API, or a p5.File. + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if file fails to + * load. This function will receive an error or + * XMLHttpRequest object with information + * about what went wrong. + * @param {Function} [whileLoadingCallback] Name of a function to call while file + * is loading. That function will + * receive progress of the request to + * load the sound file + * (between 0 and 1) as its first + * parameter. This progress + * does not account for the additional + * time needed to decode the audio data. + * + * @example + *
+ *
+ * function preload() {
+ * soundFormats('mp3', 'ogg');
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * mySound.setVolume(0.1);
+ * mySound.play();
+ * }
+ *
+ *
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * mySound.setVolume(0.1);
+ * mySound.play();
+ * }
+ *
restart
and
+ * sustain
. Play Mode determines what happens to a
+ * p5.SoundFile if it is triggered while in the middle of playback.
+ * In sustain mode, playback will continue simultaneous to the
+ * new playback. In restart mode, play() will stop playback
+ * and start over. With untilDone, a sound will play only if it's
+ * not already playing. Sustain is the default mode.
+ *
+ * @method playMode
+ * @for p5.SoundFile
+ * @param {String} str 'restart' or 'sustain' or 'untilDone'
+ * @example
+ *
+ * var mySound;
+ * function preload(){
+ * mySound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
+ * function mouseClicked() {
+ * mySound.playMode('sustain');
+ * mySound.play();
+ * }
+ * function keyPressed() {
+ * mySound.playMode('restart');
+ * mySound.play();
+ * }
+ *
+ *
+ * var soundFile;
+ *
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');
+ * }
+ * function setup() {
+ * background(0, 255, 0);
+ * soundFile.setVolume(0.1);
+ * soundFile.loop();
+ * }
+ * function keyTyped() {
+ * if (key == 'p') {
+ * soundFile.pause();
+ * background(255, 0, 0);
+ * }
+ * }
+ *
+ * function keyReleased() {
+ * if (key == 'p') {
+ * soundFile.play();
+ * background(0, 255, 0);
+ * }
+ * }
+ *
+ * p5.Signal is a constant audio-rate signal used by p5.Oscillator - * and p5.Envelope for modulation math.
- * - *This is necessary because Web Audio is processed on a seprate clock. - * For example, the p5 draw loop runs about 60 times per second. But - * the audio clock must process samples 44100 times per second. If we - * want to add a value to each of those samples, we can't do it in the - * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns - * a Tone.Signal from the Tone.js library by Yotam Mann. - * If you want to work directly with audio signals for modular - * synthesis, check out - * tone.js.
- * - * @class p5.Signal - * @constructor - * @return {Tone.Signal} A Signal object from the Tone.js library - * @example - *
- * function setup() {
- * carrier = new p5.Oscillator('sine');
- * carrier.amp(1); // set amplitude
- * carrier.freq(220); // set frequency
- * carrier.start(); // start oscillating
+ * Returns true if a p5.SoundFile is playing, false if not (i.e.
+ * paused or stopped).
*
- * modulator = new p5.Oscillator('sawtooth');
- * modulator.disconnect();
- * modulator.amp(1);
- * modulator.freq(4);
- * modulator.start();
+ * @method isPlaying
+ * @for p5.SoundFile
+ * @return {Boolean}
+ */
+
+
+ p5.SoundFile.prototype.isPlaying = function () {
+ return this._playing;
+ };
+ /**
+ * Returns true if a p5.SoundFile is paused, false if not (i.e.
+ * playing or stopped).
*
- * // Modulator's default amplitude range is -1 to 1.
- * // Multiply it by -200, so the range is -200 to 200
- * // then add 220 so the range is 20 to 420
- * carrier.freq( modulator.mult(-200).add(220) );
- * }
- *
rampTime
parameter. For more
+ * complex fades, see the Envelope class.
*
- * @method add
- * @param {Number} number
- * @return {p5.Signal} object
+ * Alternately, you can pass in a signal source such as an
+ * oscillator to modulate the amplitude with an audio signal.
+ *
+ * @method setVolume
+ * @for p5.SoundFile
+ * @param {Number|Object} volume Volume (amplitude) between 0.0
+ * and 1.0 or modulating signal/oscillator
+ * @param {Number} [rampTime] Fade for t seconds
+ * @param {Number} [timeFromNow] Schedule this event to happen at
+ * t seconds in the future
*/
- Signal.prototype.add = function (num) {
- var add = new Add(num);
- // add.setInput(this);
- this.connect(add);
- return add;
+
+
+ p5.SoundFile.prototype.setVolume = function (vol, _rampTime, _tFromNow) {
+ if (typeof vol === 'number') {
+ var rampTime = _rampTime || 0;
+ var tFromNow = _tFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
+ this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
+ } else if (vol) {
+ vol.connect(this.output.gain);
+ } else {
+ // return the Gain Node
+ return this.output.gain;
+ }
+ }; // same as setVolume, to match Processing Sound
+
+
+ p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume; // these are the same thing
+
+ p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;
+
+ p5.SoundFile.prototype.getVolume = function () {
+ return this.output.gain.value;
};
- Mult.prototype.add = Signal.prototype.add;
- Add.prototype.add = Signal.prototype.add;
- Scale.prototype.add = Signal.prototype.add;
/**
- * Multiply this signal by a constant value,
- * and return the resulting audio signal. Does
- * not change the value of the original signal,
- * instead it returns a new p5.SignalMult.
+ * Set the stereo panning of a p5.sound object to
+ * a floating point number between -1.0 (left) and 1.0 (right).
+ * Default is 0.0 (center).
*
- * @method mult
- * @param {Number} number to multiply
- * @return {p5.Signal} object
+ * @method pan
+ * @for p5.SoundFile
+ * @param {Number} [panValue] Set the stereo panner
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ * @example
+ *
+ *
+ * var ball = {};
+ * var soundFile;
+ *
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
+ * soundFile = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * ball.x = constrain(mouseX, 0, width);
+ * ellipse(ball.x, height/2, 20, 20)
+ * }
+ *
+ * function mousePressed(){
+ * // map the ball's x location to a panning degree
+ * // between -1.0 (left) and 1.0 (right)
+ * var panning = map(ball.x, 0., width,-1.0, 1.0);
+ * soundFile.pan(panning);
+ * soundFile.play();
+ * }
+ *
Creates a signal that oscillates between -1.0 and 1.0. - * By default, the oscillation takes the form of a sinusoidal - * shape ('sine'). Additional types include 'triangle', - * 'sawtooth' and 'square'. The frequency defaults to - * 440 oscillations per second (440Hz, equal to the pitch of an - * 'A' note).
- * - *Set the type of oscillation with setType(), or by instantiating a - * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc. - *
+ * Set the playback rate of a sound file. Will change the speed and the pitch. + * Values less than zero will reverse the audio buffer. * - * @class p5.Oscillator - * @constructor - * @param {Number} [freq] frequency defaults to 440Hz - * @param {String} [type] type of oscillator. Options: - * 'sine' (default), 'triangle', - * 'sawtooth', 'square' + * @method rate + * @for p5.SoundFile + * @param {Number} [playbackRate] Set the playback rate. 1.0 is normal, + * .5 is half-speed, 2.0 is twice as fast. + * Values less than zero play backwards. * @example *
- * var osc;
- * var playing = false;
+ * var song;
*
- * function setup() {
- * backgroundColor = color(255,0,255);
- * textAlign(CENTER);
+ * function preload() {
+ * song = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
*
- * osc = new p5.Oscillator();
- * osc.setType('sine');
- * osc.freq(240);
- * osc.amp(0);
- * osc.start();
+ * function setup() {
+ * song.loop();
* }
*
* function draw() {
- * background(backgroundColor)
- * text('click to play', width/2, height/2);
- * }
+ * background(200);
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {
- * if (!playing) {
- * // ramp amplitude to 0.5 over 0.05 seconds
- * osc.amp(0.5, 0.05);
- * playing = true;
- * backgroundColor = color(0,255,255);
- * } else {
- * // ramp amplitude to 0 over 0.5 seconds
- * osc.amp(0, 0.5);
- * playing = false;
- * backgroundColor = color(255,0,255);
- * }
- * }
+ * // Set the rate to a range between 0.1 and 4
+ * // Changing the rate also alters the pitch
+ * var speed = map(mouseY, 0.1, height, 0, 2);
+ * speed = constrain(speed, 0.01, 4);
+ * song.rate(speed);
+ *
+ * // Draw a circle to show what is going on
+ * stroke(0);
+ * fill(51, 100);
+ * ellipse(mouseX, 100, 48, 48);
* }
- *
- * var osc = new p5.Oscillator(300);
- * osc.start();
- * osc.freq(40, 10);
- *
new p5.SinOsc()
.
- * This creates a Sine Wave Oscillator and is
- * equivalent to new p5.Oscillator('sine')
- *
or creating a p5.Oscillator and then calling
- * its method setType('sine')
.
- * See p5.Oscillator for methods.
+ * @method addCue
+ * @for p5.SoundFile
+ * @param {Number} time Time in seconds, relative to this media
+ * element's playback. For example, to trigger
+ * an event every time playback reaches two
+ * seconds, pass in the number 2. This will be
+ * passed as the first parameter to
+ * the callback function.
+ * @param {Function} callback Name of a function that will be
+ * called at the given time. The callback will
+ * receive time and (optionally) param as its
+ * two parameters.
+ * @param {Object} [value] An object to be passed as the
+ * second parameter to the
+ * callback function.
+ * @return {Number} id ID of this cue,
+ * useful for removeCue(id)
+ * @example
+ *
+ * var mySound;
+ * function preload() {
+ * mySound = loadSound('assets/beat.mp3');
+ * }
*
- * @class p5.SinOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
- p5.SinOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'sine');
- };
- p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.TriOsc()
.
- * This creates a Triangle Wave Oscillator and is
- * equivalent to new p5.Oscillator('triangle')
- *
or creating a p5.Oscillator and then calling
- * its method setType('triangle')
.
- * See p5.Oscillator for methods.
+ * function setup() {
+ * background(0);
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
*
- * @class p5.TriOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
- */
- p5.TriOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'triangle');
- };
- p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);
- /**
- * Constructor: new p5.SawOsc()
.
- * This creates a SawTooth Wave Oscillator and is
- * equivalent to new p5.Oscillator('sawtooth')
- *
or creating a p5.Oscillator and then calling
- * its method setType('sawtooth')
.
- * See p5.Oscillator for methods.
+ * // schedule calls to changeText
+ * mySound.addCue(0.50, changeText, "hello" );
+ * mySound.addCue(1.00, changeText, "p5" );
+ * mySound.addCue(1.50, changeText, "what" );
+ * mySound.addCue(2.00, changeText, "do" );
+ * mySound.addCue(2.50, changeText, "you" );
+ * mySound.addCue(3.00, changeText, "want" );
+ * mySound.addCue(4.00, changeText, "to" );
+ * mySound.addCue(5.00, changeText, "make" );
+ * mySound.addCue(6.00, changeText, "?" );
+ * }
*
- * @class p5.SawOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
+ * function changeText(val) {
+ * background(0);
+ * text(val, width/2, height/2);
+ * }
+ *
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * if (mySound.isPlaying() ) {
+ * mySound.stop();
+ * } else {
+ * mySound.play();
+ * }
+ * }
+ * }
+ *
new p5.SqrOsc()
.
- * This creates a Square Wave Oscillator and is
- * equivalent to new p5.Oscillator('square')
- *
or creating a p5.Oscillator and then calling
- * its method setType('square')
.
- * See p5.Oscillator for methods.
+ * Remove a callback based on its ID. The ID is returned by the
+ * addCue method.
*
- * @class p5.SqrOsc
- * @constructor
- * @extends p5.Oscillator
- * @param {Number} [freq] Set the frequency
+ * @method removeCue
+ * @for p5.SoundFile
+ * @param {Number} id ID of the cue, as returned by addCue
*/
- p5.SqrOsc = function (freq) {
- p5.Oscillator.call(this, freq, 'square');
- };
- p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);
-}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_Timeline;
-Tone_core_Timeline = function (Tone) {
- 'use strict';
- Tone.Timeline = function () {
- var options = this.optionsObject(arguments, ['memory'], Tone.Timeline.defaults);
- this._timeline = [];
- this._toRemove = [];
- this._iterating = false;
- this.memory = options.memory;
- };
- Tone.extend(Tone.Timeline);
- Tone.Timeline.defaults = { 'memory': Infinity };
- Object.defineProperty(Tone.Timeline.prototype, 'length', {
- get: function () {
- return this._timeline.length;
- }
- });
- Tone.Timeline.prototype.add = function (event) {
- if (this.isUndef(event.time)) {
- throw new Error('Tone.Timeline: events must have a time attribute');
- }
- if (this._timeline.length) {
- var index = this._search(event.time);
- this._timeline.splice(index + 1, 0, event);
- } else {
- this._timeline.push(event);
- }
- if (this.length > this.memory) {
- var diff = this.length - this.memory;
- this._timeline.splice(0, diff);
- }
- return this;
- };
- Tone.Timeline.prototype.remove = function (event) {
- if (this._iterating) {
- this._toRemove.push(event);
- } else {
- var index = this._timeline.indexOf(event);
- if (index !== -1) {
- this._timeline.splice(index, 1);
- }
- }
- return this;
- };
- Tone.Timeline.prototype.get = function (time) {
- var index = this._search(time);
- if (index !== -1) {
- return this._timeline[index];
- } else {
- return null;
- }
- };
- Tone.Timeline.prototype.peek = function () {
- return this._timeline[0];
- };
- Tone.Timeline.prototype.shift = function () {
- return this._timeline.shift();
- };
- Tone.Timeline.prototype.getAfter = function (time) {
- var index = this._search(time);
- if (index + 1 < this._timeline.length) {
- return this._timeline[index + 1];
- } else {
- return null;
- }
- };
- Tone.Timeline.prototype.getBefore = function (time) {
- var len = this._timeline.length;
- if (len > 0 && this._timeline[len - 1].time < time) {
- return this._timeline[len - 1];
- }
- var index = this._search(time);
- if (index - 1 >= 0) {
- return this._timeline[index - 1];
- } else {
- return null;
- }
- };
- Tone.Timeline.prototype.cancel = function (after) {
- if (this._timeline.length > 1) {
- var index = this._search(after);
- if (index >= 0) {
- if (this._timeline[index].time === after) {
- for (var i = index; i >= 0; i--) {
- if (this._timeline[i].time === after) {
- index = i;
- } else {
- break;
- }
- }
- this._timeline = this._timeline.slice(0, index);
- } else {
- this._timeline = this._timeline.slice(0, index + 1);
- }
- } else {
- this._timeline = [];
- }
- } else if (this._timeline.length === 1) {
- if (this._timeline[0].time >= after) {
- this._timeline = [];
+
+
+ p5.SoundFile.prototype.removeCue = function (id) {
+ var cueLength = this._cues.length;
+
+ for (var i = 0; i < cueLength; i++) {
+ var cue = this._cues[i];
+
+ if (cue.id === id) {
+ this._cues.splice(i, 1);
+
+ break;
}
}
- return this;
- };
- Tone.Timeline.prototype.cancelBefore = function (time) {
- if (this._timeline.length) {
- var index = this._search(time);
- if (index >= 0) {
- this._timeline = this._timeline.slice(index + 1);
- }
+
+ if (this._cues.length === 0) {// TO DO: remove callback
+ // this.elt.ontimeupdate = null
}
- return this;
};
- Tone.Timeline.prototype._search = function (time) {
- var beginning = 0;
- var len = this._timeline.length;
- var end = len;
- if (len > 0 && this._timeline[len - 1].time <= time) {
- return len - 1;
- }
- while (beginning < end) {
- var midPoint = Math.floor(beginning + (end - beginning) / 2);
- var event = this._timeline[midPoint];
- var nextEvent = this._timeline[midPoint + 1];
- if (event.time === time) {
- for (var i = midPoint; i < this._timeline.length; i++) {
- var testEvent = this._timeline[i];
- if (testEvent.time === time) {
- midPoint = i;
- }
- }
- return midPoint;
- } else if (event.time < time && nextEvent.time > time) {
- return midPoint;
- } else if (event.time > time) {
- end = midPoint;
- } else if (event.time < time) {
- beginning = midPoint + 1;
- }
- }
- return -1;
- };
- Tone.Timeline.prototype._iterate = function (callback, lowerBound, upperBound) {
- this._iterating = true;
- lowerBound = this.defaultArg(lowerBound, 0);
- upperBound = this.defaultArg(upperBound, this._timeline.length - 1);
- for (var i = lowerBound; i <= upperBound; i++) {
- callback(this._timeline[i]);
- }
- this._iterating = false;
- if (this._toRemove.length > 0) {
- for (var j = 0; j < this._toRemove.length; j++) {
- var index = this._timeline.indexOf(this._toRemove[j]);
- if (index !== -1) {
- this._timeline.splice(index, 1);
- }
+ /**
+ * Remove all of the callbacks that had originally been scheduled
+ * via the addCue method.
+ *
+ * @method clearCues
+ */
+
+
+ p5.SoundFile.prototype.clearCues = function () {
+ this._cues = []; // this.elt.ontimeupdate = null;
+ }; // private method that checks for cues to be fired if events
+ // have been scheduled using addCue(callback, time).
+
+
+ p5.SoundFile.prototype._onTimeUpdate = function (position) {
+ var playbackTime = position / this.buffer.sampleRate;
+ var cueLength = this._cues.length;
+
+ for (var i = 0; i < cueLength; i++) {
+ var cue = this._cues[i];
+ var callbackTime = cue.time;
+ var val = cue.val;
+
+ if (this._prevTime < callbackTime && callbackTime <= playbackTime) {
+ // pass the scheduled callbackTime as parameter to the callback
+ cue.callback(val);
}
- this._toRemove = [];
- }
- };
- Tone.Timeline.prototype.forEach = function (callback) {
- this._iterate(callback);
- return this;
- };
- Tone.Timeline.prototype.forEachBefore = function (time, callback) {
- var upperBound = this._search(time);
- if (upperBound !== -1) {
- this._iterate(callback, 0, upperBound);
- }
- return this;
- };
- Tone.Timeline.prototype.forEachAfter = function (time, callback) {
- var lowerBound = this._search(time);
- this._iterate(callback, lowerBound + 1);
- return this;
- };
- Tone.Timeline.prototype.forEachFrom = function (time, callback) {
- var lowerBound = this._search(time);
- while (lowerBound >= 0 && this._timeline[lowerBound].time >= time) {
- lowerBound--;
- }
- this._iterate(callback, lowerBound + 1);
- return this;
- };
- Tone.Timeline.prototype.forEachAtTime = function (time, callback) {
- var upperBound = this._search(time);
- if (upperBound !== -1) {
- this._iterate(function (event) {
- if (event.time === time) {
- callback(event);
- }
- }, 0, upperBound);
}
- return this;
+
+ this._prevTime = playbackTime;
};
- Tone.Timeline.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._timeline = null;
- this._toRemove = null;
+ /**
+ * Save a p5.SoundFile as a .wav file. The browser will prompt the user
+ * to download the file to their device. To upload a file to a server, see
+ * getBlob
+ *
+ * @method save
+ * @for p5.SoundFile
+ * @param {String} [fileName] name of the resulting .wav file.
+ * @example
+ *
+ * var inp, button, mySound;
+ * var fileName = 'cool';
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ * function setup() {
+ * btn = createButton('click to save file');
+ * btn.position(0, 0);
+ * btn.mouseClicked(handleMouseClick);
+ * }
+ *
+ * function handleMouseClick() {
+ * mySound.save(fileName);
+ * }
+ *
+ *
+ * function preload() {
+ * mySound = loadSound('assets/doorbell.mp3');
+ * }
+ *
+ * function setup() {
+ * noCanvas();
+ * var soundBlob = mySound.getBlob();
+ *
+ * // Now we can send the blob to a server...
+ * var serverUrl = 'https://jsonplaceholder.typicode.com/posts';
+ * var httpRequestOptions = {
+ * method: 'POST',
+ * body: new FormData().append('soundBlob', soundBlob),
+ * headers: new Headers({
+ * 'Content-Type': 'multipart/form-data'
+ * })
+ * };
+ * httpDo(serverUrl, httpRequestOptions);
+ *
+ * // We can also create an `ObjectURL` pointing to the Blob
+ * var blobUrl = URL.createObjectURL(soundBlob);
+ *
+ * // The `
+ * var sound, amplitude, cnv;
+ *
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
+ * function setup() {
+ * cnv = createCanvas(100,100);
+ * amplitude = new p5.Amplitude();
+ *
+ * // start / stop the sound when canvas is clicked
+ * cnv.mouseClicked(function() {
+ * if (sound.isPlaying() ){
+ * sound.stop();
+ * } else {
+ * sound.play();
+ * }
+ * });
+ * }
+ * function draw() {
+ * background(0);
+ * fill(255);
+ * var level = amplitude.getLevel();
+ * var size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ *
+ *
+ * function preload(){
+ * sound1 = loadSound('assets/beat.mp3');
+ * sound2 = loadSound('assets/drum.mp3');
+ * }
+ * function setup(){
+ * amplitude = new p5.Amplitude();
+ * sound1.play();
+ * sound2.play();
+ * amplitude.setInput(sound2);
+ * }
+ * function draw() {
+ * background(0);
+ * fill(255);
+ * var level = amplitude.getLevel();
+ * var size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ * function mouseClicked(){
+ * sound1.stop();
+ * sound2.stop();
+ * }
+ *
+ * function preload(){
+ * sound = loadSound('assets/beat.mp3');
+ * }
+ * function setup() {
+ * amplitude = new p5.Amplitude();
+ * sound.play();
+ * }
+ * function draw() {
+ * background(0);
+ * fill(255);
+ * var level = amplitude.getLevel();
+ * var size = map(level, 0, 1, 0, 200);
+ * ellipse(width/2, height/2, size, size);
+ * }
+ * function mouseClicked(){
+ * sound.stop();
+ * }
+ *
Envelopes are pre-defined amplitude distribution over time.
- * Typically, envelopes are used to control the output volume
- * of an object, a series of fades referred to as Attack, Decay,
- * Sustain and Release (
- * ADSR
- * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
- * control an Oscillator's frequency like this: osc.freq(env)
.
Use setRange
to change the attack/release level.
- * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play
method to play the entire envelope,
- * the ramp
method for a pingable trigger,
- * or triggerAttack
/
- * triggerRelease
to trigger noteOn/noteOff.
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
- *
- * var attackTime = 0.001;
- * var decayTime = 0.2;
- * var susPercent = 0.2;
- * var releaseTime = 0.5;
- *
- * var env, triOsc;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(playEnv);
- * }
+ * Smooth Amplitude analysis by averaging with the last analysis
+ * frame. Off by default.
*
- * function playEnv() {
- * env.play();
- * }
- *
FFT (Fast Fourier Transform) is an analysis algorithm that + * isolates individual + * + * audio frequencies within a waveform.
* - * @method set - * @param {Number} attackTime Time (in seconds) before level - * reaches attackLevel - * @param {Number} attackLevel Typically an amplitude between - * 0.0 and 1.0 - * @param {Number} decayTime Time - * @param {Number} decayLevel Amplitude (In a standard ADSR envelope, - * decayLevel = sustainLevel) - * @param {Number} releaseTime Release Time (in seconds) - * @param {Number} releaseLevel Amplitude + *Once instantiated, a p5.FFT object can return an array based on
+ * two types of analyses:
• FFT.waveform()
computes
+ * amplitude values along the time domain. The array indices correspond
+ * to samples across a brief moment in time. Each value represents
+ * amplitude of the waveform at that sample of time.
+ * • FFT.analyze()
computes amplitude values along the
+ * frequency domain. The array indices correspond to frequencies (i.e.
+ * pitches), from the lowest to the highest that humans can hear. Each
+ * value represents amplitude at that slice of the frequency spectrum.
+ * Use with getEnergy()
to measure amplitude at specific
+ * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample
+ * buffer. It returns an array of amplitude measurements, referred
+ * to as bins
. The array is 1024 bins long by default.
+ * You can change the bin array length, but it must be a power of 2
+ * between 16 and 1024 in order for the FFT algorithm to function
+ * correctly. The actual size of the FFT buffer is twice the
+ * number of bins, so given a standard sample rate, the buffer is
+ * 2048/44100 seconds long.
- * var t1 = 0.1; // attack time in seconds
- * var l1 = 0.7; // attack level 0.0 to 1.0
- * var t2 = 0.3; // decay time in seconds
- * var l2 = 0.1; // decay level 0.0 to 1.0
- * var t3 = 0.2; // sustain time in seconds
- * var l3 = 0.5; // sustain level 0.0 to 1.0
- * // release level defaults to zero
+ * function preload(){
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
+ * }
*
- * var env;
- * var triOsc;
+ * function setup(){
+ * var cnv = createCanvas(100,100);
+ * cnv.mouseClicked(togglePlay);
+ * fft = new p5.FFT();
+ * sound.amp(0.2);
+ * }
*
- * function setup() {
+ * function draw(){
* background(0);
+ *
+ * var spectrum = fft.analyze();
* noStroke();
- * fill(255);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * fill(0,255,0); // spectrum is green
+ * for (var i = 0; i< spectrum.length; i++){
+ * var x = map(i, 0, spectrum.length, 0, width);
+ * var h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h )
+ * }
*
- * env = new p5.Envelope(t1, l1, t2, l2, t3, l3);
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env); // give the env control of the triOsc's amp
- * triOsc.start();
+ * var waveform = fft.waveform();
+ * noFill();
+ * beginShape();
+ * stroke(255,0,0); // waveform is red
+ * strokeWeight(1);
+ * for (var i = 0; i< waveform.length; i++){
+ * var x = map(i, 0, waveform.length, 0, width);
+ * var y = map( waveform[i], -1, 1, 0, height);
+ * vertex(x,y);
+ * }
+ * endShape();
+ *
+ * text('click to play/pause', 4, 10);
* }
*
- * // mouseClick triggers envelope if over canvas
- * function mouseClicked() {
- * // is mouse over canvas?
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * env.play(triOsc);
+ * // fade sound if mouse is over canvas
+ * function togglePlay() {
+ * if (sound.isPlaying()) {
+ * sound.pause();
+ * } else {
+ * sound.loop();
* }
* }
*
setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- * @example
- *
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
- *
- * var attackTime = 0.001;
- * var decayTime = 0.2;
- * var susPercent = 0.2;
- * var releaseTime = 0.5;
- *
- * var env, triOsc;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
+ * Set the input source for the FFT analysis. If no source is
+ * provided, FFT will analyze all sound in the sketch.
*
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * @method setInput
+ * @for p5.FFT
+ * @param {Object} [source] p5.sound object (or web audio API source node)
+ */
+
+
+ p5.FFT.prototype.setInput = function (source) {
+ if (!source) {
+ p5sound.fftMeter.connect(this.analyser);
+ } else {
+ if (source.output) {
+ source.output.connect(this.analyser);
+ } else if (source.connect) {
+ source.connect(this.analyser);
+ }
+
+ p5sound.fftMeter.disconnect();
+ }
+ };
+ /**
+ * Returns an array of amplitude values (between -1.0 and +1.0) that represent
+ * a snapshot of amplitude readings in a single buffer. Length will be
+ * equal to bins (defaults to 1024). Can be used to draw the waveform
+ * of a sound.
*
- * cnv.mousePressed(playEnv);
- * }
+ * @method waveform
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {String} [precision] If any value is provided, will return results
+ * in a Float32 Array which is more precise
+ * than a regular array.
+ * @return {Array} Array Array of amplitude values (-1 to 1)
+ * over time. Array length = bins.
*
- * function playEnv() {
- * env.play();
- * }
- *
getEnergy()
.
*
- * @method setRange
- * @param {Number} aLevel attack level (defaults to 1)
- * @param {Number} rLevel release level (defaults to 0)
+ * @method analyze
+ * @for p5.FFT
+ * @param {Number} [bins] Must be a power of two between
+ * 16 and 1024. Defaults to 1024.
+ * @param {Number} [scale] If "dB," returns decibel
+ * float measurements between
+ * -140 and 0 (max).
+ * Otherwise returns integers from 0-255.
+ * @return {Array} spectrum Array of energy (amplitude/volume)
+ * values across the frequency spectrum.
+ * Lowest energy (silence) = 0, highest
+ * possible is 255.
* @example
*
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
- *
- * var attackTime = 0.001;
- * var decayTime = 0.2;
- * var susPercent = 0.2;
- * var releaseTime = 0.5;
+ * var osc;
+ * var fft;
*
- * var env, triOsc;
+ * function setup(){
+ * createCanvas(100,100);
+ * osc = new p5.Oscillator();
+ * osc.amp(0);
+ * osc.start();
+ * fft = new p5.FFT();
+ * }
*
- * function setup() {
- * var cnv = createCanvas(100, 100);
+ * function draw(){
+ * background(0);
*
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * var freq = map(mouseX, 0, 800, 20, 15000);
+ * freq = constrain(freq, 1, 20000);
+ * osc.freq(freq);
*
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
+ * var spectrum = fft.analyze();
+ * noStroke();
+ * fill(0,255,0); // spectrum is green
+ * for (var i = 0; i< spectrum.length; i++){
+ * var x = map(i, 0, spectrum.length, 0, width);
+ * var h = -height + map(spectrum[i], 0, 255, height, 0);
+ * rect(x, height, width / spectrum.length, h );
+ * }
*
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * stroke(255);
+ * text('Freq: ' + round(freq)+'Hz', 10, 10);
*
- * cnv.mousePressed(playEnv);
+ * isMouseOverCanvas();
* }
*
- * function playEnv() {
- * env.play();
+ * // only play sound when mouse is over canvas
+ * function isMouseOverCanvas() {
+ * var mX = mouseX, mY = mouseY;
+ * if (mX > 0 && mX < width && mY < height && mY > 0) {
+ * osc.amp(0.5, 0.2);
+ * } else {
+ * osc.amp(0, 0.2);
+ * }
* }
*
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
- *
- * var attackTime = 0.001;
- * var decayTime = 0.2;
- * var susPercent = 0.2;
- * var releaseTime = 0.5;
- *
- * var env, triOsc;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
+ * Returns the
+ *
+ * spectral centroid of the input signal.
+ * NOTE: analyze() must be called prior to getCentroid(). Analyze()
+ * tells the FFT to analyze frequency data, and getCentroid() uses
+ * the results determine the spectral centroid.
*
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * @method getCentroid
+ * @for p5.FFT
+ * @return {Number} Spectral Centroid Frequency Frequency of the spectral centroid in Hz.
*
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
*
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * @example
+ *
*
- * cnv.mousePressed(playEnv);
- * }
*
- * function playEnv() {
- * // trigger env on triOsc, 0 seconds from now
- * // After decay, sustain for 0.2 seconds before release
- * env.play(triOsc, 0, 0.2);
- * }
- *
- */
- p5.Envelope.prototype.play = function (unit, secondsFromNow, susTime) {
- var tFromNow = secondsFromNow || 0;
- var susTime = susTime || 0;
- if (unit) {
- if (this.connection !== unit) {
- this.connect(unit);
- }
- }
- this.triggerAttack(unit, tFromNow);
- this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime);
- };
- /**
- * Trigger the Attack, and Decay portion of the Envelope.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go. Input can be
- * any p5.sound object, or a
- * Web Audio Param.
+ *function setup(){
+ * cnv = createCanvas(100,100);
+ * sound = new p5.AudioIn();
+ * sound.start();
+ * fft = new p5.FFT();
+ * sound.connect(fft);
+ *}
*
- * @method triggerAttack
- * @param {Object} unit p5.sound Object or Web Audio Param
- * @param {Number} secondsFromNow time from now (in seconds)
- * @example
- *
*
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
+ *function draw(){
*
- * var attackTime = 0.001;
- * var decayTime = 0.3;
- * var susPercent = 0.4;
- * var releaseTime = 0.5;
+ * var centroidplot = 0.0;
+ * var spectralCentroid = 0;
*
- * var env, triOsc;
*
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * background(200);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ * background(0);
+ * stroke(0,255,0);
+ * var spectrum = fft.analyze();
+ * fill(0,255,0); // spectrum is green
*
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
+ * //draw the spectrum
+ * for (var i = 0; i< spectrum.length; i++){
+ * var x = map(log(i), 0, log(spectrum.length), 0, width);
+ * var h = map(spectrum[i], 0, 255, 0, height);
+ * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));
+ * rect(x, height, rectangle_width, -h )
+ * }
+ * var nyquist = 22050;
*
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
+ * // get the centroid
+ * spectralCentroid = fft.getCentroid();
*
- * cnv.mousePressed(envAttack);
- * }
+ * // the mean_freq_index calculation is for the display.
+ * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);
*
- * function envAttack() {
- * console.log('trigger attack');
- * env.triggerAttack();
+ * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);
*
- * background(0,255,0);
- * text('attack!', width/2, height/2);
- * }
*
- * function mouseReleased() {
- * env.triggerRelease();
+ * stroke(255,0,0); // the line showing where the centroid is will be red
*
- * background(200);
- * text('click to play', width/2, height/2);
- * }
- *
+ * rect(centroidplot, 0, width / spectrum.length, height)
+ * noStroke();
+ * fill(255,255,255); // text is white
+ * text("centroid: ", 10, 20);
+ * text(round(spectralCentroid)+" Hz", 10, 40);
+ *}
+ *
- *
- * var attackLevel = 1.0;
- * var releaseLevel = 0;
- *
- * var attackTime = 0.001;
- * var decayTime = 0.3;
- * var susPercent = 0.4;
- * var releaseTime = 0.5;
- *
- * var env, triOsc;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * background(200);
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
- * env.setRange(attackLevel, releaseLevel);
- *
- * triOsc = new p5.Oscillator('triangle');
- * triOsc.amp(env);
- * triOsc.start();
- * triOsc.freq(220);
- *
- * cnv.mousePressed(envAttack);
- * }
- *
- * function envAttack() {
- * console.log('trigger attack');
- * env.triggerAttack();
+ * Returns an array of average amplitude values for a given number
+ * of frequency bands split equally. N defaults to 16.
+ * NOTE: analyze() must be called prior to linAverages(). Analyze()
+ * tells the FFT to analyze frequency data, and linAverages() uses
+ * the results to group them into a smaller set of averages.
*
- * background(0,255,0);
- * text('attack!', width/2, height/2);
- * }
+ * @method linAverages
+ * @for p5.FFT
+ * @param {Number} N Number of returned frequency groups
+ * @return {Array} linearAverages Array of average amplitude values for each group
+ */
+
+
+ p5.FFT.prototype.linAverages = function (N) {
+ var N = N || 16; // This prevents undefined, null or 0 values of N
+
+ var spectrum = this.freqDomain;
+ var spectrumLength = spectrum.length;
+ var spectrumStep = Math.floor(spectrumLength / N);
+ var linearAverages = new Array(N); // Keep a second index for the current average group and place the values accordingly
+ // with only one loop in the spectrum data
+
+ var groupIndex = 0;
+
+ for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
+ linearAverages[groupIndex] = linearAverages[groupIndex] !== undefined ? (linearAverages[groupIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex]; // Increase the group index when the last element of the group is processed
+
+ if (specIndex % spectrumStep === spectrumStep - 1) {
+ groupIndex++;
+ }
+ }
+
+ return linearAverages;
+ };
+ /**
+ * Returns an array of average amplitude values of the spectrum, for a given
+ * set of
+ * Octave Bands
+ * NOTE: analyze() must be called prior to logAverages(). Analyze()
+ * tells the FFT to analyze frequency data, and logAverages() uses
+ * the results to group them into a smaller set of averages.
*
- * function mouseReleased() {
- * env.triggerRelease();
+ * @method logAverages
+ * @for p5.FFT
+ * @param {Array} octaveBands Array of Octave Bands objects for grouping
+ * @return {Array} logAverages Array of average amplitude values for each group
+ */
+
+
+ p5.FFT.prototype.logAverages = function (octaveBands) {
+ var nyquist = p5sound.audiocontext.sampleRate / 2;
+ var spectrum = this.freqDomain;
+ var spectrumLength = spectrum.length;
+ var logAverages = new Array(octaveBands.length); // Keep a second index for the current average group and place the values accordingly
+ // With only one loop in the spectrum data
+
+ var octaveIndex = 0;
+
+ for (var specIndex = 0; specIndex < spectrumLength; specIndex++) {
+ var specIndexFrequency = Math.round(specIndex * nyquist / this.freqDomain.length); // Increase the group index if the current frequency exceeds the limits of the band
+
+ if (specIndexFrequency > octaveBands[octaveIndex].hi) {
+ octaveIndex++;
+ }
+
+ logAverages[octaveIndex] = logAverages[octaveIndex] !== undefined ? (logAverages[octaveIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex];
+ }
+
+ return logAverages;
+ };
+ /**
+ * Calculates and Returns the 1/N
+ * Octave Bands
+ * N defaults to 3 and minimum central frequency to 15.625Hz.
+ * (1/3 Octave Bands ~= 31 Frequency Bands)
+ * Setting fCtr0 to a central value of a higher octave will ignore the lower bands
+ * and produce less frequency groups.
*
- * background(200);
- * text('click to play', width/2, height/2);
- * }
- *
setADSR(attackTime, decayTime)
- * as
- * time constants for simple exponential ramps.
- * If the value is higher than current value, it uses attackTime,
- * while a decrease uses decayTime.
+ * p5.Signal is a constant audio-rate signal used by p5.Oscillator + * and p5.Envelope for modulation math.
* - * @method ramp - * @param {Object} unit p5.sound Object or Web Audio Param - * @param {Number} secondsFromNow When to trigger the ramp - * @param {Number} v Target value - * @param {Number} [v2] Second target value (optional) - * @example - *
- * var env, osc, amp, cnv;
+ * This is necessary because Web Audio is processed on a seprate clock.
+ * For example, the p5 draw loop runs about 60 times per second. But
+ * the audio clock must process samples 44100 times per second. If we
+ * want to add a value to each of those samples, we can't do it in the
+ * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns
+ * a Tone.Signal from the Tone.js library by Yotam Mann.
+ * If you want to work directly with audio signals for modular
+ * synthesis, check out
+ * tone.js.
*
+ * @class p5.Signal
+ * @constructor
+ * @return {Tone.Signal} A Signal object from the Tone.js library
+ * @example
+ *
* function setup() {
- * cnv = createCanvas(100, 100);
- * fill(0,255,0);
- * noStroke();
- *
- * env = new p5.Envelope();
- * env.setADSR(attackTime, decayTime);
- *
- * osc = new p5.Oscillator();
- * osc.amp(env);
- * osc.start();
- *
- * amp = new p5.Amplitude();
- *
- * cnv.mousePressed(triggerRamp);
- * }
- *
- * function triggerRamp() {
- * env.ramp(osc, 0, attackLevel, decayLevel);
- * }
+ * carrier = new p5.Oscillator('sine');
+ * carrier.amp(1); // set amplitude
+ * carrier.freq(220); // set frequency
+ * carrier.start(); // start oscillating
*
- * function draw() {
- * background(20,20,20);
- * text('click me', 10, 20);
- * var h = map(amp.getLevel(), 0, 0.4, 0, height);;
+ * modulator = new p5.Oscillator('sawtooth');
+ * modulator.disconnect();
+ * modulator.amp(1);
+ * modulator.freq(4);
+ * modulator.start();
*
- * rect(0, height, width, -h);
+ * // Modulator's default amplitude range is -1 to 1.
+ * // Multiply it by -200, so the range is -200 to 200
+ * // then add 220 so the range is 20 to 420
+ * carrier.freq( modulator.mult(-200).add(220) );
* }
*
*/
- p5.Envelope.prototype.ramp = function (unit, secondsFromNow, v1, v2) {
- var now = p5sound.audiocontext.currentTime;
- var tFromNow = secondsFromNow || 0;
- var t = now + tFromNow;
- var destination1 = this.checkExpInput(v1);
- var destination2 = typeof v2 !== 'undefined' ? this.checkExpInput(v2) : undefined;
- // connect env to unit if not already connected
- if (unit) {
- if (this.connection !== unit) {
- this.connect(unit);
- }
- }
- //get current value
- var currentVal = this.checkExpInput(this.control.getValueAtTime(t));
- // this.control.cancelScheduledValues(t);
- //if it's going up
- if (destination1 > currentVal) {
- this.control.setTargetAtTime(destination1, t, this._rampAttackTC);
- t += this._rampAttackTime;
- } else if (destination1 < currentVal) {
- this.control.setTargetAtTime(destination1, t, this._rampDecayTC);
- t += this._rampDecayTime;
- }
- // Now the second part of envelope begins
- if (destination2 === undefined)
- return;
- //if it's going up
- if (destination2 > destination1) {
- this.control.setTargetAtTime(destination2, t, this._rampAttackTC);
- } else if (destination2 < destination1) {
- this.control.setTargetAtTime(destination2, t, this._rampDecayTC);
- }
- };
- p5.Envelope.prototype.connect = function (unit) {
- this.connection = unit;
- // assume we're talking about output gain
- // unless given a different audio param
- if (unit instanceof p5.Oscillator || unit instanceof p5.SoundFile || unit instanceof p5.AudioIn || unit instanceof p5.Reverb || unit instanceof p5.Noise || unit instanceof p5.Filter || unit instanceof p5.Delay) {
- unit = unit.output.gain;
- }
- if (unit instanceof AudioParam) {
- //set the initial value
- unit.setValueAtTime(0, p5sound.audiocontext.currentTime);
- }
- if (unit instanceof p5.Signal) {
- unit.setValue(0);
- }
- this.output.connect(unit);
- };
- p5.Envelope.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
+
+
+ p5.Signal = function (value) {
+ var s = new Signal(value); // p5sound.soundArray.push(s);
+
+ return s; // TODO: is this really a constructor?
};
- // Signal Math
/**
- * Add a value to the p5.Oscillator's output amplitude,
- * and return the oscillator. Calling this method
- * again will override the initial add() with new values.
+ * Fade to value, for smooth transitions
+ *
+ * @method fade
+ * @for p5.Signal
+ * @param {Number} value Value to set this signal
+ * @param {Number} [secondsFromNow] Length of fade, in seconds from now
+ */
+
+
+ Signal.prototype.fade = Signal.prototype.linearRampToValueAtTime;
+ Mult.prototype.fade = Signal.prototype.fade;
+ Add.prototype.fade = Signal.prototype.fade;
+ Scale.prototype.fade = Signal.prototype.fade;
+ /**
+ * Connect a p5.sound object or Web Audio node to this
+ * p5.Signal so that its amplitude values can be scaled.
+ *
+ * @method setInput
+ * @for p5.Signal
+ * @param {Object} input
+ */
+
+ Signal.prototype.setInput = function (_input) {
+ _input.connect(this);
+ };
+
+ Mult.prototype.setInput = Signal.prototype.setInput;
+ Add.prototype.setInput = Signal.prototype.setInput;
+ Scale.prototype.setInput = Signal.prototype.setInput; // signals can add / mult / scale themselves
+
+ /**
+ * Add a constant value to this audio signal,
+ * and return the resulting audio signal. Does
+ * not change the value of the original signal,
+ * instead it returns a new p5.SignalAdd.
*
* @method add
- * @param {Number} number Constant number to add
- * @return {p5.Envelope} Envelope Returns this envelope
- * with scaled output
+ * @for p5.Signal
+ * @param {Number} number
+ * @return {p5.Signal} object
*/
- p5.Envelope.prototype.add = function (num) {
- var add = new Add(num);
- var thisChain = this.mathOps.length;
- var nextChain = this.output;
- return p5.prototype._mathChain(this, add, thisChain, nextChain, Add);
+
+ Signal.prototype.add = function (num) {
+ var add = new Add(num); // add.setInput(this);
+
+ this.connect(add);
+ return add;
};
+
+ Mult.prototype.add = Signal.prototype.add;
+ Add.prototype.add = Signal.prototype.add;
+ Scale.prototype.add = Signal.prototype.add;
/**
- * Multiply the p5.Envelope's output amplitude
- * by a fixed value. Calling this method
- * again will override the initial mult() with new values.
+ * Multiply this signal by a constant value,
+ * and return the resulting audio signal. Does
+ * not change the value of the original signal,
+ * instead it returns a new p5.SignalMult.
*
* @method mult
- * @param {Number} number Constant number to multiply
- * @return {p5.Envelope} Envelope Returns this envelope
- * with scaled output
+ * @for p5.Signal
+ * @param {Number} number to multiply
+ * @return {p5.Signal} object
*/
- p5.Envelope.prototype.mult = function (num) {
- var mult = new Mult(num);
- var thisChain = this.mathOps.length;
- var nextChain = this.output;
- return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult);
+
+ Signal.prototype.mult = function (num) {
+ var mult = new Mult(num); // mult.setInput(this);
+
+ this.connect(mult);
+ return mult;
};
+
+ Mult.prototype.mult = Signal.prototype.mult;
+ Add.prototype.mult = Signal.prototype.mult;
+ Scale.prototype.mult = Signal.prototype.mult;
/**
- * Scale this envelope's amplitude values to a given
- * range, and return the envelope. Calling this method
- * again will override the initial scale() with new values.
+ * Scale this signal value to a given range,
+ * and return the result as an audio signal. Does
+ * not change the value of the original signal,
+ * instead it returns a new p5.SignalScale.
*
* @method scale
+ * @for p5.Signal
+ * @param {Number} number to multiply
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
* @param {Number} outMax input range maximum
- * @return {p5.Envelope} Envelope Returns this envelope
- * with scaled output
+ * @return {p5.Signal} object
*/
- p5.Envelope.prototype.scale = function (inMin, inMax, outMin, outMax) {
- var scale = new Scale(inMin, inMax, outMin, outMax);
- var thisChain = this.mathOps.length;
- var nextChain = this.output;
- return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale);
- };
- // get rid of the oscillator
- p5.Envelope.prototype.dispose = function () {
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- this.disconnect();
- if (this.control) {
- this.control.dispose();
- this.control = null;
- }
- for (var i = 1; i < this.mathOps.length; i++) {
- this.mathOps[i].dispose();
+
+ Signal.prototype.scale = function (inMin, inMax, outMin, outMax) {
+ var mapOutMin, mapOutMax;
+
+ if (arguments.length === 4) {
+ mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5;
+ mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5;
+ } else {
+ mapOutMin = arguments[0];
+ mapOutMax = arguments[1];
}
+
+ var scale = new Scale(mapOutMin, mapOutMax);
+ this.connect(scale);
+ return scale;
};
- // Different name for backwards compatibility, replicates p5.Envelope class
- p5.Env = function (t1, l1, t2, l2, t3, l3) {
- console.warn('WARNING: p5.Env is now deprecated and may be removed in future versions. ' + 'Please use the new p5.Envelope instead.');
- p5.Envelope.call(this, t1, l1, t2, l2, t3, l3);
+
+ Mult.prototype.scale = Signal.prototype.scale;
+ Add.prototype.scale = Signal.prototype.scale;
+ Scale.prototype.scale = Signal.prototype.scale;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 39 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(20)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(o){o.Frequency=function(e,t){if(!(this instanceof o.Frequency))return new o.Frequency(e,t);o.TimeBase.call(this,e,t)},o.extend(o.Frequency,o.TimeBase),o.Frequency.prototype._primaryExpressions=Object.create(o.TimeBase.prototype._primaryExpressions),o.Frequency.prototype._primaryExpressions.midi={regexp:/^(\d+(?:\.\d+)?midi)/,method:function(e){return this.midiToFrequency(e)}},o.Frequency.prototype._primaryExpressions.note={regexp:/^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i,method:function(e,t){var r=n[e.toLowerCase()]+12*(parseInt(t)+1);return this.midiToFrequency(r)}},o.Frequency.prototype._primaryExpressions.tr={regexp:/^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?/,method:function(e,t,r){var n=1;return e&&"0"!==e&&(n*=this._beatsToUnits(this._timeSignature()*parseFloat(e))),t&&"0"!==t&&(n*=this._beatsToUnits(parseFloat(t))),r&&"0"!==r&&(n*=this._beatsToUnits(parseFloat(r)/4)),n}},o.Frequency.prototype.transpose=function(e){return this._expr=function(e,t){return e()*this.intervalToFrequencyRatio(t)}.bind(this,this._expr,e),this},o.Frequency.prototype.harmonize=function(e){return this._expr=function(e,t){for(var r=e(),n=[],o=0;oEnvelopes are pre-defined amplitude distribution over time.
+ * Typically, envelopes are used to control the output volume
+ * of an object, a series of fades referred to as Attack, Decay,
+ * Sustain and Release (
+ * ADSR
+ * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can
+ * control an Oscillator's frequency like this: osc.freq(env)
.
+ * Use setRange
to change the attack/release level.
+ * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
+ * Use the play
method to play the entire envelope,
+ * the ramp
method for a pingable trigger,
+ * or triggerAttack
/
+ * triggerRelease
to trigger noteOn/noteOff.
+ *
+ * @class p5.Envelope
+ * @constructor
+ * @example
+ *
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
+ *
+ * var attackTime = 0.001;
+ * var decayTime = 0.2;
+ * var susPercent = 0.2;
+ * var releaseTime = 0.5;
+ *
+ * var env, triOsc;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(playEnv);
+ * }
+ *
+ * function playEnv() {
+ * env.play();
+ * }
+ *
+ */
+
+
+ p5.Envelope = function (t1, l1, t2, l2, t3, l3) {
+ /**
+ * Time until envelope reaches attackLevel
+ * @property attackTime
+ */
+ this.aTime = t1 || 0.1;
+ /**
+ * Level once attack is complete.
+ * @property attackLevel
+ */
+
+ this.aLevel = l1 || 1;
+ /**
+ * Time until envelope reaches decayLevel.
+ * @property decayTime
+ */
+
+ this.dTime = t2 || 0.5;
+ /**
+ * Level after decay. The envelope will sustain here until it is released.
+ * @property decayLevel
+ */
+
+ this.dLevel = l2 || 0;
+ /**
+ * Duration of the release portion of the envelope.
+ * @property releaseTime
+ */
+
+ this.rTime = t3 || 0;
+ /**
+ * Level at the end of the release.
+ * @property releaseLevel
+ */
+
+ this.rLevel = l3 || 0;
+ this._rampHighPercentage = 0.98;
+ this._rampLowPercentage = 0.02;
+ this.output = p5sound.audiocontext.createGain();
+ this.control = new TimelineSignal();
+
+ this._init(); // this makes sure the envelope starts at zero
+
+
+ this.control.connect(this.output); // connect to the output
+
+ this.connection = null; // store connection
+ //array of math operation signal chaining
+
+ this.mathOps = [this.control]; //whether envelope should be linear or exponential curve
+
+ this.isExponential = false; // oscillator or buffer source to clear on env complete
+ // to save resources if/when it is retriggered
+
+ this.sourceToClear = null; // set to true if attack is set, then false on release
+
+ this.wasTriggered = false; // add to the soundArray so we can dispose of the env later
+
+ p5sound.soundArray.push(this);
+ }; // this init function just smooths the starting value to zero and gives a start point for the timeline
+ // - it was necessary to remove glitches at the beginning.
+
+
+ p5.Envelope.prototype._init = function () {
+ var now = p5sound.audiocontext.currentTime;
+ var t = now;
+ this.control.setTargetAtTime(0.00001, t, .001); //also, compute the correct time constants
+
+ this._setRampAD(this.aTime, this.dTime);
};
- p5.Env.prototype = Object.create(p5.Envelope.prototype);
-}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_signal_TimelineSignal);
-var pulse;
-'use strict';
-pulse = function () {
- var p5sound = master;
/**
- * Creates a Pulse object, an oscillator that implements
- * Pulse Width Modulation.
- * The pulse is created with two oscillators.
- * Accepts a parameter for frequency, and to set the
- * width between the pulses. See
- * p5.Oscillator
for a full list of methods.
+ * Reset the envelope with a series of time/value pairs.
*
- * @class p5.Pulse
- * @extends p5.Oscillator
- * @constructor
- * @param {Number} [freq] Frequency in oscillations per second (Hz)
- * @param {Number} [w] Width between the pulses (0 to 1.0,
- * defaults to 0)
+ * @method set
+ * @for p5.Envelope
+ * @param {Number} attackTime Time (in seconds) before level
+ * reaches attackLevel
+ * @param {Number} attackLevel Typically an amplitude between
+ * 0.0 and 1.0
+ * @param {Number} decayTime Time
+ * @param {Number} decayLevel Amplitude (In a standard ADSR envelope,
+ * decayLevel = sustainLevel)
+ * @param {Number} releaseTime Release Time (in seconds)
+ * @param {Number} releaseLevel Amplitude
* @example
*
- * var pulse;
+ * var t1 = 0.1; // attack time in seconds
+ * var l1 = 0.7; // attack level 0.0 to 1.0
+ * var t2 = 0.3; // decay time in seconds
+ * var l2 = 0.1; // decay level 0.0 to 1.0
+ * var t3 = 0.2; // sustain time in seconds
+ * var l3 = 0.5; // sustain level 0.0 to 1.0
+ * // release level defaults to zero
+ *
+ * var env;
+ * var triOsc;
+ *
* function setup() {
* background(0);
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
*
- * // Create and start the pulse wave oscillator
- * pulse = new p5.Pulse();
- * pulse.amp(0.5);
- * pulse.freq(220);
- * pulse.start();
+ * env = new p5.Envelope(t1, l1, t2, l2, t3, l3);
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env); // give the env control of the triOsc's amp
+ * triOsc.start();
* }
*
- * function draw() {
- * var w = map(mouseX, 0, width, 0, 1);
- * w = constrain(w, 0, 1);
- * pulse.width(w)
+ * // mouseClick triggers envelope if over canvas
+ * function mouseClicked() {
+ * // is mouse over canvas?
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * env.play(triOsc);
+ * }
* }
*
+ *
*/
- p5.Pulse = function (freq, w) {
- p5.Oscillator.call(this, freq, 'sawtooth');
- // width of PWM, should be betw 0 to 1.0
- this.w = w || 0;
- // create a second oscillator with inverse frequency
- this.osc2 = new p5.SawOsc(freq);
- // create a delay node
- this.dNode = p5sound.audiocontext.createDelay();
- // dc offset
- this.dcOffset = createDCOffset();
- this.dcGain = p5sound.audiocontext.createGain();
- this.dcOffset.connect(this.dcGain);
- this.dcGain.connect(this.output);
- // set delay time based on PWM width
- this.f = freq || 440;
- var mW = this.w / this.oscillator.frequency.value;
- this.dNode.delayTime.value = mW;
- this.dcGain.gain.value = 1.7 * (0.5 - this.w);
- // disconnect osc2 and connect it to delay, which is connected to output
- this.osc2.disconnect();
- this.osc2.panner.disconnect();
- this.osc2.amp(-1);
- // inverted amplitude
- this.osc2.output.connect(this.dNode);
- this.dNode.connect(this.output);
- this.output.gain.value = 1;
- this.output.connect(this.panner);
+
+
+ p5.Envelope.prototype.set = function (t1, l1, t2, l2, t3, l3) {
+ this.aTime = t1;
+ this.aLevel = l1;
+ this.dTime = t2 || 0;
+ this.dLevel = l2 || 0;
+ this.rTime = t3 || 0;
+ this.rLevel = l3 || 0; // set time constants for ramp
+
+ this._setRampAD(t1, t2);
};
- p5.Pulse.prototype = Object.create(p5.Oscillator.prototype);
/**
- * Set the width of a Pulse object (an oscillator that implements
- * Pulse Width Modulation).
+ * Set values like a traditional
+ *
+ * ADSR envelope
+ * .
*
- * @method width
- * @param {Number} [width] Width between the pulses (0 to 1.0,
- * defaults to 0)
+ * @method setADSR
+ * @for p5.Envelope
+ * @param {Number} attackTime Time (in seconds before envelope
+ * reaches Attack Level
+ * @param {Number} [decayTime] Time (in seconds) before envelope
+ * reaches Decay/Sustain Level
+ * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
+ * where 1.0 = attackLevel, 0.0 = releaseLevel.
+ * The susRatio determines the decayLevel and the level at which the
+ * sustain portion of the envelope will sustain.
+ * For example, if attackLevel is 0.4, releaseLevel is 0,
+ * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
+ * increased to 1.0 (using setRange
),
+ * then decayLevel would increase proportionally, to become 0.5.
+ * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
+ * @example
+ *
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
+ *
+ * var attackTime = 0.001;
+ * var decayTime = 0.2;
+ * var susPercent = 0.2;
+ * var releaseTime = 0.5;
+ *
+ * var env, triOsc;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(playEnv);
+ * }
+ *
+ * function playEnv() {
+ * env.play();
+ * }
+ *
*/
- p5.Pulse.prototype.width = function (w) {
- if (typeof w === 'number') {
- if (w <= 1 && w >= 0) {
- this.w = w;
- // set delay time based on PWM width
- // var mW = map(this.w, 0, 1.0, 0, 1/this.f);
- var mW = this.w / this.oscillator.frequency.value;
- this.dNode.delayTime.value = mW;
- }
- this.dcGain.gain.value = 1.7 * (0.5 - this.w);
- } else {
- w.connect(this.dNode.delayTime);
- var sig = new p5.SignalAdd(-0.5);
- sig.setInput(w);
- sig = sig.mult(-1);
- sig = sig.mult(1.7);
- sig.connect(this.dcGain.gain);
- }
- };
- p5.Pulse.prototype.start = function (f, time) {
- var now = p5sound.audiocontext.currentTime;
- var t = time || 0;
- if (!this.started) {
- var freq = f || this.f;
- var type = this.oscillator.type;
- this.oscillator = p5sound.audiocontext.createOscillator();
- this.oscillator.frequency.setValueAtTime(freq, now);
- this.oscillator.type = type;
- this.oscillator.connect(this.output);
- this.oscillator.start(t + now);
- // set up osc2
- this.osc2.oscillator = p5sound.audiocontext.createOscillator();
- this.osc2.oscillator.frequency.setValueAtTime(freq, t + now);
- this.osc2.oscillator.type = type;
- this.osc2.oscillator.connect(this.osc2.output);
- this.osc2.start(t + now);
- this.freqNode = [
- this.oscillator.frequency,
- this.osc2.oscillator.frequency
- ];
- // start dcOffset, too
- this.dcOffset = createDCOffset();
- this.dcOffset.connect(this.dcGain);
- this.dcOffset.start(t + now);
- // if LFO connections depend on these oscillators
- if (this.mods !== undefined && this.mods.frequency !== undefined) {
- this.mods.frequency.connect(this.freqNode[0]);
- this.mods.frequency.connect(this.freqNode[1]);
- }
- this.started = true;
- this.osc2.started = true;
- }
- };
- p5.Pulse.prototype.stop = function (time) {
- if (this.started) {
- var t = time || 0;
- var now = p5sound.audiocontext.currentTime;
- this.oscillator.stop(t + now);
- if (this.osc2.oscillator) {
- this.osc2.oscillator.stop(t + now);
- }
- this.dcOffset.stop(t + now);
- this.started = false;
- this.osc2.started = false;
- }
+
+
+ p5.Envelope.prototype.setADSR = function (aTime, dTime, sPercent, rTime) {
+ this.aTime = aTime;
+ this.dTime = dTime || 0; // lerp
+
+ this.sPercent = sPercent || 0;
+ this.dLevel = typeof sPercent !== 'undefined' ? sPercent * (this.aLevel - this.rLevel) + this.rLevel : 0;
+ this.rTime = rTime || 0; // also set time constants for ramp
+
+ this._setRampAD(aTime, dTime);
};
- p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) {
- if (typeof val === 'number') {
- this.f = val;
- var now = p5sound.audiocontext.currentTime;
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var currentFreq = this.oscillator.frequency.value;
- this.oscillator.frequency.cancelScheduledValues(now);
- this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
- this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
- this.osc2.oscillator.frequency.cancelScheduledValues(now);
- this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
- this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
- if (this.freqMod) {
- this.freqMod.output.disconnect();
- this.freqMod = null;
- }
- } else if (val.output) {
- val.output.disconnect();
- val.output.connect(this.oscillator.frequency);
- val.output.connect(this.osc2.oscillator.frequency);
- this.freqMod = val;
- }
+ /**
+ * Set max (attackLevel) and min (releaseLevel) of envelope.
+ *
+ * @method setRange
+ * @for p5.Envelope
+ * @param {Number} aLevel attack level (defaults to 1)
+ * @param {Number} rLevel release level (defaults to 0)
+ * @example
+ *
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
+ *
+ * var attackTime = 0.001;
+ * var decayTime = 0.2;
+ * var susPercent = 0.2;
+ * var releaseTime = 0.5;
+ *
+ * var env, triOsc;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(playEnv);
+ * }
+ *
+ * function playEnv() {
+ * env.play();
+ * }
+ *
+ */
+
+
+ p5.Envelope.prototype.setRange = function (aLevel, rLevel) {
+ this.aLevel = aLevel || 1;
+ this.rLevel = rLevel || 0; // not sure if this belongs here:
+ // {Number} [dLevel] decay/sustain level (optional)
+ // if (typeof(dLevel) !== 'undefined') {
+ // this.dLevel = dLevel
+ // } else if (this.sPercent) {
+ // this.dLevel = this.sPercent ? this.sPercent * (this.aLevel - this.rLevel) + this.rLevel : 0;
+ // }
+ }; // private (undocumented) method called when ADSR is set to set time constants for ramp
+ //
+ // Set the
+ // time constants for simple exponential ramps.
+ // The larger the time constant value, the slower the
+ // transition will be.
+ //
+ // method _setRampAD
+ // param {Number} attackTimeConstant attack time constant
+ // param {Number} decayTimeConstant decay time constant
+ //
+
+
+ p5.Envelope.prototype._setRampAD = function (t1, t2) {
+ this._rampAttackTime = this.checkExpInput(t1);
+ this._rampDecayTime = this.checkExpInput(t2);
+ var TCDenominator = 1.0; /// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage)
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = t1 / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = t2 / this.checkExpInput(TCDenominator);
+ }; // private method
+
+
+ p5.Envelope.prototype.setRampPercentages = function (p1, p2) {
+ //set the percentages that the simple exponential ramps go to
+ this._rampHighPercentage = this.checkExpInput(p1);
+ this._rampLowPercentage = this.checkExpInput(p2);
+ var TCDenominator = 1.0; //now re-compute the time constants based on those percentages
+ /// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage)
+
+ TCDenominator = Math.log(1.0 / this.checkExpInput(1.0 - this._rampHighPercentage));
+ this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator);
+ TCDenominator = Math.log(1.0 / this._rampLowPercentage);
+ this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator);
};
- // inspiration: http://webaudiodemos.appspot.com/oscilloscope/
- function createDCOffset() {
- var ac = p5sound.audiocontext;
- var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
- var data = buffer.getChannelData(0);
- for (var i = 0; i < 2048; i++)
- data[i] = 1;
- var bufferSource = ac.createBufferSource();
- bufferSource.buffer = buffer;
- bufferSource.loop = true;
- return bufferSource;
- }
-}(master, oscillator);
-var noise;
-'use strict';
-noise = function () {
- var p5sound = master;
/**
- * Noise is a type of oscillator that generates a buffer with random values.
+ * Assign a parameter to be controlled by this envelope.
+ * If a p5.Sound object is given, then the p5.Envelope will control its
+ * output gain. If multiple inputs are provided, the env will
+ * control all of them.
*
- * @class p5.Noise
- * @extends p5.Oscillator
- * @constructor
- * @param {String} type Type of noise can be 'white' (default),
- * 'brown' or 'pink'.
+ * @method setInput
+ * @for p5.Envelope
+ * @param {Object} [...inputs] A p5.sound object or
+ * Web Audio Param.
*/
- p5.Noise = function (type) {
- var assignType;
- p5.Oscillator.call(this);
- delete this.f;
- delete this.freq;
- delete this.oscillator;
- if (type === 'brown') {
- assignType = _brownNoise;
- } else if (type === 'pink') {
- assignType = _pinkNoise;
- } else {
- assignType = _whiteNoise;
+
+
+ p5.Envelope.prototype.setInput = function () {
+ for (var i = 0; i < arguments.length; i++) {
+ this.connect(arguments[i]);
}
- this.buffer = assignType;
};
- p5.Noise.prototype = Object.create(p5.Oscillator.prototype);
- // generate noise buffers
- var _whiteNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = whiteBuffer.getChannelData(0);
- for (var i = 0; i < bufferSize; i++) {
- noiseData[i] = Math.random() * 2 - 1;
- }
- whiteBuffer.type = 'white';
- return whiteBuffer;
- }();
- var _pinkNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = pinkBuffer.getChannelData(0);
- var b0, b1, b2, b3, b4, b5, b6;
- b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0;
- for (var i = 0; i < bufferSize; i++) {
- var white = Math.random() * 2 - 1;
- b0 = 0.99886 * b0 + white * 0.0555179;
- b1 = 0.99332 * b1 + white * 0.0750759;
- b2 = 0.969 * b2 + white * 0.153852;
- b3 = 0.8665 * b3 + white * 0.3104856;
- b4 = 0.55 * b4 + white * 0.5329522;
- b5 = -0.7616 * b5 - white * 0.016898;
- noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
- noiseData[i] *= 0.11;
- // (roughly) compensate for gain
- b6 = white * 0.115926;
- }
- pinkBuffer.type = 'pink';
- return pinkBuffer;
- }();
- var _brownNoise = function () {
- var bufferSize = 2 * p5sound.audiocontext.sampleRate;
- var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
- var noiseData = brownBuffer.getChannelData(0);
- var lastOut = 0;
- for (var i = 0; i < bufferSize; i++) {
- var white = Math.random() * 2 - 1;
- noiseData[i] = (lastOut + 0.02 * white) / 1.02;
- lastOut = noiseData[i];
- noiseData[i] *= 3.5;
- }
- brownBuffer.type = 'brown';
- return brownBuffer;
- }();
/**
- * Set type of noise to 'white', 'pink' or 'brown'.
- * White is the default.
+ * Set whether the envelope ramp is linear (default) or exponential.
+ * Exponential ramps can be useful because we perceive amplitude
+ * and frequency logarithmically.
*
- * @method setType
- * @param {String} [type] 'white', 'pink' or 'brown'
+ * @method setExp
+ * @for p5.Envelope
+ * @param {Boolean} isExp true is exponential, false is linear
*/
- p5.Noise.prototype.setType = function (type) {
- switch (type) {
- case 'white':
- this.buffer = _whiteNoise;
- break;
- case 'pink':
- this.buffer = _pinkNoise;
- break;
- case 'brown':
- this.buffer = _brownNoise;
- break;
- default:
- this.buffer = _whiteNoise;
- }
- if (this.started) {
- var now = p5sound.audiocontext.currentTime;
- this.stop(now);
- this.start(now + 0.01);
- }
- };
- p5.Noise.prototype.getType = function () {
- return this.buffer.type;
- };
- p5.Noise.prototype.start = function () {
- if (this.started) {
- this.stop();
- }
- this.noise = p5sound.audiocontext.createBufferSource();
- this.noise.buffer = this.buffer;
- this.noise.loop = true;
- this.noise.connect(this.output);
- var now = p5sound.audiocontext.currentTime;
- this.noise.start(now);
- this.started = true;
- };
- p5.Noise.prototype.stop = function () {
- var now = p5sound.audiocontext.currentTime;
- if (this.noise) {
- this.noise.stop(now);
- this.started = false;
- }
- };
- p5.Noise.prototype.dispose = function () {
- var now = p5sound.audiocontext.currentTime;
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- if (this.noise) {
- this.noise.disconnect();
- this.stop(now);
- }
- if (this.output) {
- this.output.disconnect();
- }
- if (this.panner) {
- this.panner.disconnect();
+
+
+ p5.Envelope.prototype.setExp = function (isExp) {
+ this.isExponential = isExp;
+ }; //helper method to protect against zero values being sent to exponential functions
+
+
+ p5.Envelope.prototype.checkExpInput = function (value) {
+ if (value <= 0) {
+ value = 0.00000001;
}
- this.output = null;
- this.panner = null;
- this.buffer = null;
- this.noise = null;
+
+ return value;
};
-}(master);
-var audioin;
-'use strict';
-audioin = function () {
- var p5sound = master;
- // an array of input sources
- p5sound.inputSources = [];
/**
- * Get audio from an input, i.e. your computer's microphone.
+ * Play tells the envelope to start acting on a given input.
+ * If the input is a p5.sound object (i.e. AudioIn, Oscillator,
+ * SoundFile), then Envelope will control its output volume.
+ * Envelopes can also be used to control any
+ * Web Audio Audio Param.
*
- * Turn the mic on/off with the start() and stop() methods. When the mic
- * is on, its volume can be measured with getLevel or by connecting an
- * FFT object.
+ * @method play
+ * @for p5.Envelope
+ * @param {Object} unit A p5.sound object or
+ * Web Audio Param.
+ * @param {Number} [startTime] time from now (in seconds) at which to play
+ * @param {Number} [sustainTime] time to sustain before releasing the envelope
+ * @example
+ *
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
*
- * If you want to hear the AudioIn, use the .connect() method.
- * AudioIn does not connect to p5.sound output by default to prevent
- * feedback.
+ * var attackTime = 0.001;
+ * var decayTime = 0.2;
+ * var susPercent = 0.2;
+ * var releaseTime = 0.5;
*
- * Note: This uses the getUserMedia/
- * Stream API, which is not supported by certain browsers. Access in Chrome browser
- * is limited to localhost and https, but access over http may be limited.
+ * var env, triOsc;
*
- * @class p5.AudioIn
- * @constructor
- * @param {Function} [errorCallback] A function to call if there is an error
- * accessing the AudioIn. For example,
- * Safari and iOS devices do not
- * currently allow microphone access.
- * @example
- *
- * var mic;
- * function setup(){
- * mic = new p5.AudioIn()
- * mic.start();
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ *
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(playEnv);
* }
- * function draw(){
- * background(0);
- * micLevel = mic.getLevel();
- * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
+ *
+ * function playEnv() {
+ * // trigger env on triOsc, 0 seconds from now
+ * // After decay, sustain for 0.2 seconds before release
+ * env.play(triOsc, 0, 0.2);
* }
*
*/
- p5.AudioIn = function (errorCallback) {
- // set up audio input
- /**
- * @property {GainNode} input
- */
- this.input = p5sound.audiocontext.createGain();
- /**
- * @property {GainNode} output
- */
- this.output = p5sound.audiocontext.createGain();
- /**
- * @property {MediaStream|null} stream
- */
- this.stream = null;
- /**
- * @property {MediaStreamAudioSourceNode|null} mediaStream
- */
- this.mediaStream = null;
- /**
- * @property {Number|null} currentSource
- */
- this.currentSource = null;
- /**
- * Client must allow browser to access their microphone / audioin source.
- * Default: false. Will become true when the client enables acces.
- *
- * @property {Boolean} enabled
- */
- this.enabled = false;
- /**
- * Input amplitude, connect to it by default but not to master out
- *
- * @property {p5.Amplitude} amplitude
- */
- this.amplitude = new p5.Amplitude();
- this.output.connect(this.amplitude.input);
- if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
- errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
+
+
+ p5.Envelope.prototype.play = function (unit, secondsFromNow, susTime) {
+ var tFromNow = secondsFromNow || 0;
+ var susTime = susTime || 0;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
+ }
}
- // add to soundArray so we can dispose on close
- p5sound.soundArray.push(this);
+
+ this.triggerAttack(unit, tFromNow);
+ this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime);
};
/**
- * Start processing audio input. This enables the use of other
- * AudioIn methods like getLevel(). Note that by default, AudioIn
- * is not connected to p5.sound's output. So you won't hear
- * anything unless you use the connect() method.
+ * Trigger the Attack, and Decay portion of the Envelope.
+ * Similar to holding down a key on a piano, but it will
+ * hold the sustain level until you let go. Input can be
+ * any p5.sound object, or a
+ * Web Audio Param.
*
- * Certain browsers limit access to the user's microphone. For example,
- * Chrome only allows access from localhost and over https. For this reason,
- * you may want to include an errorCallback—a function that is called in case
- * the browser won't provide mic access.
+ * @method triggerAttack
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow time from now (in seconds)
+ * @example
+ *
*
- * @method start
- * @param {Function} [successCallback] Name of a function to call on
- * success.
- * @param {Function} [errorCallback] Name of a function to call if
- * there was an error. For example,
- * some browsers do not support
- * getUserMedia.
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
+ *
+ * var attackTime = 0.001;
+ * var decayTime = 0.3;
+ * var susPercent = 0.4;
+ * var releaseTime = 0.5;
+ *
+ * var env, triOsc;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * background(200);
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(envAttack);
+ * }
+ *
+ * function envAttack() {
+ * console.log('trigger attack');
+ * env.triggerAttack();
+ *
+ * background(0,255,0);
+ * text('attack!', width/2, height/2);
+ * }
+ *
+ * function mouseReleased() {
+ * env.triggerRelease();
+ *
+ * background(200);
+ * text('click to play', width/2, height/2);
+ * }
+ *
*/
- p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
- var self = this;
- if (this.stream) {
- this.stop();
- }
- // set the audio source
- var audioSource = p5sound.inputSources[self.currentSource];
- var constraints = {
- audio: {
- sampleRate: p5sound.audiocontext.sampleRate,
- echoCancellation: false
+
+
+ p5.Envelope.prototype.triggerAttack = function (unit, secondsFromNow) {
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+ this.lastAttack = t;
+ this.wasTriggered = true;
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
}
- };
- // if developers determine which source to use
- if (p5sound.inputSources[this.currentSource]) {
- constraints.audio.deviceId = audioSource.deviceId;
+ } // get and set value (with linear ramp) to anchor automation
+
+
+ var valToSet = this.control.getValueAtTime(t);
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
+ } else {
+ this.control.linearRampToValueAtTime(valToSet, t);
+ } // after each ramp completes, cancel scheduled values
+ // (so they can be overridden in case env has been re-triggered)
+ // then, set current value (with linearRamp to avoid click)
+ // then, schedule the next automation...
+ // attack
+
+
+ t += this.aTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.aLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.aLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
+ } // decay to decay level (if using ADSR, then decay level == sustain level)
+
+
+ t += this.dTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.dLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.dLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
}
- window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
- self.stream = stream;
- self.enabled = true;
- // Wrap a MediaStreamSourceNode around the live input
- self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
- self.mediaStream.connect(self.output);
- // only send to the Amplitude reader, so we can see it but not hear it.
- self.amplitude.setInput(self.output);
- if (successCallback)
- successCallback();
- }).catch(function (err) {
- if (errorCallback)
- errorCallback(err);
- else
- console.error(err);
- });
};
/**
- * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
- * If re-starting, the user may be prompted for permission access.
+ * Trigger the Release of the Envelope. This is similar to releasing
+ * the key on a piano and letting the sound fade according to the
+ * release level and release time.
*
- * @method stop
- */
- p5.AudioIn.prototype.stop = function () {
- if (this.stream) {
- this.stream.getTracks().forEach(function (track) {
- track.stop();
- });
- this.mediaStream.disconnect();
- delete this.mediaStream;
- delete this.stream;
- }
- };
- /**
- * Connect to an audio unit. If no parameter is provided, will
- * connect to the master output (i.e. your speakers).
+ * @method triggerRelease
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow time to trigger the release
+ * @example
+ *
+ *
+ * var attackLevel = 1.0;
+ * var releaseLevel = 0;
+ *
+ * var attackTime = 0.001;
+ * var decayTime = 0.3;
+ * var susPercent = 0.4;
+ * var releaseTime = 0.5;
+ *
+ * var env, triOsc;
+ *
+ * function setup() {
+ * var cnv = createCanvas(100, 100);
+ * background(200);
+ * textAlign(CENTER);
+ * text('click to play', width/2, height/2);
+ *
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime, susPercent, releaseTime);
+ * env.setRange(attackLevel, releaseLevel);
+ *
+ * triOsc = new p5.Oscillator('triangle');
+ * triOsc.amp(env);
+ * triOsc.start();
+ * triOsc.freq(220);
+ *
+ * cnv.mousePressed(envAttack);
+ * }
+ *
+ * function envAttack() {
+ * console.log('trigger attack');
+ * env.triggerAttack();
+ *
+ * background(0,255,0);
+ * text('attack!', width/2, height/2);
+ * }
*
- * @method connect
- * @param {Object} [unit] An object that accepts audio input,
- * such as an FFT
+ * function mouseReleased() {
+ * env.triggerRelease();
+ *
+ * background(200);
+ * text('click to play', width/2, height/2);
+ * }
+ *
*/
- p5.AudioIn.prototype.connect = function (unit) {
+
+
+ p5.Envelope.prototype.triggerRelease = function (unit, secondsFromNow) {
+ // only trigger a release if an attack was triggered
+ if (!this.wasTriggered) {
+ // this currently causes a bit of trouble:
+ // if a later release has been scheduled (via the play function)
+ // a new earlier release won't interrupt it, because
+ // this.wasTriggered has already been set to false.
+ // If we want new earlier releases to override, then we need to
+ // keep track of the last release time, and if the new release time is
+ // earlier, then use it.
+ return;
+ }
+
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+
if (unit) {
- if (unit.hasOwnProperty('input')) {
- this.output.connect(unit.input);
- } else if (unit.hasOwnProperty('analyser')) {
- this.output.connect(unit.analyser);
- } else {
- this.output.connect(unit);
+ if (this.connection !== unit) {
+ this.connect(unit);
}
+ } // get and set value (with linear or exponential ramp) to anchor automation
+
+
+ var valToSet = this.control.getValueAtTime(t);
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t);
} else {
- this.output.connect(p5sound.input);
+ this.control.linearRampToValueAtTime(valToSet, t);
+ } // release
+
+
+ t += this.rTime;
+
+ if (this.isExponential === true) {
+ this.control.exponentialRampToValueAtTime(this.checkExpInput(this.rLevel), t);
+ valToSet = this.checkExpInput(this.control.getValueAtTime(t));
+ this.control.cancelScheduledValues(t);
+ this.control.exponentialRampToValueAtTime(valToSet, t);
+ } else {
+ this.control.linearRampToValueAtTime(this.rLevel, t);
+ valToSet = this.control.getValueAtTime(t);
+ this.control.cancelScheduledValues(t);
+ this.control.linearRampToValueAtTime(valToSet, t);
}
+
+ this.wasTriggered = false;
};
/**
- * Disconnect the AudioIn from all audio units. For example, if
- * connect() had been called, disconnect() will stop sending
- * signal to your speakers.
+ * Exponentially ramp to a value using the first two
+ * values from setADSR(attackTime, decayTime)
+ * as
+ * time constants for simple exponential ramps.
+ * If the value is higher than current value, it uses attackTime,
+ * while a decrease uses decayTime.
*
- * @method disconnect
- */
- p5.AudioIn.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- // stay connected to amplitude even if not outputting to p5
- this.output.connect(this.amplitude.input);
- }
- };
- /**
- * Read the Amplitude (volume level) of an AudioIn. The AudioIn
- * class contains its own instance of the Amplitude class to help
- * make it easy to get a microphone's volume level. Accepts an
- * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
- * .start() before using .getLevel().
+ * @method ramp
+ * @for p5.Envelope
+ * @param {Object} unit p5.sound Object or Web Audio Param
+ * @param {Number} secondsFromNow When to trigger the ramp
+ * @param {Number} v Target value
+ * @param {Number} [v2] Second target value (optional)
+ * @example
+ *
+ * var env, osc, amp, cnv;
*
- * @method getLevel
- * @param {Number} [smoothing] Smoothing is 0.0 by default.
- * Smooths values based on previous values.
- * @return {Number} Volume level (between 0.0 and 1.0)
- */
- p5.AudioIn.prototype.getLevel = function (smoothing) {
- if (smoothing) {
- this.amplitude.smoothing = smoothing;
- }
- return this.amplitude.getLevel();
- };
- /**
- * Set amplitude (volume) of a mic input between 0 and 1.0.
+ * var attackTime = 0.001;
+ * var decayTime = 0.2;
+ * var attackLevel = 1;
+ * var decayLevel = 0;
*
- * @method amp
- * @param {Number} vol between 0 and 1.0
- * @param {Number} [time] ramp time (optional)
- */
- p5.AudioIn.prototype.amp = function (vol, t) {
- if (t) {
- var rampTime = t || 0;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
- this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
- } else {
- this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
- this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
- }
- };
- /**
- * Returns a list of available input sources. This is a wrapper
- * for and it returns a Promise.
+ * function setup() {
+ * cnv = createCanvas(100, 100);
+ * fill(0,255,0);
+ * noStroke();
*
- * @method getSources
- * @param {Function} [successCallback] This callback function handles the sources when they
- * have been enumerated. The callback function
- * receives the deviceList array as its only argument
- * @param {Function} [errorCallback] This optional callback receives the error
- * message as its argument.
- * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
- * to the enumerateDevices() method
- * @example
- *
- * var audiograb;
+ * env = new p5.Envelope();
+ * env.setADSR(attackTime, decayTime);
*
- * function setup(){
- * //new audioIn
- * audioGrab = new p5.AudioIn();
+ * osc = new p5.Oscillator();
+ * osc.amp(env);
+ * osc.start();
*
- * audioGrab.getSources(function(deviceList) {
- * //print out the array of available sources
- * console.log(deviceList);
- * //set the source to the first item in the deviceList array
- * audioGrab.setSource(0);
- * });
+ * amp = new p5.Amplitude();
+ *
+ * cnv.mousePressed(triggerRamp);
+ * }
+ *
+ * function triggerRamp() {
+ * env.ramp(osc, 0, attackLevel, decayLevel);
+ * }
+ *
+ * function draw() {
+ * background(20,20,20);
+ * text('click me', 10, 20);
+ * var h = map(amp.getLevel(), 0, 0.4, 0, height);;
+ *
+ * rect(0, height, width, -h);
* }
*
*/
- p5.AudioIn.prototype.getSources = function (onSuccess, onError) {
- return new Promise(function (resolve, reject) {
- window.navigator.mediaDevices.enumerateDevices().then(function (devices) {
- p5sound.inputSources = devices.filter(function (device) {
- return device.kind === 'audioinput';
- });
- resolve(p5sound.inputSources);
- if (onSuccess) {
- onSuccess(p5sound.inputSources);
- }
- }).catch(function (error) {
- reject(error);
- if (onError) {
- onError(error);
- } else {
- console.error('This browser does not support MediaStreamTrack.getSources()');
- }
- });
- });
+
+
+ p5.Envelope.prototype.ramp = function (unit, secondsFromNow, v1, v2) {
+ var now = p5sound.audiocontext.currentTime;
+ var tFromNow = secondsFromNow || 0;
+ var t = now + tFromNow;
+ var destination1 = this.checkExpInput(v1);
+ var destination2 = typeof v2 !== 'undefined' ? this.checkExpInput(v2) : undefined; // connect env to unit if not already connected
+
+ if (unit) {
+ if (this.connection !== unit) {
+ this.connect(unit);
+ }
+ } //get current value
+
+
+ var currentVal = this.checkExpInput(this.control.getValueAtTime(t)); // this.control.cancelScheduledValues(t);
+ //if it's going up
+
+ if (destination1 > currentVal) {
+ this.control.setTargetAtTime(destination1, t, this._rampAttackTC);
+ t += this._rampAttackTime;
+ } //if it's going down
+ else if (destination1 < currentVal) {
+ this.control.setTargetAtTime(destination1, t, this._rampDecayTC);
+ t += this._rampDecayTime;
+ } // Now the second part of envelope begins
+
+
+ if (destination2 === undefined) return; //if it's going up
+
+ if (destination2 > destination1) {
+ this.control.setTargetAtTime(destination2, t, this._rampAttackTC);
+ } //if it's going down
+ else if (destination2 < destination1) {
+ this.control.setTargetAtTime(destination2, t, this._rampDecayTC);
+ }
+ };
+
+ p5.Envelope.prototype.connect = function (unit) {
+ this.connection = unit; // assume we're talking about output gain
+ // unless given a different audio param
+
+ if (unit instanceof p5.Oscillator || unit instanceof p5.SoundFile || unit instanceof p5.AudioIn || unit instanceof p5.Reverb || unit instanceof p5.Noise || unit instanceof p5.Filter || unit instanceof p5.Delay) {
+ unit = unit.output.gain;
+ }
+
+ if (unit instanceof AudioParam) {
+ //set the initial value
+ unit.setValueAtTime(0, p5sound.audiocontext.currentTime);
+ }
+
+ if (unit instanceof p5.Signal) {
+ unit.setValue(0);
+ }
+
+ this.output.connect(unit);
+ };
+
+ p5.Envelope.prototype.disconnect = function () {
+ if (this.output) {
+ this.output.disconnect();
+ }
+ }; // Signal Math
+
+ /**
+ * Add a value to the p5.Oscillator's output amplitude,
+ * and return the oscillator. Calling this method
+ * again will override the initial add() with new values.
+ *
+ * @method add
+ * @for p5.Envelope
+ * @param {Number} number Constant number to add
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
+ */
+
+
+ p5.Envelope.prototype.add = function (num) {
+ var add = new Add(num);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, add, thisChain, nextChain, Add);
};
/**
- * Set the input source. Accepts a number representing a
- * position in the array returned by getSources().
- * This is only available in browsers that support
- * navigator.mediaDevices.enumerateDevices().
+ * Multiply the p5.Envelope's output amplitude
+ * by a fixed value. Calling this method
+ * again will override the initial mult() with new values.
*
- * @method setSource
- * @param {number} num position of input source in the array
+ * @method mult
+ * @for p5.Envelope
+ * @param {Number} number Constant number to multiply
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
*/
- p5.AudioIn.prototype.setSource = function (num) {
- if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
- // set the current source
- this.currentSource = num;
- console.log('set source to ', p5sound.inputSources[this.currentSource]);
- } else {
- console.log('unable to set input source');
- }
- // restart stream if currently active
- if (this.stream && this.stream.active) {
- this.start();
- }
+
+
+ p5.Envelope.prototype.mult = function (num) {
+ var mult = new Mult(num);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult);
};
- // private method
- p5.AudioIn.prototype.dispose = function () {
+ /**
+ * Scale this envelope's amplitude values to a given
+ * range, and return the envelope. Calling this method
+ * again will override the initial scale() with new values.
+ *
+ * @method scale
+ * @for p5.Envelope
+ * @param {Number} inMin input range minumum
+ * @param {Number} inMax input range maximum
+ * @param {Number} outMin input range minumum
+ * @param {Number} outMax input range maximum
+ * @return {p5.Envelope} Envelope Returns this envelope
+ * with scaled output
+ */
+
+
+ p5.Envelope.prototype.scale = function (inMin, inMax, outMin, outMax) {
+ var scale = new Scale(inMin, inMax, outMin, outMax);
+ var thisChain = this.mathOps.length;
+ var nextChain = this.output;
+ return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale);
+ }; // get rid of the oscillator
+
+
+ p5.Envelope.prototype.dispose = function () {
// remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
- this.stop();
- if (this.output) {
- this.output.disconnect();
- }
- if (this.amplitude) {
- this.amplitude.disconnect();
- }
- delete this.amplitude;
- delete this.output;
- };
-}(master);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Negate;
-Tone_signal_Negate = function (Tone) {
- 'use strict';
- Tone.Negate = function () {
- this._multiply = this.input = this.output = new Tone.Multiply(-1);
- };
- Tone.extend(Tone.Negate, Tone.SignalBase);
- Tone.Negate.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._multiply.dispose();
- this._multiply = null;
- return this;
- };
- return Tone.Negate;
-}(Tone_core_Tone, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Subtract;
-Tone_signal_Subtract = function (Tone) {
- 'use strict';
- Tone.Subtract = function (value) {
- this.createInsOuts(2, 0);
- this._sum = this.input[0] = this.output = new Tone.Gain();
- this._neg = new Tone.Negate();
- this._param = this.input[1] = new Tone.Signal(value);
- this._param.chain(this._neg, this._sum);
- };
- Tone.extend(Tone.Subtract, Tone.Signal);
- Tone.Subtract.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._neg.dispose();
- this._neg = null;
- this._sum.disconnect();
- this._sum = null;
- this._param.dispose();
- this._param = null;
- return this;
- };
- return Tone.Subtract;
-}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Negate, Tone_signal_Signal);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_GreaterThanZero;
-Tone_signal_GreaterThanZero = function (Tone) {
- 'use strict';
- Tone.GreaterThanZero = function () {
- this._thresh = this.output = new Tone.WaveShaper(function (val) {
- if (val <= 0) {
- return 0;
- } else {
- return 1;
- }
- }, 127);
- this._scale = this.input = new Tone.Multiply(10000);
- this._scale.connect(this._thresh);
- };
- Tone.extend(Tone.GreaterThanZero, Tone.SignalBase);
- Tone.GreaterThanZero.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._scale.dispose();
- this._scale = null;
- this._thresh.dispose();
- this._thresh = null;
- return this;
- };
- return Tone.GreaterThanZero;
-}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_GreaterThan;
-Tone_signal_GreaterThan = function (Tone) {
- 'use strict';
- Tone.GreaterThan = function (value) {
- this.createInsOuts(2, 0);
- this._param = this.input[0] = new Tone.Subtract(value);
- this.input[1] = this._param.input[1];
- this._gtz = this.output = new Tone.GreaterThanZero();
- this._param.connect(this._gtz);
- };
- Tone.extend(Tone.GreaterThan, Tone.Signal);
- Tone.GreaterThan.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._param.dispose();
- this._param = null;
- this._gtz.dispose();
- this._gtz = null;
- return this;
- };
- return Tone.GreaterThan;
-}(Tone_core_Tone, Tone_signal_GreaterThanZero, Tone_signal_Subtract);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Abs;
-Tone_signal_Abs = function (Tone) {
- 'use strict';
- Tone.Abs = function () {
- this._abs = this.input = this.output = new Tone.WaveShaper(function (val) {
- if (val === 0) {
- return 0;
- } else {
- return Math.abs(val);
- }
- }, 127);
- };
- Tone.extend(Tone.Abs, Tone.SignalBase);
- Tone.Abs.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._abs.dispose();
- this._abs = null;
- return this;
- };
- return Tone.Abs;
-}(Tone_core_Tone, Tone_signal_WaveShaper);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Modulo;
-Tone_signal_Modulo = function (Tone) {
- 'use strict';
- Tone.Modulo = function (modulus) {
- this.createInsOuts(1, 0);
- this._shaper = new Tone.WaveShaper(Math.pow(2, 16));
- this._multiply = new Tone.Multiply();
- this._subtract = this.output = new Tone.Subtract();
- this._modSignal = new Tone.Signal(modulus);
- this.input.fan(this._shaper, this._subtract);
- this._modSignal.connect(this._multiply, 0, 0);
- this._shaper.connect(this._multiply, 0, 1);
- this._multiply.connect(this._subtract, 0, 1);
- this._setWaveShaper(modulus);
- };
- Tone.extend(Tone.Modulo, Tone.SignalBase);
- Tone.Modulo.prototype._setWaveShaper = function (mod) {
- this._shaper.setMap(function (val) {
- var multiple = Math.floor((val + 0.0001) / mod);
- return multiple;
- });
- };
- Object.defineProperty(Tone.Modulo.prototype, 'value', {
- get: function () {
- return this._modSignal.value;
- },
- set: function (mod) {
- this._modSignal.value = mod;
- this._setWaveShaper(mod);
- }
- });
- Tone.Modulo.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._shaper.dispose();
- this._shaper = null;
- this._multiply.dispose();
- this._multiply = null;
- this._subtract.dispose();
- this._subtract = null;
- this._modSignal.dispose();
- this._modSignal = null;
- return this;
- };
- return Tone.Modulo;
-}(Tone_core_Tone, Tone_signal_WaveShaper, Tone_signal_Multiply);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Pow;
-Tone_signal_Pow = function (Tone) {
- 'use strict';
- Tone.Pow = function (exp) {
- this._exp = this.defaultArg(exp, 1);
- this._expScaler = this.input = this.output = new Tone.WaveShaper(this._expFunc(this._exp), 8192);
- };
- Tone.extend(Tone.Pow, Tone.SignalBase);
- Object.defineProperty(Tone.Pow.prototype, 'value', {
- get: function () {
- return this._exp;
- },
- set: function (exp) {
- this._exp = exp;
- this._expScaler.setMap(this._expFunc(this._exp));
- }
- });
- Tone.Pow.prototype._expFunc = function (exp) {
- return function (val) {
- return Math.pow(Math.abs(val), exp);
- };
- };
- Tone.Pow.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._expScaler.dispose();
- this._expScaler = null;
- return this;
- };
- return Tone.Pow;
-}(Tone_core_Tone);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_AudioToGain;
-Tone_signal_AudioToGain = function (Tone) {
- 'use strict';
- Tone.AudioToGain = function () {
- this._norm = this.input = this.output = new Tone.WaveShaper(function (x) {
- return (x + 1) / 2;
- });
- };
- Tone.extend(Tone.AudioToGain, Tone.SignalBase);
- Tone.AudioToGain.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._norm.dispose();
- this._norm = null;
- return this;
- };
- return Tone.AudioToGain;
-}(Tone_core_Tone, Tone_signal_WaveShaper);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_Expr;
-Tone_signal_Expr = function (Tone) {
- 'use strict';
- Tone.Expr = function () {
- var expr = this._replacements(Array.prototype.slice.call(arguments));
- var inputCount = this._parseInputs(expr);
- this._nodes = [];
- this.input = new Array(inputCount);
- for (var i = 0; i < inputCount; i++) {
- this.input[i] = this.context.createGain();
- }
- var tree = this._parseTree(expr);
- var result;
- try {
- result = this._eval(tree);
- } catch (e) {
- this._disposeNodes();
- throw new Error('Tone.Expr: Could evaluate expression: ' + expr);
- }
- this.output = result;
- };
- Tone.extend(Tone.Expr, Tone.SignalBase);
- function applyBinary(Constructor, args, self) {
- var op = new Constructor();
- self._eval(args[0]).connect(op, 0, 0);
- self._eval(args[1]).connect(op, 0, 1);
- return op;
- }
- function applyUnary(Constructor, args, self) {
- var op = new Constructor();
- self._eval(args[0]).connect(op, 0, 0);
- return op;
- }
- function getNumber(arg) {
- return arg ? parseFloat(arg) : undefined;
- }
- function literalNumber(arg) {
- return arg && arg.args ? parseFloat(arg.args) : undefined;
- }
- Tone.Expr._Expressions = {
- 'value': {
- 'signal': {
- regexp: /^\d+\.\d+|^\d+/,
- method: function (arg) {
- var sig = new Tone.Signal(getNumber(arg));
- return sig;
- }
- },
- 'input': {
- regexp: /^\$\d/,
- method: function (arg, self) {
- return self.input[getNumber(arg.substr(1))];
- }
- }
- },
- 'glue': {
- '(': { regexp: /^\(/ },
- ')': { regexp: /^\)/ },
- ',': { regexp: /^,/ }
- },
- 'func': {
- 'abs': {
- regexp: /^abs/,
- method: applyUnary.bind(this, Tone.Abs)
- },
- 'mod': {
- regexp: /^mod/,
- method: function (args, self) {
- var modulus = literalNumber(args[1]);
- var op = new Tone.Modulo(modulus);
- self._eval(args[0]).connect(op);
- return op;
- }
- },
- 'pow': {
- regexp: /^pow/,
- method: function (args, self) {
- var exp = literalNumber(args[1]);
- var op = new Tone.Pow(exp);
- self._eval(args[0]).connect(op);
- return op;
- }
- },
- 'a2g': {
- regexp: /^a2g/,
- method: function (args, self) {
- var op = new Tone.AudioToGain();
- self._eval(args[0]).connect(op);
- return op;
- }
- }
- },
- 'binary': {
- '+': {
- regexp: /^\+/,
- precedence: 1,
- method: applyBinary.bind(this, Tone.Add)
- },
- '-': {
- regexp: /^\-/,
- precedence: 1,
- method: function (args, self) {
- if (args.length === 1) {
- return applyUnary(Tone.Negate, args, self);
- } else {
- return applyBinary(Tone.Subtract, args, self);
- }
- }
- },
- '*': {
- regexp: /^\*/,
- precedence: 0,
- method: applyBinary.bind(this, Tone.Multiply)
- }
- },
- 'unary': {
- '-': {
- regexp: /^\-/,
- method: applyUnary.bind(this, Tone.Negate)
- },
- '!': {
- regexp: /^\!/,
- method: applyUnary.bind(this, Tone.NOT)
- }
- }
- };
- Tone.Expr.prototype._parseInputs = function (expr) {
- var inputArray = expr.match(/\$\d/g);
- var inputMax = 0;
- if (inputArray !== null) {
- for (var i = 0; i < inputArray.length; i++) {
- var inputNum = parseInt(inputArray[i].substr(1)) + 1;
- inputMax = Math.max(inputMax, inputNum);
- }
+ this.disconnect();
+
+ if (this.control) {
+ this.control.dispose();
+ this.control = null;
}
- return inputMax;
- };
- Tone.Expr.prototype._replacements = function (args) {
- var expr = args.shift();
- for (var i = 0; i < args.length; i++) {
- expr = expr.replace(/\%/i, args[i]);
- }
- return expr;
- };
- Tone.Expr.prototype._tokenize = function (expr) {
- var position = -1;
- var tokens = [];
- while (expr.length > 0) {
- expr = expr.trim();
- var token = getNextToken(expr);
- tokens.push(token);
- expr = expr.substr(token.value.length);
- }
- function getNextToken(expr) {
- for (var type in Tone.Expr._Expressions) {
- var group = Tone.Expr._Expressions[type];
- for (var opName in group) {
- var op = group[opName];
- var reg = op.regexp;
- var match = expr.match(reg);
- if (match !== null) {
- return {
- type: type,
- value: match[0],
- method: op.method
- };
- }
- }
- }
- throw new SyntaxError('Tone.Expr: Unexpected token ' + expr);
+
+ for (var i = 1; i < this.mathOps.length; i++) {
+ this.mathOps[i].dispose();
}
- return {
- next: function () {
- return tokens[++position];
- },
- peek: function () {
- return tokens[position + 1];
- }
- };
+ }; // Different name for backwards compatibility, replicates p5.Envelope class
+
+
+ p5.Env = function (t1, l1, t2, l2, t3, l3) {
+ console.warn('WARNING: p5.Env is now deprecated and may be removed in future versions. ' + 'Please use the new p5.Envelope instead.');
+ p5.Envelope.call(this, t1, l1, t2, l2, t3, l3);
};
- Tone.Expr.prototype._parseTree = function (expr) {
- var lexer = this._tokenize(expr);
- var isUndef = this.isUndef.bind(this);
- function matchSyntax(token, syn) {
- return !isUndef(token) && token.type === 'glue' && token.value === syn;
- }
- function matchGroup(token, groupName, prec) {
- var ret = false;
- var group = Tone.Expr._Expressions[groupName];
- if (!isUndef(token)) {
- for (var opName in group) {
- var op = group[opName];
- if (op.regexp.test(token.value)) {
- if (!isUndef(prec)) {
- if (op.precedence === prec) {
- return true;
- }
- } else {
- return true;
- }
- }
- }
- }
- return ret;
- }
- function parseExpression(precedence) {
- if (isUndef(precedence)) {
- precedence = 5;
- }
- var expr;
- if (precedence < 0) {
- expr = parseUnary();
- } else {
- expr = parseExpression(precedence - 1);
- }
- var token = lexer.peek();
- while (matchGroup(token, 'binary', precedence)) {
- token = lexer.next();
- expr = {
- operator: token.value,
- method: token.method,
- args: [
- expr,
- parseExpression(precedence - 1)
- ]
- };
- token = lexer.peek();
- }
- return expr;
- }
- function parseUnary() {
- var token, expr;
- token = lexer.peek();
- if (matchGroup(token, 'unary')) {
- token = lexer.next();
- expr = parseUnary();
- return {
- operator: token.value,
- method: token.method,
- args: [expr]
- };
- }
- return parsePrimary();
- }
- function parsePrimary() {
- var token, expr;
- token = lexer.peek();
- if (isUndef(token)) {
- throw new SyntaxError('Tone.Expr: Unexpected termination of expression');
- }
- if (token.type === 'func') {
- token = lexer.next();
- return parseFunctionCall(token);
- }
- if (token.type === 'value') {
- token = lexer.next();
- return {
- method: token.method,
- args: token.value
- };
- }
- if (matchSyntax(token, '(')) {
- lexer.next();
- expr = parseExpression();
- token = lexer.next();
- if (!matchSyntax(token, ')')) {
- throw new SyntaxError('Expected )');
- }
- return expr;
- }
- throw new SyntaxError('Tone.Expr: Parse error, cannot process token ' + token.value);
- }
- function parseFunctionCall(func) {
- var token, args = [];
- token = lexer.next();
- if (!matchSyntax(token, '(')) {
- throw new SyntaxError('Tone.Expr: Expected ( in a function call "' + func.value + '"');
- }
- token = lexer.peek();
- if (!matchSyntax(token, ')')) {
- args = parseArgumentList();
- }
- token = lexer.next();
- if (!matchSyntax(token, ')')) {
- throw new SyntaxError('Tone.Expr: Expected ) in a function call "' + func.value + '"');
- }
- return {
- method: func.method,
- args: args,
- name: name
- };
- }
- function parseArgumentList() {
- var token, expr, args = [];
- while (true) {
- expr = parseExpression();
- if (isUndef(expr)) {
- break;
- }
- args.push(expr);
- token = lexer.peek();
- if (!matchSyntax(token, ',')) {
- break;
- }
- lexer.next();
+
+ p5.Env.prototype = Object.create(p5.Envelope.prototype);
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 42 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ __webpack_require__(22);
+ /**
+ * Creates a Pulse object, an oscillator that implements
+ * Pulse Width Modulation.
+ * The pulse is created with two oscillators.
+ * Accepts a parameter for frequency, and to set the
+ * width between the pulses. See
+ * p5.Oscillator
for a full list of methods.
+ *
+ * @class p5.Pulse
+ * @extends p5.Oscillator
+ * @constructor
+ * @param {Number} [freq] Frequency in oscillations per second (Hz)
+ * @param {Number} [w] Width between the pulses (0 to 1.0,
+ * defaults to 0)
+ * @example
+ *
+ * var pulse;
+ * function setup() {
+ * background(0);
+ *
+ * // Create and start the pulse wave oscillator
+ * pulse = new p5.Pulse();
+ * pulse.amp(0.5);
+ * pulse.freq(220);
+ * pulse.start();
+ * }
+ *
+ * function draw() {
+ * var w = map(mouseX, 0, width, 0, 1);
+ * w = constrain(w, 0, 1);
+ * pulse.width(w)
+ * }
+ *
+ */
+
+
+ p5.Pulse = function (freq, w) {
+ p5.Oscillator.call(this, freq, 'sawtooth'); // width of PWM, should be betw 0 to 1.0
+
+ this.w = w || 0; // create a second oscillator with inverse frequency
+
+ this.osc2 = new p5.SawOsc(freq); // create a delay node
+
+ this.dNode = p5sound.audiocontext.createDelay(); // dc offset
+
+ this.dcOffset = createDCOffset();
+ this.dcGain = p5sound.audiocontext.createGain();
+ this.dcOffset.connect(this.dcGain);
+ this.dcGain.connect(this.output); // set delay time based on PWM width
+
+ this.f = freq || 440;
+ var mW = this.w / this.oscillator.frequency.value;
+ this.dNode.delayTime.value = mW;
+ this.dcGain.gain.value = 1.7 * (0.5 - this.w); // disconnect osc2 and connect it to delay, which is connected to output
+
+ this.osc2.disconnect();
+ this.osc2.panner.disconnect();
+ this.osc2.amp(-1); // inverted amplitude
+
+ this.osc2.output.connect(this.dNode);
+ this.dNode.connect(this.output);
+ this.output.gain.value = 1;
+ this.output.connect(this.panner);
+ };
+
+ p5.Pulse.prototype = Object.create(p5.Oscillator.prototype);
+ /**
+ * Set the width of a Pulse object (an oscillator that implements
+ * Pulse Width Modulation).
+ *
+ * @method width
+ * @param {Number} [width] Width between the pulses (0 to 1.0,
+ * defaults to 0)
+ */
+
+ p5.Pulse.prototype.width = function (w) {
+ if (typeof w === 'number') {
+ if (w <= 1.0 && w >= 0.0) {
+ this.w = w; // set delay time based on PWM width
+ // var mW = map(this.w, 0, 1.0, 0, 1/this.f);
+
+ var mW = this.w / this.oscillator.frequency.value;
+ this.dNode.delayTime.value = mW;
}
- return args;
+
+ this.dcGain.gain.value = 1.7 * (0.5 - this.w);
+ } else {
+ w.connect(this.dNode.delayTime);
+ var sig = new p5.SignalAdd(-0.5);
+ sig.setInput(w);
+ sig = sig.mult(-1);
+ sig = sig.mult(1.7);
+ sig.connect(this.dcGain.gain);
}
- return parseExpression();
};
- Tone.Expr.prototype._eval = function (tree) {
- if (!this.isUndef(tree)) {
- var node = tree.method(tree.args, this);
- this._nodes.push(node);
- return node;
+
+ p5.Pulse.prototype.start = function (f, time) {
+ var now = p5sound.audiocontext.currentTime;
+ var t = time || 0;
+
+ if (!this.started) {
+ var freq = f || this.f;
+ var type = this.oscillator.type;
+ this.oscillator = p5sound.audiocontext.createOscillator();
+ this.oscillator.frequency.setValueAtTime(freq, now);
+ this.oscillator.type = type;
+ this.oscillator.connect(this.output);
+ this.oscillator.start(t + now); // set up osc2
+
+ this.osc2.oscillator = p5sound.audiocontext.createOscillator();
+ this.osc2.oscillator.frequency.setValueAtTime(freq, t + now);
+ this.osc2.oscillator.type = type;
+ this.osc2.oscillator.connect(this.osc2.output);
+ this.osc2.start(t + now);
+ this.freqNode = [this.oscillator.frequency, this.osc2.oscillator.frequency]; // start dcOffset, too
+
+ this.dcOffset = createDCOffset();
+ this.dcOffset.connect(this.dcGain);
+ this.dcOffset.start(t + now); // if LFO connections depend on these oscillators
+
+ if (this.mods !== undefined && this.mods.frequency !== undefined) {
+ this.mods.frequency.connect(this.freqNode[0]);
+ this.mods.frequency.connect(this.freqNode[1]);
+ }
+
+ this.started = true;
+ this.osc2.started = true;
}
};
- Tone.Expr.prototype._disposeNodes = function () {
- for (var i = 0; i < this._nodes.length; i++) {
- var node = this._nodes[i];
- if (this.isFunction(node.dispose)) {
- node.dispose();
- } else if (this.isFunction(node.disconnect)) {
- node.disconnect();
+
+ p5.Pulse.prototype.stop = function (time) {
+ if (this.started) {
+ var t = time || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.oscillator.stop(t + now);
+
+ if (this.osc2.oscillator) {
+ this.osc2.oscillator.stop(t + now);
}
- node = null;
- this._nodes[i] = null;
+
+ this.dcOffset.stop(t + now);
+ this.started = false;
+ this.osc2.started = false;
}
- this._nodes = null;
- };
- Tone.Expr.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._disposeNodes();
};
- return Tone.Expr;
-}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Subtract, Tone_signal_Multiply, Tone_signal_GreaterThan, Tone_signal_GreaterThanZero, Tone_signal_Abs, Tone_signal_Negate, Tone_signal_Modulo, Tone_signal_Pow);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_signal_EqualPowerGain;
-Tone_signal_EqualPowerGain = function (Tone) {
- 'use strict';
- Tone.EqualPowerGain = function () {
- this._eqPower = this.input = this.output = new Tone.WaveShaper(function (val) {
- if (Math.abs(val) < 0.001) {
- return 0;
- } else {
- return this.equalPowerScale(val);
+
+ p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) {
+ if (typeof val === 'number') {
+ this.f = val;
+ var now = p5sound.audiocontext.currentTime;
+ var rampTime = rampTime || 0;
+ var tFromNow = tFromNow || 0;
+ var currentFreq = this.oscillator.frequency.value;
+ this.oscillator.frequency.cancelScheduledValues(now);
+ this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
+ this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
+ this.osc2.oscillator.frequency.cancelScheduledValues(now);
+ this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow);
+ this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now);
+
+ if (this.freqMod) {
+ this.freqMod.output.disconnect();
+ this.freqMod = null;
}
- }.bind(this), 4096);
- };
- Tone.extend(Tone.EqualPowerGain, Tone.SignalBase);
- Tone.EqualPowerGain.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._eqPower.dispose();
- this._eqPower = null;
- return this;
- };
- return Tone.EqualPowerGain;
-}(Tone_core_Tone);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_component_CrossFade;
-Tone_component_CrossFade = function (Tone) {
- 'use strict';
- Tone.CrossFade = function (initialFade) {
- this.createInsOuts(2, 1);
- this.a = this.input[0] = new Tone.Gain();
- this.b = this.input[1] = new Tone.Gain();
- this.fade = new Tone.Signal(this.defaultArg(initialFade, 0.5), Tone.Type.NormalRange);
- this._equalPowerA = new Tone.EqualPowerGain();
- this._equalPowerB = new Tone.EqualPowerGain();
- this._invert = new Tone.Expr('1 - $0');
- this.a.connect(this.output);
- this.b.connect(this.output);
- this.fade.chain(this._equalPowerB, this.b.gain);
- this.fade.chain(this._invert, this._equalPowerA, this.a.gain);
- this._readOnly('fade');
- };
- Tone.extend(Tone.CrossFade);
- Tone.CrossFade.prototype.dispose = function () {
- Tone.prototype.dispose.call(this);
- this._writable('fade');
- this._equalPowerA.dispose();
- this._equalPowerA = null;
- this._equalPowerB.dispose();
- this._equalPowerB = null;
- this.fade.dispose();
- this.fade = null;
- this._invert.dispose();
- this._invert = null;
- this.a.dispose();
- this.a = null;
- this.b.dispose();
- this.b = null;
- return this;
- };
- return Tone.CrossFade;
-}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Expr, Tone_signal_EqualPowerGain);
-var effect;
-'use strict';
-effect = function () {
- var p5sound = master;
- var CrossFade = Tone_component_CrossFade;
+ } else if (val.output) {
+ val.output.disconnect();
+ val.output.connect(this.oscillator.frequency);
+ val.output.connect(this.osc2.oscillator.frequency);
+ this.freqMod = val;
+ }
+ }; // inspiration: http://webaudiodemos.appspot.com/oscilloscope/
+
+
+ function createDCOffset() {
+ var ac = p5sound.audiocontext;
+ var buffer = ac.createBuffer(1, 2048, ac.sampleRate);
+ var data = buffer.getChannelData(0);
+
+ for (var i = 0; i < 2048; i++) {
+ data[i] = 1.0;
+ }
+
+ var bufferSource = ac.createBufferSource();
+ bufferSource.buffer = buffer;
+ bufferSource.loop = true;
+ return bufferSource;
+ }
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 43 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
/**
- * Effect is a base class for audio effects in p5.
- * This module handles the nodes and methods that are
- * common and useful for current and future effects.
- *
- *
- * This class is extended by p5.Distortion,
- * p5.Compressor,
- * p5.Delay,
- * p5.Filter,
- * p5.Reverb.
+ * Noise is a type of oscillator that generates a buffer with random values.
*
- * @class p5.Effect
- * @constructor
- *
- * @param {Object} [ac] Reference to the audio context of the p5 object
- * @param {AudioNode} [input] Gain Node effect wrapper
- * @param {AudioNode} [output] Gain Node effect wrapper
- * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1)
- * @param {AudioNode} [wet] Effects that extend this class should connect
- * to the wet signal to this gain node, so that dry and wet
- * signals are mixed properly.
- */
- p5.Effect = function () {
- this.ac = p5sound.audiocontext;
- this.input = this.ac.createGain();
- this.output = this.ac.createGain();
- /**
- * The p5.Effect class is built
- * using Tone.js CrossFade
- * @private
- */
- this._drywet = new CrossFade(1);
- /**
- * In classes that extend
- * p5.Effect, connect effect nodes
- * to the wet parameter
- */
- this.wet = this.ac.createGain();
- this.input.connect(this._drywet.a);
- this.wet.connect(this._drywet.b);
- this._drywet.connect(this.output);
- this.connect();
- //Add to the soundArray
- p5sound.soundArray.push(this);
- };
- /**
- * Set the output volume of the filter.
- *
- * @method amp
- * @param {Number} [vol] amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts until rampTime
- * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
+ * @class p5.Noise
+ * @extends p5.Oscillator
+ * @constructor
+ * @param {String} type Type of noise can be 'white' (default),
+ * 'brown' or 'pink'.
*/
- p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001);
+
+
+ p5.Noise = function (type) {
+ var assignType;
+ p5.Oscillator.call(this);
+ delete this.f;
+ delete this.freq;
+ delete this.oscillator;
+
+ if (type === 'brown') {
+ assignType = _brownNoise;
+ } else if (type === 'pink') {
+ assignType = _pinkNoise;
+ } else {
+ assignType = _whiteNoise;
+ }
+
+ this.buffer = assignType;
};
+
+ p5.Noise.prototype = Object.create(p5.Oscillator.prototype); // generate noise buffers
+
+ var _whiteNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = whiteBuffer.getChannelData(0);
+
+ for (var i = 0; i < bufferSize; i++) {
+ noiseData[i] = Math.random() * 2 - 1;
+ }
+
+ whiteBuffer.type = 'white';
+ return whiteBuffer;
+ }();
+
+ var _pinkNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = pinkBuffer.getChannelData(0);
+ var b0, b1, b2, b3, b4, b5, b6;
+ b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0.0;
+
+ for (var i = 0; i < bufferSize; i++) {
+ var white = Math.random() * 2 - 1;
+ b0 = 0.99886 * b0 + white * 0.0555179;
+ b1 = 0.99332 * b1 + white * 0.0750759;
+ b2 = 0.96900 * b2 + white * 0.1538520;
+ b3 = 0.86650 * b3 + white * 0.3104856;
+ b4 = 0.55000 * b4 + white * 0.5329522;
+ b5 = -0.7616 * b5 - white * 0.0168980;
+ noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362;
+ noiseData[i] *= 0.11; // (roughly) compensate for gain
+
+ b6 = white * 0.115926;
+ }
+
+ pinkBuffer.type = 'pink';
+ return pinkBuffer;
+ }();
+
+ var _brownNoise = function () {
+ var bufferSize = 2 * p5sound.audiocontext.sampleRate;
+ var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate);
+ var noiseData = brownBuffer.getChannelData(0);
+ var lastOut = 0.0;
+
+ for (var i = 0; i < bufferSize; i++) {
+ var white = Math.random() * 2 - 1;
+ noiseData[i] = (lastOut + 0.02 * white) / 1.02;
+ lastOut = noiseData[i];
+ noiseData[i] *= 3.5;
+ }
+
+ brownBuffer.type = 'brown';
+ return brownBuffer;
+ }();
/**
- * Link effects together in a chain
- * Example usage: filter.chain(reverb, delay, panner);
- * May be used with an open-ended number of arguments
+ * Set type of noise to 'white', 'pink' or 'brown'.
+ * White is the default.
*
- * @method chain
- * @param {Object} [arguments] Chain together multiple sound objects
+ * @method setType
+ * @param {String} [type] 'white', 'pink' or 'brown'
*/
- p5.Effect.prototype.chain = function () {
- if (arguments.length > 0) {
- this.connect(arguments[0]);
- for (var i = 1; i < arguments.length; i += 1) {
- arguments[i - 1].connect(arguments[i]);
- }
+
+
+ p5.Noise.prototype.setType = function (type) {
+ switch (type) {
+ case 'white':
+ this.buffer = _whiteNoise;
+ break;
+
+ case 'pink':
+ this.buffer = _pinkNoise;
+ break;
+
+ case 'brown':
+ this.buffer = _brownNoise;
+ break;
+
+ default:
+ this.buffer = _whiteNoise;
}
- return this;
- };
- /**
- * Adjust the dry/wet value.
- *
- * @method drywet
- * @param {Number} [fade] The desired drywet value (0 - 1.0)
- */
- p5.Effect.prototype.drywet = function (fade) {
- if (typeof fade !== 'undefined') {
- this._drywet.fade.value = fade;
+
+ if (this.started) {
+ var now = p5sound.audiocontext.currentTime;
+ this.stop(now);
+ this.start(now + .01);
}
- return this._drywet.fade.value;
};
- /**
- * Send output to a p5.js-sound, Web Audio Node, or use signal to
- * control an AudioParam
- *
- * @method connect
- * @param {Object} unit
- */
- p5.Effect.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- this.output.connect(u.input ? u.input : u);
+
+ p5.Noise.prototype.getType = function () {
+ return this.buffer.type;
};
- /**
- * Disconnect all output.
- *
- * @method disconnect
- */
- p5.Effect.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+
+ p5.Noise.prototype.start = function () {
+ if (this.started) {
+ this.stop();
}
+
+ this.noise = p5sound.audiocontext.createBufferSource();
+ this.noise.buffer = this.buffer;
+ this.noise.loop = true;
+ this.noise.connect(this.output);
+ var now = p5sound.audiocontext.currentTime;
+ this.noise.start(now);
+ this.started = true;
};
- p5.Effect.prototype.dispose = function () {
- // remove refernce form soundArray
+
+ p5.Noise.prototype.stop = function () {
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.noise) {
+ this.noise.stop(now);
+ this.started = false;
+ }
+ };
+
+ p5.Noise.prototype.dispose = function () {
+ var now = p5sound.audiocontext.currentTime; // remove reference from soundArray
+
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
- if (this.input) {
- this.input.disconnect();
- delete this.input;
+
+ if (this.noise) {
+ this.noise.disconnect();
+ this.stop(now);
}
+
if (this.output) {
this.output.disconnect();
- delete this.output;
- }
- if (this._drywet) {
- this._drywet.disconnect();
- delete this._drywet;
}
- if (this.wet) {
- this.wet.disconnect();
- delete this.wet;
+
+ if (this.panner) {
+ this.panner.disconnect();
}
- this.ac = undefined;
+
+ this.output = null;
+ this.panner = null;
+ this.buffer = null;
+ this.noise = null;
};
- return p5.Effect;
-}(master, Tone_component_CrossFade);
-var filter;
-'use strict';
-filter = function () {
- var p5sound = master;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 44 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1); // an array of input sources
+
+
+ p5sound.inputSources = [];
/**
- * A p5.Filter uses a Web Audio Biquad Filter to filter
- * the frequency response of an input source. Subclasses
- * include:
- * * p5.LowPass
:
- * Allows frequencies below the cutoff frequency to pass through,
- * and attenuates frequencies above the cutoff.
- * * p5.HighPass
:
- * The opposite of a lowpass filter.
- * * p5.BandPass
:
- * Allows a range of frequencies to pass through and attenuates
- * the frequencies below and above this frequency range.
+ * Get audio from an input, i.e. your computer's microphone.
*
- * The .res()
method controls either width of the
- * bandpass, or resonance of the low/highpass cutoff frequency.
+ * Turn the mic on/off with the start() and stop() methods. When the mic
+ * is on, its volume can be measured with getLevel or by connecting an
+ * FFT object.
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
+ * If you want to hear the AudioIn, use the .connect() method.
+ * AudioIn does not connect to p5.sound output by default to prevent
+ * feedback.
*
- * @class p5.Filter
- * @extends p5.Effect
+ * Note: This uses the getUserMedia/
+ * Stream API, which is not supported by certain browsers. Access in Chrome browser
+ * is limited to localhost and https, but access over http may be limited.
+ *
+ * @class p5.AudioIn
* @constructor
- * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'
+ * @param {Function} [errorCallback] A function to call if there is an error
+ * accessing the AudioIn. For example,
+ * Safari and iOS devices do not
+ * currently allow microphone access.
* @example
*
- * var fft, noise, filter;
- *
- * function setup() {
- * fill(255, 40, 255);
- *
- * filter = new p5.BandPass();
- *
- * noise = new p5.Noise();
- * // disconnect unfiltered noise,
- * // and connect to filter
- * noise.disconnect();
- * noise.connect(filter);
- * noise.start();
- *
- * fft = new p5.FFT();
- * }
- *
- * function draw() {
- * background(30);
- *
- * // set the BandPass frequency based on mouseX
- * var freq = map(mouseX, 0, width, 20, 10000);
- * filter.freq(freq);
- * // give the filter a narrow band (lower res = wider bandpass)
- * filter.res(50);
- *
- * // draw filtered spectrum
- * var spectrum = fft.analyze();
- * noStroke();
- * for (var i = 0; i < spectrum.length; i++) {
- * var x = map(i, 0, spectrum.length, 0, width);
- * var h = -height + map(spectrum[i], 0, 255, height, 0);
- * rect(x, height, width/spectrum.length, h);
- * }
- *
- * isMouseOverCanvas();
+ * var mic;
+ * function setup(){
+ * mic = new p5.AudioIn()
+ * mic.start();
* }
- *
- * function isMouseOverCanvas() {
- * var mX = mouseX, mY = mouseY;
- * if (mX > 0 && mX < width && mY < height && mY > 0) {
- * noise.amp(0.5, 0.2);
- * } else {
- * noise.amp(0, 0.2);
- * }
+ * function draw(){
+ * background(0);
+ * micLevel = mic.getLevel();
+ * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);
* }
*
*/
- //constructor with inheritance
- p5.Filter = function (type) {
- Effect.call(this);
- //add extend Effect by adding a Biquad Filter
+
+ p5.AudioIn = function (errorCallback) {
+ // set up audio input
+
/**
- * The p5.Filter is built with a
- *
- * Web Audio BiquadFilter Node.
- *
- * @property {DelayNode} biquadFilter
+ * @property {GainNode} input
*/
- this.biquad = this.ac.createBiquadFilter();
- this.input.connect(this.biquad);
- this.biquad.connect(this.wet);
- if (type) {
- this.setType(type);
- }
- //Properties useful for the toggle method.
- this._on = true;
- this._untoggledType = this.biquad.type;
+ this.input = p5sound.audiocontext.createGain();
+ /**
+ * @property {GainNode} output
+ */
+
+ this.output = p5sound.audiocontext.createGain();
+ /**
+ * @property {MediaStream|null} stream
+ */
+
+ this.stream = null;
+ /**
+ * @property {MediaStreamAudioSourceNode|null} mediaStream
+ */
+
+ this.mediaStream = null;
+ /**
+ * @property {Number|null} currentSource
+ */
+
+ this.currentSource = null;
+ /**
+ * Client must allow browser to access their microphone / audioin source.
+ * Default: false. Will become true when the client enables acces.
+ *
+ * @property {Boolean} enabled
+ */
+
+ this.enabled = false;
+ /**
+ * Input amplitude, connect to it by default but not to master out
+ *
+ * @property {p5.Amplitude} amplitude
+ */
+
+ this.amplitude = new p5.Amplitude();
+ this.output.connect(this.amplitude.input);
+
+ if (!window.MediaStreamTrack || !window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
+ errorCallback ? errorCallback() : window.alert('This browser does not support MediaStreamTrack and mediaDevices');
+ } // add to soundArray so we can dispose on close
+
+
+ p5sound.soundArray.push(this);
};
- p5.Filter.prototype = Object.create(Effect.prototype);
/**
- * Filter an audio signal according to a set
- * of filter parameters.
+ * Start processing audio input. This enables the use of other
+ * AudioIn methods like getLevel(). Note that by default, AudioIn
+ * is not connected to p5.sound's output. So you won't hear
+ * anything unless you use the connect() method.
*
- * @method process
- * @param {Object} Signal An object that outputs audio
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance/Width of the filter frequency
- * from 0.001 to 1000
- */
- p5.Filter.prototype.process = function (src, freq, res, time) {
- src.connect(this.input);
- this.set(freq, res, time);
- };
- /**
- * Set the frequency and the resonance of the filter.
+ * Certain browsers limit access to the user's microphone. For example,
+ * Chrome only allows access from localhost and over https. For this reason,
+ * you may want to include an errorCallback—a function that is called in case
+ * the browser won't provide mic access.
*
- * @method set
- * @param {Number} [freq] Frequency in Hz, from 10 to 22050
- * @param {Number} [res] Resonance (Q) from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
+ * @method start
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] Name of a function to call on
+ * success.
+ * @param {Function} [errorCallback] Name of a function to call if
+ * there was an error. For example,
+ * some browsers do not support
+ * getUserMedia.
*/
- p5.Filter.prototype.set = function (freq, res, time) {
- if (freq) {
- this.freq(freq, time);
- }
- if (res) {
- this.res(res, time);
+
+
+ p5.AudioIn.prototype.start = function (successCallback, errorCallback) {
+ var self = this;
+
+ if (this.stream) {
+ this.stop();
+ } // set the audio source
+
+
+ var audioSource = p5sound.inputSources[self.currentSource];
+ var constraints = {
+ audio: {
+ sampleRate: p5sound.audiocontext.sampleRate,
+ echoCancellation: false
+ }
+ }; // if developers determine which source to use
+
+ if (p5sound.inputSources[this.currentSource]) {
+ constraints.audio.deviceId = audioSource.deviceId;
}
+
+ window.navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
+ self.stream = stream;
+ self.enabled = true; // Wrap a MediaStreamSourceNode around the live input
+
+ self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream);
+ self.mediaStream.connect(self.output); // only send to the Amplitude reader, so we can see it but not hear it.
+
+ self.amplitude.setInput(self.output);
+ if (successCallback) successCallback();
+ })["catch"](function (err) {
+ if (errorCallback) errorCallback(err);else console.error(err);
+ });
};
/**
- * Set the filter frequency, in Hz, from 10 to 22050 (the range of
- * human hearing, although in reality most people hear in a narrower
- * range).
+ * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel().
+ * If re-starting, the user may be prompted for permission access.
*
- * @method freq
- * @param {Number} freq Filter Frequency
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current frequency value
+ * @method stop
+ * @for p5.AudioIn
*/
- p5.Filter.prototype.freq = function (freq, time) {
- var t = time || 0;
- if (freq <= 0) {
- freq = 1;
- }
- if (typeof freq === 'number') {
- this.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.frequency.exponentialRampToValueAtTime(freq, this.ac.currentTime + 0.02 + t);
- } else if (freq) {
- freq.connect(this.biquad.frequency);
+
+
+ p5.AudioIn.prototype.stop = function () {
+ if (this.stream) {
+ this.stream.getTracks().forEach(function (track) {
+ track.stop();
+ });
+ this.mediaStream.disconnect();
+ delete this.mediaStream;
+ delete this.stream;
}
- return this.biquad.frequency.value;
};
/**
- * Controls either width of a bandpass frequency,
- * or the resonance of a low/highpass cutoff frequency.
+ * Connect to an audio unit. If no parameter is provided, will
+ * connect to the master output (i.e. your speakers).
*
- * @method res
- * @param {Number} res Resonance/Width of filter freq
- * from 0.001 to 1000
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- * @return {Number} value Returns the current res value
+ * @method connect
+ * @for p5.AudioIn
+ * @param {Object} [unit] An object that accepts audio input,
+ * such as an FFT
*/
- p5.Filter.prototype.res = function (res, time) {
- var t = time || 0;
- if (typeof res === 'number') {
- this.biquad.Q.value = res;
- this.biquad.Q.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.Q.linearRampToValueAtTime(res, this.ac.currentTime + 0.02 + t);
- } else if (res) {
- res.connect(this.biquad.Q);
+
+
+ p5.AudioIn.prototype.connect = function (unit) {
+ if (unit) {
+ if (unit.hasOwnProperty('input')) {
+ this.output.connect(unit.input);
+ } else if (unit.hasOwnProperty('analyser')) {
+ this.output.connect(unit.analyser);
+ } else {
+ this.output.connect(unit);
+ }
+ } else {
+ this.output.connect(p5sound.input);
}
- return this.biquad.Q.value;
};
/**
- * Controls the gain attribute of a Biquad Filter.
- * This is distinctly different from .amp() which is inherited from p5.Effect
- * .amp() controls the volume via the output gain node
- * p5.Filter.gain() controls the gain parameter of a Biquad Filter node.
+ * Disconnect the AudioIn from all audio units. For example, if
+ * connect() had been called, disconnect() will stop sending
+ * signal to your speakers.
*
- * @method gain
- * @param {Number} gain
- * @return {Number} Returns the current or updated gain value
+ * @method disconnect
+ * @for p5.AudioIn
*/
- p5.Filter.prototype.gain = function (gain, time) {
- var t = time || 0;
- if (typeof gain === 'number') {
- this.biquad.gain.value = gain;
- this.biquad.gain.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.biquad.gain.linearRampToValueAtTime(gain, this.ac.currentTime + 0.02 + t);
- } else if (gain) {
- gain.connect(this.biquad.gain);
+
+
+ p5.AudioIn.prototype.disconnect = function () {
+ if (this.output) {
+ this.output.disconnect(); // stay connected to amplitude even if not outputting to p5
+
+ this.output.connect(this.amplitude.input);
}
- return this.biquad.gain.value;
};
/**
- * Toggle function. Switches between the specified type and allpass
+ * Read the Amplitude (volume level) of an AudioIn. The AudioIn
+ * class contains its own instance of the Amplitude class to help
+ * make it easy to get a microphone's volume level. Accepts an
+ * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must
+ * .start() before using .getLevel().
*
- * @method toggle
- * @return {boolean} [Toggle value]
+ * @method getLevel
+ * @for p5.AudioIn
+ * @param {Number} [smoothing] Smoothing is 0.0 by default.
+ * Smooths values based on previous values.
+ * @return {Number} Volume level (between 0.0 and 1.0)
*/
- p5.Filter.prototype.toggle = function () {
- this._on = !this._on;
- if (this._on === true) {
- this.biquad.type = this._untoggledType;
- } else if (this._on === false) {
- this.biquad.type = 'allpass';
+
+
+ p5.AudioIn.prototype.getLevel = function (smoothing) {
+ if (smoothing) {
+ this.amplitude.smoothing = smoothing;
}
- return this._on;
+
+ return this.amplitude.getLevel();
};
/**
- * Set the type of a p5.Filter. Possible types include:
- * "lowpass" (default), "highpass", "bandpass",
- * "lowshelf", "highshelf", "peaking", "notch",
- * "allpass".
+ * Set amplitude (volume) of a mic input between 0 and 1.0.
*
- * @method setType
- * @param {String} t
+ * @method amp
+ * @for p5.AudioIn
+ * @param {Number} vol between 0 and 1.0
+ * @param {Number} [time] ramp time (optional)
*/
- p5.Filter.prototype.setType = function (t) {
- this.biquad.type = t;
- this._untoggledType = this.biquad.type;
- };
- p5.Filter.prototype.dispose = function () {
- // remove reference from soundArray
- Effect.prototype.dispose.apply(this);
- if (this.biquad) {
- this.biquad.disconnect();
- delete this.biquad;
+
+
+ p5.AudioIn.prototype.amp = function (vol, t) {
+ if (t) {
+ var rampTime = t || 0;
+ var currentVol = this.output.gain.value;
+ this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime);
+ this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime);
+ } else {
+ this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime);
+ this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime);
}
};
/**
- * Constructor: new p5.LowPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('lowpass')
.
- * See p5.Filter for methods.
+ * Returns a list of available input sources. This is a wrapper
+ * for and it returns a Promise.
*
- * @class p5.LowPass
- * @constructor
- * @extends p5.Filter
- */
- p5.LowPass = function () {
- p5.Filter.call(this, 'lowpass');
- };
- p5.LowPass.prototype = Object.create(p5.Filter.prototype);
- /**
- * Constructor: new p5.HighPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('highpass')
.
- * See p5.Filter for methods.
+ * @method getSources
+ * @for p5.AudioIn
+ * @param {Function} [successCallback] This callback function handles the sources when they
+ * have been enumerated. The callback function
+ * receives the deviceList array as its only argument
+ * @param {Function} [errorCallback] This optional callback receives the error
+ * message as its argument.
+ * @returns {Promise} Returns a Promise that can be used in place of the callbacks, similar
+ * to the enumerateDevices() method
+ * @example
+ *
+ * var audiograb;
*
- * @class p5.HighPass
- * @constructor
- * @extends p5.Filter
- */
- p5.HighPass = function () {
- p5.Filter.call(this, 'highpass');
- };
- p5.HighPass.prototype = Object.create(p5.Filter.prototype);
- /**
- * Constructor: new p5.BandPass()
Filter.
- * This is the same as creating a p5.Filter and then calling
- * its method setType('bandpass')
.
- * See p5.Filter for methods.
+ * function setup(){
+ * //new audioIn
+ * audioGrab = new p5.AudioIn();
*
- * @class p5.BandPass
- * @constructor
- * @extends p5.Filter
+ * audioGrab.getSources(function(deviceList) {
+ * //print out the array of available sources
+ * console.log(deviceList);
+ * //set the source to the first item in the deviceList array
+ * audioGrab.setSource(0);
+ * });
+ * }
+ *
*/
- p5.BandPass = function () {
- p5.Filter.call(this, 'bandpass');
+
+
+ p5.AudioIn.prototype.getSources = function (onSuccess, onError) {
+ return new Promise(function (resolve, reject) {
+ window.navigator.mediaDevices.enumerateDevices().then(function (devices) {
+ p5sound.inputSources = devices.filter(function (device) {
+ return device.kind === 'audioinput';
+ });
+ resolve(p5sound.inputSources);
+
+ if (onSuccess) {
+ onSuccess(p5sound.inputSources);
+ }
+ })["catch"](function (error) {
+ reject(error);
+
+ if (onError) {
+ onError(error);
+ } else {
+ console.error('This browser does not support MediaStreamTrack.getSources()');
+ }
+ });
+ });
};
- p5.BandPass.prototype = Object.create(p5.Filter.prototype);
- return p5.Filter;
-}(master, effect);
-var src_eqFilter;
-'use strict';
-src_eqFilter = function () {
- var Filter = filter;
- var p5sound = master;
/**
- * EQFilter extends p5.Filter with constraints
- * necessary for the p5.EQ
+ * Set the input source. Accepts a number representing a
+ * position in the array returned by getSources().
+ * This is only available in browsers that support
+ * navigator.mediaDevices.enumerateDevices().
*
- * @private
+ * @method setSource
+ * @for p5.AudioIn
+ * @param {number} num position of input source in the array
*/
- var EQFilter = function (freq, res) {
- Filter.call(this, 'peaking');
- this.disconnect();
- this.set(freq, res);
- this.biquad.gain.value = 0;
- delete this.input;
- delete this.output;
- delete this._drywet;
- delete this.wet;
- };
- EQFilter.prototype = Object.create(Filter.prototype);
- EQFilter.prototype.amp = function () {
- console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
- };
- EQFilter.prototype.drywet = function () {
- console.warn('`drywet()` is not available for p5.EQ bands.');
- };
- EQFilter.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- if (this.biquad) {
- this.biquad.connect(u.input ? u.input : u);
+
+
+ p5.AudioIn.prototype.setSource = function (num) {
+ if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) {
+ // set the current source
+ this.currentSource = num;
+ console.log('set source to ', p5sound.inputSources[this.currentSource]);
} else {
- this.output.connect(u.input ? u.input : u);
- }
- };
- EQFilter.prototype.disconnect = function () {
- if (this.biquad) {
- this.biquad.disconnect();
+ console.log('unable to set input source');
+ } // restart stream if currently active
+
+
+ if (this.stream && this.stream.active) {
+ this.start();
}
- };
- EQFilter.prototype.dispose = function () {
- // remove reference form soundArray
+ }; // private method
+
+
+ p5.AudioIn.prototype.dispose = function () {
+ // remove reference from soundArray
var index = p5sound.soundArray.indexOf(this);
p5sound.soundArray.splice(index, 1);
- this.disconnect();
- delete this.biquad;
+ this.stop();
+
+ if (this.output) {
+ this.output.disconnect();
+ }
+
+ if (this.amplitude) {
+ this.amplitude.disconnect();
+ }
+
+ delete this.amplitude;
+ delete this.output;
};
- return EQFilter;
-}(filter, master);
-var eq;
-'use strict';
-eq = function () {
- var Effect = effect;
- var EQFilter = src_eqFilter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 45 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(2),__webpack_require__(46),__webpack_require__(52),__webpack_require__(9)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(i){"use strict";return i.CrossFade=function(e){this.createInsOuts(2,1),this.a=this.input[0]=new i.Gain,this.b=this.input[1]=new i.Gain,this.fade=new i.Signal(this.defaultArg(e,.5),i.Type.NormalRange),this._equalPowerA=new i.EqualPowerGain,this._equalPowerB=new i.EqualPowerGain,this._invert=new i.Expr("1 - $0"),this.a.connect(this.output),this.b.connect(this.output),this.fade.chain(this._equalPowerB,this.b.gain),this.fade.chain(this._invert,this._equalPowerA,this.a.gain),this._readOnly("fade")},i.extend(i.CrossFade),i.CrossFade.prototype.dispose=function(){return i.prototype.dispose.call(this),this._writable("fade"),this._equalPowerA.dispose(),this._equalPowerA=null,this._equalPowerB.dispose(),this._equalPowerB=null,this.fade.dispose(),this.fade=null,this._invert.dispose(),this._invert=null,this.a.dispose(),this.a=null,this.b.dispose(),this.b=null,this},i.CrossFade}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 46 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(7),__webpack_require__(15),__webpack_require__(3),__webpack_require__(47),__webpack_require__(25),__webpack_require__(48),__webpack_require__(24),__webpack_require__(49),__webpack_require__(50),__webpack_require__(51)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(p){"use strict";function r(e,n,r){var t=new e;return r._eval(n[0]).connect(t,0,0),r._eval(n[1]).connect(t,0,1),t}function t(e,n,r){var t=new e;return r._eval(n[0]).connect(t,0,0),t}function o(e){return e?parseFloat(e):void 0}function i(e){return e&&e.args?parseFloat(e.args):void 0}return p.Expr=function(){var n=this._replacements(Array.prototype.slice.call(arguments)),e=this._parseInputs(n);this._nodes=[],this.input=new Array(e);for(var r=0;r
*/
+
+
p5.EQ = function (_eqsize) {
- Effect.call(this);
- //p5.EQ can be of size (3) or (8), defaults to 3
+ Effect.call(this); //p5.EQ can be of size (3) or (8), defaults to 3
+
_eqsize = _eqsize === 3 || _eqsize === 8 ? _eqsize : 3;
var factor;
_eqsize === 3 ? factor = Math.pow(2, 3) : factor = 2;
/**
* The p5.EQ is built with abstracted p5.Filter objects.
- * To modify any bands, use methods of the
* p5.Filter API, especially `gain` and `freq`.
* Bands are stored in an array, with indices 0 - 3, or 0 - 7
* @property {Array} bands
*
*/
+
this.bands = [];
var freq, res;
+
for (var i = 0; i < _eqsize; i++) {
if (i === _eqsize - 1) {
freq = 21000;
- res = 0.01;
+ res = .01;
} else if (i === 0) {
freq = 100;
- res = 0.1;
+ res = .1;
} else if (i === 1) {
freq = _eqsize === 3 ? 360 * factor : 360;
res = 1;
@@ -8938,30 +8294,35 @@ eq = function () {
freq = this.bands[i - 1].freq() * factor;
res = 1;
}
+
this.bands[i] = this._newBand(freq, res);
+
if (i > 0) {
this.bands[i - 1].connect(this.bands[i].biquad);
} else {
this.input.connect(this.bands[i].biquad);
}
}
+
this.bands[_eqsize - 1].connect(this.output);
};
+
p5.EQ.prototype = Object.create(Effect.prototype);
/**
* Process an input by connecting it to the EQ
* @method process
* @param {Object} src Audio source
*/
+
p5.EQ.prototype.process = function (src) {
src.connect(this.input);
- };
- // /**
+ }; // /**
// * Set the frequency and gain of each band in the EQ. This method should be
// * called with 3 or 8 frequency and gain pairs, depending on the size of the EQ.
// * ex. eq.set(freq0, gain0, freq1, gain1, freq2, gain2);
// *
// * @method set
+ // * @for p5.EQ
// * @param {Number} [freq0] Frequency value for band with index 0
// * @param {Number} [gain0] Gain value for band with index 0
// * @param {Number} [freq1] Frequency value for band with index 1
@@ -8979,6 +8340,8 @@ eq = function () {
// * @param {Number} [freq7] Frequency value for band with index 7
// * @param {Number} [gain7] Gain value for band with index 7
// */
+
+
p5.EQ.prototype.set = function () {
if (arguments.length === this.bands.length * 2) {
for (var i = 0; i < arguments.length; i += 2) {
@@ -8994,30 +8357,113 @@ eq = function () {
* the raw biquad filter. This method returns an abstracted p5.Filter,
* which can be added to p5.EQ.bands, in order to create new EQ bands.
* @private
+ * @for p5.EQ
* @method _newBand
* @param {Number} freq
* @param {Number} res
* @return {Object} Abstracted Filter
*/
+
+
p5.EQ.prototype._newBand = function (freq, res) {
return new EQFilter(freq, res);
};
+
p5.EQ.prototype.dispose = function () {
Effect.prototype.dispose.apply(this);
+
if (this.bands) {
while (this.bands.length > 0) {
delete this.bands.pop().dispose();
}
+
delete this.bands;
}
};
+
return p5.EQ;
-}(effect, src_eqFilter);
-var panner3d;
-'use strict';
-panner3d = function () {
- var p5sound = master;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 54 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var Filter = __webpack_require__(14);
+
+ var p5sound = __webpack_require__(1);
+ /**
+ * EQFilter extends p5.Filter with constraints
+ * necessary for the p5.EQ
+ *
+ * @private
+ */
+
+
+ var EQFilter = function EQFilter(freq, res) {
+ Filter.call(this, 'peaking');
+ this.disconnect();
+ this.set(freq, res);
+ this.biquad.gain.value = 0;
+ delete this.input;
+ delete this.output;
+ delete this._drywet;
+ delete this.wet;
+ };
+
+ EQFilter.prototype = Object.create(Filter.prototype);
+
+ EQFilter.prototype.amp = function () {
+ console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`');
+ };
+
+ EQFilter.prototype.drywet = function () {
+ console.warn('`drywet()` is not available for p5.EQ bands.');
+ };
+
+ EQFilter.prototype.connect = function (unit) {
+ var u = unit || p5.soundOut.input;
+
+ if (this.biquad) {
+ this.biquad.connect(u.input ? u.input : u);
+ } else {
+ this.output.connect(u.input ? u.input : u);
+ }
+ };
+
+ EQFilter.prototype.disconnect = function () {
+ if (this.biquad) {
+ this.biquad.disconnect();
+ }
+ };
+
+ EQFilter.prototype.dispose = function () {
+ // remove reference form soundArray
+ var index = p5sound.soundArray.indexOf(this);
+ p5sound.soundArray.splice(index, 1);
+ this.disconnect();
+ delete this.biquad;
+ };
+
+ return EQFilter;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 55 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4);
/**
* Panner3D is based on the
@@ -9034,6 +8480,8 @@ panner3d = function () {
* @class p5.Panner3D
* @constructor
*/
+
+
p5.Panner3D = function () {
Effect.call(this);
/**
@@ -9052,58 +8500,69 @@ panner3d = function () {
* @property {AudioNode} panner
*
*/
+
this.panner = this.ac.createPanner();
this.panner.panningModel = 'HRTF';
this.panner.distanceModel = 'linear';
this.panner.connect(this.output);
this.input.connect(this.panner);
};
+
p5.Panner3D.prototype = Object.create(Effect.prototype);
/**
* Connect an audio sorce
*
* @method process
+ * @for p5.Panner3D
* @param {Object} src Input source
*/
+
p5.Panner3D.prototype.process = function (src) {
src.connect(this.input);
};
/**
* Set the X,Y,Z position of the Panner
* @method set
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
* @param {Number} time
* @return {Array} Updated x, y, z values as an array
*/
+
+
p5.Panner3D.prototype.set = function (xVal, yVal, zVal, time) {
this.positionX(xVal, time);
this.positionY(yVal, time);
this.positionZ(zVal, time);
- return [
- this.panner.positionX.value,
- this.panner.positionY.value,
- this.panner.positionZ.value
- ];
+ return [this.panner.positionX.value, this.panner.positionY.value, this.panner.positionZ.value];
};
/**
* Getter and setter methods for position coordinates
* @method positionX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for position coordinates
* @method positionY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for position coordinates
* @method positionZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
+
p5.Panner3D.prototype.positionX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.panner.positionX.value = xVal;
this.panner.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9111,10 +8570,13 @@ panner3d = function () {
} else if (xVal) {
xVal.connect(this.panner.positionX);
}
+
return this.panner.positionX.value;
};
+
p5.Panner3D.prototype.positionY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.panner.positionY.value = yVal;
this.panner.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9122,10 +8584,13 @@ panner3d = function () {
} else if (yVal) {
yVal.connect(this.panner.positionY);
}
+
return this.panner.positionY.value;
};
+
p5.Panner3D.prototype.positionZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.panner.positionZ.value = zVal;
this.panner.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9133,44 +8598,52 @@ panner3d = function () {
} else if (zVal) {
zVal.connect(this.panner.positionZ);
}
+
return this.panner.positionZ.value;
};
/**
* Set the X,Y,Z position of the Panner
* @method orient
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
* @param {Number} time
* @return {Array} Updated x, y, z values as an array
*/
+
+
p5.Panner3D.prototype.orient = function (xVal, yVal, zVal, time) {
this.orientX(xVal, time);
this.orientY(yVal, time);
this.orientZ(zVal, time);
- return [
- this.panner.orientationX.value,
- this.panner.orientationY.value,
- this.panner.orientationZ.value
- ];
+ return [this.panner.orientationX.value, this.panner.orientationY.value, this.panner.orientationZ.value];
};
/**
* Getter and setter methods for orient coordinates
* @method orientX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for orient coordinates
* @method orientY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
/**
* Getter and setter methods for orient coordinates
* @method orientZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
+
+
p5.Panner3D.prototype.orientX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.panner.orientationX.value = xVal;
this.panner.orientationX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9178,10 +8651,13 @@ panner3d = function () {
} else if (xVal) {
xVal.connect(this.panner.orientationX);
}
+
return this.panner.orientationX.value;
};
+
p5.Panner3D.prototype.orientY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.panner.orientationY.value = yVal;
this.panner.orientationY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9189,10 +8665,13 @@ panner3d = function () {
} else if (yVal) {
yVal.connect(this.panner.orientationY);
}
+
return this.panner.orientationY.value;
};
+
p5.Panner3D.prototype.orientZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.panner.orientationZ.value = zVal;
this.panner.orientationZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9200,14 +8679,18 @@ panner3d = function () {
} else if (zVal) {
zVal.connect(this.panner.orientationZ);
}
+
return this.panner.orientationZ.value;
};
/**
* Set the rolloff factor and max distance
* @method setFalloff
+ * @for p5.Panner3D
* @param {Number} [maxDistance]
* @param {Number} [rolloffFactor]
*/
+
+
p5.Panner3D.prototype.setFalloff = function (maxDistance, rolloffFactor) {
this.maxDist(maxDistance);
this.rolloff(rolloffFactor);
@@ -9215,42 +8698,60 @@ panner3d = function () {
/**
* Maxium distance between the source and the listener
* @method maxDist
+ * @for p5.Panner3D
* @param {Number} maxDistance
* @return {Number} updated value
*/
+
+
p5.Panner3D.prototype.maxDist = function (maxDistance) {
if (typeof maxDistance === 'number') {
this.panner.maxDistance = maxDistance;
}
+
return this.panner.maxDistance;
};
/**
* How quickly the volume is reduced as the source moves away from the listener
* @method rollof
+ * @for p5.Panner3D
* @param {Number} rolloffFactor
* @return {Number} updated value
*/
+
+
p5.Panner3D.prototype.rolloff = function (rolloffFactor) {
if (typeof rolloffFactor === 'number') {
this.panner.rolloffFactor = rolloffFactor;
}
+
return this.panner.rolloffFactor;
};
+
p5.Panner3D.dispose = function () {
Effect.prototype.dispose.apply(this);
+
if (this.panner) {
this.panner.disconnect();
delete this.panner;
}
};
+
return p5.Panner3D;
-}(master, effect);
-var listener3d;
-'use strict';
-listener3d = function () {
- var p5sound = master;
- var Effect = effect;
- // /**
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 56 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4); // /**
// * listener is a class that can construct both a Spatial Panner
// * and a Spatial Listener. The panner is based on the
// * Web Audio Spatial Panner Node
@@ -9273,18 +8774,20 @@ listener3d = function () {
// * @param {AudioParam} listener.distanceModel "linear", "inverse", or "exponential"
// * @param {String} [type] [Specify construction of a spatial panner or listener]
// */
+
+
p5.Listener3D = function (type) {
this.ac = p5sound.audiocontext;
this.listener = this.ac.listener;
- };
- // /**
+ }; // /**
// * Connect an audio sorce
// * @param {Object} src Input source
// */
+
+
p5.Listener3D.prototype.process = function (src) {
src.connect(this.input);
- };
- // /**
+ }; // /**
// * Set the X,Y,Z position of the Panner
// * @param {[Number]} xVal
// * @param {[Number]} yVal
@@ -9292,22 +8795,22 @@ listener3d = function () {
// * @param {[Number]} time
// * @return {[Array]} [Updated x, y, z values as an array]
// */
+
+
p5.Listener3D.prototype.position = function (xVal, yVal, zVal, time) {
this.positionX(xVal, time);
this.positionY(yVal, time);
this.positionZ(zVal, time);
- return [
- this.listener.positionX.value,
- this.listener.positionY.value,
- this.listener.positionZ.value
- ];
- };
- // /**
+ return [this.listener.positionX.value, this.listener.positionY.value, this.listener.positionZ.value];
+ }; // /**
// * Getter and setter methods for position coordinates
// * @return {Number} [updated coordinate value]
// */
+
+
p5.Listener3D.prototype.positionX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.positionX.value = xVal;
this.listener.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9315,10 +8818,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.positionX);
}
+
return this.listener.positionX.value;
};
+
p5.Listener3D.prototype.positionY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.positionY.value = yVal;
this.listener.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9326,10 +8832,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.positionY);
}
+
return this.listener.positionY.value;
};
+
p5.Listener3D.prototype.positionZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.positionZ.value = zVal;
this.listener.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9337,9 +8846,9 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.positionZ);
}
+
return this.listener.positionZ.value;
- };
- // cannot define method when class definition is commented
+ }; // cannot define method when class definition is commented
// /**
// * Overrides the listener orient() method because Listener has slightly
// * different params. In human terms, Forward vectors are the direction the
@@ -9355,6 +8864,8 @@ listener3d = function () {
// * @param {Number} time
// * @return {Array} All orienation params
// */
+
+
p5.Listener3D.prototype.orient = function (xValF, yValF, zValF, xValU, yValU, zValU, time) {
if (arguments.length === 3 || arguments.length === 4) {
time = arguments[3];
@@ -9363,41 +8874,31 @@ listener3d = function () {
this.orientForward(xValF, yValF, zValF);
this.orientUp(xValU, yValU, zValU, time);
}
- return [
- this.listener.forwardX.value,
- this.listener.forwardY.value,
- this.listener.forwardZ.value,
- this.listener.upX.value,
- this.listener.upY.value,
- this.listener.upZ.value
- ];
+
+ return [this.listener.forwardX.value, this.listener.forwardY.value, this.listener.forwardZ.value, this.listener.upX.value, this.listener.upY.value, this.listener.upZ.value];
};
+
p5.Listener3D.prototype.orientForward = function (xValF, yValF, zValF, time) {
this.forwardX(xValF, time);
this.forwardY(yValF, time);
this.forwardZ(zValF, time);
- return [
- this.listener.forwardX,
- this.listener.forwardY,
- this.listener.forwardZ
- ];
+ return [this.listener.forwardX, this.listener.forwardY, this.listener.forwardZ];
};
+
p5.Listener3D.prototype.orientUp = function (xValU, yValU, zValU, time) {
this.upX(xValU, time);
this.upY(yValU, time);
this.upZ(zValU, time);
- return [
- this.listener.upX,
- this.listener.upY,
- this.listener.upZ
- ];
- };
- // /**
+ return [this.listener.upX, this.listener.upY, this.listener.upZ];
+ }; // /**
// * Getter and setter methods for orient coordinates
// * @return {Number} [updated coordinate value]
// */
+
+
p5.Listener3D.prototype.forwardX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.forwardX.value = xVal;
this.listener.forwardX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9405,10 +8906,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.forwardX);
}
+
return this.listener.forwardX.value;
};
+
p5.Listener3D.prototype.forwardY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.forwardY.value = yVal;
this.listener.forwardY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9416,10 +8920,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.forwardY);
}
+
return this.listener.forwardY.value;
};
+
p5.Listener3D.prototype.forwardZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.forwardZ.value = zVal;
this.listener.forwardZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9427,10 +8934,13 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.forwardZ);
}
+
return this.listener.forwardZ.value;
};
+
p5.Listener3D.prototype.upX = function (xVal, time) {
var t = time || 0;
+
if (typeof xVal === 'number') {
this.listener.upX.value = xVal;
this.listener.upX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9438,10 +8948,13 @@ listener3d = function () {
} else if (xVal) {
xVal.connect(this.listener.upX);
}
+
return this.listener.upX.value;
};
+
p5.Listener3D.prototype.upY = function (yVal, time) {
var t = time || 0;
+
if (typeof yVal === 'number') {
this.listener.upY.value = yVal;
this.listener.upY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9449,10 +8962,13 @@ listener3d = function () {
} else if (yVal) {
yVal.connect(this.listener.upY);
}
+
return this.listener.upY.value;
};
+
p5.Listener3D.prototype.upZ = function (zVal, time) {
var t = time || 0;
+
if (typeof zVal === 'number') {
this.listener.upZ.value = zVal;
this.listener.upZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
@@ -9460,15 +8976,25 @@ listener3d = function () {
} else if (zVal) {
zVal.connect(this.listener.upZ);
}
+
return this.listener.upZ.value;
};
+
return p5.Listener3D;
-}(master, effect);
-var delay;
-'use strict';
-delay = function () {
- var Filter = filter;
- var Effect = effect;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 57 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var Filter = __webpack_require__(14);
+
+ var Effect = __webpack_require__(4);
/**
* Delay is an echo effect. It processes an existing sound source,
* and outputs a delayed version of that sound. The p5.Delay can
@@ -9480,9 +9006,9 @@ delay = function () {
* original source.
*
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
* @class p5.Delay
* @extends p5.Effect
@@ -9523,6 +9049,8 @@ delay = function () {
* }
*
*/
+
+
p5.Delay = function () {
Effect.call(this);
this._split = this.ac.createChannelSplitter(2);
@@ -9536,6 +9064,7 @@ delay = function () {
*
* @property {DelayNode} leftDelay
*/
+
this.leftDelay = this.ac.createDelay();
/**
* The p5.Delay is built with two
@@ -9544,36 +9073,52 @@ delay = function () {
*
* @property {DelayNode} rightDelay
*/
+
this.rightDelay = this.ac.createDelay();
this._leftFilter = new Filter();
this._rightFilter = new Filter();
+
this._leftFilter.disconnect();
+
this._rightFilter.disconnect();
+
this._leftFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
+
this._rightFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime);
+
this._leftFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
- this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime);
- // graph routing
+
+ this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime); // graph routing
+
+
this.input.connect(this._split);
this.leftDelay.connect(this._leftGain);
this.rightDelay.connect(this._rightGain);
+
this._leftGain.connect(this._leftFilter.input);
+
this._rightGain.connect(this._rightFilter.input);
+
this._merge.connect(this.wet);
+
this._leftFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
- this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime);
- // default routing
+
+ this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime); // default routing
+
+
this.setType(0);
- this._maxDelay = this.leftDelay.delayTime.maxValue;
- // set initial feedback to 0.5
+ this._maxDelay = this.leftDelay.delayTime.maxValue; // set initial feedback to 0.5
+
this.feedback(0.5);
};
+
p5.Delay.prototype = Object.create(Effect.prototype);
/**
* Add delay to an audio signal according to a set
* of delay parameters.
*
* @method process
+ * @for p5.Delay
* @param {Object} Signal An object that outputs audio
* @param {Number} [delayTime] Time (in seconds) of the delay/echo.
* Some browsers limit delayTime to
@@ -9585,22 +9130,28 @@ delay = function () {
* below the lowPass will be part of the
* delay.
*/
+
p5.Delay.prototype.process = function (src, _delayTime, _feedback, _filter) {
var feedback = _feedback || 0;
var delayTime = _delayTime || 0;
- if (feedback >= 1) {
+
+ if (feedback >= 1.0) {
throw new Error('Feedback value will force a positive feedback loop.');
}
+
if (delayTime >= this._maxDelay) {
throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.');
}
+
src.connect(this.input);
this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime);
this._leftGain.gain.value = feedback;
this._rightGain.gain.value = feedback;
+
if (_filter) {
this._leftFilter.freq(_filter);
+
this._rightFilter.freq(_filter);
}
};
@@ -9609,8 +9160,11 @@ delay = function () {
* a floating point number between 0.0 and 1.0.
*
* @method delayTime
+ * @for p5.Delay
* @param {Number} delayTime Time (in seconds) of the delay
*/
+
+
p5.Delay.prototype.delayTime = function (t) {
// if t is an audio node...
if (typeof t !== 'number') {
@@ -9631,24 +9185,28 @@ delay = function () {
* creating an infinite feedback loop. The default value is 0.5
*
* @method feedback
+ * @for p5.Delay
* @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
* Oscillator that can be used to
* modulate this param
* @returns {Number} Feedback value
*
*/
+
+
p5.Delay.prototype.feedback = function (f) {
// if f is an audio node...
if (f && typeof f !== 'number') {
f.connect(this._leftGain.gain);
f.connect(this._rightGain.gain);
- } else if (f >= 1) {
+ } else if (f >= 1.0) {
throw new Error('Feedback value will force a positive feedback loop.');
} else if (typeof f === 'number') {
this._leftGain.gain.value = f;
this._rightGain.gain.value = f;
- }
- // return value of feedback
+ } // return value of feedback
+
+
return this._leftGain.gain.value;
};
/**
@@ -9656,6 +9214,7 @@ delay = function () {
* will cut off any frequencies higher than the filter frequency.
*
* @method filter
+ * @for p5.Delay
* @param {Number|Object} cutoffFreq A lowpass filter will cut off any
* frequencies higher than the filter frequency.
* @param {Number|Object} res Resonance of the filter frequency
@@ -9664,8 +9223,11 @@ delay = function () {
* High numbers (i.e. 15) will produce a resonance,
* low numbers (i.e. .2) will produce a slope.
*/
+
+
p5.Delay.prototype.filter = function (freq, q) {
this._leftFilter.set(freq, q);
+
this._rightFilter.set(freq, q);
};
/**
@@ -9674,3083 +9236,2665 @@ delay = function () {
* Any other parameter will revert to the default delay setting.
*
* @method setType
+ * @for p5.Delay
* @param {String|Number} type 'pingPong' (1) or 'default' (0)
*/
+
+
p5.Delay.prototype.setType = function (t) {
if (t === 1) {
t = 'pingPong';
}
+
this._split.disconnect();
+
this._leftFilter.disconnect();
+
this._rightFilter.disconnect();
+
this._split.connect(this.leftDelay, 0);
+
this._split.connect(this.rightDelay, 1);
+
switch (t) {
- case 'pingPong':
- this._rightFilter.setType(this._leftFilter.biquad.type);
- this._leftFilter.output.connect(this._merge, 0, 0);
- this._rightFilter.output.connect(this._merge, 0, 1);
- this._leftFilter.output.connect(this.rightDelay);
- this._rightFilter.output.connect(this.leftDelay);
- break;
- default:
- this._leftFilter.output.connect(this._merge, 0, 0);
- this._rightFilter.output.connect(this._merge, 0, 1);
- this._leftFilter.output.connect(this.leftDelay);
- this._rightFilter.output.connect(this.rightDelay);
- }
- };
- // DocBlocks for methods inherited from p5.Effect
- /**
- * Set the output level of the delay effect.
- *
- * @method amp
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- /**
- * Send output to a p5.sound or web audio object
- *
- * @method connect
- * @param {Object} unit
- */
- /**
- * Disconnect all output.
- *
- * @method disconnect
- */
- p5.Delay.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
- this._split.disconnect();
- this._leftFilter.dispose();
- this._rightFilter.dispose();
- this._merge.disconnect();
- this._leftGain.disconnect();
- this._rightGain.disconnect();
- this.leftDelay.disconnect();
- this.rightDelay.disconnect();
- this._split = undefined;
- this._leftFilter = undefined;
- this._rightFilter = undefined;
- this._merge = undefined;
- this._leftGain = undefined;
- this._rightGain = undefined;
- this.leftDelay = undefined;
- this.rightDelay = undefined;
- };
-}(filter, effect);
-var reverb;
-'use strict';
-reverb = function () {
- var CustomError = errorHandler;
- var Effect = effect;
- /**
- * Reverb adds depth to a sound through a large number of decaying
- * echoes. It creates the perception that sound is occurring in a
- * physical space. The p5.Reverb has paramters for Time (how long does the
- * reverb last) and decayRate (how much the sound decays with each echo)
- * that can be set with the .set() or .process() methods. The p5.Convolver
- * extends p5.Reverb allowing you to recreate the sound of actual physical
- * spaces through convolution.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Reverb
- * @extends p5.Effect
- * @constructor
- * @example
- *
- * var soundFile, reverb;
- * function preload() {
- * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * reverb = new p5.Reverb();
- * soundFile.disconnect(); // so we'll only hear reverb...
- *
- * // connect soundFile to reverb, process w/
- * // 3 second reverbTime, decayRate of 2%
- * reverb.process(soundFile, 3, 2);
- * soundFile.play();
- * }
- *
- */
- p5.Reverb = function () {
- Effect.call(this);
- this._initConvolverNode();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- // default params
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
- this._buildImpulse();
- };
- p5.Reverb.prototype = Object.create(Effect.prototype);
- p5.Reverb.prototype._initConvolverNode = function () {
- this.convolverNode = this.ac.createConvolver();
- this.input.connect(this.convolverNode);
- this.convolverNode.connect(this.wet);
- };
- p5.Reverb.prototype._teardownConvolverNode = function () {
- if (this.convolverNode) {
- this.convolverNode.disconnect();
- delete this.convolverNode;
- }
- };
- p5.Reverb.prototype._setBuffer = function (audioBuffer) {
- this._teardownConvolverNode();
- this._initConvolverNode();
- this.convolverNode.buffer = audioBuffer;
- };
- /**
- * Connect a source to the reverb, and assign reverb parameters.
- *
- * @method process
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
- p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
- src.connect(this.input);
- var rebuild = false;
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
- if (decayRate) {
- this._decay = decayRate;
- }
- if (reverse) {
- this._reverse = reverse;
- }
- if (rebuild) {
- this._buildImpulse();
- }
- };
- /**
- * Set the reverb settings. Similar to .process(), but without
- * assigning a new input.
- *
- * @method set
- * @param {Number} [seconds] Duration of the reverb, in seconds.
- * Min: 0, Max: 10. Defaults to 3.
- * @param {Number} [decayRate] Percentage of decay with each echo.
- * Min: 0, Max: 100. Defaults to 2.
- * @param {Boolean} [reverse] Play the reverb backwards or forwards.
- */
- p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
- var rebuild = false;
- if (seconds) {
- this._seconds = seconds;
- rebuild = true;
- }
- if (decayRate) {
- this._decay = decayRate;
- }
- if (reverse) {
- this._reverse = reverse;
- }
- if (rebuild) {
- this._buildImpulse();
+ case 'pingPong':
+ this._rightFilter.setType(this._leftFilter.biquad.type);
+
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.rightDelay);
+
+ this._rightFilter.output.connect(this.leftDelay);
+
+ break;
+
+ default:
+ this._leftFilter.output.connect(this._merge, 0, 0);
+
+ this._rightFilter.output.connect(this._merge, 0, 1);
+
+ this._leftFilter.output.connect(this.leftDelay);
+
+ this._rightFilter.output.connect(this.rightDelay);
+
}
- };
- // DocBlocks for methods inherited from p5.Effect
+ }; // DocBlocks for methods inherited from p5.Effect
+
/**
- * Set the output level of the reverb effect.
+ * Set the output level of the delay effect.
*
* @method amp
+ * @for p5.Delay
* @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
+
/**
* Send output to a p5.sound or web audio object
*
* @method connect
+ * @for p5.Delay
* @param {Object} unit
*/
+
/**
* Disconnect all output.
*
* @method disconnect
+ * @for p5.Delay
*/
- /**
- * Inspired by Simple Reverb by Jordan Santell
- * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
- *
- * Utility function for building an impulse response
- * based on the module parameters.
- *
- * @private
- */
- p5.Reverb.prototype._buildImpulse = function () {
- var rate = this.ac.sampleRate;
- var length = rate * this._seconds;
- var decay = this._decay;
- var impulse = this.ac.createBuffer(2, length, rate);
- var impulseL = impulse.getChannelData(0);
- var impulseR = impulse.getChannelData(1);
- var n, i;
- for (i = 0; i < length; i++) {
- n = this._reverse ? length - i : i;
- impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
- }
- this._setBuffer(impulse);
- };
- p5.Reverb.prototype.dispose = function () {
+
+
+ p5.Delay.prototype.dispose = function () {
Effect.prototype.dispose.apply(this);
- this._teardownConvolverNode();
- };
- // =======================================================================
- // *** p5.Convolver ***
- // =======================================================================
- /**
- * p5.Convolver extends p5.Reverb. It can emulate the sound of real
- * physical spaces through a process called
- * convolution.
- *
- * Convolution multiplies any audio input by an "impulse response"
- * to simulate the dispersion of sound over time. The impulse response is
- * generated from an audio file that you provide. One way to
- * generate an impulse response is to pop a balloon in a reverberant space
- * and record the echo. Convolution can also be used to experiment with
- * sound.
- *
- * Use the method createConvolution(path)
to instantiate a
- * p5.Convolver with a path to your impulse response audio file.
- *
- * @class p5.Convolver
- * @extends p5.Effect
- * @constructor
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call when loading succeeds
- * @param {Function} [errorCallback] function to call if loading fails.
- * This function will receive an error or
- * XMLHttpRequest object with information
- * about what went wrong.
- * @example
- *
- * var cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
- *
- * sound.play();
- * }
- *
- */
- p5.Convolver = function (path, callback, errorCallback) {
- p5.Reverb.call(this);
- /**
- * Internally, the p5.Convolver uses the a
- *
- * Web Audio Convolver Node.
- *
- * @property {ConvolverNode} convolverNode
- */
- this._initConvolverNode();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- if (path) {
- this.impulses = [];
- this._loadBuffer(path, callback, errorCallback);
- } else {
- // parameters
- this._seconds = 3;
- this._decay = 2;
- this._reverse = false;
- this._buildImpulse();
- }
- };
- p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
- p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
- /**
- * Create a p5.Convolver. Accepts a path to a soundfile
- * that will be used to generate an impulse response.
- *
- * @method createConvolver
- * @param {String} path path to a sound file
- * @param {Function} [callback] function to call if loading is successful.
- * The object will be passed in as the argument
- * to the callback function.
- * @param {Function} [errorCallback] function to call if loading is not successful.
- * A custom error will be passed in as the argument
- * to the callback function.
- * @return {p5.Convolver}
- * @example
- *
- * var cVerb, sound;
- * function preload() {
- * // We have both MP3 and OGG versions of all sound assets
- * soundFormats('ogg', 'mp3');
- *
- * // Try replacing 'bx-spring' with other soundfiles like
- * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
- * cVerb = createConvolver('assets/bx-spring.mp3');
- *
- * // Try replacing 'Damscray_DancingTiger' with
- * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
- * sound = loadSound('assets/Damscray_DancingTiger.mp3');
- * }
- *
- * function setup() {
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
- *
- * sound.play();
- * }
- *
- */
- p5.prototype.createConvolver = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
- var self = this;
- var cReverb = new p5.Convolver(path, function (buffer) {
- if (typeof callback === 'function') {
- callback(buffer);
- }
- if (typeof self._decrementPreload === 'function') {
- self._decrementPreload();
- }
- }, errorCallback);
- cReverb.impulses = [];
- return cReverb;
- };
- /**
- * Private method to load a buffer as an Impulse Response,
- * assign it to the convolverNode, and add to the Array of .impulses.
- *
- * @param {String} path
- * @param {Function} callback
- * @param {Function} errorCallback
- * @private
- */
- p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
- var path = p5.prototype._checkFileFormats(path);
- var self = this;
- var errorTrace = new Error().stack;
- var ac = p5.prototype.getAudioContext();
- var request = new XMLHttpRequest();
- request.open('GET', path, true);
- request.responseType = 'arraybuffer';
- request.onload = function () {
- if (request.status === 200) {
- // on success loading file:
- ac.decodeAudioData(request.response, function (buff) {
- var buffer = {};
- var chunks = path.split('/');
- buffer.name = chunks[chunks.length - 1];
- buffer.audioBuffer = buff;
- self.impulses.push(buffer);
- self._setBuffer(buffer.audioBuffer);
- if (callback) {
- callback(buffer);
- }
- }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015
- function () {
- var err = new CustomError('decodeAudioData', errorTrace, self.url);
- var msg = 'AudioContext error at decodeAudioData for ' + self.url;
- if (errorCallback) {
- err.msg = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- });
- } else {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- }
- };
- // if there is another error, aside from 404...
- request.onerror = function () {
- var err = new CustomError('loadConvolver', errorTrace, self.url);
- var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
- if (errorCallback) {
- err.message = msg;
- errorCallback(err);
- } else {
- console.error(msg + '\n The error stack trace includes: \n' + err.stack);
- }
- };
- request.send();
+
+ this._split.disconnect();
+
+ this._leftFilter.dispose();
+
+ this._rightFilter.dispose();
+
+ this._merge.disconnect();
+
+ this._leftGain.disconnect();
+
+ this._rightGain.disconnect();
+
+ this.leftDelay.disconnect();
+ this.rightDelay.disconnect();
+ this._split = undefined;
+ this._leftFilter = undefined;
+ this._rightFilter = undefined;
+ this._merge = undefined;
+ this._leftGain = undefined;
+ this._rightGain = undefined;
+ this.leftDelay = undefined;
+ this.rightDelay = undefined;
};
- p5.Convolver.prototype.set = null;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 58 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var CustomError = __webpack_require__(10);
+
+ var Effect = __webpack_require__(4);
/**
- * Connect a source to the reverb, and assign reverb parameters.
+ * Reverb adds depth to a sound through a large number of decaying
+ * echoes. It creates the perception that sound is occurring in a
+ * physical space. The p5.Reverb has paramters for Time (how long does the
+ * reverb last) and decayRate (how much the sound decays with each echo)
+ * that can be set with the .set() or .process() methods. The p5.Convolver
+ * extends p5.Reverb allowing you to recreate the sound of actual physical
+ * spaces through convolution.
*
- * @method process
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
+ *
+ * @class p5.Reverb
+ * @extends p5.Effect
+ * @constructor
* @example
*
- * var cVerb, sound;
+ * var soundFile, reverb;
* function preload() {
- * soundFormats('ogg', 'mp3');
- *
- * cVerb = createConvolver('assets/concrete-tunnel.mp3');
- *
- * sound = loadSound('assets/beat.mp3');
+ * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
* function setup() {
- * // disconnect from master output...
- * sound.disconnect();
- *
- * // ...and process with (i.e. connect to) cVerb
- * // so that we only hear the convolution
- * cVerb.process(sound);
+ * reverb = new p5.Reverb();
+ * soundFile.disconnect(); // so we'll only hear reverb...
*
- * sound.play();
+ * // connect soundFile to reverb, process w/
+ * // 3 second reverbTime, decayRate of 2%
+ * reverb.process(soundFile, 3, 2);
+ * soundFile.play();
* }
*
*/
- p5.Convolver.prototype.process = function (src) {
- src.connect(this.input);
+
+
+ p5.Reverb = function () {
+ Effect.call(this);
+
+ this._initConvolverNode(); // otherwise, Safari distorts
+
+
+ this.input.gain.value = 0.5; // default params
+
+ this._seconds = 3;
+ this._decay = 2;
+ this._reverse = false;
+
+ this._buildImpulse();
};
- /**
- * If you load multiple impulse files using the .addImpulse method,
- * they will be stored as Objects in this Array. Toggle between them
- * with the toggleImpulse(id)
method.
- *
- * @property {Array} impulses
- */
- p5.Convolver.prototype.impulses = [];
- /**
- * Load and assign a new Impulse Response to the p5.Convolver.
- * The impulse is added to the .impulses
array. Previous
- * impulses can be accessed with the .toggleImpulse(id)
- * method.
- *
- * @method addImpulse
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
- p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
- }
- this._loadBuffer(path, callback, errorCallback);
+
+ p5.Reverb.prototype = Object.create(Effect.prototype);
+
+ p5.Reverb.prototype._initConvolverNode = function () {
+ this.convolverNode = this.ac.createConvolver();
+ this.input.connect(this.convolverNode);
+ this.convolverNode.connect(this.wet);
};
- /**
- * Similar to .addImpulse, except that the .impulses
- * Array is reset to save memory. A new .impulses
- * array is created with this impulse as the only item.
- *
- * @method resetImpulse
- * @param {String} path path to a sound file
- * @param {Function} callback function (optional)
- * @param {Function} errorCallback function (optional)
- */
- p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
- // if loading locally without a server
- if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
- alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
+
+ p5.Reverb.prototype._teardownConvolverNode = function () {
+ if (this.convolverNode) {
+ this.convolverNode.disconnect();
+ delete this.convolverNode;
}
- this.impulses = [];
- this._loadBuffer(path, callback, errorCallback);
+ };
+
+ p5.Reverb.prototype._setBuffer = function (audioBuffer) {
+ this._teardownConvolverNode();
+
+ this._initConvolverNode();
+
+ this.convolverNode.buffer = audioBuffer;
};
/**
- * If you have used .addImpulse()
to add multiple impulses
- * to a p5.Convolver, then you can use this method to toggle between
- * the items in the .impulses
Array. Accepts a parameter
- * to identify which impulse you wish to use, identified either by its
- * original filename (String) or by its position in the .impulses
- *
Array (Number).
- * You can access the objects in the .impulses Array directly. Each
- * Object has two attributes: an .audioBuffer
(type:
- * Web Audio
- * AudioBuffer) and a .name
, a String that corresponds
- * with the original filename.
+ * Connect a source to the reverb, and assign reverb parameters.
*
- * @method toggleImpulse
- * @param {String|Number} id Identify the impulse by its original filename
- * (String), or by its position in the
- * .impulses
Array (Number).
- */
- p5.Convolver.prototype.toggleImpulse = function (id) {
- if (typeof id === 'number' && id < this.impulses.length) {
- this._setBuffer(this.impulses[id].audioBuffer);
- }
- if (typeof id === 'string') {
- for (var i = 0; i < this.impulses.length; i++) {
- if (this.impulses[i].name === id) {
- this._setBuffer(this.impulses[i].audioBuffer);
- break;
- }
- }
- }
- };
- p5.Convolver.prototype.dispose = function () {
- p5.Reverb.prototype.dispose.apply(this);
- // remove all the Impulse Response buffers
- for (var i in this.impulses) {
- if (this.impulses[i]) {
- this.impulses[i] = null;
- }
- }
- };
-}(errorHandler, effect);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_TimelineState;
-Tone_core_TimelineState = function (Tone) {
- 'use strict';
- Tone.TimelineState = function (initial) {
- Tone.Timeline.call(this);
- this._initial = initial;
- };
- Tone.extend(Tone.TimelineState, Tone.Timeline);
- Tone.TimelineState.prototype.getValueAtTime = function (time) {
- var event = this.get(time);
- if (event !== null) {
- return event.state;
- } else {
- return this._initial;
- }
- };
- Tone.TimelineState.prototype.setStateAtTime = function (state, time) {
- this.add({
- 'state': state,
- 'time': time
- });
- };
- return Tone.TimelineState;
-}(Tone_core_Tone, Tone_core_Timeline);
-/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/
-var Tone_core_Clock;
-Tone_core_Clock = function (Tone) {
- 'use strict';
- Tone.Clock = function () {
- Tone.Emitter.call(this);
- var options = this.optionsObject(arguments, [
- 'callback',
- 'frequency'
- ], Tone.Clock.defaults);
- this.callback = options.callback;
- this._nextTick = 0;
- this._lastState = Tone.State.Stopped;
- this.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency);
- this._readOnly('frequency');
- this.ticks = 0;
- this._state = new Tone.TimelineState(Tone.State.Stopped);
- this._boundLoop = this._loop.bind(this);
- this.context.on('tick', this._boundLoop);
- };
- Tone.extend(Tone.Clock, Tone.Emitter);
- Tone.Clock.defaults = {
- 'callback': Tone.noOp,
- 'frequency': 1,
- 'lookAhead': 'auto'
- };
- Object.defineProperty(Tone.Clock.prototype, 'state', {
- get: function () {
- return this._state.getValueAtTime(this.now());
- }
- });
- Tone.Clock.prototype.start = function (time, offset) {
- time = this.toSeconds(time);
- if (this._state.getValueAtTime(time) !== Tone.State.Started) {
- this._state.add({
- 'state': Tone.State.Started,
- 'time': time,
- 'offset': offset
- });
- }
- return this;
- };
- Tone.Clock.prototype.stop = function (time) {
- time = this.toSeconds(time);
- this._state.cancel(time);
- this._state.setStateAtTime(Tone.State.Stopped, time);
- return this;
- };
- Tone.Clock.prototype.pause = function (time) {
- time = this.toSeconds(time);
- if (this._state.getValueAtTime(time) === Tone.State.Started) {
- this._state.setStateAtTime(Tone.State.Paused, time);
- }
- return this;
- };
- Tone.Clock.prototype._loop = function () {
- var now = this.now();
- var lookAhead = this.context.lookAhead;
- var updateInterval = this.context.updateInterval;
- var lagCompensation = this.context.lag * 2;
- var loopInterval = now + lookAhead + updateInterval + lagCompensation;
- while (loopInterval > this._nextTick && this._state) {
- var currentState = this._state.getValueAtTime(this._nextTick);
- if (currentState !== this._lastState) {
- this._lastState = currentState;
- var event = this._state.get(this._nextTick);
- if (currentState === Tone.State.Started) {
- this._nextTick = event.time;
- if (!this.isUndef(event.offset)) {
- this.ticks = event.offset;
- }
- this.emit('start', event.time, this.ticks);
- } else if (currentState === Tone.State.Stopped) {
- this.ticks = 0;
- this.emit('stop', event.time);
- } else if (currentState === Tone.State.Paused) {
- this.emit('pause', event.time);
- }
- }
- var tickTime = this._nextTick;
- if (this.frequency) {
- this._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick);
- if (currentState === Tone.State.Started) {
- this.callback(tickTime);
- this.ticks++;
- }
- }
- }
- };
- Tone.Clock.prototype.getStateAtTime = function (time) {
- time = this.toSeconds(time);
- return this._state.getValueAtTime(time);
- };
- Tone.Clock.prototype.dispose = function () {
- Tone.Emitter.prototype.dispose.call(this);
- this.context.off('tick', this._boundLoop);
- this._writable('frequency');
- this.frequency.dispose();
- this.frequency = null;
- this._boundLoop = null;
- this._nextTick = Infinity;
- this.callback = null;
- this._state.dispose();
- this._state = null;
- };
- return Tone.Clock;
-}(Tone_core_Tone, Tone_signal_TimelineSignal, Tone_core_TimelineState, Tone_core_Emitter);
-var metro;
-'use strict';
-metro = function () {
- var p5sound = master;
- // requires the Tone.js library's Clock (MIT license, Yotam Mann)
- // https://github.com/TONEnoTONE/Tone.js/
- var Clock = Tone_core_Clock;
- p5.Metro = function () {
- this.clock = new Clock({ 'callback': this.ontick.bind(this) });
- this.syncedParts = [];
- this.bpm = 120;
- // gets overridden by p5.Part
- this._init();
- this.prevTick = 0;
- this.tatumTime = 0;
- this.tickCallback = function () {
- };
- };
- p5.Metro.prototype.ontick = function (tickTime) {
- var elapsedTime = tickTime - this.prevTick;
- var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
- if (elapsedTime - this.tatumTime <= -0.02) {
- return;
- } else {
- // console.log('ok', this.syncedParts[0].phrases[0].name);
- this.prevTick = tickTime;
- // for all of the active things on the metro:
- var self = this;
- this.syncedParts.forEach(function (thisPart) {
- if (!thisPart.isPlaying)
- return;
- thisPart.incrementStep(secondsFromNow);
- // each synced source keeps track of its own beat number
- thisPart.phrases.forEach(function (thisPhrase) {
- var phraseArray = thisPhrase.sequence;
- var bNum = self.metroTicks % phraseArray.length;
- if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
- thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
- }
- });
- });
- this.metroTicks += 1;
- this.tickCallback(secondsFromNow);
- }
- };
- p5.Metro.prototype.setBPM = function (bpm, rampTime) {
- var beatTime = 60 / (bpm * this.tatums);
- var now = p5sound.audiocontext.currentTime;
- this.tatumTime = beatTime;
- var rampTime = rampTime || 0;
- this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
- this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
- this.bpm = bpm;
- };
- p5.Metro.prototype.getBPM = function () {
- return this.clock.getRate() / this.tatums * 60;
- };
- p5.Metro.prototype._init = function () {
- this.metroTicks = 0;
- };
- // clear existing synced parts, add only this one
- p5.Metro.prototype.resetSync = function (part) {
- this.syncedParts = [part];
- };
- // push a new synced part to the array
- p5.Metro.prototype.pushSync = function (part) {
- this.syncedParts.push(part);
- };
- p5.Metro.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.start(now + t);
- this.setBPM(this.bpm);
- };
- p5.Metro.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- this.clock.stop(now + t);
- };
- p5.Metro.prototype.beatLength = function (tatums) {
- this.tatums = 1 / tatums / 4;
+ * @method process
+ * @for p5.Reverb
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
+ */
+
+
+ p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) {
+ src.connect(this.input);
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
+ }
+
+ if (decayRate) {
+ this._decay = decayRate;
+ }
+
+ if (reverse) {
+ this._reverse = reverse;
+ }
+
+ if (rebuild) {
+ this._buildImpulse();
+ }
};
-}(master, Tone_core_Clock);
-var looper;
-'use strict';
-looper = function () {
- var p5sound = master;
- var BPM = 120;
/**
- * Set the global tempo, in beats per minute, for all
- * p5.Parts. This method will impact all active p5.Parts.
+ * Set the reverb settings. Similar to .process(), but without
+ * assigning a new input.
*
- * @method setBPM
- * @param {Number} BPM Beats Per Minute
- * @param {Number} rampTime Seconds from now
+ * @method set
+ * @for p5.Reverb
+ * @param {Number} [seconds] Duration of the reverb, in seconds.
+ * Min: 0, Max: 10. Defaults to 3.
+ * @param {Number} [decayRate] Percentage of decay with each echo.
+ * Min: 0, Max: 100. Defaults to 2.
+ * @param {Boolean} [reverse] Play the reverb backwards or forwards.
*/
- p5.prototype.setBPM = function (bpm, rampTime) {
- BPM = bpm;
- for (var i in p5sound.parts) {
- if (p5sound.parts[i]) {
- p5sound.parts[i].setBPM(bpm, rampTime);
- }
+
+
+ p5.Reverb.prototype.set = function (seconds, decayRate, reverse) {
+ var rebuild = false;
+
+ if (seconds) {
+ this._seconds = seconds;
+ rebuild = true;
+ }
+
+ if (decayRate) {
+ this._decay = decayRate;
+ }
+
+ if (reverse) {
+ this._reverse = reverse;
+ }
+
+ if (rebuild) {
+ this._buildImpulse();
+ }
+ }; // DocBlocks for methods inherited from p5.Effect
+
+ /**
+ * Set the output level of the reverb effect.
+ *
+ * @method amp
+ * @for p5.Reverb
+ * @param {Number} volume amplitude between 0 and 1.0
+ * @param {Number} [rampTime] create a fade that lasts rampTime
+ * @param {Number} [timeFromNow] schedule this event to happen
+ * seconds from now
+ */
+
+ /**
+ * Send output to a p5.sound or web audio object
+ *
+ * @method connect
+ * @for p5.Reverb
+ * @param {Object} unit
+ */
+
+ /**
+ * Disconnect all output.
+ *
+ * @method disconnect
+ * @for p5.Reverb
+ */
+
+ /**
+ * Inspired by Simple Reverb by Jordan Santell
+ * https://github.com/web-audio-components/simple-reverb/blob/master/index.js
+ *
+ * Utility function for building an impulse response
+ * based on the module parameters.
+ *
+ * @private
+ */
+
+
+ p5.Reverb.prototype._buildImpulse = function () {
+ var rate = this.ac.sampleRate;
+ var length = rate * this._seconds;
+ var decay = this._decay;
+ var impulse = this.ac.createBuffer(2, length, rate);
+ var impulseL = impulse.getChannelData(0);
+ var impulseR = impulse.getChannelData(1);
+ var n, i;
+
+ for (i = 0; i < length; i++) {
+ n = this._reverse ? length - i : i;
+ impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
+ impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay);
}
+
+ this._setBuffer(impulse);
};
+
+ p5.Reverb.prototype.dispose = function () {
+ Effect.prototype.dispose.apply(this);
+
+ this._teardownConvolverNode();
+ }; // =======================================================================
+ // *** p5.Convolver ***
+ // =======================================================================
+
/**
- * A phrase is a pattern of musical events over time, i.e.
- * a series of notes and rests.
+ * p5.Convolver extends p5.Reverb. It can emulate the sound of real
+ * physical spaces through a process called
+ * convolution.
*
- * Phrases must be added to a p5.Part for playback, and
- * each part can play multiple phrases at the same time.
- * For example, one Phrase might be a kick drum, another
- * could be a snare, and another could be the bassline.
+ * Convolution multiplies any audio input by an "impulse response"
+ * to simulate the dispersion of sound over time. The impulse response is
+ * generated from an audio file that you provide. One way to
+ * generate an impulse response is to pop a balloon in a reverberant space
+ * and record the echo. Convolution can also be used to experiment with
+ * sound.
*
- * The first parameter is a name so that the phrase can be
- * modified or deleted later. The callback is a a function that
- * this phrase will call at every step—for example it might be
- * called playNote(value){}
. The array determines
- * which value is passed into the callback at each step of the
- * phrase. It can be numbers, an object with multiple numbers,
- * or a zero (0) indicates a rest so the callback won't be called).
+ * Use the method createConvolution(path)
to instantiate a
+ * p5.Convolver with a path to your impulse response audio file.
*
- * @class p5.Phrase
+ * @class p5.Convolver
+ * @extends p5.Effect
* @constructor
- * @param {String} name Name so that you can access the Phrase.
- * @param {Function} callback The name of a function that this phrase
- * will call. Typically it will play a sound,
- * and accept two parameters: a time at which
- * to play the sound (in seconds from now),
- * and a value from the sequence array. The
- * time should be passed into the play() or
- * start() method to ensure precision.
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call when loading succeeds
+ * @param {Function} [errorCallback] function to call if loading fails.
+ * This function will receive an error or
+ * XMLHttpRequest object with information
+ * about what went wrong.
* @example
*
- * var mySound, myPhrase, myPart;
- * var pattern = [1,0,0,2,0,2,0,0];
- * var msg = 'click to play';
- *
+ * var cVerb, sound;
* function preload() {
- * mySound = loadSound('assets/beatbox.mp3');
- * }
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
*
- * function setup() {
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * masterVolume(0.1);
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
*
- * myPhrase = new p5.Phrase('bbox', makeSound, pattern);
- * myPart = new p5.Part();
- * myPart.addPhrase(myPhrase);
- * myPart.setBPM(60);
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
- * function draw() {
- * background(0);
- * text(msg, width/2, height/2);
- * }
+ * function setup() {
+ * // disconnect from master output...
+ * sound.disconnect();
*
- * function makeSound(time, playbackRate) {
- * mySound.rate(playbackRate);
- * mySound.play(time);
- * }
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * myPart.start();
- * msg = 'playing pattern';
- * }
+ * sound.play();
* }
- *
*
*/
- p5.Phrase = function (name, callback, sequence) {
- this.phraseStep = 0;
- this.name = name;
- this.callback = callback;
+
+
+ p5.Convolver = function (path, callback, errorCallback) {
+ p5.Reverb.call(this);
/**
- * Array of values to pass into the callback
- * at each step of the phrase. Depending on the callback
- * function's requirements, these values may be numbers,
- * strings, or an object with multiple parameters.
- * Zero (0) indicates a rest.
+ * Internally, the p5.Convolver uses the a
+ *
+ * Web Audio Convolver Node.
*
- * @property {Array} sequence
+ * @property {ConvolverNode} convolverNode
*/
- this.sequence = sequence;
+
+ this._initConvolverNode(); // otherwise, Safari distorts
+
+
+ this.input.gain.value = 0.5;
+
+ if (path) {
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
+ } else {
+ // parameters
+ this._seconds = 3;
+ this._decay = 2;
+ this._reverse = false;
+
+ this._buildImpulse();
+ }
};
+
+ p5.Convolver.prototype = Object.create(p5.Reverb.prototype);
+ p5.prototype.registerPreloadMethod('createConvolver', p5.prototype);
/**
- * A p5.Part plays back one or more p5.Phrases. Instantiate a part
- * with steps and tatums. By default, each step represents a 1/16th note.
- *
- * See p5.Phrase for more about musical timing.
+ * Create a p5.Convolver. Accepts a path to a soundfile
+ * that will be used to generate an impulse response.
*
- * @class p5.Part
- * @constructor
- * @param {Number} [steps] Steps in the part
- * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
+ * @method createConvolver
+ * @for p5
+ * @param {String} path path to a sound file
+ * @param {Function} [callback] function to call if loading is successful.
+ * The object will be passed in as the argument
+ * to the callback function.
+ * @param {Function} [errorCallback] function to call if loading is not successful.
+ * A custom error will be passed in as the argument
+ * to the callback function.
+ * @return {p5.Convolver}
* @example
*
- * var box, drum, myPart;
- * var boxPat = [1,0,0,2,0,2,0,0];
- * var drumPat = [0,1,1,0,2,0,1,0];
- * var msg = 'click to play';
- *
+ * var cVerb, sound;
* function preload() {
- * box = loadSound('assets/beatbox.mp3');
- * drum = loadSound('assets/drum.mp3');
- * }
- *
- * function setup() {
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- * masterVolume(0.1);
+ * // We have both MP3 and OGG versions of all sound assets
+ * soundFormats('ogg', 'mp3');
*
- * var boxPhrase = new p5.Phrase('box', playBox, boxPat);
- * var drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
- * myPart = new p5.Part();
- * myPart.addPhrase(boxPhrase);
- * myPart.addPhrase(drumPhrase);
- * myPart.setBPM(60);
- * masterVolume(0.1);
- * }
+ * // Try replacing 'bx-spring' with other soundfiles like
+ * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'
+ * cVerb = createConvolver('assets/bx-spring.mp3');
*
- * function draw() {
- * background(0);
- * text(msg, width/2, height/2);
+ * // Try replacing 'Damscray_DancingTiger' with
+ * // 'beat', 'doorbell', lucky_dragons_-_power_melody'
+ * sound = loadSound('assets/Damscray_DancingTiger.mp3');
* }
*
- * function playBox(time, playbackRate) {
- * box.rate(playbackRate);
- * box.play(time);
- * }
+ * function setup() {
+ * // disconnect from master output...
+ * sound.disconnect();
*
- * function playDrum(time, playbackRate) {
- * drum.rate(playbackRate);
- * drum.play(time);
- * }
+ * // ...and process with cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
*
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * myPart.start();
- * msg = 'playing part';
- * }
+ * sound.play();
* }
*
*/
- p5.Part = function (steps, bLength) {
- this.length = steps || 0;
- // how many beats
- this.partStep = 0;
- this.phrases = [];
- this.isPlaying = false;
- this.noLoop();
- this.tatums = bLength || 0.0625;
- // defaults to quarter note
- this.metro = new p5.Metro();
- this.metro._init();
- this.metro.beatLength(this.tatums);
- this.metro.setBPM(BPM);
- p5sound.parts.push(this);
- this.callback = function () {
- };
- };
- /**
- * Set the tempo of this part, in Beats Per Minute.
- *
- * @method setBPM
- * @param {Number} BPM Beats Per Minute
- * @param {Number} [rampTime] Seconds from now
- */
- p5.Part.prototype.setBPM = function (tempo, rampTime) {
- this.metro.setBPM(tempo, rampTime);
- };
- /**
- * Returns the tempo, in Beats Per Minute, of this part.
- *
- * @method getBPM
- * @return {Number}
- */
- p5.Part.prototype.getBPM = function () {
- return this.metro.getBPM();
- };
- /**
- * Start playback of this part. It will play
- * through all of its phrases at a speed
- * determined by setBPM.
- *
- * @method start
- * @param {Number} [time] seconds from now
- */
- p5.Part.prototype.start = function (time) {
- if (!this.isPlaying) {
- this.isPlaying = true;
- this.metro.resetSync(this);
- var t = time || 0;
- this.metro.start(t);
+
+ p5.prototype.createConvolver = function (path, callback, errorCallback) {
+ // if loading locally without a server
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ var self = this;
+ var cReverb = new p5.Convolver(path, function (buffer) {
+ if (typeof callback === 'function') {
+ callback(buffer);
+ }
+
+ if (typeof self._decrementPreload === 'function') {
+ self._decrementPreload();
+ }
+ }, errorCallback);
+ cReverb.impulses = [];
+ return cReverb;
};
/**
- * Loop playback of this part. It will begin
- * looping through all of its phrases at a speed
- * determined by setBPM.
+ * Private method to load a buffer as an Impulse Response,
+ * assign it to the convolverNode, and add to the Array of .impulses.
*
- * @method loop
- * @param {Number} [time] seconds from now
+ * @param {String} path
+ * @param {Function} callback
+ * @param {Function} errorCallback
+ * @private
*/
- p5.Part.prototype.loop = function (time) {
- this.looping = true;
- // rest onended function
- this.onended = function () {
- this.partStep = 0;
+
+
+ p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) {
+ var path = p5.prototype._checkFileFormats(path);
+
+ var self = this;
+ var errorTrace = new Error().stack;
+ var ac = p5.prototype.getAudioContext();
+ var request = new XMLHttpRequest();
+ request.open('GET', path, true);
+ request.responseType = 'arraybuffer';
+
+ request.onload = function () {
+ if (request.status === 200) {
+ // on success loading file:
+ ac.decodeAudioData(request.response, function (buff) {
+ var buffer = {};
+ var chunks = path.split('/');
+ buffer.name = chunks[chunks.length - 1];
+ buffer.audioBuffer = buff;
+ self.impulses.push(buffer);
+
+ self._setBuffer(buffer.audioBuffer);
+
+ if (callback) {
+ callback(buffer);
+ }
+ }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015
+ function () {
+ var err = new CustomError('decodeAudioData', errorTrace, self.url);
+ var msg = 'AudioContext error at decodeAudioData for ' + self.url;
+
+ if (errorCallback) {
+ err.msg = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ });
+ } // if request status != 200, it failed
+ else {
+ var err = new CustomError('loadConvolver', errorTrace, self.url);
+ var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
+ }
+ }; // if there is another error, aside from 404...
+
+
+ request.onerror = function () {
+ var err = new CustomError('loadConvolver', errorTrace, self.url);
+ var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.';
+
+ if (errorCallback) {
+ err.message = msg;
+ errorCallback(err);
+ } else {
+ console.error(msg + '\n The error stack trace includes: \n' + err.stack);
+ }
};
- var t = time || 0;
- this.start(t);
+
+ request.send();
};
+
+ p5.Convolver.prototype.set = null;
/**
- * Tell the part to stop looping.
+ * Connect a source to the reverb, and assign reverb parameters.
*
- * @method noLoop
- */
- p5.Part.prototype.noLoop = function () {
- this.looping = false;
- // rest onended function
- this.onended = function () {
- this.stop();
- };
- };
- /**
- * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
+ * @method process
+ * @for p5.Convolver
+ * @param {Object} src p5.sound / Web Audio object with a sound
+ * output.
+ * @example
+ *
+ * var cVerb, sound;
+ * function preload() {
+ * soundFormats('ogg', 'mp3');
*
- * @method stop
- * @param {Number} [time] seconds from now
- */
- p5.Part.prototype.stop = function (time) {
- this.partStep = 0;
- this.pause(time);
- };
- /**
- * Pause the part. Playback will resume
- * from the current step.
+ * cVerb = createConvolver('assets/concrete-tunnel.mp3');
+ *
+ * sound = loadSound('assets/beat.mp3');
+ * }
+ *
+ * function setup() {
+ * // disconnect from master output...
+ * sound.disconnect();
*
- * @method pause
- * @param {Number} time seconds from now
+ * // ...and process with (i.e. connect to) cVerb
+ * // so that we only hear the convolution
+ * cVerb.process(sound);
+ *
+ * sound.play();
+ * }
+ *
*/
- p5.Part.prototype.pause = function (time) {
- this.isPlaying = false;
- var t = time || 0;
- this.metro.stop(t);
+
+ p5.Convolver.prototype.process = function (src) {
+ src.connect(this.input);
};
/**
- * Add a p5.Phrase to this Part.
+ * If you load multiple impulse files using the .addImpulse method,
+ * they will be stored as Objects in this Array. Toggle between them
+ * with the toggleImpulse(id)
method.
*
- * @method addPhrase
- * @param {p5.Phrase} phrase reference to a p5.Phrase
+ * @property {Array} impulses
+ * @for p5.Convolver
*/
- p5.Part.prototype.addPhrase = function (name, callback, array) {
- var p;
- if (arguments.length === 3) {
- p = new p5.Phrase(name, callback, array);
- } else if (arguments[0] instanceof p5.Phrase) {
- p = arguments[0];
- } else {
- throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
- }
- this.phrases.push(p);
- // reset the length if phrase is longer than part's existing length
- if (p.sequence.length > this.length) {
- this.length = p.sequence.length;
- }
- };
+
+
+ p5.Convolver.prototype.impulses = [];
/**
- * Remove a phrase from this part, based on the name it was
- * given when it was created.
+ * Load and assign a new Impulse Response to the p5.Convolver.
+ * The impulse is added to the .impulses
array. Previous
+ * impulses can be accessed with the .toggleImpulse(id)
+ * method.
*
- * @method removePhrase
- * @param {String} phraseName
+ * @method addImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
*/
- p5.Part.prototype.removePhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases.splice(i, 1);
- }
+
+ p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) {
+ // if loading locally without a server
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ this._loadBuffer(path, callback, errorCallback);
};
/**
- * Get a phrase from this part, based on the name it was
- * given when it was created. Now you can modify its array.
+ * Similar to .addImpulse, except that the .impulses
+ * Array is reset to save memory. A new .impulses
+ * array is created with this impulse as the only item.
*
- * @method getPhrase
- * @param {String} phraseName
+ * @method resetImpulse
+ * @for p5.Convolver
+ * @param {String} path path to a sound file
+ * @param {Function} callback function (optional)
+ * @param {Function} errorCallback function (optional)
*/
- p5.Part.prototype.getPhrase = function (name) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- return this.phrases[i];
- }
+
+
+ p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) {
+ // if loading locally without a server
+ if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {
+ alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');
}
+
+ this.impulses = [];
+
+ this._loadBuffer(path, callback, errorCallback);
};
/**
- * Find all sequences with the specified name, and replace their patterns with the specified array.
+ * If you have used .addImpulse()
to add multiple impulses
+ * to a p5.Convolver, then you can use this method to toggle between
+ * the items in the .impulses
Array. Accepts a parameter
+ * to identify which impulse you wish to use, identified either by its
+ * original filename (String) or by its position in the .impulses
+ *
Array (Number).
+ * You can access the objects in the .impulses Array directly. Each
+ * Object has two attributes: an .audioBuffer
(type:
+ * Web Audio
+ * AudioBuffer) and a .name
, a String that corresponds
+ * with the original filename.
*
- * @method replaceSequence
- * @param {String} phraseName
- * @param {Array} sequence Array of values to pass into the callback
- * at each step of the phrase.
+ * @method toggleImpulse
+ * @for p5.Convolver
+ * @param {String|Number} id Identify the impulse by its original filename
+ * (String), or by its position in the
+ * .impulses
Array (Number).
*/
- p5.Part.prototype.replaceSequence = function (name, array) {
- for (var i in this.phrases) {
- if (this.phrases[i].name === name) {
- this.phrases[i].sequence = array;
- }
+
+
+ p5.Convolver.prototype.toggleImpulse = function (id) {
+ if (typeof id === 'number' && id < this.impulses.length) {
+ this._setBuffer(this.impulses[id].audioBuffer);
}
- };
- p5.Part.prototype.incrementStep = function (time) {
- if (this.partStep < this.length - 1) {
- this.callback(time);
- this.partStep += 1;
- } else {
- if (!this.looping && this.partStep === this.length - 1) {
- console.log('done');
- // this.callback(time);
- this.onended();
+
+ if (typeof id === 'string') {
+ for (var i = 0; i < this.impulses.length; i++) {
+ if (this.impulses[i].name === id) {
+ this._setBuffer(this.impulses[i].audioBuffer);
+
+ break;
+ }
}
}
};
- /**
- * Set the function that will be called at every step. This will clear the previous function.
- *
- * @method onStep
- * @param {Function} callback The name of the callback
- * you want to fire
- * on every beat/tatum.
- */
- p5.Part.prototype.onStep = function (callback) {
- this.callback = callback;
- };
- // ===============
- // p5.Score
- // ===============
- /**
- * A Score consists of a series of Parts. The parts will
- * be played back in order. For example, you could have an
- * A part, a B part, and a C part, and play them back in this order
- * new p5.Score(a, a, b, a, c)
- *
- * @class p5.Score
- * @constructor
- * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
- */
- p5.Score = function () {
- // for all of the arguments
- this.parts = [];
- this.currentPart = 0;
- var thisScore = this;
- for (var i in arguments) {
- if (arguments[i] && this.parts[i]) {
- this.parts[i] = arguments[i];
- this.parts[i].nextPart = this.parts[i + 1];
- this.parts[i].onended = function () {
- thisScore.resetPart(i);
- playNextPart(thisScore);
- };
+
+ p5.Convolver.prototype.dispose = function () {
+ p5.Reverb.prototype.dispose.apply(this); // remove all the Impulse Response buffers
+
+ for (var i in this.impulses) {
+ if (this.impulses[i]) {
+ this.impulses[i] = null;
}
}
- this.looping = false;
};
- p5.Score.prototype.onended = function () {
- if (this.looping) {
- // this.resetParts();
- this.parts[0].start();
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 59 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1); // requires the Tone.js library's Clock (MIT license, Yotam Mann)
+ // https://github.com/TONEnoTONE/Tone.js/
+
+
+ var Clock = __webpack_require__(26);
+
+ p5.Metro = function () {
+ this.clock = new Clock({
+ 'callback': this.ontick.bind(this)
+ });
+ this.syncedParts = [];
+ this.bpm = 120; // gets overridden by p5.Part
+
+ this._init();
+
+ this.prevTick = 0;
+ this.tatumTime = 0;
+
+ this.tickCallback = function () {};
+ };
+
+ p5.Metro.prototype.ontick = function (tickTime) {
+ var elapsedTime = tickTime - this.prevTick;
+ var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;
+
+ if (elapsedTime - this.tatumTime <= -0.02) {
+ return;
} else {
- this.parts[this.parts.length - 1].onended = function () {
- this.stop();
- this.resetParts();
- };
+ // console.log('ok', this.syncedParts[0].phrases[0].name);
+ this.prevTick = tickTime; // for all of the active things on the metro:
+
+ var self = this;
+ this.syncedParts.forEach(function (thisPart) {
+ if (!thisPart.isPlaying) return;
+ thisPart.incrementStep(secondsFromNow); // each synced source keeps track of its own beat number
+
+ thisPart.phrases.forEach(function (thisPhrase) {
+ var phraseArray = thisPhrase.sequence;
+ var bNum = self.metroTicks % phraseArray.length;
+
+ if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) {
+ thisPhrase.callback(secondsFromNow, phraseArray[bNum]);
+ }
+ });
+ });
+ this.metroTicks += 1;
+ this.tickCallback(secondsFromNow);
}
- this.currentPart = 0;
- };
- /**
- * Start playback of the score.
- *
- * @method start
- */
- p5.Score.prototype.start = function () {
- this.parts[this.currentPart].start();
- this.scoreStep = 0;
};
- /**
- * Stop playback of the score.
- *
- * @method stop
- */
- p5.Score.prototype.stop = function () {
- this.parts[this.currentPart].stop();
- this.currentPart = 0;
- this.scoreStep = 0;
+
+ p5.Metro.prototype.setBPM = function (bpm, rampTime) {
+ var beatTime = 60 / (bpm * this.tatums);
+ var now = p5sound.audiocontext.currentTime;
+ this.tatumTime = beatTime;
+ var rampTime = rampTime || 0;
+ this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);
+ this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);
+ this.bpm = bpm;
};
- /**
- * Pause playback of the score.
- *
- * @method pause
- */
- p5.Score.prototype.pause = function () {
- this.parts[this.currentPart].stop();
+
+ p5.Metro.prototype.getBPM = function () {
+ return this.clock.getRate() / this.tatums * 60;
};
- /**
- * Loop playback of the score.
- *
- * @method loop
- */
- p5.Score.prototype.loop = function () {
- this.looping = true;
- this.start();
+
+ p5.Metro.prototype._init = function () {
+ this.metroTicks = 0; // this.setBPM(120);
+ }; // clear existing synced parts, add only this one
+
+
+ p5.Metro.prototype.resetSync = function (part) {
+ this.syncedParts = [part];
+ }; // push a new synced part to the array
+
+
+ p5.Metro.prototype.pushSync = function (part) {
+ this.syncedParts.push(part);
};
- /**
- * Stop looping playback of the score. If it
- * is currently playing, this will go into effect
- * after the current round of playback completes.
- *
- * @method noLoop
- */
- p5.Score.prototype.noLoop = function () {
- this.looping = false;
+
+ p5.Metro.prototype.start = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.clock.start(now + t);
+ this.setBPM(this.bpm);
};
- p5.Score.prototype.resetParts = function () {
- var self = this;
- this.parts.forEach(function (part) {
- self.resetParts[part];
- });
+
+ p5.Metro.prototype.stop = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+ this.clock.stop(now + t);
};
- p5.Score.prototype.resetPart = function (i) {
- this.parts[i].stop();
- this.parts[i].partStep = 0;
- for (var p in this.parts[i].phrases) {
- if (this.parts[i]) {
- this.parts[i].phrases[p].phraseStep = 0;
- }
- }
+
+ p5.Metro.prototype.beatLength = function (tatums) {
+ this.tatums = 1 / tatums / 4; // lowest possible division of a beat
};
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 60 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_ARRAY__ = [__webpack_require__(0),__webpack_require__(23),__webpack_require__(8)], __WEBPACK_AMD_DEFINE_RESULT__ = (function(t){"use strict";return t.TimelineState=function(e){t.Timeline.call(this),this._initial=e},t.extend(t.TimelineState,t.Timeline),t.TimelineState.prototype.getValueAtTime=function(e){var t=this.get(e);return null!==t?t.state:this._initial},t.TimelineState.prototype.setStateAtTime=function(e,t){this.add({state:e,time:t})},t.TimelineState}).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 61 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var BPM = 120;
/**
- * Set the tempo for all parts in the score
+ * Set the global tempo, in beats per minute, for all
+ * p5.Parts. This method will impact all active p5.Parts.
*
* @method setBPM
+ * @for p5
* @param {Number} BPM Beats Per Minute
* @param {Number} rampTime Seconds from now
*/
- p5.Score.prototype.setBPM = function (bpm, rampTime) {
- for (var i in this.parts) {
- if (this.parts[i]) {
- this.parts[i].setBPM(bpm, rampTime);
+
+ p5.prototype.setBPM = function (bpm, rampTime) {
+ BPM = bpm;
+
+ for (var i in p5sound.parts) {
+ if (p5sound.parts[i]) {
+ p5sound.parts[i].setBPM(bpm, rampTime);
}
}
};
- function playNextPart(aScore) {
- aScore.currentPart++;
- if (aScore.currentPart >= aScore.parts.length) {
- aScore.scoreStep = 0;
- aScore.onended();
- } else {
- aScore.scoreStep = 0;
- aScore.parts[aScore.currentPart - 1].stop();
- aScore.parts[aScore.currentPart].start();
- }
- }
-}(master);
-var soundloop;
-'use strict';
-soundloop = function () {
- var p5sound = master;
- var Clock = Tone_core_Clock;
/**
- * SoundLoop
+ * A phrase is a pattern of musical events over time, i.e.
+ * a series of notes and rests.
*
- * @class p5.SoundLoop
- * @constructor
+ * Phrases must be added to a p5.Part for playback, and
+ * each part can play multiple phrases at the same time.
+ * For example, one Phrase might be a kick drum, another
+ * could be a snare, and another could be the bassline.
*
- * @param {Function} callback this function will be called on each iteration of theloop
- * @param {Number|String} [interval] amount of time or beats for each iteration of the loop
- * defaults to 1
+ * The first parameter is a name so that the phrase can be
+ * modified or deleted later. The callback is a a function that
+ * this phrase will call at every step—for example it might be
+ * called playNote(value){}
. The array determines
+ * which value is passed into the callback at each step of the
+ * phrase. It can be numbers, an object with multiple numbers,
+ * or a zero (0) indicates a rest so the callback won't be called).
*
- * @example
- *
- * var click;
- * var looper1;
- *
- * function preload() {
- * click = loadSound('assets/drum.mp3');
- * }
- *
- * function setup() {
- * //the looper's callback is passed the timeFromNow
- * //this value should be used as a reference point from
- * //which to schedule sounds
- * looper1 = new p5.SoundLoop(function(timeFromNow){
- * click.play(timeFromNow);
- * background(255 * (looper1.iterations % 2));
- * }, 2);
+ * @class p5.Phrase
+ * @constructor
+ * @param {String} name Name so that you can access the Phrase.
+ * @param {Function} callback The name of a function that this phrase
+ * will call. Typically it will play a sound,
+ * and accept two parameters: a time at which
+ * to play the sound (in seconds from now),
+ * and a value from the sequence array. The
+ * time should be passed into the play() or
+ * start() method to ensure precision.
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
+ * @example
+ *
+ * var mySound, myPhrase, myPart;
+ * var pattern = [1,0,0,2,0,2,0,0];
+ * var msg = 'click to play';
*
- * //stop after 10 iteratios;
- * looper1.maxIterations = 10;
- * //start the loop
- * looper1.start();
- * }
- *
+ * function preload() {
+ * mySound = loadSound('assets/beatbox.mp3');
+ * }
+ *
+ * function setup() {
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ * masterVolume(0.1);
+ *
+ * myPhrase = new p5.Phrase('bbox', makeSound, pattern);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(myPhrase);
+ * myPart.setBPM(60);
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * text(msg, width/2, height/2);
+ * }
+ *
+ * function makeSound(time, playbackRate) {
+ * mySound.rate(playbackRate);
+ * mySound.play(time);
+ * }
+ *
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * myPart.start();
+ * msg = 'playing pattern';
+ * }
+ * }
+ *
+ *
+ */
+
+
+ p5.Phrase = function (name, callback, sequence) {
+ this.phraseStep = 0;
+ this.name = name;
+ this.callback = callback;
+ /**
+ * Array of values to pass into the callback
+ * at each step of the phrase. Depending on the callback
+ * function's requirements, these values may be numbers,
+ * strings, or an object with multiple parameters.
+ * Zero (0) indicates a rest.
+ *
+ * @property {Array} sequence
+ */
+
+ this.sequence = sequence;
+ };
+ /**
+ * A p5.Part plays back one or more p5.Phrases. Instantiate a part
+ * with steps and tatums. By default, each step represents a 1/16th note.
+ *
+ * See p5.Phrase for more about musical timing.
+ *
+ * @class p5.Part
+ * @constructor
+ * @param {Number} [steps] Steps in the part
+ * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)
+ * @example
+ *
+ * var box, drum, myPart;
+ * var boxPat = [1,0,0,2,0,2,0,0];
+ * var drumPat = [0,1,1,0,2,0,1,0];
+ * var msg = 'click to play';
+ *
+ * function preload() {
+ * box = loadSound('assets/beatbox.mp3');
+ * drum = loadSound('assets/drum.mp3');
+ * }
+ *
+ * function setup() {
+ * noStroke();
+ * fill(255);
+ * textAlign(CENTER);
+ * masterVolume(0.1);
+ *
+ * var boxPhrase = new p5.Phrase('box', playBox, boxPat);
+ * var drumPhrase = new p5.Phrase('drum', playDrum, drumPat);
+ * myPart = new p5.Part();
+ * myPart.addPhrase(boxPhrase);
+ * myPart.addPhrase(drumPhrase);
+ * myPart.setBPM(60);
+ * masterVolume(0.1);
+ * }
+ *
+ * function draw() {
+ * background(0);
+ * text(msg, width/2, height/2);
+ * }
+ *
+ * function playBox(time, playbackRate) {
+ * box.rate(playbackRate);
+ * box.play(time);
+ * }
+ *
+ * function playDrum(time, playbackRate) {
+ * drum.rate(playbackRate);
+ * drum.play(time);
+ * }
+ *
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * myPart.start();
+ * msg = 'playing part';
+ * }
+ * }
+ *
*/
- p5.SoundLoop = function (callback, interval) {
- this.callback = callback;
- /**
- * musicalTimeMode uses Tone.Time convention
- * true if string, false if number
- * @property {Boolean} musicalTimeMode
- */
- this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
- this._interval = interval || 1;
- /**
- * musicalTimeMode variables
- * modify these only when the interval is specified in musicalTime format as a string
- */
- this._timeSignature = 4;
- this._bpm = 60;
+
+
+ p5.Part = function (steps, bLength) {
+ this.length = steps || 0; // how many beats
+
+ this.partStep = 0;
+ this.phrases = [];
this.isPlaying = false;
- /**
- * Set a limit to the number of loops to play. defaults to Infinity
- * @property {Number} maxIterations
- */
- this.maxIterations = Infinity;
- var self = this;
- this.clock = new Clock({
- 'callback': function (time) {
- var timeFromNow = time - p5sound.audiocontext.currentTime;
- /**
- * Do not initiate the callback if timeFromNow is < 0
- * This ususually occurs for a few milliseconds when the page
- * is not fully loaded
- *
- * The callback should only be called until maxIterations is reached
- */
- if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
- self.callback(timeFromNow);
- }
- },
- 'frequency': this._calcFreq()
- });
+ this.noLoop();
+ this.tatums = bLength || 0.0625; // defaults to quarter note
+
+ this.metro = new p5.Metro();
+
+ this.metro._init();
+
+ this.metro.beatLength(this.tatums);
+ this.metro.setBPM(BPM);
+ p5sound.parts.push(this);
+
+ this.callback = function () {};
};
/**
- * Start the loop
- * @method start
- * @param {Number} [timeFromNow] schedule a starting time
+ * Set the tempo of this part, in Beats Per Minute.
+ *
+ * @method setBPM
+ * @for p5.Part
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} [rampTime] Seconds from now
*/
- p5.SoundLoop.prototype.start = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (!this.isPlaying) {
- this.clock.start(now + t);
- this.isPlaying = true;
- }
+
+
+ p5.Part.prototype.setBPM = function (tempo, rampTime) {
+ this.metro.setBPM(tempo, rampTime);
};
/**
- * Stop the loop
- * @method stop
- * @param {Number} [timeFromNow] schedule a stopping time
+ * Returns the tempo, in Beats Per Minute, of this part.
+ *
+ * @method getBPM
+ * @for p5.Part
+ * @return {Number}
*/
- p5.SoundLoop.prototype.stop = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (this.isPlaying) {
- this.clock.stop(now + t);
- this.isPlaying = false;
- }
+
+
+ p5.Part.prototype.getBPM = function () {
+ return this.metro.getBPM();
};
/**
- * Pause the loop
- * @method pause
- * @param {Number} [timeFromNow] schedule a pausing time
+ * Start playback of this part. It will play
+ * through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method start
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype.pause = function (timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (this.isPlaying) {
- this.clock.pause(now + t);
- this.isPlaying = false;
+
+
+ p5.Part.prototype.start = function (time) {
+ if (!this.isPlaying) {
+ this.isPlaying = true;
+ this.metro.resetSync(this);
+ var t = time || 0;
+ this.metro.start(t);
}
};
/**
- * Synchronize loops. Use this method to start two more more loops in synchronization
- * or to start a loop in synchronization with a loop that is already playing
- * This method will schedule the implicit loop in sync with the explicit master loop
- * i.e. loopToStart.syncedStart(loopToSyncWith)
- *
- * @method syncedStart
- * @param {Object} otherLoop a p5.SoundLoop to sync with
- * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
+ * Loop playback of this part. It will begin
+ * looping through all of its phrases at a speed
+ * determined by setBPM.
+ *
+ * @method loop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype.syncedStart = function (otherLoop, timeFromNow) {
- var t = timeFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- if (!otherLoop.isPlaying) {
- otherLoop.clock.start(now + t);
- otherLoop.isPlaying = true;
- this.clock.start(now + t);
- this.isPlaying = true;
- } else if (otherLoop.isPlaying) {
- var time = otherLoop.clock._nextTick - p5sound.audiocontext.currentTime;
- this.clock.start(now + time);
- this.isPlaying = true;
- }
+
+
+ p5.Part.prototype.loop = function (time) {
+ this.looping = true; // rest onended function
+
+ this.onended = function () {
+ this.partStep = 0;
+ };
+
+ var t = time || 0;
+ this.start(t);
};
/**
- * Updates frequency value, reflected in next callback
- * @private
- * @method _update
+ * Tell the part to stop looping.
+ *
+ * @method noLoop
+ * @for p5.Part
*/
- p5.SoundLoop.prototype._update = function () {
- this.clock.frequency.value = this._calcFreq();
+
+
+ p5.Part.prototype.noLoop = function () {
+ this.looping = false; // rest onended function
+
+ this.onended = function () {
+ this.stop();
+ };
};
/**
- * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
- * @private
- * @method _calcFreq
- * @return {Number} new clock frequency value
+ * Stop the part and cue it to step 0. Playback will resume from the begining of the Part when it is played again.
+ *
+ * @method stop
+ * @for p5.Part
+ * @param {Number} [time] seconds from now
*/
- p5.SoundLoop.prototype._calcFreq = function () {
- //Seconds mode, bpm / timesignature has no effect
- if (typeof this._interval === 'number') {
- this.musicalTimeMode = false;
- return 1 / this._interval;
- } else if (typeof this._interval === 'string') {
- this.musicalTimeMode = true;
- return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
- }
+
+
+ p5.Part.prototype.stop = function (time) {
+ this.partStep = 0;
+ this.pause(time);
};
/**
- * Convert notation from musical time format to seconds
- * Uses Tone.Time convention
- * @private
- * @method _convertNotation
- * @param {String} value value to be converted
- * @return {Number} converted value in seconds
+ * Pause the part. Playback will resume
+ * from the current step.
+ *
+ * @method pause
+ * @for p5.Part
+ * @param {Number} time seconds from now
*/
- p5.SoundLoop.prototype._convertNotation = function (value) {
- var type = value.slice(-1);
- value = Number(value.slice(0, -1));
- switch (type) {
- case 'm':
- return this._measure(value);
- case 'n':
- return this._note(value);
- default:
- console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
- }
+
+
+ p5.Part.prototype.pause = function (time) {
+ this.isPlaying = false;
+ var t = time || 0;
+ this.metro.stop(t);
};
/**
- * Helper conversion methods of measure and note
- * @private
- * @method _measure
- * @private
- * @method _note
+ * Add a p5.Phrase to this Part.
+ *
+ * @method addPhrase
+ * @for p5.Part
+ * @param {p5.Phrase} phrase reference to a p5.Phrase
*/
- p5.SoundLoop.prototype._measure = function (value) {
- return value * this._timeSignature;
- };
- p5.SoundLoop.prototype._note = function (value) {
- return this._timeSignature / value;
+
+
+ p5.Part.prototype.addPhrase = function (name, callback, array) {
+ var p;
+
+ if (arguments.length === 3) {
+ p = new p5.Phrase(name, callback, array);
+ } else if (arguments[0] instanceof p5.Phrase) {
+ p = arguments[0];
+ } else {
+ throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase';
+ }
+
+ this.phrases.push(p); // reset the length if phrase is longer than part's existing length
+
+ if (p.sequence.length > this.length) {
+ this.length = p.sequence.length;
+ }
};
/**
- * Getters and Setters, setting any paramter will result in a change in the clock's
- * frequency, that will be reflected after the next callback
- * beats per minute (defaults to 60)
- * @property {Number} bpm
+ * Remove a phrase from this part, based on the name it was
+ * given when it was created.
+ *
+ * @method removePhrase
+ * @for p5.Part
+ * @param {String} phraseName
*/
- Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
- get: function () {
- return this._bpm;
- },
- set: function (bpm) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+
+
+ p5.Part.prototype.removePhrase = function (name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases.splice(i, 1);
}
- this._bpm = bpm;
- this._update();
}
- });
+ };
/**
- * number of quarter notes in a measure (defaults to 4)
- * @property {Number} timeSignature
+ * Get a phrase from this part, based on the name it was
+ * given when it was created. Now you can modify its array.
+ *
+ * @method getPhrase
+ * @for p5.Part
+ * @param {String} phraseName
*/
- Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
- get: function () {
- return this._timeSignature;
- },
- set: function (timeSig) {
- if (!this.musicalTimeMode) {
- console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+
+
+ p5.Part.prototype.getPhrase = function (name) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ return this.phrases[i];
}
- this._timeSignature = timeSig;
- this._update();
}
- });
+ };
/**
- * length of the loops interval
- * @property {Number|String} interval
+ * Find all sequences with the specified name, and replace their patterns with the specified array.
+ *
+ * @method replaceSequence
+ * @for p5.Part
+ * @param {String} phraseName
+ * @param {Array} sequence Array of values to pass into the callback
+ * at each step of the phrase.
*/
- Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
- get: function () {
- return this._interval;
- },
- set: function (interval) {
- this.musicalTimeMode = typeof interval === 'Number' ? false : true;
- this._interval = interval;
- this._update();
+
+
+ p5.Part.prototype.replaceSequence = function (name, array) {
+ for (var i in this.phrases) {
+ if (this.phrases[i].name === name) {
+ this.phrases[i].sequence = array;
+ }
}
- });
- /**
- * how many times the callback has been called so far
- * @property {Number} iterations
- * @readonly
- */
- Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
- get: function () {
- return this.clock.ticks;
+ };
+
+ p5.Part.prototype.incrementStep = function (time) {
+ if (this.partStep < this.length - 1) {
+ this.callback(time);
+ this.partStep += 1;
+ } else {
+ if (!this.looping && this.partStep === this.length - 1) {
+ console.log('done'); // this.callback(time);
+
+ this.onended();
+ }
}
- });
- return p5.SoundLoop;
-}(master, Tone_core_Clock);
-var compressor;
-compressor = function () {
- 'use strict';
- var p5sound = master;
- var Effect = effect;
- var CustomError = errorHandler;
+ };
/**
- * Compressor is an audio effect class that performs dynamics compression
- * on an audio input source. This is a very commonly used technique in music
- * and sound production. Compression creates an overall louder, richer,
- * and fuller sound by lowering the volume of louds and raising that of softs.
- * Compression can be used to avoid clipping (sound distortion due to
- * peaks in volume) and is especially useful when many sounds are played
- * at once. Compression can be used on indivudal sound sources in addition
- * to the master output.
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
- * disconnect() are available.
- *
- * @class p5.Compressor
- * @constructor
- * @extends p5.Effect
+ * Set the function that will be called at every step. This will clear the previous function.
*
- *
+ * @method onStep
+ * @for p5.Part
+ * @param {Function} callback The name of the callback
+ * you want to fire
+ * on every beat/tatum.
*/
- p5.Compressor = function () {
- Effect.call(this);
- /**
- * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
- *
- * @property {AudioNode} compressor
- */
- this.compressor = this.ac.createDynamicsCompressor();
- this.input.connect(this.compressor);
- this.compressor.connect(this.wet);
- };
- p5.Compressor.prototype = Object.create(Effect.prototype);
- /**
- * Performs the same function as .connect, but also accepts
- * optional parameters to set compressor's audioParams
- * @method process
- *
- * @param {Object} src Sound source to be connected
- *
- * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [threshold] The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
- */
- p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) {
- src.connect(this.input);
- this.set(attack, knee, ratio, threshold, release);
- };
+
+
+ p5.Part.prototype.onStep = function (callback) {
+ this.callback = callback;
+ }; // ===============
+ // p5.Score
+ // ===============
+
/**
- * Set the paramters of a compressor.
- * @method set
- * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} knee A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
+ * A Score consists of a series of Parts. The parts will
+ * be played back in order. For example, you could have an
+ * A part, a B part, and a C part, and play them back in this order
+ * new p5.Score(a, a, b, a, c)
+ *
+ * @class p5.Score
+ * @constructor
+ * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.
*/
- p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) {
- if (typeof attack !== 'undefined') {
- this.attack(attack);
- }
- if (typeof knee !== 'undefined') {
- this.knee(knee);
- }
- if (typeof ratio !== 'undefined') {
- this.ratio(ratio);
- }
- if (typeof threshold !== 'undefined') {
- this.threshold(threshold);
+
+
+ p5.Score = function () {
+ // for all of the arguments
+ this.parts = [];
+ this.currentPart = 0;
+ var thisScore = this;
+
+ for (var i in arguments) {
+ if (arguments[i] && this.parts[i]) {
+ this.parts[i] = arguments[i];
+ this.parts[i].nextPart = this.parts[i + 1];
+
+ this.parts[i].onended = function () {
+ thisScore.resetPart(i);
+ playNextPart(thisScore);
+ };
+ }
}
- if (typeof release !== 'undefined') {
- this.release(release);
+
+ this.looping = false;
+ };
+
+ p5.Score.prototype.onended = function () {
+ if (this.looping) {
+ // this.resetParts();
+ this.parts[0].start();
+ } else {
+ this.parts[this.parts.length - 1].onended = function () {
+ this.stop();
+ this.resetParts();
+ };
}
+
+ this.currentPart = 0;
};
/**
- * Get current attack or set value w/ time ramp
- *
- *
- * @method attack
- * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
- * default = .003, range 0 - 1
- * @param {Number} [time] Assign time value to schedule the change in value
+ * Start playback of the score.
+ *
+ * @method start
+ * @for p5.Score
*/
- p5.Compressor.prototype.attack = function (attack, time) {
- var t = time || 0;
- if (typeof attack == 'number') {
- this.compressor.attack.value = attack;
- this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t);
- } else if (typeof attack !== 'undefined') {
- attack.connect(this.compressor.attack);
- }
- return this.compressor.attack.value;
+
+
+ p5.Score.prototype.start = function () {
+ this.parts[this.currentPart].start();
+ this.scoreStep = 0;
};
/**
- * Get current knee or set value w/ time ramp
- *
- * @method knee
- * @param {Number} [knee] A decibel value representing the range above the
- * threshold where the curve smoothly transitions to the "ratio" portion.
- * default = 30, range 0 - 40
- * @param {Number} [time] Assign time value to schedule the change in value
+ * Stop playback of the score.
+ *
+ * @method stop
+ * @for p5.Score
*/
- p5.Compressor.prototype.knee = function (knee, time) {
- var t = time || 0;
- if (typeof knee == 'number') {
- this.compressor.knee.value = knee;
- this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t);
- } else if (typeof knee !== 'undefined') {
- knee.connect(this.compressor.knee);
- }
- return this.compressor.knee.value;
+
+
+ p5.Score.prototype.stop = function () {
+ this.parts[this.currentPart].stop();
+ this.currentPart = 0;
+ this.scoreStep = 0;
};
/**
- * Get current ratio or set value w/ time ramp
- * @method ratio
+ * Pause playback of the score.
*
- * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
- * @param {Number} [time] Assign time value to schedule the change in value
+ * @method pause
+ * @for p5.Score
*/
- p5.Compressor.prototype.ratio = function (ratio, time) {
- var t = time || 0;
- if (typeof ratio == 'number') {
- this.compressor.ratio.value = ratio;
- this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t);
- } else if (typeof ratio !== 'undefined') {
- ratio.connect(this.compressor.ratio);
- }
- return this.compressor.ratio.value;
+
+
+ p5.Score.prototype.pause = function () {
+ this.parts[this.currentPart].stop();
};
/**
- * Get current threshold or set value w/ time ramp
- * @method threshold
+ * Loop playback of the score.
*
- * @param {Number} threshold The decibel value above which the compression will start taking effect
- * default = -24, range -100 - 0
- * @param {Number} [time] Assign time value to schedule the change in value
+ * @method loop
+ * @for p5.Score
*/
- p5.Compressor.prototype.threshold = function (threshold, time) {
- var t = time || 0;
- if (typeof threshold == 'number') {
- this.compressor.threshold.value = threshold;
- this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t);
- } else if (typeof threshold !== 'undefined') {
- threshold.connect(this.compressor.threshold);
- }
- return this.compressor.threshold.value;
+
+
+ p5.Score.prototype.loop = function () {
+ this.looping = true;
+ this.start();
};
/**
- * Get current release or set value w/ time ramp
- * @method release
- *
- * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
- * default = .25, range 0 - 1
+ * Stop looping playback of the score. If it
+ * is currently playing, this will go into effect
+ * after the current round of playback completes.
*
- * @param {Number} [time] Assign time value to schedule the change in value
+ * @method noLoop
+ * @for p5.Score
*/
- p5.Compressor.prototype.release = function (release, time) {
- var t = time || 0;
- if (typeof release == 'number') {
- this.compressor.release.value = release;
- this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
- this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t);
- } else if (typeof number !== 'undefined') {
- release.connect(this.compressor.release);
+
+
+ p5.Score.prototype.noLoop = function () {
+ this.looping = false;
+ };
+
+ p5.Score.prototype.resetParts = function () {
+ var self = this;
+ this.parts.forEach(function (part) {
+ self.resetParts[part];
+ });
+ };
+
+ p5.Score.prototype.resetPart = function (i) {
+ this.parts[i].stop();
+ this.parts[i].partStep = 0;
+
+ for (var p in this.parts[i].phrases) {
+ if (this.parts[i]) {
+ this.parts[i].phrases[p].phraseStep = 0;
+ }
}
- return this.compressor.release.value;
};
/**
- * Return the current reduction value
+ * Set the tempo for all parts in the score
*
- * @method reduction
- * @return {Number} Value of the amount of gain reduction that is applied to the signal
+ * @method setBPM
+ * @for p5.Score
+ * @param {Number} BPM Beats Per Minute
+ * @param {Number} rampTime Seconds from now
*/
- p5.Compressor.prototype.reduction = function () {
- return this.compressor.reduction.value;
- };
- p5.Compressor.prototype.dispose = function () {
- Effect.prototype.dispose.apply(this);
- if (this.compressor) {
- this.compressor.disconnect();
- delete this.compressor;
+
+
+ p5.Score.prototype.setBPM = function (bpm, rampTime) {
+ for (var i in this.parts) {
+ if (this.parts[i]) {
+ this.parts[i].setBPM(bpm, rampTime);
+ }
}
};
- return p5.Compressor;
-}(master, effect, errorHandler);
-var soundRecorder;
-'use strict';
-soundRecorder = function () {
- // inspiration: recorder.js, Tone.js & typedarray.org
- var p5sound = master;
- var convertToWav = helpers.convertToWav;
- var ac = p5sound.audiocontext;
+
+ function playNextPart(aScore) {
+ aScore.currentPart++;
+
+ if (aScore.currentPart >= aScore.parts.length) {
+ aScore.scoreStep = 0;
+ aScore.onended();
+ } else {
+ aScore.scoreStep = 0;
+ aScore.parts[aScore.currentPart - 1].stop();
+ aScore.parts[aScore.currentPart].start();
+ }
+ }
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 62 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ var p5sound = __webpack_require__(1);
+
+ var Clock = __webpack_require__(26);
/**
- * Record sounds for playback and/or to save as a .wav file.
- * The p5.SoundRecorder records all sound output from your sketch,
- * or can be assigned a specific source with setInput().
- * The record() method accepts a p5.SoundFile as a parameter.
- * When playback is stopped (either after the given amount of time,
- * or with the stop() method), the p5.SoundRecorder will send its
- * recording to that p5.SoundFile for playback.
- *
- * @class p5.SoundRecorder
- * @constructor
- * @example
- *
- * var mic, recorder, soundFile;
- * var state = 0;
- *
- * function setup() {
- * background(200);
- * // create an audio in
- * mic = new p5.AudioIn();
- *
- * // prompts user to enable their browser mic
- * mic.start();
- *
- * // create a sound recorder
- * recorder = new p5.SoundRecorder();
- *
- * // connect the mic to the recorder
- * recorder.setInput(mic);
- *
- * // this sound file will be used to
- * // playback & save the recording
- * soundFile = new p5.SoundFile();
- *
- * text('keyPress to record', 20, 20);
- * }
+ * SoundLoop
*
- * function keyPressed() {
- * // make sure user enabled the mic
- * if (state === 0 && mic.enabled) {
+ * @class p5.SoundLoop
+ * @constructor
*
- * // record to our p5.SoundFile
- * recorder.record(soundFile);
+ * @param {Function} callback this function will be called on each iteration of theloop
+ * @param {Number|String} [interval] amount of time or beats for each iteration of the loop
+ * defaults to 1
*
- * background(255,0,0);
- * text('Recording!', 20, 20);
- * state++;
- * }
- * else if (state === 1) {
- * background(0,255,0);
+ * @example
+ *
+ * var click;
+ * var looper1;
*
- * // stop recorder and
- * // send result to soundFile
- * recorder.stop();
+ * function preload() {
+ * click = loadSound('assets/drum.mp3');
+ * }
*
- * text('Stopped', 20, 20);
- * state++;
- * }
+ * function setup() {
+ * //the looper's callback is passed the timeFromNow
+ * //this value should be used as a reference point from
+ * //which to schedule sounds
+ * looper1 = new p5.SoundLoop(function(timeFromNow){
+ * click.play(timeFromNow);
+ * background(255 * (looper1.iterations % 2));
+ * }, 2);
*
- * else if (state === 2) {
- * soundFile.play(); // play the result!
- * save(soundFile, 'mySound.wav');
- * state++;
- * }
- * }
- *
+ * //stop after 10 iteratios;
+ * looper1.maxIterations = 10;
+ * //start the loop
+ * looper1.start();
+ * }
+ *
*/
- p5.SoundRecorder = function () {
- this.input = ac.createGain();
- this.output = ac.createGain();
- this.recording = false;
- this.bufferSize = 1024;
- this._channels = 2;
- // stereo (default)
- this._clear();
- // initialize variables
- this._jsNode = ac.createScriptProcessor(this.bufferSize, this._channels, 2);
- this._jsNode.onaudioprocess = this._audioprocess.bind(this);
+
+
+ p5.SoundLoop = function (callback, interval) {
+ this.callback = callback;
/**
- * callback invoked when the recording is over
- * @private
- * @type Function(Float32Array)
+ * musicalTimeMode uses Tone.Time convention
+ * true if string, false if number
+ * @property {Boolean} musicalTimeMode
*/
- this._callback = function () {
- };
- // connections
- this._jsNode.connect(p5.soundOut._silentNode);
- this.setInput();
- // add this p5.SoundFile to the soundArray
- p5sound.soundArray.push(this);
- };
- /**
- * Connect a specific device to the p5.SoundRecorder.
- * If no parameter is given, p5.SoundRecorer will record
- * all audible p5.sound from your sketch.
- *
- * @method setInput
- * @param {Object} [unit] p5.sound object or a web audio unit
- * that outputs sound
- */
- p5.SoundRecorder.prototype.setInput = function (unit) {
- this.input.disconnect();
- this.input = null;
- this.input = ac.createGain();
- this.input.connect(this._jsNode);
- this.input.connect(this.output);
- if (unit) {
- unit.connect(this.input);
- } else {
- p5.soundOut.output.connect(this.input);
- }
- };
- /**
- * Start recording. To access the recording, provide
- * a p5.SoundFile as the first parameter. The p5.SoundRecorder
- * will send its recording to that p5.SoundFile for playback once
- * recording is complete. Optional parameters include duration
- * (in seconds) of the recording, and a callback function that
- * will be called once the complete recording has been
- * transfered to the p5.SoundFile.
- *
- * @method record
- * @param {p5.SoundFile} soundFile p5.SoundFile
- * @param {Number} [duration] Time (in seconds)
- * @param {Function} [callback] The name of a function that will be
- * called once the recording completes
- */
- p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
- this.recording = true;
- if (duration) {
- this.sampleLimit = Math.round(duration * ac.sampleRate);
- }
- if (sFile && callback) {
- this._callback = function () {
- this.buffer = this._getBuffer();
- sFile.setBuffer(this.buffer);
- callback();
- };
- } else if (sFile) {
- this._callback = function () {
- this.buffer = this._getBuffer();
- sFile.setBuffer(this.buffer);
- };
- }
+
+ this.musicalTimeMode = typeof this._interval === 'number' ? false : true;
+ this._interval = interval || 1;
+ /**
+ * musicalTimeMode variables
+ * modify these only when the interval is specified in musicalTime format as a string
+ */
+
+ this._timeSignature = 4;
+ this._bpm = 60;
+ this.isPlaying = false;
+ /**
+ * Set a limit to the number of loops to play. defaults to Infinity
+ * @property {Number} maxIterations
+ */
+
+ this.maxIterations = Infinity;
+ var self = this;
+ this.clock = new Clock({
+ 'callback': function callback(time) {
+ var timeFromNow = time - p5sound.audiocontext.currentTime;
+ /**
+ * Do not initiate the callback if timeFromNow is < 0
+ * This ususually occurs for a few milliseconds when the page
+ * is not fully loaded
+ *
+ * The callback should only be called until maxIterations is reached
+ */
+
+ if (timeFromNow > 0 && self.iterations <= self.maxIterations) {
+ self.callback(timeFromNow);
+ }
+ },
+ 'frequency': this._calcFreq()
+ });
};
/**
- * Stop the recording. Once the recording is stopped,
- * the results will be sent to the p5.SoundFile that
- * was given on .record(), and if a callback function
- * was provided on record, that function will be called.
- *
- * @method stop
+ * Start the loop
+ * @method start
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a starting time
*/
- p5.SoundRecorder.prototype.stop = function () {
- this.recording = false;
- this._callback();
- this._clear();
- };
- p5.SoundRecorder.prototype._clear = function () {
- this._leftBuffers = [];
- this._rightBuffers = [];
- this.recordedSamples = 0;
- this.sampleLimit = null;
+
+
+ p5.SoundLoop.prototype.start = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (!this.isPlaying) {
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ }
};
/**
- * internal method called on audio process
- *
- * @private
- * @param {AudioProcessorEvent} event
+ * Stop the loop
+ * @method stop
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a stopping time
*/
- p5.SoundRecorder.prototype._audioprocess = function (event) {
- if (this.recording === false) {
- return;
- } else if (this.recording === true) {
- // if we are past the duration, then stop... else:
- if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {
- this.stop();
- } else {
- // get channel data
- var left = event.inputBuffer.getChannelData(0);
- var right = event.inputBuffer.getChannelData(1);
- // clone the samples
- this._leftBuffers.push(new Float32Array(left));
- this._rightBuffers.push(new Float32Array(right));
- this.recordedSamples += this.bufferSize;
- }
- }
- };
- p5.SoundRecorder.prototype._getBuffer = function () {
- var buffers = [];
- buffers.push(this._mergeBuffers(this._leftBuffers));
- buffers.push(this._mergeBuffers(this._rightBuffers));
- return buffers;
- };
- p5.SoundRecorder.prototype._mergeBuffers = function (channelBuffer) {
- var result = new Float32Array(this.recordedSamples);
- var offset = 0;
- var lng = channelBuffer.length;
- for (var i = 0; i < lng; i++) {
- var buffer = channelBuffer[i];
- result.set(buffer, offset);
- offset += buffer.length;
- }
- return result;
- };
- p5.SoundRecorder.prototype.dispose = function () {
- this._clear();
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- this._callback = function () {
- };
- if (this.input) {
- this.input.disconnect();
+
+
+ p5.SoundLoop.prototype.stop = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.stop(now + t);
+ this.isPlaying = false;
}
- this.input = null;
- this._jsNode = null;
};
/**
- * Save a p5.SoundFile as a .wav file. The browser will prompt the user
- * to download the file to their device.
- * For uploading audio to a server, use
- * `p5.SoundFile.saveBlob`.
- *
- * @for p5
- * @method saveSound
- * @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save
- * @param {String} fileName name of the resulting .wav file.
+ * Pause the loop
+ * @method pause
+ * @for p5.SoundLoop
+ * @param {Number} [timeFromNow] schedule a pausing time
*/
- // add to p5.prototype as this is used by the p5 `save()` method.
- p5.prototype.saveSound = function (soundFile, fileName) {
- const dataView = convertToWav(soundFile.buffer);
- p5.prototype.writeFile([dataView], fileName, 'wav');
+
+
+ p5.SoundLoop.prototype.pause = function (timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (this.isPlaying) {
+ this.clock.pause(now + t);
+ this.isPlaying = false;
+ }
};
-}(master, helpers);
-var peakdetect;
-'use strict';
-peakdetect = function () {
/**
- * PeakDetect works in conjunction with p5.FFT to
- * look for onsets in some or all of the frequency spectrum.
- *
- *
- * To use p5.PeakDetect, call update
in the draw loop
- * and pass in a p5.FFT object.
- *
- *
- * You can listen for a specific part of the frequency spectrum by
- * setting the range between freq1
and freq2
.
- *
- *
- * threshold
is the threshold for detecting a peak,
- * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
- * as 1.0.
- *
- *
- * The update method is meant to be run in the draw loop, and
- * frames determines how many loops must pass before
- * another peak can be detected.
- * For example, if the frameRate() = 60, you could detect the beat of a
- * 120 beat-per-minute song with this equation:
- * framesPerPeak = 60 / (estimatedBPM / 60 );
- *
- *
- *
- * Based on example contribtued by @b2renger, and a simple beat detection
- * explanation by Felix Turner.
- *
- *
- * @class p5.PeakDetect
- * @constructor
- * @param {Number} [freq1] lowFrequency - defaults to 20Hz
- * @param {Number} [freq2] highFrequency - defaults to 20000 Hz
- * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1
- * scaled logarithmically where 0.1 is 1/2 the loudness
- * of 1.0. Defaults to 0.35.
- * @param {Number} [framesPerPeak] Defaults to 20.
- * @example
- *
- *
- * var cnv, soundFile, fft, peakDetect;
- * var ellipseWidth = 10;
- *
- * function preload() {
- * soundFile = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * background(0);
- * noStroke();
- * fill(255);
- * textAlign(CENTER);
- *
- * // p5.PeakDetect requires a p5.FFT
- * fft = new p5.FFT();
- * peakDetect = new p5.PeakDetect();
- * }
- *
- * function draw() {
- * background(0);
- * text('click to play/pause', width/2, height/2);
- *
- * // peakDetect accepts an fft post-analysis
- * fft.analyze();
- * peakDetect.update(fft);
- *
- * if ( peakDetect.isDetected ) {
- * ellipseWidth = 50;
- * } else {
- * ellipseWidth *= 0.95;
- * }
- *
- * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
- * }
+ * Synchronize loops. Use this method to start two more more loops in synchronization
+ * or to start a loop in synchronization with a loop that is already playing
+ * This method will schedule the implicit loop in sync with the explicit master loop
+ * i.e. loopToStart.syncedStart(loopToSyncWith)
*
- * // toggle play/stop when canvas is clicked
- * function mouseClicked() {
- * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
- * if (soundFile.isPlaying() ) {
- * soundFile.stop();
- * } else {
- * soundFile.play();
- * }
- * }
- * }
- *
+ * @method syncedStart
+ * @for p5.SoundLoop
+ * @param {Object} otherLoop a p5.SoundLoop to sync with
+ * @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
*/
- p5.PeakDetect = function (freq1, freq2, threshold, _framesPerPeak) {
- // framesPerPeak determines how often to look for a beat.
- // If a beat is provided, try to look for a beat based on bpm
- this.framesPerPeak = _framesPerPeak || 20;
- this.framesSinceLastPeak = 0;
- this.decayRate = 0.95;
- this.threshold = threshold || 0.35;
- this.cutoff = 0;
- // how much to increase the cutoff
- // TO DO: document this / figure out how to make it accessible
- this.cutoffMult = 1.5;
- this.energy = 0;
- this.penergy = 0;
- // TO DO: document this property / figure out how to make it accessible
- this.currentValue = 0;
- /**
- * isDetected is set to true when a peak is detected.
- *
- * @attribute isDetected {Boolean}
- * @default false
- */
- this.isDetected = false;
- this.f1 = freq1 || 40;
- this.f2 = freq2 || 20000;
- // function to call when a peak is detected
- this._onPeak = function () {
- };
+
+
+ p5.SoundLoop.prototype.syncedStart = function (otherLoop, timeFromNow) {
+ var t = timeFromNow || 0;
+ var now = p5sound.audiocontext.currentTime;
+
+ if (!otherLoop.isPlaying) {
+ otherLoop.clock.start(now + t);
+ otherLoop.isPlaying = true;
+ this.clock.start(now + t);
+ this.isPlaying = true;
+ } else if (otherLoop.isPlaying) {
+ var time = otherLoop.clock._nextTick - p5sound.audiocontext.currentTime;
+ this.clock.start(now + time);
+ this.isPlaying = true;
+ }
};
/**
- * The update method is run in the draw loop.
- *
- * Accepts an FFT object. You must call .analyze()
- * on the FFT object prior to updating the peakDetect
- * because it relies on a completed FFT analysis.
- *
- * @method update
- * @param {p5.FFT} fftObject A p5.FFT object
+ * Updates frequency value, reflected in next callback
+ * @private
+ * @for p5.SoundLoop
+ * @method _update
*/
- p5.PeakDetect.prototype.update = function (fftObject) {
- var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255;
- if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) {
- // trigger callback
- this._onPeak();
- this.isDetected = true;
- // debounce
- this.cutoff = nrg * this.cutoffMult;
- this.framesSinceLastPeak = 0;
- } else {
- this.isDetected = false;
- if (this.framesSinceLastPeak <= this.framesPerPeak) {
- this.framesSinceLastPeak++;
- } else {
- this.cutoff *= this.decayRate;
- this.cutoff = Math.max(this.cutoff, this.threshold);
+
+
+ p5.SoundLoop.prototype._update = function () {
+ this.clock.frequency.value = this._calcFreq();
+ };
+ /**
+ * Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
+ * @private
+ * @for p5.SoundLoop
+ * @method _calcFreq
+ * @return {Number} new clock frequency value
+ */
+
+
+ p5.SoundLoop.prototype._calcFreq = function () {
+ //Seconds mode, bpm / timesignature has no effect
+ if (typeof this._interval === 'number') {
+ this.musicalTimeMode = false;
+ return 1 / this._interval;
+ } //Musical timing mode, calculate interval based bpm, interval,and time signature
+ else if (typeof this._interval === 'string') {
+ this.musicalTimeMode = true;
+ return this._bpm / 60 / this._convertNotation(this._interval) * (this._timeSignature / 4);
}
+ };
+ /**
+ * Convert notation from musical time format to seconds
+ * Uses Tone.Time convention
+ * @private
+ * @for p5.SoundLoop
+ * @method _convertNotation
+ * @param {String} value value to be converted
+ * @return {Number} converted value in seconds
+ */
+
+
+ p5.SoundLoop.prototype._convertNotation = function (value) {
+ var type = value.slice(-1);
+ value = Number(value.slice(0, -1));
+
+ switch (type) {
+ case 'm':
+ return this._measure(value);
+
+ case 'n':
+ return this._note(value);
+
+ default:
+ console.warn('Specified interval is not formatted correctly. See Tone.js ' + 'timing reference for more info: https://github.com/Tonejs/Tone.js/wiki/Time');
}
- this.currentValue = nrg;
- this.penergy = nrg;
};
/**
- * onPeak accepts two arguments: a function to call when
- * a peak is detected. The value of the peak,
- * between 0.0 and 1.0, is passed to the callback.
- *
- * @method onPeak
- * @param {Function} callback Name of a function that will
- * be called when a peak is
- * detected.
- * @param {Object} [val] Optional value to pass
- * into the function when
- * a peak is detected.
- * @example
- *
- * var cnv, soundFile, fft, peakDetect;
- * var ellipseWidth = 0;
- *
- * function preload() {
- * soundFile = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * cnv = createCanvas(100,100);
- * textAlign(CENTER);
- *
- * fft = new p5.FFT();
- * peakDetect = new p5.PeakDetect();
- *
- * setupSound();
- *
- * // when a beat is detected, call triggerBeat()
- * peakDetect.onPeak(triggerBeat);
- * }
- *
- * function draw() {
- * background(0);
- * fill(255);
- * text('click to play', width/2, height/2);
- *
- * fft.analyze();
- * peakDetect.update(fft);
- *
- * ellipseWidth *= 0.95;
- * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
- * }
- *
- * // this function is called by peakDetect.onPeak
- * function triggerBeat() {
- * ellipseWidth = 50;
- * }
- *
- * // mouseclick starts/stops sound
- * function setupSound() {
- * cnv.mouseClicked( function() {
- * if (soundFile.isPlaying() ) {
- * soundFile.stop();
- * } else {
- * soundFile.play();
- * }
- * });
- * }
- *
+ * Helper conversion methods of measure and note
+ * @private
+ * @for p5.SoundLoop
+ * @method _measure
*/
- p5.PeakDetect.prototype.onPeak = function (callback, val) {
- var self = this;
- self._onPeak = function () {
- callback(self.energy, val);
- };
+
+
+ p5.SoundLoop.prototype._measure = function (value) {
+ return value * this._timeSignature;
};
-}();
-var gain;
-'use strict';
-gain = function () {
- var p5sound = master;
/**
- * A gain node is usefull to set the relative volume of sound.
- * It's typically used to build mixers.
- *
- * @class p5.Gain
- * @constructor
- * @example
- *
- *
- * // load two soundfile and crossfade beetween them
- * var sound1,sound2;
- * var gain1, gain2, gain3;
- *
- * function preload(){
- * soundFormats('ogg', 'mp3');
- * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');
- * sound2 = loadSound('assets/beat.mp3');
- * }
- *
- * function setup() {
- * createCanvas(400,200);
- *
- * // create a 'master' gain to which we will connect both soundfiles
- * gain3 = new p5.Gain();
- * gain3.connect();
- *
- * // setup first sound for playing
- * sound1.rate(1);
- * sound1.loop();
- * sound1.disconnect(); // diconnect from p5 output
- *
- * gain1 = new p5.Gain(); // setup a gain node
- * gain1.setInput(sound1); // connect the first sound to its input
- * gain1.connect(gain3); // connect its output to the 'master'
- *
- * sound2.rate(1);
- * sound2.disconnect();
- * sound2.loop();
- *
- * gain2 = new p5.Gain();
- * gain2.setInput(sound2);
- * gain2.connect(gain3);
- *
- * }
- *
- * function draw(){
- * background(180);
- *
- * // calculate the horizontal distance beetween the mouse and the right of the screen
- * var d = dist(mouseX,0,width,0);
+ * @private
+ * @method _note
+ * @for p5.SoundLoop
+ */
+
+
+ p5.SoundLoop.prototype._note = function (value) {
+ return this._timeSignature / value;
+ };
+ /**
+ * Getters and Setters, setting any paramter will result in a change in the clock's
+ * frequency, that will be reflected after the next callback
+ * beats per minute (defaults to 60)
+ * @property {Number} bpm
+ * @for p5.SoundLoop
+ */
+
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
+ get: function get() {
+ return this._bpm;
+ },
+ set: function set(bpm) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the BPM in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ }
+
+ this._bpm = bpm;
+
+ this._update();
+ }
+ });
+ /**
+ * number of quarter notes in a measure (defaults to 4)
+ * @property {Number} timeSignature
+ * @for p5.SoundLoop
+ */
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
+ get: function get() {
+ return this._timeSignature;
+ },
+ set: function set(timeSig) {
+ if (!this.musicalTimeMode) {
+ console.warn('Changing the timeSignature in "seconds" mode has no effect. ' + 'BPM is only relevant in musicalTimeMode ' + 'when the interval is specified as a string ' + '("2n", "4n", "1m"...etc)');
+ }
+
+ this._timeSignature = timeSig;
+
+ this._update();
+ }
+ });
+ /**
+ * length of the loops interval
+ * @property {Number|String} interval
+ * @for p5.SoundLoop
+ */
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
+ get: function get() {
+ return this._interval;
+ },
+ set: function set(interval) {
+ this.musicalTimeMode = typeof interval === 'Number' ? false : true;
+ this._interval = interval;
+
+ this._update();
+ }
+ });
+ /**
+ * how many times the callback has been called so far
+ * @property {Number} iterations
+ * @for p5.SoundLoop
+ * @readonly
+ */
+
+ Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
+ get: function get() {
+ return this.clock.ticks;
+ }
+ });
+ return p5.SoundLoop;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 63 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var __WEBPACK_AMD_DEFINE_RESULT__;!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ 'use strict';
+
+ var p5sound = __webpack_require__(1);
+
+ var Effect = __webpack_require__(4);
+
+ var CustomError = __webpack_require__(10);
+ /**
+ * Compressor is an audio effect class that performs dynamics compression
+ * on an audio input source. This is a very commonly used technique in music
+ * and sound production. Compression creates an overall louder, richer,
+ * and fuller sound by lowering the volume of louds and raising that of softs.
+ * Compression can be used to avoid clipping (sound distortion due to
+ * peaks in volume) and is especially useful when many sounds are played
+ * at once. Compression can be used on indivudal sound sources in addition
+ * to the master output.
*
- * // map the horizontal position of the mouse to values useable for volume control of sound1
- * var vol1 = map(mouseX,0,width,0,1);
- * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
+ * disconnect() are available.
*
- * gain1.amp(vol1,0.5,0);
- * gain2.amp(vol2,0.5,0);
+ * @class p5.Compressor
+ * @constructor
+ * @extends p5.Effect
*
- * // map the vertical position of the mouse to values useable for 'master volume control'
- * var vol3 = map(mouseY,0,height,0,1);
- * gain3.amp(vol3,0.5,0);
- * }
- *
*
*/
- p5.Gain = function () {
- this.ac = p5sound.audiocontext;
- this.input = this.ac.createGain();
- this.output = this.ac.createGain();
- // otherwise, Safari distorts
- this.input.gain.value = 0.5;
- this.input.connect(this.output);
- // add to the soundArray
- p5sound.soundArray.push(this);
+
+
+ p5.Compressor = function () {
+ Effect.call(this);
+ /**
+ * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
+ *
+ * @property {AudioNode} compressor
+ */
+
+ this.compressor = this.ac.createDynamicsCompressor();
+ this.input.connect(this.compressor);
+ this.compressor.connect(this.wet);
};
+
+ p5.Compressor.prototype = Object.create(Effect.prototype);
/**
- * Connect a source to the gain node.
+ * Performs the same function as .connect, but also accepts
+ * optional parameters to set compressor's audioParams
+ * @method process
+ * @for p5.Compressor
*
- * @method setInput
- * @param {Object} src p5.sound / Web Audio object with a sound
- * output.
+ * @param {Object} src Sound source to be connected
+ *
+ * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [threshold] The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
*/
- p5.Gain.prototype.setInput = function (src) {
+
+ p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) {
src.connect(this.input);
+ this.set(attack, knee, ratio, threshold, release);
};
/**
- * Send output to a p5.sound or web audio object
- *
- * @method connect
- * @param {Object} unit
+ * Set the paramters of a compressor.
+ * @method set
+ * @for p5.Compressor
+ * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} knee A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} ratio The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
*/
- p5.Gain.prototype.connect = function (unit) {
- var u = unit || p5.soundOut.input;
- this.output.connect(u.input ? u.input : u);
+
+
+ p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) {
+ if (typeof attack !== 'undefined') {
+ this.attack(attack);
+ }
+
+ if (typeof knee !== 'undefined') {
+ this.knee(knee);
+ }
+
+ if (typeof ratio !== 'undefined') {
+ this.ratio(ratio);
+ }
+
+ if (typeof threshold !== 'undefined') {
+ this.threshold(threshold);
+ }
+
+ if (typeof release !== 'undefined') {
+ this.release(release);
+ }
};
/**
- * Disconnect all output.
+ * Get current attack or set value w/ time ramp
*
- * @method disconnect
+ *
+ * @method attack
+ * @for p5.Compressor
+ * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
+ * default = .003, range 0 - 1
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.Gain.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
+
+
+ p5.Compressor.prototype.attack = function (attack, time) {
+ var t = time || 0;
+
+ if (typeof attack == 'number') {
+ this.compressor.attack.value = attack;
+ this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t);
+ } else if (typeof attack !== 'undefined') {
+ attack.connect(this.compressor.attack);
}
+
+ return this.compressor.attack.value;
};
/**
- * Set the output level of the gain node.
- *
- * @method amp
- * @param {Number} volume amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts rampTime
- * @param {Number} [timeFromNow] schedule this event to happen
- * seconds from now
- */
- p5.Gain.prototype.amp = function (vol, rampTime, tFromNow) {
- var rampTime = rampTime || 0;
- var tFromNow = tFromNow || 0;
- var now = p5sound.audiocontext.currentTime;
- var currentVol = this.output.gain.value;
- this.output.gain.cancelScheduledValues(now);
- this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
- this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
- };
- p5.Gain.prototype.dispose = function () {
- // remove reference from soundArray
- var index = p5sound.soundArray.indexOf(this);
- p5sound.soundArray.splice(index, 1);
- if (this.output) {
- this.output.disconnect();
- delete this.output;
- }
- if (this.input) {
- this.input.disconnect();
- delete this.input;
+ * Get current knee or set value w/ time ramp
+ *
+ * @method knee
+ * @for p5.Compressor
+ * @param {Number} [knee] A decibel value representing the range above the
+ * threshold where the curve smoothly transitions to the "ratio" portion.
+ * default = 30, range 0 - 40
+ * @param {Number} [time] Assign time value to schedule the change in value
+ */
+
+
+ p5.Compressor.prototype.knee = function (knee, time) {
+ var t = time || 0;
+
+ if (typeof knee == 'number') {
+ this.compressor.knee.value = knee;
+ this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t);
+ } else if (typeof knee !== 'undefined') {
+ knee.connect(this.compressor.knee);
}
+
+ return this.compressor.knee.value;
};
-}(master);
-var audioVoice;
-'use strict';
-audioVoice = function () {
- var p5sound = master;
/**
- * Base class for monophonic synthesizers. Any extensions of this class
- * should follow the API and implement the methods below in order to
- * remain compatible with p5.PolySynth();
- *
- * @class p5.AudioVoice
- * @constructor
+ * Get current ratio or set value w/ time ramp
+ * @method ratio
+ * @for p5.Compressor
+ * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
+ * default = 12, range 1 - 20
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice = function () {
- this.ac = p5sound.audiocontext;
- this.output = this.ac.createGain();
- this.connect();
- p5sound.soundArray.push(this);
- };
- p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {
- };
- p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {
- };
- p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {
- };
- p5.AudioVoice.prototype.amp = function (vol, rampTime) {
+
+
+ p5.Compressor.prototype.ratio = function (ratio, time) {
+ var t = time || 0;
+
+ if (typeof ratio == 'number') {
+ this.compressor.ratio.value = ratio;
+ this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t);
+ } else if (typeof ratio !== 'undefined') {
+ ratio.connect(this.compressor.ratio);
+ }
+
+ return this.compressor.ratio.value;
};
/**
- * Connect to p5 objects or Web Audio Nodes
- * @method connect
- * @param {Object} unit
+ * Get current threshold or set value w/ time ramp
+ * @method threshold
+ * @for p5.Compressor
+ * @param {Number} threshold The decibel value above which the compression will start taking effect
+ * default = -24, range -100 - 0
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
+
+
+ p5.Compressor.prototype.threshold = function (threshold, time) {
+ var t = time || 0;
+
+ if (typeof threshold == 'number') {
+ this.compressor.threshold.value = threshold;
+ this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t);
+ } else if (typeof threshold !== 'undefined') {
+ threshold.connect(this.compressor.threshold);
+ }
+
+ return this.compressor.threshold.value;
};
/**
- * Disconnect from soundOut
- * @method disconnect
+ * Get current release or set value w/ time ramp
+ * @method release
+ * @for p5.Compressor
+ * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
+ * default = .25, range 0 - 1
+ *
+ * @param {Number} [time] Assign time value to schedule the change in value
*/
- p5.AudioVoice.prototype.disconnect = function () {
- this.output.disconnect();
- };
- p5.AudioVoice.prototype.dispose = function () {
- if (this.output) {
- this.output.disconnect();
- delete this.output;
+
+
+ p5.Compressor.prototype.release = function (release, time) {
+ var t = time || 0;
+
+ if (typeof release == 'number') {
+ this.compressor.release.value = release;
+ this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t);
+ this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t);
+ } else if (typeof number !== 'undefined') {
+ release.connect(this.compressor.release);
}
+
+ return this.compressor.release.value;
};
- return p5.AudioVoice;
-}(master);
-var monosynth;
-'use strict';
-monosynth = function () {
- var p5sound = master;
- var AudioVoice = audioVoice;
- var noteToFreq = helpers.noteToFreq;
- var DEFAULT_SUSTAIN = 0.15;
/**
- * A MonoSynth is used as a single voice for sound synthesis.
- * This is a class to be used in conjunction with the PolySynth
- * class. Custom synthetisers should be built inheriting from
- * this class.
- *
- * @class p5.MonoSynth
- * @constructor
- * @example
- *
- * var monoSynth;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * monoSynth = new p5.MonoSynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // time from now (in seconds)
- * var time = 0;
- * // note duration (in seconds)
- * var dur = 0.25;
- * // velocity (volume, from 0 to 1)
- * var v = 0.2;
- *
- * monoSynth.play("G3", v, time, dur);
- * monoSynth.play("C4", v, time += dur, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
- **/
- p5.MonoSynth = function () {
- AudioVoice.call(this);
- this.oscillator = new p5.Oscillator();
- this.env = new p5.Envelope();
- this.env.setRange(1, 0);
- this.env.setExp(true);
- //set params
- this.setADSR(0.02, 0.25, 0.05, 0.35);
- // oscillator --> env --> this.output (gain) --> p5.soundOut
- this.oscillator.disconnect();
- this.oscillator.connect(this.output);
- this.env.disconnect();
- this.env.setInput(this.output.gain);
- // reset oscillator gain to 1.0
- this.oscillator.output.gain.value = 1;
- this.oscillator.start();
- this.connect();
- p5sound.soundArray.push(this);
+ * Return the current reduction value
+ *
+ * @method reduction
+ * @for p5.Compressor
+ * @return {Number} Value of the amount of gain reduction that is applied to the signal
+ */
+
+
+ p5.Compressor.prototype.reduction = function () {
+ return this.compressor.reduction.value;
};
- p5.MonoSynth.prototype = Object.create(p5.AudioVoice.prototype);
- /**
- * Play tells the MonoSynth to start playing a note. This method schedules
- * the calling of .triggerAttack and .triggerRelease.
- *
- * @method play
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @param {Number} [sustainTime] time to sustain before releasing the envelope
- * @example
- *
- * var monoSynth;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * monoSynth = new p5.MonoSynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // time from now (in seconds)
- * var time = 0;
- * // note duration (in seconds)
- * var dur = 1/6;
- * // note velocity (volume, from 0 to 1)
- * var v = random();
- *
- * monoSynth.play("Fb3", v, 0, dur);
- * monoSynth.play("Gb3", v, time += dur, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
- *
- */
- p5.MonoSynth.prototype.play = function (note, velocity, secondsFromNow, susTime) {
- this.triggerAttack(note, velocity, ~~secondsFromNow);
- this.triggerRelease(~~secondsFromNow + (susTime || DEFAULT_SUSTAIN));
+
+ p5.Compressor.prototype.dispose = function () {
+ Effect.prototype.dispose.apply(this);
+
+ if (this.compressor) {
+ this.compressor.disconnect();
+ delete this.compressor;
+ }
};
+
+ return p5.Compressor;
+}).call(exports, __webpack_require__, exports, module),
+ __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
+
+/***/ }),
+/* 64 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+var __WEBPACK_AMD_DEFINE_RESULT__;
+
+!(__WEBPACK_AMD_DEFINE_RESULT__ = (function (require) {
+ // inspiration: recorder.js, Tone.js & typedarray.org
+ var p5sound = __webpack_require__(1);
+
+ var convertToWav = __webpack_require__(6).convertToWav;
+
+ var ac = p5sound.audiocontext;
/**
- * Trigger the Attack, and Decay portion of the Envelope.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
+ * Record sounds for playback and/or to save as a .wav file.
+ * The p5.SoundRecorder records all sound output from your sketch,
+ * or can be assigned a specific source with setInput().
+ * The record() method accepts a p5.SoundFile as a parameter.
+ * When playback is stopped (either after the given amount of time,
+ * or with the stop() method), the p5.SoundRecorder will send its
+ * recording to that p5.SoundFile for playback.
*
- * @param {String | Number} note the note you want to play, specified as a
- * frequency in Hertz (Number) or as a midi
- * value in Note/Octave format ("C4", "Eb3"...etc")
- * See
- * Tone. Defaults to 440 hz
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
- * @param {Number} [secondsFromNow] time from now (in seconds) at which to play
- * @method triggerAttack
+ * @class p5.SoundRecorder
+ * @constructor
* @example
*
- * var monoSynth = new p5.MonoSynth();
+ * var mic, recorder, soundFile;
+ * var state = 0;
*
- * function mousePressed() {
- * monoSynth.triggerAttack("E3");
- * }
+ * function setup() {
+ * background(200);
+ * // create an audio in
+ * mic = new p5.AudioIn();
*
- * function mouseReleased() {
- * monoSynth.triggerRelease();
- * }
- *
- */
- p5.MonoSynth.prototype.triggerAttack = function (note, velocity, secondsFromNow) {
- var secondsFromNow = ~~secondsFromNow;
- var freq = noteToFreq(note);
- var vel = velocity || 0.1;
- this.oscillator.freq(freq, 0, secondsFromNow);
- this.env.ramp(this.output.gain, secondsFromNow, vel);
- };
- /**
- * Trigger the release of the Envelope. This is similar to releasing
- * the key on a piano and letting the sound fade according to the
- * release level and release time.
+ * // prompts user to enable their browser mic
+ * mic.start();
*
- * @param {Number} secondsFromNow time to trigger the release
- * @method triggerRelease
- * @example
- *
- * var monoSynth = new p5.MonoSynth();
+ * // create a sound recorder
+ * recorder = new p5.SoundRecorder();
*
- * function mousePressed() {
- * monoSynth.triggerAttack("E3");
+ * // connect the mic to the recorder
+ * recorder.setInput(mic);
+ *
+ * // this sound file will be used to
+ * // playback & save the recording
+ * soundFile = new p5.SoundFile();
+ *
+ * text('keyPress to record', 20, 20);
* }
*
- * function mouseReleased() {
- * monoSynth.triggerRelease();
+ * function keyPressed() {
+ * // make sure user enabled the mic
+ * if (state === 0 && mic.enabled) {
+ *
+ * // record to our p5.SoundFile
+ * recorder.record(soundFile);
+ *
+ * background(255,0,0);
+ * text('Recording!', 20, 20);
+ * state++;
+ * }
+ * else if (state === 1) {
+ * background(0,255,0);
+ *
+ * // stop recorder and
+ * // send result to soundFile
+ * recorder.stop();
+ *
+ * text('Stopped', 20, 20);
+ * state++;
+ * }
+ *
+ * else if (state === 2) {
+ * soundFile.play(); // play the result!
+ * save(soundFile, 'mySound.wav');
+ * state++;
+ * }
* }
- *
+ *
setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- */
- p5.MonoSynth.prototype.setADSR = function (attack, decay, sustain, release) {
- this.env.setADSR(attack, decay, sustain, release);
- };
- /**
- * Getters and Setters
- * @property {Number} attack
- */
- /**
- * @property {Number} decay
- */
- /**
- * @property {Number} sustain
- */
- /**
- * @property {Number} release
- */
- Object.defineProperties(p5.MonoSynth.prototype, {
- 'attack': {
- get: function () {
- return this.env.aTime;
- },
- set: function (attack) {
- this.env.setADSR(attack, this.env.dTime, this.env.sPercent, this.env.rTime);
- }
- },
- 'decay': {
- get: function () {
- return this.env.dTime;
- },
- set: function (decay) {
- this.env.setADSR(this.env.aTime, decay, this.env.sPercent, this.env.rTime);
- }
- },
- 'sustain': {
- get: function () {
- return this.env.sPercent;
- },
- set: function (sustain) {
- this.env.setADSR(this.env.aTime, this.env.dTime, sustain, this.env.rTime);
- }
- },
- 'release': {
- get: function () {
- return this.env.rTime;
- },
- set: function (release) {
- this.env.setADSR(this.env.aTime, this.env.dTime, this.env.sPercent, release);
- }
- }
- });
- /**
- * MonoSynth amp
- * @method amp
- * @param {Number} vol desired volume
- * @param {Number} [rampTime] Time to reach new volume
- * @return {Number} new volume value
- */
- p5.MonoSynth.prototype.amp = function (vol, rampTime) {
- var t = rampTime || 0;
- if (typeof vol !== 'undefined') {
- this.oscillator.amp(vol, t);
+ * @method setInput
+ * @for p5.SoundRecorder
+ * @param {Object} [unit] p5.sound object or a web audio unit
+ * that outputs sound
+ */
+
+
+ p5.SoundRecorder.prototype.setInput = function (unit) {
+ this.input.disconnect();
+ this.input = null;
+ this.input = ac.createGain();
+ this.input.connect(this._jsNode);
+ this.input.connect(this.output);
+
+ if (unit) {
+ unit.connect(this.input);
+ } else {
+ p5.soundOut.output.connect(this.input);
}
- return this.oscillator.amp().value;
};
/**
- * Connect to a p5.sound / Web Audio object.
+ * Start recording. To access the recording, provide
+ * a p5.SoundFile as the first parameter. The p5.SoundRecorder
+ * will send its recording to that p5.SoundFile for playback once
+ * recording is complete. Optional parameters include duration
+ * (in seconds) of the recording, and a callback function that
+ * will be called once the complete recording has been
+ * transfered to the p5.SoundFile.
*
- * @method connect
- * @param {Object} unit A p5.sound or Web Audio object
+ * @method record
+ * @for p5.SoundRecorder
+ * @param {p5.SoundFile} soundFile p5.SoundFile
+ * @param {Number} [duration] Time (in seconds)
+ * @param {Function} [callback] The name of a function that will be
+ * called once the recording completes
*/
- p5.MonoSynth.prototype.connect = function (unit) {
- var u = unit || p5sound.input;
- this.output.connect(u.input ? u.input : u);
+
+
+ p5.SoundRecorder.prototype.record = function (sFile, duration, callback) {
+ this.recording = true;
+
+ if (duration) {
+ this.sampleLimit = Math.round(duration * ac.sampleRate);
+ }
+
+ if (sFile && callback) {
+ this._callback = function () {
+ this.buffer = this._getBuffer();
+ sFile.setBuffer(this.buffer);
+ callback();
+ };
+ } else if (sFile) {
+ this._callback = function () {
+ this.buffer = this._getBuffer();
+ sFile.setBuffer(this.buffer);
+ };
+ }
};
/**
- * Disconnect all outputs
+ * Stop the recording. Once the recording is stopped,
+ * the results will be sent to the p5.SoundFile that
+ * was given on .record(), and if a callback function
+ * was provided on record, that function will be called.
*
- * @method disconnect
+ * @method stop
+ * @for p5.SoundRecorder
*/
- p5.MonoSynth.prototype.disconnect = function () {
- if (this.output) {
- this.output.disconnect();
- }
+
+
+ p5.SoundRecorder.prototype.stop = function () {
+ this.recording = false;
+
+ this._callback();
+
+ this._clear();
+ };
+
+ p5.SoundRecorder.prototype._clear = function () {
+ this._leftBuffers = [];
+ this._rightBuffers = [];
+ this.recordedSamples = 0;
+ this.sampleLimit = null;
};
/**
- * Get rid of the MonoSynth and free up its resources / memory.
+ * internal method called on audio process
*
- * @method dispose
+ * @private
+ * @for p5.SoundRecorder
+ * @param {AudioProcessorEvent} event
*/
- p5.MonoSynth.prototype.dispose = function () {
- AudioVoice.prototype.dispose.apply(this);
- if (this.env) {
- this.env.dispose();
+
+
+ p5.SoundRecorder.prototype._audioprocess = function (event) {
+ if (this.recording === false) {
+ return;
+ } else if (this.recording === true) {
+ // if we are past the duration, then stop... else:
+ if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) {
+ this.stop();
+ } else {
+ // get channel data
+ var left = event.inputBuffer.getChannelData(0);
+ var right = event.inputBuffer.getChannelData(1); // clone the samples
+
+ this._leftBuffers.push(new Float32Array(left));
+
+ this._rightBuffers.push(new Float32Array(right));
+
+ this.recordedSamples += this.bufferSize;
+ }
}
- if (this.oscillator) {
- this.oscillator.dispose();
+ };
+
+ p5.SoundRecorder.prototype._getBuffer = function () {
+ var buffers = [];
+ buffers.push(this._mergeBuffers(this._leftBuffers));
+ buffers.push(this._mergeBuffers(this._rightBuffers));
+ return buffers;
+ };
+
+ p5.SoundRecorder.prototype._mergeBuffers = function (channelBuffer) {
+ var result = new Float32Array(this.recordedSamples);
+ var offset = 0;
+ var lng = channelBuffer.length;
+
+ for (var i = 0; i < lng; i++) {
+ var buffer = channelBuffer[i];
+ result.set(buffer, offset);
+ offset += buffer.length;
}
+
+ return result;
};
-}(master, audioVoice, helpers);
-var polysynth;
-'use strict';
-polysynth = function () {
- var p5sound = master;
- var TimelineSignal = Tone_signal_TimelineSignal;
- var noteToFreq = helpers.noteToFreq;
- /**
- * An AudioVoice is used as a single voice for sound synthesis.
- * The PolySynth class holds an array of AudioVoice, and deals
- * with voices allocations, with setting notes to be played, and
- * parameters to be set.
- *
- * @class p5.PolySynth
- * @constructor
- *
- * @param {Number} [synthVoice] A monophonic synth voice inheriting
- * the AudioVoice class. Defaults to p5.MonoSynth
- * @param {Number} [maxVoices] Number of voices, defaults to 8;
- * @example
- *
- * var polySynth;
- *
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
- *
- * polySynth = new p5.PolySynth();
- *
- * textAlign(CENTER);
- * text('click to play', width/2, height/2);
- * }
- *
- * function playSynth() {
- * // note duration (in seconds)
- * var dur = 1.5;
- *
- * // time from now (in seconds)
- * var time = 0;
- *
- * // velocity (volume, from 0 to 1)
- * var vel = 0.1;
- *
- * // notes can overlap with each other
- * polySynth.play("G2", vel, 0, dur);
- * polySynth.play("C3", vel, time += 1/3, dur);
- * polySynth.play("G3", vel, time += 1/3, dur);
- *
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
- * }
- *
PeakDetect works in conjunction with p5.FFT to + * look for onsets in some or all of the frequency spectrum. + *
+ *
+ * To use p5.PeakDetect, call update
in the draw loop
+ * and pass in a p5.FFT object.
+ *
+ * You can listen for a specific part of the frequency spectrum by
+ * setting the range between freq1
and freq2
.
+ *
threshold
is the threshold for detecting a peak,
+ * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud
+ * as 1.0.
+ * The update method is meant to be run in the draw loop, and
+ * frames determines how many loops must pass before
+ * another peak can be detected.
+ * For example, if the frameRate() = 60, you could detect the beat of a
+ * 120 beat-per-minute song with this equation:
+ * framesPerPeak = 60 / (estimatedBPM / 60 );
+ *
+ * Based on example contribtued by @b2renger, and a simple beat detection + * explanation by Felix Turner. + *
+ * + * @class p5.PeakDetect + * @constructor + * @param {Number} [freq1] lowFrequency - defaults to 20Hz + * @param {Number} [freq2] highFrequency - defaults to 20000 Hz + * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1 + * scaled logarithmically where 0.1 is 1/2 the loudness + * of 1.0. Defaults to 0.35. + * @param {Number} [framesPerPeak] Defaults to 20. * @example *
- * var polySynth;
*
- * function setup() {
- * var cnv = createCanvas(100, 100);
- * cnv.mousePressed(playSynth);
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 10;
*
- * polySynth = new p5.PolySynth();
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
+ * }
*
+ * function setup() {
+ * background(0);
+ * noStroke();
+ * fill(255);
* textAlign(CENTER);
- * text('click to play', width/2, height/2);
+ *
+ * // p5.PeakDetect requires a p5.FFT
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
* }
*
- * function playSynth() {
- * // note duration (in seconds)
- * var dur = 0.1;
+ * function draw() {
+ * background(0);
+ * text('click to play/pause', width/2, height/2);
*
- * // time from now (in seconds)
- * var time = 0;
+ * // peakDetect accepts an fft post-analysis
+ * fft.analyze();
+ * peakDetect.update(fft);
*
- * // velocity (volume, from 0 to 1)
- * var vel = 0.1;
+ * if ( peakDetect.isDetected ) {
+ * ellipseWidth = 50;
+ * } else {
+ * ellipseWidth *= 0.95;
+ * }
*
- * polySynth.play("G2", vel, 0, dur);
- * polySynth.play("C3", vel, 0, dur);
- * polySynth.play("G3", vel, 0, dur);
+ * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);
+ * }
*
- * background(random(255), random(255), 255);
- * text('click to play', width/2, height/2);
+ * // toggle play/stop when canvas is clicked
+ * function mouseClicked() {
+ * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {
+ * if (soundFile.isPlaying() ) {
+ * soundFile.stop();
+ * } else {
+ * soundFile.play();
+ * }
+ * }
* }
*
setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
- p5.PolySynth.prototype.noteADSR = function (note, a, d, s, r, timeFromNow) {
- var now = p5sound.audiocontext.currentTime;
- var timeFromNow = timeFromNow || 0;
- var t = now + timeFromNow;
- this.audiovoices[this.notes[note].getValueAtTime(t)].setADSR(a, d, s, r);
- };
- /**
- * Set the PolySynths global envelope. This method modifies the envelopes of each
- * monosynth so that all notes are played with this envelope.
+ * Accepts an FFT object. You must call .analyze()
+ * on the FFT object prior to updating the peakDetect
+ * because it relies on a completed FFT analysis.
*
- * @method setADSR
- * @param {Number} [attackTime] Time (in seconds before envelope
- * reaches Attack Level
- * @param {Number} [decayTime] Time (in seconds) before envelope
- * reaches Decay/Sustain Level
- * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,
- * where 1.0 = attackLevel, 0.0 = releaseLevel.
- * The susRatio determines the decayLevel and the level at which the
- * sustain portion of the envelope will sustain.
- * For example, if attackLevel is 0.4, releaseLevel is 0,
- * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is
- * increased to 1.0 (using setRange
),
- * then decayLevel would increase proportionally, to become 0.5.
- * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)
- **/
- p5.PolySynth.prototype.setADSR = function (a, d, s, r) {
- this.audiovoices.forEach(function (voice) {
- voice.setADSR(a, d, s, r);
- });
+ * @method update
+ * @param {p5.FFT} fftObject A p5.FFT object
+ */
+
+
+ p5.PeakDetect.prototype.update = function (fftObject) {
+ var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255;
+
+ if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) {
+ // trigger callback
+ this._onPeak();
+
+ this.isDetected = true; // debounce
+
+ this.cutoff = nrg * this.cutoffMult;
+ this.framesSinceLastPeak = 0;
+ } else {
+ this.isDetected = false;
+
+ if (this.framesSinceLastPeak <= this.framesPerPeak) {
+ this.framesSinceLastPeak++;
+ } else {
+ this.cutoff *= this.decayRate;
+ this.cutoff = Math.max(this.cutoff, this.threshold);
+ }
+ }
+
+ this.currentValue = nrg;
+ this.penergy = nrg;
};
/**
- * Trigger the Attack, and Decay portion of a MonoSynth.
- * Similar to holding down a key on a piano, but it will
- * hold the sustain level until you let go.
+ * onPeak accepts two arguments: a function to call when
+ * a peak is detected. The value of the peak,
+ * between 0.0 and 1.0, is passed to the callback.
*
- * @method noteAttack
- * @param {Number} [note] midi note on which attack should be triggered.
- * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
- * @param {Number} [secondsFromNow] time from now (in seconds)
+ * @method onPeak
+ * @param {Function} callback Name of a function that will
+ * be called when a peak is
+ * detected.
+ * @param {Object} [val] Optional value to pass
+ * into the function when
+ * a peak is detected.
* @example
*
- * var polySynth = new p5.PolySynth();
- * var pitches = ["G", "D", "G", "C"];
- * var octaves = [2, 3, 4];
+ * var cnv, soundFile, fft, peakDetect;
+ * var ellipseWidth = 0;
*
- * function mousePressed() {
- * // play a chord: multiple notes at the same time
- * for (var i = 0; i < 4; i++) {
- * var note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
+ * function preload() {
+ * soundFile = loadSound('assets/beat.mp3');
* }
*
- * function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
+ * function setup() {
+ * cnv = createCanvas(100,100);
+ * textAlign(CENTER);
+ *
+ * fft = new p5.FFT();
+ * peakDetect = new p5.PeakDetect();
+ *
+ * setupSound();
+ *
+ * // when a beat is detected, call triggerBeat()
+ * peakDetect.onPeak(triggerBeat);
* }
- *
- * var pitches = ["G", "D", "G", "C"];
- * var octaves = [2, 3, 4];
- * var polySynth = new p5.PolySynth();
*
- * function mousePressed() {
- * // play a chord: multiple notes at the same time
- * for (var i = 0; i < 4; i++) {
- * var note = random(pitches) + random(octaves);
- * polySynth.noteAttack(note, 0.1);
- * }
- * }
+ * // load two soundfile and crossfade beetween them
+ * var sound1,sound2;
+ * var gain1, gain2, gain3;
*
- * function mouseReleased() {
- * // release all voices
- * polySynth.noteRelease();
- * }
- *
Scale the output of all sound in this sketch
\n * Scaled between 0.0 (silence) and 1.0 (full volume).\n * 1.0 is the maximum amplitude of a digital sound, so multiplying\n * by greater than 1.0 may cause digital distortion. To\n * fade, provide arampTime
parameter. For more\n * complex fades, see the Envelope class.\n *\n * Alternately, you can pass in a signal source such as an\n * oscillator to modulate the amplitude with an audio signal.\n *\n * How This Works: When you load the p5.sound module, it\n * creates a single instance of p5sound. All sound objects in this\n * module output to p5sound before reaching your computer's output.\n * So if you change the amplitude of p5sound, it impacts all of the\n * sound in this module.
\n *\n *If no value is provided, returns a Web Audio API Gain Node
\n *\n * @method masterVolume\n * @param {Number|Object} volume Volume (amplitude) between 0.0\n * and 1.0 or modulating signal/oscillator\n * @param {Number} [rampTime] Fade for t seconds\n * @param {Number} [timeFromNow] Schedule this event to happen at\n * t seconds in the future\n */\n p5.prototype.masterVolume = function(vol, rampTime, tFromNow) {\n if (typeof vol === 'number') {\n var rampTime = rampTime || 0;\n var tFromNow = tFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n var currentVol = p5sound.output.gain.value;\n p5sound.output.gain.cancelScheduledValues(now + tFromNow);\n p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);\n p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);\n }\n else if (vol) {\n vol.connect(p5sound.output.gain);\n } else {\n // return the Gain Node\n return p5sound.output.gain;\n }\n };\n\n /**\n * `p5.soundOut` is the p5.sound master output. It sends output to\n * the destination of this window's web audio context. It contains\n * Web Audio API nodes including a dyanmicsCompressor (.limiter
),\n * and Gain Nodes for .input
and .output
.\n *\n * @property {Object} soundOut\n */\n p5.prototype.soundOut = p5.soundOut = p5sound;\n\n /**\n * a silent connection to the DesinationNode\n * which will ensure that anything connected to it\n * will not be garbage collected\n *\n * @private\n */\n p5.soundOut._silentNode = p5sound.audiocontext.createGain();\n p5.soundOut._silentNode.gain.value = 0;\n p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);\n\n\n return p5sound;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/WaveShaper\", \"Tone/type/Type\", \"Tone/core/Param\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class A signal is an audio-rate value. Tone.Signal is a core component of the library.\n\t * Unlike a number, Signals can be scheduled with sample-level accuracy. Tone.Signal\n\t * has all of the methods available to native Web Audio \n\t * [AudioParam](http://webaudio.github.io/web-audio-api/#the-audioparam-interface)\n\t * as well as additional conveniences. Read more about working with signals \n\t * [here](https://github.com/Tonejs/Tone.js/wiki/Signals).\n\t *\n\t * @constructor\n\t * @extends {Tone.Param}\n\t * @param {Number|AudioParam} [value] Initial value of the signal. If an AudioParam\n\t * is passed in, that parameter will be wrapped\n\t * and controlled by the Signal. \n\t * @param {string} [units=Number] unit The units the signal is in. \n\t * @example\n\t * var signal = new Tone.Signal(10);\n\t */\n\tTone.Signal = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"value\", \"units\"], Tone.Signal.defaults);\n\n\t\t/**\n\t\t * The node where the constant signal value is scaled.\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis.output = this._gain = this.context.createGain();\n\n\t\toptions.param = this._gain.gain;\n\t\tTone.Param.call(this, options);\n\n\t\t/**\n\t\t * The node where the value is set.\n\t\t * @type {Tone.Param}\n\t\t * @private\n\t\t */\n\t\tthis.input = this._param = this._gain.gain;\n\n\t\t//connect the const output to the node output\n\t\tthis.context.getConstant(1).chain(this._gain);\n\t};\n\n\tTone.extend(Tone.Signal, Tone.Param);\n\n\t/**\n\t * The default values\n\t * @type {Object}\n\t * @static\n\t * @const\n\t */\n\tTone.Signal.defaults = {\n\t\t\"value\" : 0,\n\t\t\"units\" : Tone.Type.Default,\n\t\t\"convert\" : true,\n\t};\n\n\t/**\n\t * When signals connect to other signals or AudioParams, \n\t * they take over the output value of that signal or AudioParam. \n\t * For all other nodes, the behavior is the same as a default connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.SignalBase} this\n\t * @method\n\t */\n\tTone.Signal.prototype.connect = Tone.SignalBase.prototype.connect;\n\n\t/**\n\t * dispose and disconnect\n\t * @returns {Tone.Signal} this\n\t */\n\tTone.Signal.prototype.dispose = function(){\n\t\tTone.Param.prototype.dispose.call(this);\n\t\tthis._param = null;\n\t\tthis._gain.disconnect();\n\t\tthis._gain = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.Signal;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Signal\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Multiply two incoming signals. Or, if a number is given in the constructor, \n\t * multiplies the incoming signal by that value. \n\t *\n\t * @constructor\n\t * @extends {Tone.Signal}\n\t * @param {number=} value Constant value to multiple. If no value is provided,\n\t * it will return the product of the first and second inputs\n\t * @example\n\t * var mult = new Tone.Multiply();\n\t * var sigA = new Tone.Signal(3);\n\t * var sigB = new Tone.Signal(4);\n\t * sigA.connect(mult, 0, 0);\n\t * sigB.connect(mult, 0, 1);\n\t * //output of mult is 12.\n\t * @example\n\t * var mult = new Tone.Multiply(10);\n\t * var sig = new Tone.Signal(2).connect(mult);\n\t * //the output of mult is 20. \n\t */\n\tTone.Multiply = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the input node is the same as the output node\n\t\t * it is also the GainNode which handles the scaling of incoming signal\n\t\t * \n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._mult = this.input[0] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * the scaling parameter\n\t\t * @type {AudioParam}\n\t\t * @private\n\t\t */\n\t\tthis._param = this.input[1] = this.output.gain;\n\t\t\n\t\tthis._param.value = this.defaultArg(value, 0);\n\t};\n\n\tTone.extend(Tone.Multiply, Tone.Signal);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.Multiply} this\n\t */\n\tTone.Multiply.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._mult.dispose();\n\t\tthis._mult = null;\n\t\tthis._param = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Multiply;\n});\n","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var CrossFade = require('Tone/component/CrossFade');\n\n /**\n * Effect is a base class for audio effects in p5. \n * var notes = [60, 64, 67, 72];\n * var i = 0;\n *\n * function setup() {\n * osc = new p5.Oscillator('Triangle');\n * osc.start();\n * frameRate(1);\n * }\n *\n * function draw() {\n * var freq = midiToFreq(notes[i]);\n * osc.freq(freq);\n * i++;\n * if (i >= notes.length){\n * i = 0;\n * }\n * }\n *
\n */\n var midiToFreq = p5.prototype.midiToFreq = function(m) {\n return 440 * Math.pow(2, (m-69)/12.0);\n };\n\n // This method converts ANSI notes specified as a string \"C4\", \"Eb3\" to a frequency\n var noteToFreq = function(note) {\n if (typeof note !== 'string') {\n return note;\n }\n var wholeNotes = {A:21, B:23, C:24, D:26, E:28, F:29, G:31};\n var value = wholeNotes[ note[0].toUpperCase() ];\n var octave = ~~note.slice(-1);\n value += 12 * (octave -1);\n\n switch(note[1]) {\n case '#':\n value += 1;\n break;\n case 'b':\n value -= 1;\n break;\n default:\n break;\n }\n return midiToFreq(value);\n }\n\n /**\n * List the SoundFile formats that you will include. LoadSound\n * will search your directory for these extensions, and will pick\n * a format that is compatable with the client's web browser.\n * Here is a free online file\n * converter.\n *\n * @method soundFormats\n * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg'\n * @example\n * \n * function preload() {\n * // set the global sound formats\n * soundFormats('mp3', 'ogg');\n *\n * // load either beatbox.mp3, or .ogg, depending on browser\n * mySound = loadSound('assets/beatbox.mp3');\n * }\n *\n * function setup() {\n * mySound.play();\n * }\n *
input[0]
\n\t * and input[1]
. If a value is passed into the constructor, \n\t * the it will be added to the input.\n\t * \n\t * @constructor\n\t * @extends {Tone.Signal}\n\t * @param {number=} value If no value is provided, Tone.Add will sum the first\n\t * and second inputs. \n\t * @example\n\t * var signal = new Tone.Signal(2);\n\t * var add = new Tone.Add(2);\n\t * signal.connect(add);\n\t * //the output of add equals 4\n\t * @example\n\t * //if constructed with no arguments\n\t * //it will add the first and second inputs\n\t * var add = new Tone.Add();\n\t * var sig0 = new Tone.Signal(3).connect(add, 0, 0);\n\t * var sig1 = new Tone.Signal(4).connect(add, 0, 1);\n\t * //the output of add equals 7. \n\t */\n\tTone.Add = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the summing node\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._sum = this.input[0] = this.input[1] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * @private\n\t\t * @type {Tone.Signal}\n\t\t */\n\t\tthis._param = this.input[1] = new Tone.Signal(value);\n\n\t\tthis._param.connect(this._sum);\n\t};\n\n\tTone.extend(Tone.Add, Tone.Signal);\n\t\n\t/**\n\t * Clean up.\n\t * @returns {Tone.Add} this\n\t */\n\tTone.Add.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._sum.dispose();\n\t\tthis._sum = null;\n\t\tthis._param.dispose();\n\t\tthis._param = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Add;\n});","define([\"Tone/core/Tone\", \"Tone/type/Time\", \"Tone/type/Frequency\", \"Tone/type/TransportTime\", \"Tone/core/Context\"],\nfunction (Tone) {\t\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tTYPES\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Units which a value can take on.\n\t * @enum {String}\n\t */\n\tTone.Type = {\n\t\t/** \n\t\t * Default units\n\t\t * @typedef {Default}\n\t\t */\n\t\tDefault : \"number\",\n\t\t/**\n\t\t * Time can be described in a number of ways. Read more [Time](https://github.com/Tonejs/Tone.js/wiki/Time).\n\t\t *\n\t\t * connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.TimelineSignal} this\n\t * @method\n\t */\n\tTone.TimelineSignal.prototype.connect = Tone.SignalBase.prototype.connect;\n\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tAUTOMATION CURVE CALCULATIONS\n\t//\tMIT License, copyright (c) 2014 Jordan Santell\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Calculates the the value along the curve produced by setTargetAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._exponentialApproach = function (t0, v0, v1, timeConstant, t) {\n\t\treturn v1 + (v0 - v1) * Math.exp(-(t - t0) / timeConstant);\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by linearRampToValueAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._linearInterpolate = function (t0, v0, t1, v1, t) {\n\t\treturn v0 + (v1 - v0) * ((t - t0) / (t1 - t0));\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by exponentialRampToValueAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._exponentialInterpolate = function (t0, v0, t1, v1, t) {\n\t\tv0 = Math.max(this._minOutput, v0);\n\t\treturn v0 * Math.pow(v1 / v0, (t - t0) / (t1 - t0));\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by setValueCurveAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._curveInterpolate = function (start, curve, duration, time) {\n\t\tvar len = curve.length;\n\t\t// If time is after duration, return the last curve value\n\t\tif (time >= start + duration) {\n\t\t\treturn curve[len - 1];\n\t\t} else if (time <= start){\n\t\t\treturn curve[0];\n\t\t} else {\n\t\t\tvar progress = (time - start) / duration;\n\t\t\tvar lowerIndex = Math.floor((len - 1) * progress);\n\t\t\tvar upperIndex = Math.ceil((len - 1) * progress);\n\t\t\tvar lowerVal = curve[lowerIndex];\n\t\t\tvar upperVal = curve[upperIndex];\n\t\t\tif (upperIndex === lowerIndex){\n\t\t\t\treturn lowerVal;\n\t\t\t} else {\n\t\t\t\treturn this._linearInterpolate(lowerIndex, lowerVal, upperIndex, upperVal, progress * (len - 1));\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Clean up.\n\t * @return {Tone.TimelineSignal} this\n\t */\n\tTone.TimelineSignal.prototype.dispose = function(){\n\t\tTone.Signal.prototype.dispose.call(this);\n\t\tTone.Param.prototype.dispose.call(this);\n\t\tthis._events.dispose();\n\t\tthis._events = null;\n\t};\n\n\treturn Tone.TimelineSignal;\n});","'use strict';\n\ndefine(function (require) {\n var Effect = require('effect');\n\n /**\n * A p5.Filter uses a Web Audio Biquad Filter to filter\n * the frequency response of an input source. Subclasses\n * include:
\n *p5.LowPass
:\n * Allows frequencies below the cutoff frequency to pass through,\n * and attenuates frequencies above the cutoff.p5.HighPass
:\n * The opposite of a lowpass filter. p5.BandPass
:\n * Allows a range of frequencies to pass through and attenuates\n * the frequencies below and above this frequency range..res()
method controls either width of the\n * bandpass, or resonance of the low/highpass cutoff frequency.\n *\n * This class extends p5.Effect.\n * Methods amp(), chain(),\n * drywet(), connect(), and\n * disconnect() are available.\n *\n * @class p5.Filter\n * @extends p5.Effect\n * @constructor\n * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'\n * @example\n * \n * var fft, noise, filter;\n *\n * function setup() {\n * fill(255, 40, 255);\n *\n * filter = new p5.BandPass();\n *\n * noise = new p5.Noise();\n * // disconnect unfiltered noise,\n * // and connect to filter\n * noise.disconnect();\n * noise.connect(filter);\n * noise.start();\n *\n * fft = new p5.FFT();\n * }\n *\n * function draw() {\n * background(30);\n *\n * // set the BandPass frequency based on mouseX\n * var freq = map(mouseX, 0, width, 20, 10000);\n * filter.freq(freq);\n * // give the filter a narrow band (lower res = wider bandpass)\n * filter.res(50);\n *\n * // draw filtered spectrum\n * var spectrum = fft.analyze();\n * noStroke();\n * for (var i = 0; i < spectrum.length; i++) {\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width/spectrum.length, h);\n * }\n *\n * isMouseOverCanvas();\n * }\n *\n * function isMouseOverCanvas() {\n * var mX = mouseX, mY = mouseY;\n * if (mX > 0 && mX < width && mY < height && mY > 0) {\n * noise.amp(0.5, 0.2);\n * } else {\n * noise.amp(0, 0.2);\n * }\n * }\n *
new p5.LowPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('lowpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.LowPass\n * @constructor\n * @extends p5.Filter\n */\n p5.LowPass = function() {\n p5.Filter.call(this, 'lowpass');\n };\n p5.LowPass.prototype = Object.create(p5.Filter.prototype);\n\n /**\n * Constructor: new p5.HighPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('highpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.HighPass\n * @constructor\n * @extends p5.Filter\n */\n p5.HighPass = function() {\n p5.Filter.call(this, 'highpass');\n };\n p5.HighPass.prototype = Object.create(p5.Filter.prototype);\n\n /**\n * Constructor: new p5.BandPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('bandpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.BandPass\n * @constructor\n * @extends p5.Filter\n */\n p5.BandPass = function() {\n p5.Filter.call(this, 'bandpass');\n };\n p5.BandPass.prototype = Object.create(p5.Filter.prototype);\n\n return p5.Filter;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/Add\", \"Tone/signal/Negate\", \"Tone/signal/Signal\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Subtract the signal connected to input[1]
from the signal connected \n\t * to input[0]
. If an argument is provided in the constructor, the \n\t * signals .value
will be subtracted from the incoming signal.\n\t *\n\t * @extends {Tone.Signal}\n\t * @constructor\n\t * @param {number=} value The value to subtract from the incoming signal. If the value\n\t * is omitted, it will subtract the second signal from the first.\n\t * @example\n\t * var sub = new Tone.Subtract(1);\n\t * var sig = new Tone.Signal(4).connect(sub);\n\t * //the output of sub is 3. \n\t * @example\n\t * var sub = new Tone.Subtract();\n\t * var sigA = new Tone.Signal(10);\n\t * var sigB = new Tone.Signal(2.5);\n\t * sigA.connect(sub, 0, 0);\n\t * sigB.connect(sub, 0, 1);\n\t * //output of sub is 7.5\n\t */\n\tTone.Subtract = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the summing node\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._sum = this.input[0] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * negate the input of the second input before connecting it\n\t\t * to the summing node.\n\t\t * @type {Tone.Negate}\n\t\t * @private\n\t\t */\n\t\tthis._neg = new Tone.Negate();\n\n\t\t/**\n\t\t * the node where the value is set\n\t\t * @private\n\t\t * @type {Tone.Signal}\n\t\t */\n\t\tthis._param = this.input[1] = new Tone.Signal(value);\n\n\t\tthis._param.chain(this._neg, this._sum);\n\t};\n\n\tTone.extend(Tone.Subtract, Tone.Signal);\n\n\t/**\n\t * Clean up.\n\t * @returns {Tone.SignalBase} this\n\t */\n\tTone.Subtract.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._neg.dispose();\n\t\tthis._neg = null;\n\t\tthis._sum.disconnect();\n\t\tthis._sum = null;\n\t\tthis._param.dispose();\n\t\tthis._param = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.Subtract;\n});","'use strict';\n\nglobal.TONE_SILENCE_VERSION_LOGGING = true;\n\ndefine(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (StartAudioContext, Context, Tone) {\n // Create the Audio Context\n const audiocontext = new window.AudioContext();\n\n // Tone and p5.sound share the same audio context\n Tone.context.dispose();\n Tone.setContext(audiocontext);\n\n /**\n * Returns the Audio Context for this sketch. Useful for users\n * who would like to dig deeper into the Web Audio API\n * .
\n *\n *Some browsers require users to startAudioContext\n * with a user gesture, such as touchStarted in the example below.
\n *\n * @method getAudioContext\n * @return {Object} AudioContext for this sketch\n * @example\n *\n * function draw() {\n * background(255);\n * textAlign(CENTER);\n *\n * if (getAudioContext().state !== 'running') {\n * text('click to start audio', width/2, height/2);\n * } else {\n * text('audio is enabled', width/2, height/2);\n * }\n * }\n *\n * function touchStarted() {\n * if (getAudioContext().state !== 'running') {\n * getAudioContext().resume();\n * }\n * var synth = new p5.MonoSynth();\n * synth.play('A4', 0.5, 0, 0.2);\n * }\n *\n *
It is a good practice to give users control over starting audio playback.\n * This practice is enforced by Google Chrome's autoplay policy as of r70\n * (info), iOS Safari, and other browsers.\n *
\n *\n *\n * userStartAudio() starts the Audio Context on a user gesture. It utilizes\n * the StartAudioContext library by\n * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext.\n *
\n *\n *Starting the audio context on a user gesture can be as simple as userStartAudio()
.\n * Optional parameters let you decide on a specific element that will start the audio context,\n * and/or call a function once the audio context is started.
\n * function setup() {\n * var myDiv = createDiv('click to start audio');\n * myDiv.position(0, 0);\n *\n * var mySynth = new p5.MonoSynth();\n *\n * // This won't play until the context has started\n * mySynth.play('A6');\n *\n * // Start the audio context on a click/touch event\n * userStartAudio().then(function() {\n * myDiv.remove();\n * });\n * }\n *
connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.SignalBase} this\n\t */\n\tTone.SignalBase.prototype.connect = function(node, outputNumber, inputNumber){\n\t\t//zero it out so that the signal can have full control\n\t\tif ((Tone.Signal && Tone.Signal === node.constructor) || \n\t\t\t\t(Tone.Param && Tone.Param === node.constructor) || \n\t\t\t\t(Tone.TimelineSignal && Tone.TimelineSignal === node.constructor)){\n\t\t\t//cancel changes\n\t\t\tnode._param.cancelScheduledValues(0);\n\t\t\t//reset the value\n\t\t\tnode._param.value = 0;\n\t\t\t//mark the value as overridden\n\t\t\tnode.overridden = true;\n\t\t} else if (node instanceof AudioParam){\n\t\t\tnode.cancelScheduledValues(0);\n\t\t\tnode.value = 0;\n\t\t} \n\t\tTone.prototype.connect.call(this, node, outputNumber, inputNumber);\n\t\treturn this;\n\t};\n\n\treturn Tone.SignalBase;\n});","define([\"Tone/core/Tone\", \"Tone/type/TimeBase\"], function (Tone) {\n\n\t/**\n\t * @class Tone.Time is a primitive type for encoding Time values. \n\t * Eventually all time values are evaluated to seconds\n\t * using the `eval` method. Tone.Time can be constructed\n\t * with or without the `new` keyword. Tone.Time can be passed\n\t * into the parameter of any method which takes time as an argument. \n\t * @constructor\n\t * @extends {Tone.TimeBase}\n\t * @param {String|Number} val The time value.\n\t * @param {String=} units The units of the value.\n\t * @example\n\t * var t = Tone.Time(\"4n\");//encodes a quarter note\n\t * t.mult(4); // multiply that value by 4\n\t * t.toNotation(); //returns \"1m\"\n\t */\n\tTone.Time = function(val, units){\n\t\tif (this instanceof Tone.Time){\n\n\t\t\t/**\n\t\t\t * If the current clock time should\n\t\t\t * be added to the output\n\t\t\t * @type {Boolean}\n\t\t\t * @private\n\t\t\t */\n\t\t\tthis._plusNow = false;\n\t\t\t\n\t\t\tTone.TimeBase.call(this, val, units);\n\n\t\t} else {\n\t\t\treturn new Tone.Time(val, units);\n\t\t}\n\t};\n\n\tTone.extend(Tone.Time, Tone.TimeBase);\n\n\t//clone the expressions so that \n\t//we can add more without modifying the original\n\tTone.Time.prototype._unaryExpressions = Object.create(Tone.TimeBase.prototype._unaryExpressions);\n\n\t/*\n\t * Adds an additional unary expression\n\t * which quantizes values to the next subdivision\n\t * @type {Object}\n\t * @private\n\t */\n\tTone.Time.prototype._unaryExpressions.quantize = {\n\t\tregexp : /^@/,\n\t\tmethod : function(rh){\n\t\t\treturn Tone.Transport.nextSubdivision(rh());\n\t\t}\n\t};\n\n\t/*\n\t * Adds an additional unary expression\n\t * which adds the current clock time.\n\t * @type {Object}\n\t * @private\n\t */\n\tTone.Time.prototype._unaryExpressions.now = {\n\t\tregexp : /^\\+/,\n\t\tmethod : function(lh){\n\t\t\tthis._plusNow = true;\n\t\t\treturn lh();\n\t\t}\n\t};\n\n\t/**\n\t * Quantize the time by the given subdivision. Optionally add a\n\t * percentage which will move the time value towards the ideal\n\t * quantized value by that percentage. \n\t * @param {Number|Time} val The subdivision to quantize to\n\t * @param {NormalRange} [percent=1] Move the time value\n\t * towards the quantized value by\n\t * a percentage.\n\t * @return {Tone.Time} this\n\t * @example\n\t * Tone.Time(21).quantize(2) //returns 22\n\t * Tone.Time(0.6).quantize(\"4n\", 0.5) //returns 0.55\n\t */\n\tTone.Time.prototype.quantize = function(subdiv, percent){\n\t\tpercent = this.defaultArg(percent, 1);\n\t\tthis._expr = function(expr, subdivision, percent){\n\t\t\texpr = expr();\n\t\t\tsubdivision = subdivision.toSeconds();\n\t\t\tvar multiple = Math.round(expr / subdivision);\n\t\t\tvar ideal = multiple * subdivision;\n\t\t\tvar diff = ideal - expr;\n\t\t\treturn expr + diff * percent;\n\t\t}.bind(this, this._expr, new this.constructor(subdiv), percent);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Adds the clock time to the time expression at the \n\t * moment of evaluation. \n\t * @return {Tone.Time} this\n\t */\n\tTone.Time.prototype.addNow = function(){\n\t\tthis._plusNow = true;\n\t\treturn this;\n\t};\n\n\t/**\n\t * @override\n\t * Override the default value return when no arguments are passed in.\n\t * The default value is 'now'\n\t * @private\n\t */\n\tTone.Time.prototype._defaultExpr = function(){\n\t\tthis._plusNow = true;\n\t\treturn this._noOp;\n\t};\n\n\t/**\n\t * Copies the value of time to this Time\n\t * @param {Tone.Time} time\n\t * @return {Time}\n\t */\n\tTone.Time.prototype.copy = function(time){\n\t\tTone.TimeBase.prototype.copy.call(this, time);\n\t\tthis._plusNow = time._plusNow;\n\t\treturn this;\n\t};\n\n\t//CONVERSIONS//////////////////////////////////////////////////////////////\n\n\t/**\n\t * Convert a Time to Notation. Values will be thresholded to the nearest 128th note. \n\t * @return {Notation} \n\t * @example\n\t * //if the Transport is at 120bpm:\n\t * Tone.Time(2).toNotation();//returns \"1m\"\n\t */\n\tTone.Time.prototype.toNotation = function(){\n\t\tvar time = this.toSeconds();\n\t\tvar testNotations = [\"1m\", \"2n\", \"4n\", \"8n\", \"16n\", \"32n\", \"64n\", \"128n\"];\n\t\tvar retNotation = this._toNotationHelper(time, testNotations);\n\t\t//try the same thing but with tripelets\n\t\tvar testTripletNotations = [\"1m\", \"2n\", \"2t\", \"4n\", \"4t\", \"8n\", \"8t\", \"16n\", \"16t\", \"32n\", \"32t\", \"64n\", \"64t\", \"128n\"];\n\t\tvar retTripletNotation = this._toNotationHelper(time, testTripletNotations);\n\t\t//choose the simpler expression of the two\n\t\tif (retTripletNotation.split(\"+\").length < retNotation.split(\"+\").length){\n\t\t\treturn retTripletNotation;\n\t\t} else {\n\t\t\treturn retNotation;\n\t\t}\n\t};\n\n\t/**\n\t * Helper method for Tone.toNotation\n\t * @param {Number} units \n\t * @param {Array} testNotations\n\t * @return {String}\n\t * @private\n\t */\n\tTone.Time.prototype._toNotationHelper = function(units, testNotations){\n\t\t//the threshold is the last value in the array\n\t\tvar threshold = this._notationToUnits(testNotations[testNotations.length - 1]);\n\t\tvar retNotation = \"\";\n\t\tfor (var i = 0; i < testNotations.length; i++){\n\t\t\tvar notationTime = this._notationToUnits(testNotations[i]);\n\t\t\t//account for floating point errors (i.e. round up if the value is 0.999999)\n\t\t\tvar multiple = units / notationTime;\n\t\t\tvar floatingPointError = 0.000001;\n\t\t\tif (1 - multiple % 1 < floatingPointError){\n\t\t\t\tmultiple += floatingPointError;\n\t\t\t}\n\t\t\tmultiple = Math.floor(multiple);\n\t\t\tif (multiple > 0){\n\t\t\t\tif (multiple === 1){\n\t\t\t\t\tretNotation += testNotations[i];\n\t\t\t\t} else {\n\t\t\t\t\tretNotation += multiple.toString() + \"*\" + testNotations[i];\n\t\t\t\t}\n\t\t\t\tunits -= multiple * notationTime;\n\t\t\t\tif (units < threshold){\n\t\t\t\t\tbreak;\n\t\t\t\t} else {\n\t\t\t\t\tretNotation += \" + \";\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif (retNotation === \"\"){\n\t\t\tretNotation = \"0\";\n\t\t}\n\t\treturn retNotation;\n\t};\n\n\t/**\n\t * Convert a notation value to the current units\n\t * @param {Notation} notation \n\t * @return {Number} \n\t * @private\n\t */\n\tTone.Time.prototype._notationToUnits = function(notation){\n\t\tvar primaryExprs = this._primaryExpressions;\n\t\tvar notationExprs = [primaryExprs.n, primaryExprs.t, primaryExprs.m];\n\t\tfor (var i = 0; i < notationExprs.length; i++){\n\t\t\tvar expr = notationExprs[i];\n\t\t\tvar match = notation.match(expr.regexp);\n\t\t\tif (match){\n\t\t\t\treturn expr.method.call(this, match[1]);\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Return the time encoded as Bars:Beats:Sixteenths.\n\t * @return {BarsBeatsSixteenths}\n\t */\n\tTone.Time.prototype.toBarsBeatsSixteenths = function(){\n\t\tvar quarterTime = this._beatsToUnits(1);\n\t\tvar quarters = this.toSeconds() / quarterTime;\n\t\tvar measures = Math.floor(quarters / this._timeSignature());\n\t\tvar sixteenths = (quarters % 1) * 4;\n\t\tquarters = Math.floor(quarters) % this._timeSignature();\n\t\tsixteenths = sixteenths.toString();\n\t\tif (sixteenths.length > 3){\n\t\t\tsixteenths = parseFloat(sixteenths).toFixed(3);\n\t\t}\n\t\tvar progress = [measures, quarters, sixteenths];\n\t\treturn progress.join(\":\");\n\t};\n\n\t/**\n\t * Return the time in ticks.\n\t * @return {Ticks}\n\t */\n\tTone.Time.prototype.toTicks = function(){\n\t\tvar quarterTime = this._beatsToUnits(1);\n\t\tvar quarters = this.valueOf() / quarterTime;\n\t\treturn Math.floor(quarters * Tone.Transport.PPQ);\n\t};\n\n\t/**\n\t * Return the time in samples\n\t * @return {Samples} \n\t */\n\tTone.Time.prototype.toSamples = function(){\n\t\treturn this.toSeconds() * this.context.sampleRate;\n\t};\n\n\t/**\n\t * Return the time as a frequency value\n\t * @return {Frequency} \n\t * @example\n\t * Tone.Time(2).toFrequency(); //0.5\n\t */\n\tTone.Time.prototype.toFrequency = function(){\n\t\treturn 1/this.toSeconds();\n\t};\n\n\t/**\n\t * Return the time in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.Time.prototype.toSeconds = function(){\n\t\treturn this.valueOf();\n\t};\n\n\t/**\n\t * Return the time in milliseconds.\n\t * @return {Milliseconds} \n\t */\n\tTone.Time.prototype.toMilliseconds = function(){\n\t\treturn this.toSeconds() * 1000;\n\t};\n\n\t/**\n\t * Return the time in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.Time.prototype.valueOf = function(){\n\t\tvar val = this._expr();\n\t\treturn val + (this._plusNow?this.now():0);\n\t};\n\n\treturn Tone.Time;\n});","define([\"Tone/core/Tone\"], function (Tone) {\n\n\t/**\n\t * @class Tone.TimeBase is a flexible encoding of time\n\t * which can be evaluated to and from a string.\n\t * Parsing code modified from https://code.google.com/p/tapdigit/\n\t * Copyright 2011 2012 Ariya Hidayat, New BSD License\n\t * @extends {Tone}\n\t * @param {Time} val The time value as a number or string\n\t * @param {String=} units Unit values\n\t * @example\n\t * Tone.TimeBase(4, \"n\")\n\t * Tone.TimeBase(2, \"t\")\n\t * Tone.TimeBase(\"2t\").add(\"1m\")\n\t * Tone.TimeBase(\"2t + 1m\");\n\t */\n\tTone.TimeBase = function(val, units){\n\n\t\t//allows it to be constructed with or without 'new'\n\t\tif (this instanceof Tone.TimeBase) {\n\n\t\t\t/**\n\t\t\t * Any expressions parsed from the Time\n\t\t\t * @type {Array}\n\t\t\t * @private\n\t\t\t */\n\t\t\tthis._expr = this._noOp;\n\n\t\t\tif (val instanceof Tone.TimeBase){\n\t\t\t\tthis.copy(val);\n\t\t\t} else if (!this.isUndef(units) || this.isNumber(val)){\n\t\t\t\t//default units\n\t\t\t\tunits = this.defaultArg(units, this._defaultUnits);\n\t\t\t\tvar method = this._primaryExpressions[units].method;\n\t\t\t\tthis._expr = method.bind(this, val);\n\t\t\t} else if (this.isString(val)){\n\t\t\t\tthis.set(val);\n\t\t\t} else if (this.isUndef(val)){\n\t\t\t\t//default expression\n\t\t\t\tthis._expr = this._defaultExpr();\n\t\t\t}\n\t\t} else {\n\n\t\t\treturn new Tone.TimeBase(val, units);\n\t\t}\n\t};\n\n\tTone.extend(Tone.TimeBase);\n\n\t/**\n\t * Repalce the current time value with the value\n\t * given by the expression string.\n\t * @param {String} exprString\n\t * @return {Tone.TimeBase} this\n\t */\n\tTone.TimeBase.prototype.set = function(exprString){\n\t\tthis._expr = this._parseExprString(exprString);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Return a clone of the TimeBase object.\n\t * @return {Tone.TimeBase} The new cloned Tone.TimeBase\n\t */\n\tTone.TimeBase.prototype.clone = function(){\n\t\tvar instance = new this.constructor();\n\t\tinstance.copy(this);\n\t\treturn instance;\n\t};\n\n\t/**\n\t * Copies the value of time to this Time\n\t * @param {Tone.TimeBase} time\n\t * @return {TimeBase}\n\t */\n\tTone.TimeBase.prototype.copy = function(time){\n\t\tvar val = time._expr();\n\t\treturn this.set(val);\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tABSTRACT SYNTAX TREE PARSER\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * All the primary expressions.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._primaryExpressions = {\n\t\t\"n\" : {\n\t\t\tregexp : /^(\\d+)n/i,\n\t\t\tmethod : function(value){\n\t\t\t\tvalue = parseInt(value);\n\t\t\t\tif (value === 1){\n\t\t\t\t\treturn this._beatsToUnits(this._timeSignature());\n\t\t\t\t} else {\n\t\t\t\t\treturn this._beatsToUnits(4 / value);\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"t\" : {\n\t\t\tregexp : /^(\\d+)t/i,\n\t\t\tmethod : function(value){\n\t\t\t\tvalue = parseInt(value);\n\t\t\t\treturn this._beatsToUnits(8 / (parseInt(value) * 3));\n\t\t\t}\n\t\t},\n\t\t\"m\" : {\n\t\t\tregexp : /^(\\d+)m/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._beatsToUnits(parseInt(value) * this._timeSignature());\n\t\t\t}\n\t\t},\n\t\t\"i\" : {\n\t\t\tregexp : /^(\\d+)i/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._ticksToUnits(parseInt(value));\n\t\t\t}\n\t\t},\n\t\t\"hz\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?)hz/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._frequencyToUnits(parseFloat(value));\n\t\t\t}\n\t\t},\n\t\t\"tr\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?):(\\d+(?:\\.\\d+)?):?(\\d+(?:\\.\\d+)?)?/,\n\t\t\tmethod : function(m, q, s){\n\t\t\t\tvar total = 0;\n\t\t\t\tif (m && m !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(this._timeSignature() * parseFloat(m));\n\t\t\t\t}\n\t\t\t\tif (q && q !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(parseFloat(q));\n\t\t\t\t}\n\t\t\t\tif (s && s !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(parseFloat(s) / 4);\n\t\t\t\t}\n\t\t\t\treturn total;\n\t\t\t}\n\t\t},\n\t\t\"s\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?s)/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._secondsToUnits(parseFloat(value));\n\t\t\t}\n\t\t},\n\t\t\"samples\" : {\n\t\t\tregexp : /^(\\d+)samples/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn parseInt(value) / this.context.sampleRate;\n\t\t\t}\n\t\t},\n\t\t\"default\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?)/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._primaryExpressions[this._defaultUnits].method.call(this, value);\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * All the binary expressions that TimeBase can accept.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._binaryExpressions = {\n\t\t\"+\" : {\n\t\t\tregexp : /^\\+/,\n\t\t\tprecedence : 2,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() + rh();\n\t\t\t}\n\t\t},\n\t\t\"-\" : {\n\t\t\tregexp : /^\\-/,\n\t\t\tprecedence : 2,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() - rh();\n\t\t\t}\n\t\t},\n\t\t\"*\" : {\n\t\t\tregexp : /^\\*/,\n\t\t\tprecedence : 1,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() * rh();\n\t\t\t}\n\t\t},\n\t\t\"/\" : {\n\t\t\tregexp : /^\\//,\n\t\t\tprecedence : 1,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() / rh();\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * All the unary expressions.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._unaryExpressions = {\n\t\t\"neg\" : {\n\t\t\tregexp : /^\\-/,\n\t\t\tmethod : function(lh){\n\t\t\t\treturn -lh();\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Syntactic glue which holds expressions together\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._syntaxGlue = {\n\t\t\"(\" : {\n\t\t\tregexp : /^\\(/\n\t\t},\n\t\t\")\" : {\n\t\t\tregexp : /^\\)/\n\t\t}\n\t};\n\n\t/**\n\t * tokenize the expression based on the Expressions object\n\t * @param {string} expr \n\t * @return {Object} returns two methods on the tokenized list, next and peek\n\t * @private\n\t */\n\tTone.TimeBase.prototype._tokenize = function(expr){\n\t\tvar position = -1;\n\t\tvar tokens = [];\n\n\t\twhile(expr.length > 0){\n\t\t\texpr = expr.trim();\n\t\t\tvar token = getNextToken(expr, this);\n\t\t\ttokens.push(token);\n\t\t\texpr = expr.substr(token.value.length);\n\t\t}\n\n\t\tfunction getNextToken(expr, context){\n\t\t\tvar expressions = [\"_binaryExpressions\", \"_unaryExpressions\", \"_primaryExpressions\", \"_syntaxGlue\"];\n\t\t\tfor (var i = 0; i < expressions.length; i++){\n\t\t\t\tvar group = context[expressions[i]];\n\t\t\t\tfor (var opName in group){\n\t\t\t\t\tvar op = group[opName];\n\t\t\t\t\tvar reg = op.regexp;\n\t\t\t\t\tvar match = expr.match(reg);\n\t\t\t\t\tif (match !== null){\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tmethod : op.method,\n\t\t\t\t\t\t\tprecedence : op.precedence,\n\t\t\t\t\t\t\tregexp : op.regexp,\n\t\t\t\t\t\t\tvalue : match[0],\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tthrow new SyntaxError(\"Tone.TimeBase: Unexpected token \"+expr);\n\t\t}\n\n\t\treturn {\n\t\t\tnext : function(){\n\t\t\t\treturn tokens[++position];\n\t\t\t},\n\t\t\tpeek : function(){\n\t\t\t\treturn tokens[position + 1];\n\t\t\t}\n\t\t};\n\t};\n\n\t/**\n\t * Given a token, find the value within the groupName\n\t * @param {Object} token\n\t * @param {String} groupName\n\t * @param {Number} precedence\n\t * @private\n\t */\n\tTone.TimeBase.prototype._matchGroup = function(token, group, prec) {\n\t\tvar ret = false;\n\t\tif (!this.isUndef(token)){\n\t\t\tfor (var opName in group){\n\t\t\t\tvar op = group[opName];\n\t\t\t\tif (op.regexp.test(token.value)){\n\t\t\t\t\tif (!this.isUndef(prec)){\n\t\t\t\t\t\tif(op.precedence === prec){\t\n\t\t\t\t\t\t\treturn op;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn op;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn ret;\n\t};\n\n\t/**\n\t * Match a binary expression given the token and the precedence\n\t * @param {Lexer} lexer\n\t * @param {Number} precedence\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseBinary = function(lexer, precedence){\n\t\tif (this.isUndef(precedence)){\n\t\t\tprecedence = 2;\n\t\t}\n\t\tvar expr;\n\t\tif (precedence < 0){\n\t\t\texpr = this._parseUnary(lexer);\n\t\t} else {\n\t\t\texpr = this._parseBinary(lexer, precedence - 1);\n\t\t}\n\t\tvar token = lexer.peek();\n\t\twhile (token && this._matchGroup(token, this._binaryExpressions, precedence)){\n\t\t\ttoken = lexer.next();\n\t\t\texpr = token.method.bind(this, expr, this._parseBinary(lexer, precedence - 1));\n\t\t\ttoken = lexer.peek();\n\t\t}\n\t\treturn expr;\n\t};\n\n\t/**\n\t * Match a unary expression.\n\t * @param {Lexer} lexer\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseUnary = function(lexer){\n\t\tvar token, expr;\n\t\ttoken = lexer.peek();\n\t\tvar op = this._matchGroup(token, this._unaryExpressions);\n\t\tif (op) {\n\t\t\ttoken = lexer.next();\n\t\t\texpr = this._parseUnary(lexer);\n\t\t\treturn op.method.bind(this, expr);\n\t\t}\n\t\treturn this._parsePrimary(lexer);\n\t};\n\n\t/**\n\t * Match a primary expression (a value).\n\t * @param {Lexer} lexer\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parsePrimary = function(lexer){\n\t\tvar token, expr;\n\t\ttoken = lexer.peek();\n\t\tif (this.isUndef(token)) {\n\t\t\tthrow new SyntaxError(\"Tone.TimeBase: Unexpected end of expression\");\n\t\t}\n\t\tif (this._matchGroup(token, this._primaryExpressions)) {\n\t\t\ttoken = lexer.next();\n\t\t\tvar matching = token.value.match(token.regexp);\n\t\t\treturn token.method.bind(this, matching[1], matching[2], matching[3]);\n\t\t}\n\t\tif (token && token.value === \"(\"){\n\t\t\tlexer.next();\n\t\t\texpr = this._parseBinary(lexer);\n\t\t\ttoken = lexer.next();\n\t\t\tif (!(token && token.value === \")\")) {\n\t\t\t\tthrow new SyntaxError(\"Expected )\");\n\t\t\t}\n\t\t\treturn expr;\n\t\t}\n\t\tthrow new SyntaxError(\"Tone.TimeBase: Cannot process token \" + token.value);\n\t};\n\n\t/**\n\t * Recursively parse the string expression into a syntax tree.\n\t * @param {string} expr \n\t * @return {Function} the bound method to be evaluated later\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseExprString = function(exprString){\n\t\tif (!this.isString(exprString)){\n\t\t\texprString = exprString.toString();\n\t\t}\n\t\tvar lexer = this._tokenize(exprString);\n\t\tvar tree = this._parseBinary(lexer);\n\t\treturn tree;\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tDEFAULTS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * The initial expression value\n\t * @return {Number} The initial value 0\n\t * @private\n\t */\n\tTone.TimeBase.prototype._noOp = function(){\n\t\treturn 0;\n\t};\n\n\t/**\n\t * The default expression value if no arguments are given\n\t * @private\n\t */\n\tTone.TimeBase.prototype._defaultExpr = function(){\n\t\treturn this._noOp;\n\t};\n\n\t/**\n\t * The default units if none are given.\n\t * @private\n\t */\n\tTone.TimeBase.prototype._defaultUnits = \"s\";\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tUNIT CONVERSIONS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Returns the value of a frequency in the current units\n\t * @param {Frequency} freq\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._frequencyToUnits = function(freq){\n\t\treturn 1/freq;\n\t};\n\n\t/**\n\t * Return the value of the beats in the current units\n\t * @param {Number} beats\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._beatsToUnits = function(beats){\n\t\treturn (60 / Tone.Transport.bpm.value) * beats;\n\t};\n\n\t/**\n\t * Returns the value of a second in the current units\n\t * @param {Seconds} seconds\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._secondsToUnits = function(seconds){\n\t\treturn seconds;\n\t};\n\n\t/**\n\t * Returns the value of a tick in the current time units\n\t * @param {Ticks} ticks\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._ticksToUnits = function(ticks){\n\t\treturn ticks * (this._beatsToUnits(1) / Tone.Transport.PPQ);\n\t};\n\n\t/**\n\t * Return the time signature.\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._timeSignature = function(){\n\t\treturn Tone.Transport.timeSignature;\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tEXPRESSIONS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Push an expression onto the expression list\n\t * @param {Time} val\n\t * @param {String} type\n\t * @param {String} units\n\t * @return {Tone.TimeBase} \n\t * @private\n\t */\n\tTone.TimeBase.prototype._pushExpr = function(val, name, units){\n\t\t//create the expression\n\t\tif (!(val instanceof Tone.TimeBase)){\n\t\t\tval = new this.constructor(val, units);\n\t\t}\n\t\tthis._expr = this._binaryExpressions[name].method.bind(this, this._expr, val._expr);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Add to the current value.\n\t * @param {Time} val The value to add\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").add(\"1m\"); //\"3m\"\n\t */\n\tTone.TimeBase.prototype.add = function(val, units){\n\t\treturn this._pushExpr(val, \"+\", units);\n\t};\n\n\t/**\n\t * Subtract the value from the current time.\n\t * @param {Time} val The value to subtract\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").sub(\"1m\"); //\"1m\"\n\t */\n\tTone.TimeBase.prototype.sub = function(val, units){\n\t\treturn this._pushExpr(val, \"-\", units);\n\t};\n\n\t/**\n\t * Multiply the current value by the given time.\n\t * @param {Time} val The value to multiply\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").mult(\"2\"); //\"4m\"\n\t */\n\tTone.TimeBase.prototype.mult = function(val, units){\n\t\treturn this._pushExpr(val, \"*\", units);\n\t};\n\n\t/**\n\t * Divide the current value by the given time.\n\t * @param {Time} val The value to divide by\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").div(2); //\"1m\"\n\t */\n\tTone.TimeBase.prototype.div = function(val, units){\n\t\treturn this._pushExpr(val, \"/\", units);\n\t};\n\n\t/**\n\t * Evaluate the time value. Returns the time\n\t * in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.TimeBase.prototype.valueOf = function(){\n\t\treturn this._expr();\n\t};\n\n\t/**\n\t * Clean up\n\t * @return {Tone.TimeBase} this\n\t */\n\tTone.TimeBase.prototype.dispose = function(){\n\t\tthis._expr = null;\n\t};\n\n\treturn Tone.TimeBase;\n});","define([\"Tone/core/Tone\", \"Tone/type/Type\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Tone.Param wraps the native Web Audio's AudioParam to provide\n\t * additional unit conversion functionality. It also\n\t * serves as a base-class for classes which have a single,\n\t * automatable parameter. \n\t * @extends {Tone}\n\t * @param {AudioParam} param The parameter to wrap.\n\t * @param {Tone.Type} units The units of the audio param.\n\t * @param {Boolean} convert If the param should be converted.\n\t */\n\tTone.Param = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"param\", \"units\", \"convert\"], Tone.Param.defaults);\n\n\t\t/**\n\t\t * The native parameter to control\n\t\t * @type {AudioParam}\n\t\t * @private\n\t\t */\n\t\tthis._param = this.input = options.param;\n\n\t\t/**\n\t\t * The units of the parameter\n\t\t * @type {Tone.Type}\n\t\t */\n\t\tthis.units = options.units;\n\n\t\t/**\n\t\t * If the value should be converted or not\n\t\t * @type {Boolean}\n\t\t */\n\t\tthis.convert = options.convert;\n\n\t\t/**\n\t\t * True if the signal value is being overridden by \n\t\t * a connected signal.\n\t\t * @readOnly\n\t\t * @type {boolean}\n\t\t * @private\n\t\t */\n\t\tthis.overridden = false;\n\n\t\t/**\n\t\t * If there is an LFO, this is where it is held.\n\t\t * @type {Tone.LFO}\n\t\t * @private\n\t\t */\n\t\tthis._lfo = null;\n\n\t\tif (this.isObject(options.lfo)){\n\t\t\tthis.value = options.lfo;\n\t\t} else if (!this.isUndef(options.value)){\n\t\t\tthis.value = options.value;\n\t\t}\n\t};\n\n\tTone.extend(Tone.Param);\n\t\n\t/**\n\t * Defaults\n\t * @type {Object}\n\t * @const\n\t */\n\tTone.Param.defaults = {\n\t\t\"units\" : Tone.Type.Default,\n\t\t\"convert\" : true,\n\t\t\"param\" : undefined\n\t};\n\n\t/**\n\t * The current value of the parameter. \n\t * @memberOf Tone.Param#\n\t * @type {Number}\n\t * @name value\n\t */\n\tObject.defineProperty(Tone.Param.prototype, \"value\", {\n\t\tget : function(){\n\t\t\treturn this._toUnits(this._param.value);\n\t\t},\n\t\tset : function(value){\n\t\t\tif (this.isObject(value)){\n\t\t\t\t//throw an error if the LFO needs to be included\n\t\t\t\tif (this.isUndef(Tone.LFO)){\n\t\t\t\t\tthrow new Error(\"Include 'Tone.LFO' to use an LFO as a Param value.\");\n\t\t\t\t}\n\t\t\t\t//remove the old one\n\t\t\t\tif (this._lfo){\n\t\t\t\t\tthis._lfo.dispose();\n\t\t\t\t}\n\t\t\t\tthis._lfo = new Tone.LFO(value).start();\n\t\t\t\tthis._lfo.connect(this.input);\n\t\t\t} else {\n\t\t\t\tvar convertedVal = this._fromUnits(value);\n\t\t\t\tthis._param.cancelScheduledValues(0);\n\t\t\t\tthis._param.value = convertedVal;\n\t\t\t}\n\t\t}\n\t});\n\n\t/**\n\t * Convert the given value from the type specified by Tone.Param.units\n\t * into the destination value (such as Gain or Frequency).\n\t * @private\n\t * @param {*} val the value to convert\n\t * @return {number} the number which the value should be set to\n\t */\n\tTone.Param.prototype._fromUnits = function(val){\n\t\tif (this.convert || this.isUndef(this.convert)){\n\t\t\tswitch(this.units){\n\t\t\t\tcase Tone.Type.Time: \n\t\t\t\t\treturn this.toSeconds(val);\n\t\t\t\tcase Tone.Type.Frequency: \n\t\t\t\t\treturn this.toFrequency(val);\n\t\t\t\tcase Tone.Type.Decibels: \n\t\t\t\t\treturn this.dbToGain(val);\n\t\t\t\tcase Tone.Type.NormalRange: \n\t\t\t\t\treturn Math.min(Math.max(val, 0), 1);\n\t\t\t\tcase Tone.Type.AudioRange: \n\t\t\t\t\treturn Math.min(Math.max(val, -1), 1);\n\t\t\t\tcase Tone.Type.Positive: \n\t\t\t\t\treturn Math.max(val, 0);\n\t\t\t\tdefault:\n\t\t\t\t\treturn val;\n\t\t\t}\n\t\t} else {\n\t\t\treturn val;\n\t\t}\n\t};\n\n\t/**\n\t * Convert the parameters value into the units specified by Tone.Param.units.\n\t * @private\n\t * @param {number} val the value to convert\n\t * @return {number}\n\t */\n\tTone.Param.prototype._toUnits = function(val){\n\t\tif (this.convert || this.isUndef(this.convert)){\n\t\t\tswitch(this.units){\n\t\t\t\tcase Tone.Type.Decibels: \n\t\t\t\t\treturn this.gainToDb(val);\n\t\t\t\tdefault:\n\t\t\t\t\treturn val;\n\t\t\t}\n\t\t} else {\n\t\t\treturn val;\n\t\t}\n\t};\n\n\t/**\n\t * the minimum output value\n\t * @type {Number}\n\t * @private\n\t */\n\tTone.Param.prototype._minOutput = 0.00001;\n\n\t/**\n\t * Schedules a parameter value change at the given time.\n\t * @param {*}\tvalue The value to set the signal.\n\t * @param {Time} time The time when the change should occur.\n\t * @returns {Tone.Param} this\n\t * @example\n\t * //set the frequency to \"G4\" in exactly 1 second from now. \n\t * freq.setValueAtTime(\"G4\", \"+1\");\n\t */\n\tTone.Param.prototype.setValueAtTime = function(value, time){\n\t\tvalue = this._fromUnits(value);\n\t\ttime = this.toSeconds(time);\n\t\tif (time <= this.now() + this.blockTime){\n\t\t\tthis._param.value = value;\n\t\t} else {\n\t\t\tthis._param.setValueAtTime(value, time);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Creates a schedule point with the current value at the current time.\n\t * This is useful for creating an automation anchor point in order to \n\t * schedule changes from the current value. \n\t *\n\t * @param {number=} now (Optionally) pass the now value in. \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.setRampPoint = function(now){\n\t\tnow = this.defaultArg(now, this.now());\n\t\tvar currentVal = this._param.value;\n\t\t// exponentialRampToValueAt cannot ever ramp from or to 0\n\t\t// More info: https://bugzilla.mozilla.org/show_bug.cgi?id=1125600#c2\n\t\tif (currentVal === 0){\n\t\t\tcurrentVal = this._minOutput;\n\t\t}\n\t\tthis._param.setValueAtTime(currentVal, now);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules a linear continuous change in parameter value from the \n\t * previous scheduled parameter value to the given value.\n\t * \n\t * @param {number} value \n\t * @param {Time} endTime \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.linearRampToValueAtTime = function(value, endTime){\n\t\tvalue = this._fromUnits(value);\n\t\tthis._param.linearRampToValueAtTime(value, this.toSeconds(endTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an exponential continuous change in parameter value from \n\t * the previous scheduled parameter value to the given value.\n\t * \n\t * @param {number} value \n\t * @param {Time} endTime \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.exponentialRampToValueAtTime = function(value, endTime){\n\t\tvalue = this._fromUnits(value);\n\t\tvalue = Math.max(this._minOutput, value);\n\t\tthis._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an exponential continuous change in parameter value from \n\t * the current time and current value to the given value over the \n\t * duration of the rampTime.\n\t * \n\t * @param {number} value The value to ramp to.\n\t * @param {Time} rampTime the time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //exponentially ramp to the value 2 over 4 seconds. \n\t * signal.exponentialRampToValue(2, 4);\n\t */\n\tTone.Param.prototype.exponentialRampToValue = function(value, rampTime, startTime){\n\t\tstartTime = this.toSeconds(startTime);\n\t\tthis.setRampPoint(startTime);\n\t\tthis.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an linear continuous change in parameter value from \n\t * the current time and current value to the given value over the \n\t * duration of the rampTime.\n\t * \n\t * @param {number} value The value to ramp to.\n\t * @param {Time} rampTime the time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //linearly ramp to the value 4 over 3 seconds. \n\t * signal.linearRampToValue(4, 3);\n\t */\n\tTone.Param.prototype.linearRampToValue = function(value, rampTime, startTime){\n\t\tstartTime = this.toSeconds(startTime);\n\t\tthis.setRampPoint(startTime);\n\t\tthis.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Start exponentially approaching the target value at the given time with\n\t * a rate having the given time constant.\n\t * @param {number} value \n\t * @param {Time} startTime \n\t * @param {number} timeConstant \n\t * @returns {Tone.Param} this \n\t */\n\tTone.Param.prototype.setTargetAtTime = function(value, startTime, timeConstant){\n\t\tvalue = this._fromUnits(value);\n\t\t// The value will never be able to approach without timeConstant > 0.\n\t\t// http://www.w3.org/TR/webaudio/#dfn-setTargetAtTime, where the equation\n\t\t// is described. 0 results in a division by 0.\n\t\tvalue = Math.max(this._minOutput, value);\n\t\ttimeConstant = Math.max(this._minOutput, timeConstant);\n\t\tthis._param.setTargetAtTime(value, this.toSeconds(startTime), timeConstant);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Sets an array of arbitrary parameter values starting at the given time\n\t * for the given duration.\n\t * \t\n\t * @param {Array} values \n\t * @param {Time} startTime \n\t * @param {Time} duration \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.setValueCurveAtTime = function(values, startTime, duration){\n\t\tfor (var i = 0; i < values.length; i++){\n\t\t\tvalues[i] = this._fromUnits(values[i]);\n\t\t}\n\t\tthis._param.setValueCurveAtTime(values, this.toSeconds(startTime), this.toSeconds(duration));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Cancels all scheduled parameter changes with times greater than or \n\t * equal to startTime.\n\t * \n\t * @param {Time} startTime\n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.cancelScheduledValues = function(startTime){\n\t\tthis._param.cancelScheduledValues(this.toSeconds(startTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Ramps to the given value over the duration of the rampTime. \n\t * Automatically selects the best ramp type (exponential or linear)\n\t * depending on the `units` of the signal\n\t * \n\t * @param {number} value \n\t * @param {Time} rampTime \tThe time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //ramp to the value either linearly or exponentially \n\t * //depending on the \"units\" value of the signal\n\t * signal.rampTo(0, 10);\n\t * @example\n\t * //schedule it to ramp starting at a specific time\n\t * signal.rampTo(0, 10, 5)\n\t */\n\tTone.Param.prototype.rampTo = function(value, rampTime, startTime){\n\t\trampTime = this.defaultArg(rampTime, 0);\n\t\tif (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM || this.units === Tone.Type.Decibels){\n\t\t\tthis.exponentialRampToValue(value, rampTime, startTime);\n\t\t} else {\n\t\t\tthis.linearRampToValue(value, rampTime, startTime);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * The LFO created by the signal instance. If none\n\t * was created, this is null.\n\t * @type {Tone.LFO}\n\t * @readOnly\n\t * @memberOf Tone.Param#\n\t * @name lfo\n\t */\n\tObject.defineProperty(Tone.Param.prototype, \"lfo\", {\n\t\tget : function(){\n\t\t\treturn this._lfo;\n\t\t}\n\t});\n\n\t/**\n\t * Clean up\n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._param = null;\n\t\tif (this._lfo){\n\t\t\tthis._lfo.dispose();\n\t\t\tthis._lfo = null;\n\t\t}\n\t\treturn this;\n\t};\n\n\treturn Tone.Param;\n});","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n\n var Add = require('Tone/signal/Add');\n var Mult = require('Tone/signal/Multiply');\n var Scale = require('Tone/signal/Scale');\n\n /**\n * Creates a signal that oscillates between -1.0 and 1.0.\n * By default, the oscillation takes the form of a sinusoidal\n * shape ('sine'). Additional types include 'triangle',\n * 'sawtooth' and 'square'. The frequency defaults to\n * 440 oscillations per second (440Hz, equal to the pitch of an\n * 'A' note).
\n *\n *Set the type of oscillation with setType(), or by instantiating a\n * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc.\n *
\n *\n * @class p5.Oscillator\n * @constructor\n * @param {Number} [freq] frequency defaults to 440Hz\n * @param {String} [type] type of oscillator. Options:\n * 'sine' (default), 'triangle',\n * 'sawtooth', 'square'\n * @example\n *\n * var osc;\n * var playing = false;\n *\n * function setup() {\n * backgroundColor = color(255,0,255);\n * textAlign(CENTER);\n *\n * osc = new p5.Oscillator();\n * osc.setType('sine');\n * osc.freq(240);\n * osc.amp(0);\n * osc.start();\n * }\n *\n * function draw() {\n * background(backgroundColor)\n * text('click to play', width/2, height/2);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {\n * if (!playing) {\n * // ramp amplitude to 0.5 over 0.05 seconds\n * osc.amp(0.5, 0.05);\n * playing = true;\n * backgroundColor = color(0,255,255);\n * } else {\n * // ramp amplitude to 0 over 0.5 seconds\n * osc.amp(0, 0.5);\n * playing = false;\n * backgroundColor = color(255,0,255);\n * }\n * }\n * }\n *
\n * var osc = new p5.Oscillator(300);\n * osc.start();\n * osc.freq(40, 10);\n *
new p5.SinOsc()
.\n * This creates a Sine Wave Oscillator and is\n * equivalent to new p5.Oscillator('sine')\n *
or creating a p5.Oscillator and then calling\n * its method setType('sine')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SinOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SinOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'sine');\n };\n\n p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.TriOsc()
.\n * This creates a Triangle Wave Oscillator and is\n * equivalent to new p5.Oscillator('triangle')\n *
or creating a p5.Oscillator and then calling\n * its method setType('triangle')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.TriOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.TriOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'triangle');\n };\n\n p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.SawOsc()
.\n * This creates a SawTooth Wave Oscillator and is\n * equivalent to new p5.Oscillator('sawtooth')\n *
or creating a p5.Oscillator and then calling\n * its method setType('sawtooth')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SawOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SawOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'sawtooth');\n };\n\n p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.SqrOsc()
.\n * This creates a Square Wave Oscillator and is\n * equivalent to new p5.Oscillator('square')\n *
or creating a p5.Oscillator and then calling\n * its method setType('square')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SqrOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SqrOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'square');\n };\n\n p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n});\n","define([\"Tone/core/Tone\", \"Tone/type/Type\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A Timeline class for scheduling and maintaining state\n\t * along a timeline. All events must have a \"time\" property. \n\t * Internally, events are stored in time order for fast \n\t * retrieval.\n\t * @extends {Tone}\n\t * @param {Positive} [memory=Infinity] The number of previous events that are retained.\n\t */\n\tTone.Timeline = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"memory\"], Tone.Timeline.defaults);\n\n\t\t/**\n\t\t * The array of scheduled timeline events\n\t\t * @type {Array}\n\t\t * @private\n\t\t */\n\t\tthis._timeline = [];\n\n\t\t/**\n\t\t * An array of items to remove from the list. \n\t\t * @type {Array}\n\t\t * @private\n\t\t */\n\t\tthis._toRemove = [];\n\n\t\t/**\n\t\t * Flag if the tieline is mid iteration\n\t\t * @private\n\t\t * @type {Boolean}\n\t\t */\n\t\tthis._iterating = false;\n\n\t\t/**\n\t\t * The memory of the timeline, i.e.\n\t\t * how many events in the past it will retain\n\t\t * @type {Positive}\n\t\t */\n\t\tthis.memory = options.memory;\n\t};\n\n\tTone.extend(Tone.Timeline);\n\n\t/**\n\t * the default parameters\n\t * @static\n\t * @const\n\t */\n\tTone.Timeline.defaults = {\n\t\t\"memory\" : Infinity\n\t};\n\n\t/**\n\t * The number of items in the timeline.\n\t * @type {Number}\n\t * @memberOf Tone.Timeline#\n\t * @name length\n\t * @readOnly\n\t */\n\tObject.defineProperty(Tone.Timeline.prototype, \"length\", {\n\t\tget : function(){\n\t\t\treturn this._timeline.length;\n\t\t}\n\t});\n\n\t/**\n\t * Insert an event object onto the timeline. Events must have a \"time\" attribute.\n\t * @param {Object} event The event object to insert into the \n\t * timeline. \n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.add = function(event){\n\t\t//the event needs to have a time attribute\n\t\tif (this.isUndef(event.time)){\n\t\t\tthrow new Error(\"Tone.Timeline: events must have a time attribute\");\n\t\t}\n\t\tif (this._timeline.length){\n\t\t\tvar index = this._search(event.time);\n\t\t\tthis._timeline.splice(index + 1, 0, event);\n\t\t} else {\n\t\t\tthis._timeline.push(event);\t\t\t\n\t\t}\n\t\t//if the length is more than the memory, remove the previous ones\n\t\tif (this.length > this.memory){\n\t\t\tvar diff = this.length - this.memory;\n\t\t\tthis._timeline.splice(0, diff);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Remove an event from the timeline.\n\t * @param {Object} event The event object to remove from the list.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.remove = function(event){\n\t\tif (this._iterating){\n\t\t\tthis._toRemove.push(event);\n\t\t} else {\n\t\t\tvar index = this._timeline.indexOf(event);\n\t\t\tif (index !== -1){\n\t\t\t\tthis._timeline.splice(index, 1);\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Get the nearest event whose time is less than or equal to the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object set after that time.\n\t */\n\tTone.Timeline.prototype.get = function(time){\n\t\tvar index = this._search(time);\n\t\tif (index !== -1){\n\t\t\treturn this._timeline[index];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Return the first event in the timeline without removing it\n\t * @returns {Object} The first event object\n\t */\n\tTone.Timeline.prototype.peek = function(){\n\t\treturn this._timeline[0];\n\t};\n\n\t/**\n\t * Return the first event in the timeline and remove it\n\t * @returns {Object} The first event object\n\t */\n\tTone.Timeline.prototype.shift = function(){\n\t\treturn this._timeline.shift();\n\t};\n\n\t/**\n\t * Get the event which is scheduled after the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object after the given time\n\t */\n\tTone.Timeline.prototype.getAfter = function(time){\n\t\tvar index = this._search(time);\n\t\tif (index + 1 < this._timeline.length){\n\t\t\treturn this._timeline[index + 1];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Get the event before the event at the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object before the given time\n\t */\n\tTone.Timeline.prototype.getBefore = function(time){\n\t\tvar len = this._timeline.length;\n\t\t//if it's after the last item, return the last item\n\t\tif (len > 0 && this._timeline[len - 1].time < time){\n\t\t\treturn this._timeline[len - 1];\n\t\t}\n\t\tvar index = this._search(time);\n\t\tif (index - 1 >= 0){\n\t\t\treturn this._timeline[index - 1];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Cancel events after the given time\n\t * @param {Number} time The time to query.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.cancel = function(after){\n\t\tif (this._timeline.length > 1){\n\t\t\tvar index = this._search(after);\n\t\t\tif (index >= 0){\n\t\t\t\tif (this._timeline[index].time === after){\n\t\t\t\t\t//get the first item with that time\n\t\t\t\t\tfor (var i = index; i >= 0; i--){\n\t\t\t\t\t\tif (this._timeline[i].time === after){\n\t\t\t\t\t\t\tindex = i;\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tthis._timeline = this._timeline.slice(0, index);\n\t\t\t\t} else {\n\t\t\t\t\tthis._timeline = this._timeline.slice(0, index + 1);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis._timeline = [];\n\t\t\t}\n\t\t} else if (this._timeline.length === 1){\n\t\t\t//the first item's time\n\t\t\tif (this._timeline[0].time >= after){\n\t\t\t\tthis._timeline = [];\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Cancel events before or equal to the given time.\n\t * @param {Number} time The time to cancel before.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.cancelBefore = function(time){\n\t\tif (this._timeline.length){\n\t\t\tvar index = this._search(time);\n\t\t\tif (index >= 0){\n\t\t\t\tthis._timeline = this._timeline.slice(index + 1);\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Does a binary serach on the timeline array and returns the \n\t * nearest event index whose time is after or equal to the given time.\n\t * If a time is searched before the first index in the timeline, -1 is returned.\n\t * If the time is after the end, the index of the last item is returned.\n\t * @param {Number} time \n\t * @return {Number} the index in the timeline array \n\t * @private\n\t */\n\tTone.Timeline.prototype._search = function(time){\n\t\tvar beginning = 0;\n\t\tvar len = this._timeline.length;\n\t\tvar end = len;\n\t\tif (len > 0 && this._timeline[len - 1].time <= time){\n\t\t\treturn len - 1;\n\t\t}\n\t\twhile (beginning < end){\n\t\t\t// calculate the midpoint for roughly equal partition\n\t\t\tvar midPoint = Math.floor(beginning + (end - beginning) / 2);\n\t\t\tvar event = this._timeline[midPoint];\n\t\t\tvar nextEvent = this._timeline[midPoint + 1];\n\t\t\tif (event.time === time){\n\t\t\t\t//choose the last one that has the same time\n\t\t\t\tfor (var i = midPoint; i < this._timeline.length; i++){\n\t\t\t\t\tvar testEvent = this._timeline[i];\n\t\t\t\t\tif (testEvent.time === time){\n\t\t\t\t\t\tmidPoint = i;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn midPoint;\n\t\t\t} else if (event.time < time && nextEvent.time > time){\n\t\t\t\treturn midPoint;\n\t\t\t} else if (event.time > time){\n\t\t\t\t//search lower\n\t\t\t\tend = midPoint;\n\t\t\t} else if (event.time < time){\n\t\t\t\t//search upper\n\t\t\t\tbeginning = midPoint + 1;\n\t\t\t} \n\t\t}\n\t\treturn -1;\n\t};\n\n\t/**\n\t * Internal iterator. Applies extra safety checks for \n\t * removing items from the array. \n\t * @param {Function} callback \n\t * @param {Number=} lowerBound \n\t * @param {Number=} upperBound \n\t * @private\n\t */\n\tTone.Timeline.prototype._iterate = function(callback, lowerBound, upperBound){\n\t\tthis._iterating = true;\n\t\tlowerBound = this.defaultArg(lowerBound, 0);\n\t\tupperBound = this.defaultArg(upperBound, this._timeline.length - 1);\n\t\tfor (var i = lowerBound; i <= upperBound; i++){\n\t\t\tcallback(this._timeline[i]);\n\t\t}\n\t\tthis._iterating = false;\n\t\tif (this._toRemove.length > 0){\n\t\t\tfor (var j = 0; j < this._toRemove.length; j++){\n\t\t\t\tvar index = this._timeline.indexOf(this._toRemove[j]);\n\t\t\t\tif (index !== -1){\n\t\t\t\t\tthis._timeline.splice(index, 1);\n\t\t\t\t}\n\t\t\t}\n\t\t\tthis._toRemove = [];\n\t\t}\n\t};\n\n\t/**\n\t * Iterate over everything in the array\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEach = function(callback){\n\t\tthis._iterate(callback);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at or before the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachBefore = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar upperBound = this._search(time);\n\t\tif (upperBound !== -1){\n\t\t\tthis._iterate(callback, 0, upperBound);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array after the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachAfter = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar lowerBound = this._search(time);\n\t\tthis._iterate(callback, lowerBound + 1);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at or after the given time. Similar to \n\t * forEachAfter, but includes the item(s) at the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachFrom = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar lowerBound = this._search(time);\n\t\t//work backwards until the event time is less than time\n\t\twhile (lowerBound >= 0 && this._timeline[lowerBound].time >= time){\n\t\t\tlowerBound--;\n\t\t}\n\t\tthis._iterate(callback, lowerBound + 1);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at the given time\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachAtTime = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar upperBound = this._search(time);\n\t\tif (upperBound !== -1){\n\t\t\tthis._iterate(function(event){\n\t\t\t\tif (event.time === time){\n\t\t\t\t\tcallback(event);\n\t\t\t\t} \n\t\t\t}, 0, upperBound);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Clean up.\n\t * @return {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._timeline = null;\n\t\tthis._toRemove = null;\n\t};\n\n\treturn Tone.Timeline;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Multiply\", \"Tone/signal/Signal\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Negate the incoming signal. i.e. an input signal of 10 will output -10\n\t *\n\t * @constructor\n\t * @extends {Tone.SignalBase}\n\t * @example\n\t * var neg = new Tone.Negate();\n\t * var sig = new Tone.Signal(-2).connect(neg);\n\t * //output of neg is positive 2. \n\t */\n\tTone.Negate = function(){\n\t\t/**\n\t\t * negation is done by multiplying by -1\n\t\t * @type {Tone.Multiply}\n\t\t * @private\n\t\t */\n\t\tthis._multiply = this.input = this.output = new Tone.Multiply(-1);\n\t};\n\n\tTone.extend(Tone.Negate, Tone.SignalBase);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.Negate} this\n\t */\n\tTone.Negate.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._multiply.dispose();\n\t\tthis._multiply = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Negate;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Signal\", \"Tone/signal/Multiply\", \"Tone/signal/WaveShaper\"], \nfunction(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class GreaterThanZero outputs 1 when the input is strictly greater than zero\n\t * \n\t * @constructor\n\t * @extends {Tone.SignalBase}\n\t * @example\n\t * var gt0 = new Tone.GreaterThanZero();\n\t * var sig = new Tone.Signal(0.01).connect(gt0);\n\t * //the output of gt0 is 1. \n\t * sig.value = 0;\n\t * //the output of gt0 is 0. \n\t */\n\tTone.GreaterThanZero = function(){\n\t\t\n\t\t/**\n\t\t * @type {Tone.WaveShaper}\n\t\t * @private\n\t\t */\n\t\tthis._thresh = this.output = new Tone.WaveShaper(function(val){\n\t\t\tif (val <= 0){\n\t\t\t\treturn 0;\n\t\t\t} else {\n\t\t\t\treturn 1;\n\t\t\t}\n\t\t}, 127);\n\n\t\t/**\n\t\t * scale the first thresholded signal by a large value.\n\t\t * this will help with values which are very close to 0\n\t\t * @type {Tone.Multiply}\n\t\t * @private\n\t\t */\n\t\tthis._scale = this.input = new Tone.Multiply(10000);\n\n\t\t//connections\n\t\tthis._scale.connect(this._thresh);\n\t};\n\n\tTone.extend(Tone.GreaterThanZero, Tone.SignalBase);\n\n\t/**\n\t * dispose method\n\t * @returns {Tone.GreaterThanZero} this\n\t */\n\tTone.GreaterThanZero.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._scale.dispose();\n\t\tthis._scale = null;\n\t\tthis._thresh.dispose();\n\t\tthis._thresh = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.GreaterThanZero;\n});","define([\"Tone/core/Tone\", \"Tone/signal/TimelineSignal\", \"Tone/core/TimelineState\", \n\t\"Tone/core/Emitter\", \"Tone/core/Context\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A sample accurate clock which provides a callback at the given rate. \n\t * While the callback is not sample-accurate (it is still susceptible to\n\t * loose JS timing), the time passed in as the argument to the callback\n\t * is precise. For most applications, it is better to use Tone.Transport\n\t * instead of the Clock by itself since you can synchronize multiple callbacks.\n\t *\n\t * \t@constructor\n\t * @extends {Tone.Emitter}\n\t * \t@param {function} callback The callback to be invoked with the time of the audio event\n\t * \t@param {Frequency} frequency The rate of the callback\n\t * \t@example\n\t * //the callback will be invoked approximately once a second\n\t * //and will print the time exactly once a second apart.\n\t * var clock = new Tone.Clock(function(time){\n\t * \tconsole.log(time);\n\t * }, 1);\n\t */\n\tTone.Clock = function(){\n\n\t\tTone.Emitter.call(this);\n\n\t\tvar options = this.optionsObject(arguments, [\"callback\", \"frequency\"], Tone.Clock.defaults);\n\n\t\t/**\n\t\t * The callback function to invoke at the scheduled tick.\n\t\t * @type {Function}\n\t\t */\n\t\tthis.callback = options.callback;\n\n\t\t/**\n\t\t * The next time the callback is scheduled.\n\t\t * @type {Number}\n\t\t * @private\n\t\t */\n\t\tthis._nextTick = 0;\n\n\t\t/**\n\t\t * The last state of the clock.\n\t\t * @type {State}\n\t\t * @private\n\t\t */\n\t\tthis._lastState = Tone.State.Stopped;\n\n\t\t/**\n\t\t * The rate the callback function should be invoked. \n\t\t * @type {BPM}\n\t\t * @signal\n\t\t */\n\t\tthis.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency);\n\t\tthis._readOnly(\"frequency\");\n\n\t\t/**\n\t\t * The number of times the callback was invoked. Starts counting at 0\n\t\t * and increments after the callback was invoked. \n\t\t * @type {Ticks}\n\t\t * @readOnly\n\t\t */\n\t\tthis.ticks = 0;\n\n\t\t/**\n\t\t * The state timeline\n\t\t * @type {Tone.TimelineState}\n\t\t * @private\n\t\t */\n\t\tthis._state = new Tone.TimelineState(Tone.State.Stopped);\n\n\t\t/**\n\t\t * The loop function bound to its context. \n\t\t * This is necessary to remove the event in the end.\n\t\t * @type {Function}\n\t\t * @private\n\t\t */\n\t\tthis._boundLoop = this._loop.bind(this);\n\n\t\t//bind a callback to the worker thread\n \tthis.context.on(\"tick\", this._boundLoop);\n\t};\n\n\tTone.extend(Tone.Clock, Tone.Emitter);\n\n\t/**\n\t * The defaults\n\t * @const\n\t * @type {Object}\n\t */\n\tTone.Clock.defaults = {\n\t\t\"callback\" : Tone.noOp,\n\t\t\"frequency\" : 1,\n\t\t\"lookAhead\" : \"auto\",\n\t};\n\n\t/**\n\t * Returns the playback state of the source, either \"started\", \"stopped\" or \"paused\".\n\t * @type {Tone.State}\n\t * @readOnly\n\t * @memberOf Tone.Clock#\n\t * @name state\n\t */\n\tObject.defineProperty(Tone.Clock.prototype, \"state\", {\n\t\tget : function(){\n\t\t\treturn this._state.getValueAtTime(this.now());\n\t\t}\n\t});\n\n\t/**\n\t * Start the clock at the given time. Optionally pass in an offset\n\t * of where to start the tick counter from.\n\t * @param {Time} time The time the clock should start\n\t * @param {Ticks=} offset Where the tick counter starts counting from.\n\t * @return {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.start = function(time, offset){\n\t\ttime = this.toSeconds(time);\n\t\tif (this._state.getValueAtTime(time) !== Tone.State.Started){\n\t\t\tthis._state.add({\n\t\t\t\t\"state\" : Tone.State.Started, \n\t\t\t\t\"time\" : time,\n\t\t\t\t\"offset\" : offset\n\t\t\t});\n\t\t}\n\t\treturn this;\t\n\t};\n\n\t/**\n\t * Stop the clock. Stopping the clock resets the tick counter to 0.\n\t * @param {Time} [time=now] The time when the clock should stop.\n\t * @returns {Tone.Clock} this\n\t * @example\n\t * clock.stop();\n\t */\n\tTone.Clock.prototype.stop = function(time){\n\t\ttime = this.toSeconds(time);\n\t\tthis._state.cancel(time);\n\t\tthis._state.setStateAtTime(Tone.State.Stopped, time);\n\t\treturn this;\t\n\t};\n\n\n\t/**\n\t * Pause the clock. Pausing does not reset the tick counter.\n\t * @param {Time} [time=now] The time when the clock should stop.\n\t * @returns {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.pause = function(time){\n\t\ttime = this.toSeconds(time);\n\t\tif (this._state.getValueAtTime(time) === Tone.State.Started){\n\t\t\tthis._state.setStateAtTime(Tone.State.Paused, time);\n\t\t}\n\t\treturn this;\t\n\t};\n\n\t/**\n\t * The scheduling loop.\n\t * @param {Number} time The current page time starting from 0\n\t * when the page was loaded.\n\t * @private\n\t */\n\tTone.Clock.prototype._loop = function(){\n\t\t//get the frequency value to compute the value of the next loop\n\t\tvar now = this.now();\n\t\t//if it's started\n\t\tvar lookAhead = this.context.lookAhead;\n\t\tvar updateInterval = this.context.updateInterval;\n\t\tvar lagCompensation = this.context.lag * 2;\n\t\tvar loopInterval = now + lookAhead + updateInterval + lagCompensation;\n\t\twhile (loopInterval > this._nextTick && this._state){\n\t\t\tvar currentState = this._state.getValueAtTime(this._nextTick);\n\t\t\tif (currentState !== this._lastState){\n\t\t\t\tthis._lastState = currentState;\n\t\t\t\tvar event = this._state.get(this._nextTick);\n\t\t\t\t// emit an event\n\t\t\t\tif (currentState === Tone.State.Started){\n\t\t\t\t\t//correct the time\n\t\t\t\t\tthis._nextTick = event.time;\n\t\t\t\t\tif (!this.isUndef(event.offset)){\n\t\t\t\t\t\tthis.ticks = event.offset;\n\t\t\t\t\t}\n\t\t\t\t\tthis.emit(\"start\", event.time, this.ticks);\n\t\t\t\t} else if (currentState === Tone.State.Stopped){\n\t\t\t\t\tthis.ticks = 0;\n\n\t\t\t\t\tthis.emit(\"stop\", event.time);\n\t\t\t\t} else if (currentState === Tone.State.Paused){\n\t\t\t\t\tthis.emit(\"pause\", event.time);\n\t\t\t\t}\n\t\t\t}\n\t\t\tvar tickTime = this._nextTick;\n\t\t\tif (this.frequency){\n\t\t\t\tthis._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick);\n\t\t\t\tif (currentState === Tone.State.Started){\n\t\t\t\t\tthis.callback(tickTime);\n\t\t\t\t\tthis.ticks++;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Returns the scheduled state at the given time.\n\t * @param {Time} time The time to query.\n\t * @return {String} The name of the state input in setStateAtTime.\n\t * @example\n\t * clock.start(\"+0.1\");\n\t * clock.getStateAtTime(\"+0.1\"); //returns \"started\"\n\t */\n\tTone.Clock.prototype.getStateAtTime = function(time){\n\t\ttime = this.toSeconds(time);\n\t\treturn this._state.getValueAtTime(time);\n\t};\n\n\t/**\n\t * Clean up\n\t * @returns {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.dispose = function(){\n\t\tTone.Emitter.prototype.dispose.call(this);\n\t\tthis.context.off(\"tick\", this._boundLoop);\n\t\tthis._writable(\"frequency\");\n\t\tthis.frequency.dispose();\n\t\tthis.frequency = null;\n\t\tthis._boundLoop = null;\n\t\tthis._nextTick = Infinity;\n\t\tthis.callback = null;\n\t\tthis._state.dispose();\n\t\tthis._state = null;\n\t};\n\n\treturn Tone.Clock;\n});","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var AudioVoice = require('audioVoice');\n var noteToFreq = require('helpers').noteToFreq;\n\n var DEFAULT_SUSTAIN = 0.15;\n\n /**\n * A MonoSynth is used as a single voice for sound synthesis.\n * This is a class to be used in conjunction with the PolySynth\n * class. Custom synthetisers should be built inheriting from\n * this class.\n *\n * @class p5.MonoSynth\n * @constructor\n * @example\n * \n * var monoSynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * monoSynth = new p5.MonoSynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // time from now (in seconds)\n * var time = 0;\n * // note duration (in seconds)\n * var dur = 0.25;\n * // velocity (volume, from 0 to 1)\n * var v = 0.2;\n *\n * monoSynth.play(\"G3\", v, time, dur);\n * monoSynth.play(\"C4\", v, time += dur, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var monoSynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * monoSynth = new p5.MonoSynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // time from now (in seconds)\n * var time = 0;\n * // note duration (in seconds)\n * var dur = 1/6;\n * // note velocity (volume, from 0 to 1)\n * var v = random();\n *\n * monoSynth.play(\"Fb3\", v, 0, dur);\n * monoSynth.play(\"Gb3\", v, time += dur, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var monoSynth = new p5.MonoSynth();\n *\n * function mousePressed() {\n * monoSynth.triggerAttack(\"E3\");\n * }\n *\n * function mouseReleased() {\n * monoSynth.triggerRelease();\n * }\n *
\n * var monoSynth = new p5.MonoSynth();\n *\n * function mousePressed() {\n * monoSynth.triggerAttack(\"E3\");\n * }\n *\n * function mouseReleased() {\n * monoSynth.triggerRelease();\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n */\n p5.MonoSynth.prototype.setADSR = function (attack,decay,sustain,release) {\n this.env.setADSR(attack, decay, sustain, release);\n };\n\n\n /**\n * Getters and Setters\n * @property {Number} attack\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} decay\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} sustain\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} release\n * @for p5.MonoSynth\n */\n Object.defineProperties(p5.MonoSynth.prototype, {\n 'attack': {\n get : function() {\n return this.env.aTime;\n },\n set : function(attack) {\n this.env.setADSR(attack, this.env.dTime,\n this.env.sPercent, this.env.rTime);\n }\n },\n 'decay': {\n get : function() {\n return this.env.dTime;\n },\n set : function(decay) {\n this.env.setADSR(this.env.aTime, decay,\n this.env.sPercent, this.env.rTime);\n }\n },\n 'sustain': {\n get : function() {\n return this.env.sPercent;\n },\n set : function(sustain) {\n this.env.setADSR(this.env.aTime, this.env.dTime,\n sustain, this.env.rTime);\n }\n },\n 'release': {\n get : function() {\n return this.env.rTime;\n },\n set : function(release) {\n this.env.setADSR(this.env.aTime, this.env.dTime,\n this.env.sPercent, release);\n }\n },\n });\n\n\n /**\n * MonoSynth amp\n * @method amp\n * @for p5.MonoSynth\n * @param {Number} vol desired volume\n * @param {Number} [rampTime] Time to reach new volume\n * @return {Number} new volume value\n */\n p5.MonoSynth.prototype.amp = function(vol, rampTime) {\n var t = rampTime || 0;\n if (typeof vol !== 'undefined') {\n this.oscillator.amp(vol, t);\n }\n return this.oscillator.amp().value;\n };\n\n /**\n * Connect to a p5.sound / Web Audio object.\n *\n * @method connect\n * @for p5.MonoSynth\n * @param {Object} unit A p5.sound or Web Audio object\n */\n\n p5.MonoSynth.prototype.connect = function(unit) {\n var u = unit || p5sound.input;\n this.output.connect(u.input ? u.input : u);\n };\n\n /**\n * Disconnect all outputs\n *\n * @method disconnect\n * @for p5.MonoSynth\n */\n p5.MonoSynth.prototype.disconnect = function() {\n if (this.output) {\n this.output.disconnect();\n }\n };\n\n\n /**\n * Get rid of the MonoSynth and free up its resources / memory.\n *\n * @method dispose\n * @for p5.MonoSynth\n */\n p5.MonoSynth.prototype.dispose = function() {\n AudioVoice.prototype.dispose.apply(this);\n\n if (this.env) {\n this.env.dispose();\n }\n if (this.oscillator) {\n this.oscillator.dispose();\n }\n };\n\n});\n","'use strict';\ndefine(function() {\n var p5sound = require('master');\n\n /**\n * Base class for monophonic synthesizers. Any extensions of this class\n * should follow the API and implement the methods below in order to\n * remain compatible with p5.PolySynth();\n *\n * @class p5.AudioVoice\n * @constructor\n */\n p5.AudioVoice = function () {\n\t this.ac = p5sound.audiocontext;\n\t this.output = this.ac.createGain();\n\t this.connect();\n\t p5sound.soundArray.push(this);\n };\n\n p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {\n };\n\n p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {\n };\n\n p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {\n };\n\n p5.AudioVoice.prototype.amp = function(vol, rampTime) {\n };\n\n /**\n * Connect to p5 objects or Web Audio Nodes\n * @method connect\n * @for p5.AudioVoice\n * @param {Object} unit\n */\n p5.AudioVoice.prototype.connect = function(unit) {\n var u = unit || p5sound.input;\n this.output.connect(u.input ? u.input : u);\n };\n\n /**\n * Disconnect from soundOut\n * @method disconnect\n * @for p5.AudioVoice\n */\n p5.AudioVoice.prototype.disconnect = function() {\n this.output.disconnect();\n };\n\n p5.AudioVoice.prototype.dispose = function() {\n if (this.output) {\n this.output.disconnect();\n delete this.output;\n }\n };\n\n return p5.AudioVoice;\n});\n","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var TimelineSignal = require('Tone/signal/TimelineSignal');\n var noteToFreq = require('helpers').noteToFreq;\n\n /**\n * An AudioVoice is used as a single voice for sound synthesis.\n * The PolySynth class holds an array of AudioVoice, and deals\n * with voices allocations, with setting notes to be played, and\n * parameters to be set.\n *\n * @class p5.PolySynth\n * @constructor\n *\n * @param {Number} [synthVoice] A monophonic synth voice inheriting\n * the AudioVoice class. Defaults to p5.MonoSynth\n * @param {Number} [maxVoices] Number of voices, defaults to 8;\n * @example\n * \n * var polySynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * polySynth = new p5.PolySynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // note duration (in seconds)\n * var dur = 1.5;\n *\n * // time from now (in seconds)\n * var time = 0;\n *\n * // velocity (volume, from 0 to 1)\n * var vel = 0.1;\n *\n * // notes can overlap with each other\n * polySynth.play(\"G2\", vel, 0, dur);\n * polySynth.play(\"C3\", vel, time += 1/3, dur);\n * polySynth.play(\"G3\", vel, time += 1/3, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var polySynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * polySynth = new p5.PolySynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // note duration (in seconds)\n * var dur = 0.1;\n *\n * // time from now (in seconds)\n * var time = 0;\n *\n * // velocity (volume, from 0 to 1)\n * var vel = 0.1;\n *\n * polySynth.play(\"G2\", vel, 0, dur);\n * polySynth.play(\"C3\", vel, 0, dur);\n * polySynth.play(\"G3\", vel, 0, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n **/\n\n p5.PolySynth.prototype.noteADSR = function (note,a,d,s,r,timeFromNow) {\n var now = p5sound.audiocontext.currentTime;\n var timeFromNow = timeFromNow || 0;\n var t = now + timeFromNow\n this.audiovoices[ this.notes[note].getValueAtTime(t) ].setADSR(a,d,s,r);\n };\n\n\n /**\n * Set the PolySynths global envelope. This method modifies the envelopes of each\n * monosynth so that all notes are played with this envelope.\n *\n * @method setADSR\n * @for p5.PolySynth\n * @param {Number} [attackTime] Time (in seconds before envelope\n * reaches Attack Level\n * @param {Number} [decayTime] Time (in seconds) before envelope\n * reaches Decay/Sustain Level\n * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,\n * where 1.0 = attackLevel, 0.0 = releaseLevel.\n * The susRatio determines the decayLevel and the level at which the\n * sustain portion of the envelope will sustain.\n * For example, if attackLevel is 0.4, releaseLevel is 0,\n * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is\n * increased to 1.0 (using setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n **/\n p5.PolySynth.prototype.setADSR = function(a,d,s,r) {\n this.audiovoices.forEach(function(voice) {\n voice.setADSR(a,d,s,r);\n });\n };\n\n /**\n * Trigger the Attack, and Decay portion of a MonoSynth.\n * Similar to holding down a key on a piano, but it will\n * hold the sustain level until you let go.\n *\n * @method noteAttack\n * @for p5.PolySynth\n * @param {Number} [note] midi note on which attack should be triggered.\n * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/\n * @param {Number} [secondsFromNow] time from now (in seconds)\n * @example\n * \n * var polySynth = new p5.PolySynth();\n * var pitches = [\"G\", \"D\", \"G\", \"C\"];\n * var octaves = [2, 3, 4];\n *\n * function mousePressed() {\n * // play a chord: multiple notes at the same time\n * for (var i = 0; i < 4; i++) {\n * var note = random(pitches) + random(octaves);\n * polySynth.noteAttack(note, 0.1);\n * }\n * }\n *\n * function mouseReleased() {\n * // release all voices\n * polySynth.noteRelease();\n * }\n *
\n * var pitches = [\"G\", \"D\", \"G\", \"C\"];\n * var octaves = [2, 3, 4];\n * var polySynth = new p5.PolySynth();\n *\n * function mousePressed() {\n * // play a chord: multiple notes at the same time\n * for (var i = 0; i < 4; i++) {\n * var note = random(pitches) + random(octaves);\n * polySynth.noteAttack(note, 0.1);\n * }\n * }\n *\n * function mouseReleased() {\n * // release all voices\n * polySynth.noteRelease();\n * }\n *
SoundFile object with a path to a file.
\n *\n *The p5.SoundFile may not be available immediately because\n * it loads the file information asynchronously.
\n *\n *To do something with the sound as soon as it loads\n * pass the name of a function as the second parameter.
\n *\n *Only one file path is required. However, audio file formats\n * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all\n * web browsers. If you want to ensure compatability, instead of a single\n * file path, you may include an Array of filepaths, and the browser will\n * choose a format that works.
\n *\n * @class p5.SoundFile\n * @constructor\n * @param {String|Array} path path to a sound file (String). Optionally,\n * you may include multiple file formats in\n * an array. Alternately, accepts an object\n * from the HTML5 File API, or a p5.File.\n * @param {Function} [successCallback] Name of a function to call once file loads\n * @param {Function} [errorCallback] Name of a function to call if file fails to\n * load. This function will receive an error or\n * XMLHttpRequest object with information\n * about what went wrong.\n * @param {Function} [whileLoadingCallback] Name of a function to call while file\n * is loading. That function will\n * receive progress of the request to\n * load the sound file\n * (between 0 and 1) as its first\n * parameter. This progress\n * does not account for the additional\n * time needed to decode the audio data.\n *\n * @example\n *\n *\n * function preload() {\n * soundFormats('mp3', 'ogg');\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * mySound.setVolume(0.1);\n * mySound.play();\n * }\n *\n *
\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * mySound.setVolume(0.1);\n * mySound.play();\n * }\n *
restart
and\n * sustain
. Play Mode determines what happens to a\n * p5.SoundFile if it is triggered while in the middle of playback.\n * In sustain mode, playback will continue simultaneous to the\n * new playback. In restart mode, play() will stop playback\n * and start over. With untilDone, a sound will play only if it's\n * not already playing. Sustain is the default mode.\n *\n * @method playMode\n * @for p5.SoundFile\n * @param {String} str 'restart' or 'sustain' or 'untilDone'\n * @example\n * \n * var mySound;\n * function preload(){\n * mySound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n * function mouseClicked() {\n * mySound.playMode('sustain');\n * mySound.play();\n * }\n * function keyPressed() {\n * mySound.playMode('restart');\n * mySound.play();\n * }\n *\n *
\n * var soundFile;\n *\n * function preload() {\n * soundFormats('ogg', 'mp3');\n * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');\n * }\n * function setup() {\n * background(0, 255, 0);\n * soundFile.setVolume(0.1);\n * soundFile.loop();\n * }\n * function keyTyped() {\n * if (key == 'p') {\n * soundFile.pause();\n * background(255, 0, 0);\n * }\n * }\n *\n * function keyReleased() {\n * if (key == 'p') {\n * soundFile.play();\n * background(0, 255, 0);\n * }\n * }\n *
\n * rampTime
parameter. For more\n * complex fades, see the Envelope class.\n *\n * Alternately, you can pass in a signal source such as an\n * oscillator to modulate the amplitude with an audio signal.\n *\n * @method setVolume\n * @for p5.SoundFile\n * @param {Number|Object} volume Volume (amplitude) between 0.0\n * and 1.0 or modulating signal/oscillator\n * @param {Number} [rampTime] Fade for t seconds\n * @param {Number} [timeFromNow] Schedule this event to happen at\n * t seconds in the future\n */\n p5.SoundFile.prototype.setVolume = function(vol, _rampTime, _tFromNow) {\n if (typeof vol === 'number') {\n var rampTime = _rampTime || 0;\n var tFromNow = _tFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n var currentVol = this.output.gain.value;\n this.output.gain.cancelScheduledValues(now + tFromNow);\n this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);\n this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);\n }\n else if (vol) {\n vol.connect(this.output.gain);\n } else {\n // return the Gain Node\n return this.output.gain;\n }\n };\n\n // same as setVolume, to match Processing Sound\n p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;\n\n // these are the same thing\n p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;\n\n p5.SoundFile.prototype.getVolume = function() {\n return this.output.gain.value;\n };\n\n /**\n * Set the stereo panning of a p5.sound object to\n * a floating point number between -1.0 (left) and 1.0 (right).\n * Default is 0.0 (center).\n *\n * @method pan\n * @for p5.SoundFile\n * @param {Number} [panValue] Set the stereo panner\n * @param {Number} [timeFromNow] schedule this event to happen\n * seconds from now\n * @example\n * \n *\n * var ball = {};\n * var soundFile;\n *\n * function preload() {\n * soundFormats('ogg', 'mp3');\n * soundFile = loadSound('assets/beatbox.mp3');\n * }\n *\n * function draw() {\n * background(0);\n * ball.x = constrain(mouseX, 0, width);\n * ellipse(ball.x, height/2, 20, 20)\n * }\n *\n * function mousePressed(){\n * // map the ball's x location to a panning degree\n * // between -1.0 (left) and 1.0 (right)\n * var panning = map(ball.x, 0., width,-1.0, 1.0);\n * soundFile.pan(panning);\n * soundFile.play();\n * }\n *
\n * var song;\n *\n * function preload() {\n * song = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * song.loop();\n * }\n *\n * function draw() {\n * background(200);\n *\n * // Set the rate to a range between 0.1 and 4\n * // Changing the rate also alters the pitch\n * var speed = map(mouseY, 0.1, height, 0, 2);\n * speed = constrain(speed, 0.01, 4);\n * song.rate(speed);\n *\n * // Draw a circle to show what is going on\n * stroke(0);\n * fill(51, 100);\n * ellipse(mouseX, 100, 48, 48);\n * }\n *\n *
\n * \n * var drum;\n *\n * function preload() {\n * drum = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * drum.reverseBuffer();\n * drum.play();\n * }\n *\n *
\n * \n * var mySound;\n * function preload() {\n * mySound = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * // schedule calls to changeText\n * mySound.addCue(0.50, changeText, \"hello\" );\n * mySound.addCue(1.00, changeText, \"p5\" );\n * mySound.addCue(1.50, changeText, \"what\" );\n * mySound.addCue(2.00, changeText, \"do\" );\n * mySound.addCue(2.50, changeText, \"you\" );\n * mySound.addCue(3.00, changeText, \"want\" );\n * mySound.addCue(4.00, changeText, \"to\" );\n * mySound.addCue(5.00, changeText, \"make\" );\n * mySound.addCue(6.00, changeText, \"?\" );\n * }\n *\n * function changeText(val) {\n * background(0);\n * text(val, width/2, height/2);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * if (mySound.isPlaying() ) {\n * mySound.stop();\n * } else {\n * mySound.play();\n * }\n * }\n * }\n *
\n * var inp, button, mySound;\n * var fileName = 'cool';\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n * function setup() {\n * btn = createButton('click to save file');\n * btn.position(0, 0);\n * btn.mouseClicked(handleMouseClick);\n * }\n *\n * function handleMouseClick() {\n * mySound.save(fileName);\n * }\n *
\n *\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * noCanvas();\n * var soundBlob = mySound.getBlob();\n *\n * // Now we can send the blob to a server...\n * var serverUrl = 'https://jsonplaceholder.typicode.com/posts';\n * var httpRequestOptions = {\n * method: 'POST',\n * body: new FormData().append('soundBlob', soundBlob),\n * headers: new Headers({\n * 'Content-Type': 'multipart/form-data'\n * })\n * };\n * httpDo(serverUrl, httpRequestOptions);\n *\n * // We can also create an `ObjectURL` pointing to the Blob\n * var blobUrl = URL.createObjectURL(soundBlob);\n *\n * // The `
\n * var sound, amplitude, cnv;\n *\n * function preload(){\n * sound = loadSound('assets/beat.mp3');\n * }\n * function setup() {\n * cnv = createCanvas(100,100);\n * amplitude = new p5.Amplitude();\n *\n * // start / stop the sound when canvas is clicked\n * cnv.mouseClicked(function() {\n * if (sound.isPlaying() ){\n * sound.stop();\n * } else {\n * sound.play();\n * }\n * });\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n *\n *
\n * function preload(){\n * sound1 = loadSound('assets/beat.mp3');\n * sound2 = loadSound('assets/drum.mp3');\n * }\n * function setup(){\n * amplitude = new p5.Amplitude();\n * sound1.play();\n * sound2.play();\n * amplitude.setInput(sound2);\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n * function mouseClicked(){\n * sound1.stop();\n * sound2.stop();\n * }\n *
\n * function preload(){\n * sound = loadSound('assets/beat.mp3');\n * }\n * function setup() {\n * amplitude = new p5.Amplitude();\n * sound.play();\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n * function mouseClicked(){\n * sound.stop();\n * }\n *
FFT (Fast Fourier Transform) is an analysis algorithm that\n * isolates individual\n * \n * audio frequencies within a waveform.
\n *\n *Once instantiated, a p5.FFT object can return an array based on\n * two types of analyses:
• FFT.waveform()
computes\n * amplitude values along the time domain. The array indices correspond\n * to samples across a brief moment in time. Each value represents\n * amplitude of the waveform at that sample of time.
\n * • FFT.analyze()
computes amplitude values along the\n * frequency domain. The array indices correspond to frequencies (i.e.\n * pitches), from the lowest to the highest that humans can hear. Each\n * value represents amplitude at that slice of the frequency spectrum.\n * Use with getEnergy()
to measure amplitude at specific\n * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample\n * buffer. It returns an array of amplitude measurements, referred\n * to as bins
. The array is 1024 bins long by default.\n * You can change the bin array length, but it must be a power of 2\n * between 16 and 1024 in order for the FFT algorithm to function\n * correctly. The actual size of the FFT buffer is twice the\n * number of bins, so given a standard sample rate, the buffer is\n * 2048/44100 seconds long.
\n * function preload(){\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup(){\n * var cnv = createCanvas(100,100);\n * cnv.mouseClicked(togglePlay);\n * fft = new p5.FFT();\n * sound.amp(0.2);\n * }\n *\n * function draw(){\n * background(0);\n *\n * var spectrum = fft.analyze();\n * noStroke();\n * fill(0,255,0); // spectrum is green\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width / spectrum.length, h )\n * }\n *\n * var waveform = fft.waveform();\n * noFill();\n * beginShape();\n * stroke(255,0,0); // waveform is red\n * strokeWeight(1);\n * for (var i = 0; i< waveform.length; i++){\n * var x = map(i, 0, waveform.length, 0, width);\n * var y = map( waveform[i], -1, 1, 0, height);\n * vertex(x,y);\n * }\n * endShape();\n *\n * text('click to play/pause', 4, 10);\n * }\n *\n * // fade sound if mouse is over canvas\n * function togglePlay() {\n * if (sound.isPlaying()) {\n * sound.pause();\n * } else {\n * sound.loop();\n * }\n * }\n *
getEnergy()
.\n *\n * @method analyze\n * @for p5.FFT\n * @param {Number} [bins] Must be a power of two between\n * 16 and 1024. Defaults to 1024.\n * @param {Number} [scale] If \"dB,\" returns decibel\n * float measurements between\n * -140 and 0 (max).\n * Otherwise returns integers from 0-255.\n * @return {Array} spectrum Array of energy (amplitude/volume)\n * values across the frequency spectrum.\n * Lowest energy (silence) = 0, highest\n * possible is 255.\n * @example\n * \n * var osc;\n * var fft;\n *\n * function setup(){\n * createCanvas(100,100);\n * osc = new p5.Oscillator();\n * osc.amp(0);\n * osc.start();\n * fft = new p5.FFT();\n * }\n *\n * function draw(){\n * background(0);\n *\n * var freq = map(mouseX, 0, 800, 20, 15000);\n * freq = constrain(freq, 1, 20000);\n * osc.freq(freq);\n *\n * var spectrum = fft.analyze();\n * noStroke();\n * fill(0,255,0); // spectrum is green\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width / spectrum.length, h );\n * }\n *\n * stroke(255);\n * text('Freq: ' + round(freq)+'Hz', 10, 10);\n *\n * isMouseOverCanvas();\n * }\n *\n * // only play sound when mouse is over canvas\n * function isMouseOverCanvas() {\n * var mX = mouseX, mY = mouseY;\n * if (mX > 0 && mX < width && mY < height && mY > 0) {\n * osc.amp(0.5, 0.2);\n * } else {\n * osc.amp(0, 0.2);\n * }\n * }\n *
\n *\n *\n *function setup(){\n * cnv = createCanvas(100,100);\n * sound = new p5.AudioIn();\n * sound.start();\n * fft = new p5.FFT();\n * sound.connect(fft);\n *}\n *\n *\n *function draw(){\n *\n * var centroidplot = 0.0;\n * var spectralCentroid = 0;\n *\n *\n * background(0);\n * stroke(0,255,0);\n * var spectrum = fft.analyze();\n * fill(0,255,0); // spectrum is green\n *\n * //draw the spectrum\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(log(i), 0, log(spectrum.length), 0, width);\n * var h = map(spectrum[i], 0, 255, 0, height);\n * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));\n * rect(x, height, rectangle_width, -h )\n * }\n\n * var nyquist = 22050;\n *\n * // get the centroid\n * spectralCentroid = fft.getCentroid();\n *\n * // the mean_freq_index calculation is for the display.\n * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);\n *\n * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);\n *\n *\n * stroke(255,0,0); // the line showing where the centroid is will be red\n *\n * rect(centroidplot, 0, width / spectrum.length, height)\n * noStroke();\n * fill(255,255,255); // text is white\n * text(\"centroid: \", 10, 20);\n * text(round(spectralCentroid)+\" Hz\", 10, 40);\n *}\n *
p5.Signal is a constant audio-rate signal used by p5.Oscillator\n * and p5.Envelope for modulation math.
\n *\n *This is necessary because Web Audio is processed on a seprate clock.\n * For example, the p5 draw loop runs about 60 times per second. But\n * the audio clock must process samples 44100 times per second. If we\n * want to add a value to each of those samples, we can't do it in the\n * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns\n * a Tone.Signal from the Tone.js library by Yotam Mann.\n * If you want to work directly with audio signals for modular\n * synthesis, check out\n * tone.js.
\n *\n * @class p5.Signal\n * @constructor\n * @return {Tone.Signal} A Signal object from the Tone.js library\n * @example\n *\n * function setup() {\n * carrier = new p5.Oscillator('sine');\n * carrier.amp(1); // set amplitude\n * carrier.freq(220); // set frequency\n * carrier.start(); // start oscillating\n *\n * modulator = new p5.Oscillator('sawtooth');\n * modulator.disconnect();\n * modulator.amp(1);\n * modulator.freq(4);\n * modulator.start();\n *\n * // Modulator's default amplitude range is -1 to 1.\n * // Multiply it by -200, so the range is -200 to 200\n * // then add 220 so the range is 20 to 420\n * carrier.freq( modulator.mult(-200).add(220) );\n * }\n *
Envelopes are pre-defined amplitude distribution over time.\n * Typically, envelopes are used to control the output volume\n * of an object, a series of fades referred to as Attack, Decay,\n * Sustain and Release (\n * ADSR\n * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can\n * control an Oscillator's frequency like this: osc.freq(env)
.
Use setRange
to change the attack/release level.\n * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play
method to play the entire envelope,\n * the ramp
method for a pingable trigger,\n * or triggerAttack
/\n * triggerRelease
to trigger noteOn/noteOff.
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var t1 = 0.1; // attack time in seconds\n * var l1 = 0.7; // attack level 0.0 to 1.0\n * var t2 = 0.3; // decay time in seconds\n * var l2 = 0.1; // decay level 0.0 to 1.0\n * var t3 = 0.2; // sustain time in seconds\n * var l3 = 0.5; // sustain level 0.0 to 1.0\n * // release level defaults to zero\n *\n * var env;\n * var triOsc;\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope(t1, l1, t2, l2, t3, l3);\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env); // give the env control of the triOsc's amp\n * triOsc.start();\n * }\n *\n * // mouseClick triggers envelope if over canvas\n * function mouseClicked() {\n * // is mouse over canvas?\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * env.play(triOsc);\n * }\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n * @example\n * \n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * // trigger env on triOsc, 0 seconds from now\n * // After decay, sustain for 0.2 seconds before release\n * env.play(triOsc, 0, 0.2);\n * }\n *
\n *\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.3;\n * var susPercent = 0.4;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * background(200);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(envAttack);\n * }\n *\n * function envAttack() {\n * console.log('trigger attack');\n * env.triggerAttack();\n *\n * background(0,255,0);\n * text('attack!', width/2, height/2);\n * }\n *\n * function mouseReleased() {\n * env.triggerRelease();\n *\n * background(200);\n * text('click to play', width/2, height/2);\n * }\n *
\n *\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.3;\n * var susPercent = 0.4;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * background(200);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(envAttack);\n * }\n *\n * function envAttack() {\n * console.log('trigger attack');\n * env.triggerAttack();\n *\n * background(0,255,0);\n * text('attack!', width/2, height/2);\n * }\n *\n * function mouseReleased() {\n * env.triggerRelease();\n *\n * background(200);\n * text('click to play', width/2, height/2);\n * }\n *
setADSR(attackTime, decayTime)
\n * as \n * time constants for simple exponential ramps.\n * If the value is higher than current value, it uses attackTime,\n * while a decrease uses decayTime.\n *\n * @method ramp\n * @for p5.Envelope\n * @param {Object} unit p5.sound Object or Web Audio Param\n * @param {Number} secondsFromNow When to trigger the ramp\n * @param {Number} v Target value\n * @param {Number} [v2] Second target value (optional)\n * @example\n * \n * var env, osc, amp, cnv;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var attackLevel = 1;\n * var decayLevel = 0;\n *\n * function setup() {\n * cnv = createCanvas(100, 100);\n * fill(0,255,0);\n * noStroke();\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime);\n *\n * osc = new p5.Oscillator();\n * osc.amp(env);\n * osc.start();\n *\n * amp = new p5.Amplitude();\n *\n * cnv.mousePressed(triggerRamp);\n * }\n *\n * function triggerRamp() {\n * env.ramp(osc, 0, attackLevel, decayLevel);\n * }\n *\n * function draw() {\n * background(20,20,20);\n * text('click me', 10, 20);\n * var h = map(amp.getLevel(), 0, 0.4, 0, height);;\n *\n * rect(0, height, width, -h);\n * }\n *
p5.Oscillator
for a full list of methods.\n *\n * @class p5.Pulse\n * @extends p5.Oscillator\n * @constructor\n * @param {Number} [freq] Frequency in oscillations per second (Hz)\n * @param {Number} [w] Width between the pulses (0 to 1.0,\n * defaults to 0)\n * @example\n * \n * var pulse;\n * function setup() {\n * background(0);\n *\n * // Create and start the pulse wave oscillator\n * pulse = new p5.Pulse();\n * pulse.amp(0.5);\n * pulse.freq(220);\n * pulse.start();\n * }\n *\n * function draw() {\n * var w = map(mouseX, 0, width, 0, 1);\n * w = constrain(w, 0, 1);\n * pulse.width(w)\n * }\n *
Get audio from an input, i.e. your computer's microphone.
\n *\n *Turn the mic on/off with the start() and stop() methods. When the mic\n * is on, its volume can be measured with getLevel or by connecting an\n * FFT object.
\n *\n *If you want to hear the AudioIn, use the .connect() method.\n * AudioIn does not connect to p5.sound output by default to prevent\n * feedback.
\n *\n *Note: This uses the getUserMedia/\n * Stream API, which is not supported by certain browsers. Access in Chrome browser\n * is limited to localhost and https, but access over http may be limited.
\n *\n * @class p5.AudioIn\n * @constructor\n * @param {Function} [errorCallback] A function to call if there is an error\n * accessing the AudioIn. For example,\n * Safari and iOS devices do not\n * currently allow microphone access.\n * @example\n *\n * var mic;\n * function setup(){\n * mic = new p5.AudioIn()\n * mic.start();\n * }\n * function draw(){\n * background(0);\n * micLevel = mic.getLevel();\n * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);\n * }\n *
\n * var audiograb;\n *\n * function setup(){\n * //new audioIn\n * audioGrab = new p5.AudioIn();\n *\n * audioGrab.getSources(function(deviceList) {\n * //print out the array of available sources\n * console.log(deviceList);\n * //set the source to the first item in the deviceList array\n * audioGrab.setSource(0);\n * });\n * }\n *
input[0]
. \n\t\t * @type {Tone.Gain}\n\t\t */\n\t\tthis.a = this.input[0] = new Tone.Gain();\n\n\t\t/**\n\t\t * Alias for input[1]
. \n\t\t * @type {Tone.Gain}\n\t\t */\n\t\tthis.b = this.input[1] = new Tone.Gain();\n\n\t\t/**\n\t\t * \tThe mix between the two inputs. A fade value of 0\n\t\t * \twill output 100% input[0]
and \n\t\t * \ta value of 1 will output 100% input[1]
. \n\t\t * @type {NormalRange}\n\t\t * @signal\n\t\t */\n\t\tthis.fade = new Tone.Signal(this.defaultArg(initialFade, 0.5), Tone.Type.NormalRange);\n\n\t\t/**\n\t\t * equal power gain cross fade\n\t\t * @private\n\t\t * @type {Tone.EqualPowerGain}\n\t\t */\n\t\tthis._equalPowerA = new Tone.EqualPowerGain();\n\n\t\t/**\n\t\t * equal power gain cross fade\n\t\t * @private\n\t\t * @type {Tone.EqualPowerGain}\n\t\t */\n\t\tthis._equalPowerB = new Tone.EqualPowerGain();\n\t\t\n\t\t/**\n\t\t * invert the incoming signal\n\t\t * @private\n\t\t * @type {Tone}\n\t\t */\n\t\tthis._invert = new Tone.Expr(\"1 - $0\");\n\n\t\t//connections\n\t\tthis.a.connect(this.output);\n\t\tthis.b.connect(this.output);\n\t\tthis.fade.chain(this._equalPowerB, this.b.gain);\n\t\tthis.fade.chain(this._invert, this._equalPowerA, this.a.gain);\n\t\tthis._readOnly(\"fade\");\n\t};\n\n\tTone.extend(Tone.CrossFade);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.CrossFade} this\n\t */\n\tTone.CrossFade.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._writable(\"fade\");\n\t\tthis._equalPowerA.dispose();\n\t\tthis._equalPowerA = null;\n\t\tthis._equalPowerB.dispose();\n\t\tthis._equalPowerB = null;\n\t\tthis.fade.dispose();\n\t\tthis.fade = null;\n\t\tthis._invert.dispose();\n\t\tthis._invert = null;\n\t\tthis.a.dispose();\n\t\tthis.a = null;\n\t\tthis.b.dispose();\n\t\tthis.b = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.CrossFade;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/Add\", \"Tone/signal/Subtract\", \"Tone/signal/Multiply\", \n\t\"Tone/signal/GreaterThan\", \"Tone/signal/GreaterThanZero\", \"Tone/signal/Abs\", \"Tone/signal/Negate\", \n\t\"Tone/signal/Modulo\", \"Tone/signal/Pow\", \"Tone/signal/AudioToGain\"], \n\tfunction(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Evaluate an expression at audio rate. \n * var eq;\n * var band_names;\n * var band_index;\n *\n * var soundFile, play;\n *\n * function preload() {\n * soundFormats('mp3', 'ogg');\n * soundFile = loadSound('assets/beat');\n * }\n *\n * function setup() {\n * eq = new p5.EQ(3);\n * soundFile.disconnect();\n * eq.process(soundFile);\n *\n * band_names = ['lows','mids','highs'];\n * band_index = 0;\n * play = false;\n * textAlign(CENTER);\n * }\n *\n * function draw() {\n * background(30);\n * noStroke();\n * fill(255);\n * text('click to kill',50,25);\n *\n * fill(255, 40, 255);\n * textSize(26);\n * text(band_names[band_index],50,55);\n *\n * fill(255);\n * textSize(9);\n * text('space = play/pause',50,80);\n * }\n *\n * //If mouse is over canvas, cycle to the next band and kill the frequency\n * function mouseClicked() {\n * for (var i = 0; i < eq.bands.length; i++) {\n * eq.bands[i].gain(0);\n * }\n * eq.bands[band_index].gain(-40);\n * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {\n * band_index === 2 ? band_index = 0 : band_index++;\n * }\n * }\n *\n * //use space bar to trigger play / pause\n * function keyPressed() {\n * if (key===' ') {\n * play = !play\n * play ? soundFile.loop() : soundFile.pause();\n * }\n * }\n *
p5.soundOut.audiocontext.listener
\n *\n *\n * @class p5.Panner3D\n * @constructor\n */\n\tp5.Panner3D = function() {\n Effect.call(this);\n\n /**\n * \n * Web Audio Spatial Panner Node\n *\n * Properties include\n * - panningModel: \"equal power\" or \"HRTF\"\n * - distanceModel: \"linear\", \"inverse\", or \"exponential\"\n *\n * @property {AudioNode} panner\n *\n */\n this.panner = this.ac.createPanner();\n this.panner.panningModel = 'HRTF';\n this.panner.distanceModel = 'linear';\n this.panner.connect(this.output);\n this.input.connect(this.panner);\n\t};\n\n p5.Panner3D.prototype = Object.create(Effect.prototype);\n\n\n /**\n * Connect an audio sorce\n *\n * @method process\n * @for p5.Panner3D\n * @param {Object} src Input source\n */\n p5.Panner3D.prototype.process = function(src) {\n src.connect(this.input);\n }\n /**\n * Set the X,Y,Z position of the Panner\n * @method set\n * @for p5.Panner3D\n * @param {Number} xVal\n * @param {Number} yVal\n * @param {Number} zVal\n * @param {Number} time\n * @return {Array} Updated x, y, z values as an array\n */\n p5.Panner3D.prototype.set = function(xVal, yVal, zVal, time) {\n this.positionX(xVal,time);\n this.positionY(yVal,time);\n this.positionZ(zVal,time);\n return [this.panner.positionX.value,\n this.panner.positionY.value,\n this.panner.positionZ.value];\n };\n\n /**\n * Getter and setter methods for position coordinates\n * @method positionX\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for position coordinates\n * @method positionY\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for position coordinates\n * @method positionZ\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n p5.Panner3D.prototype.positionX = function(xVal, time) {\n var t = time || 0;\n if (typeof xVal === 'number') {\n this.panner.positionX.value = xVal;\n this.panner.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t);\n } else if (xVal) {\n xVal.connect(this.panner.positionX);\n }\n return this.panner.positionX.value;\n };\n p5.Panner3D.prototype.positionY = function(yVal, time) {\n var t = time || 0;\n if (typeof yVal === 'number') {\n this.panner.positionY.value = yVal;\n this.panner.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t);\n } else if (yVal) {\n yVal.connect(this.panner.positionY);\n }\n return this.panner.positionY.value;\n };\n p5.Panner3D.prototype.positionZ = function(zVal, time) {\n var t = time || 0;\n if (typeof zVal === 'number') {\n this.panner.positionZ.value = zVal;\n this.panner.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t);\n } else if (zVal) {\n zVal.connect(this.panner.positionZ);\n }\n return this.panner.positionZ.value;\n };\n\n /**\n * Set the X,Y,Z position of the Panner\n * @method orient\n * @for p5.Panner3D\n * @param {Number} xVal\n * @param {Number} yVal\n * @param {Number} zVal\n * @param {Number} time\n * @return {Array} Updated x, y, z values as an array\n */\n p5.Panner3D.prototype.orient = function(xVal, yVal, zVal, time) {\n this.orientX(xVal,time);\n this.orientY(yVal,time);\n this.orientZ(zVal,time);\n return [this.panner.orientationX.value,\n this.panner.orientationY.value,\n this.panner.orientationZ.value];\n };\n\n /**\n * Getter and setter methods for orient coordinates\n * @method orientX\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for orient coordinates\n * @method orientY\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for orient coordinates\n * @method orientZ\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n p5.Panner3D.prototype.orientX = function(xVal, time) {\n var t = time || 0;\n if (typeof xVal === 'number') {\n this.panner.orientationX.value = xVal;\n this.panner.orientationX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t);\n } else if (xVal) {\n xVal.connect(this.panner.orientationX);\n }\n return this.panner.orientationX.value;\n };\n p5.Panner3D.prototype.orientY = function(yVal, time) {\n var t = time || 0;\n if (typeof yVal === 'number') {\n this.panner.orientationY.value = yVal;\n this.panner.orientationY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t);\n } else if (yVal) {\n yVal.connect(this.panner.orientationY);\n }\n return this.panner.orientationY.value;\n };\n p5.Panner3D.prototype.orientZ = function(zVal, time) {\n var t = time || 0;\n if (typeof zVal === 'number') {\n this.panner.orientationZ.value = zVal;\n this.panner.orientationZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t);\n } else if (zVal) {\n zVal.connect(this.panner.orientationZ);\n }\n return this.panner.orientationZ.value;\n };\n\n /**\n * Set the rolloff factor and max distance\n * @method setFalloff\n * @for p5.Panner3D\n * @param {Number} [maxDistance]\n * @param {Number} [rolloffFactor]\n */\n p5.Panner3D.prototype.setFalloff = function(maxDistance, rolloffFactor) {\n this.maxDist(maxDistance);\n this.rolloff(rolloffFactor);\n };\n /**\n * Maxium distance between the source and the listener\n * @method maxDist\n * @for p5.Panner3D\n * @param {Number} maxDistance\n * @return {Number} updated value\n */\n p5.Panner3D.prototype.maxDist = function(maxDistance){\n if (typeof maxDistance === 'number') {\n this.panner.maxDistance = maxDistance;\n }\n return this.panner.maxDistance;\n };\n\n /**\n * How quickly the volume is reduced as the source moves away from the listener\n * @method rollof\n * @for p5.Panner3D\n * @param {Number} rolloffFactor\n * @return {Number} updated value\n */\n p5.Panner3D.prototype.rolloff = function(rolloffFactor){\n if (typeof rolloffFactor === 'number') {\n this.panner.rolloffFactor = rolloffFactor;\n }\n return this.panner.rolloffFactor;\n };\n\n p5.Panner3D.dispose = function() {\n Effect.prototype.dispose.apply(this);\n if (this.panner) {\n this.panner.disconnect();\n delete this.panner;\n }\n };\n\n return p5.Panner3D;\n\n});\n","'use strict'\n\ndefine(function (require) {\n var p5sound = require('master');\n var Effect = require('effect');\n\n// /**\n// * listener is a class that can construct both a Spatial Panner\n// * and a Spatial Listener. The panner is based on the \n// * Web Audio Spatial Panner Node\n// * https://www.w3.org/TR/webaudio/#the-listenernode-interface\n// * This panner is a spatial processing node that allows audio to be positioned\n// * and oriented in 3D space. \n// *\n// * The Listener modifies the properties of the Audio Context Listener. \n// * Both objects types use the same methods. The default is a spatial panner.\n// *\n// * p5.Panner3D
- Constructs a Spatial Pannerp5.Listener3D
- Constructs a Spatial Listener\n * var noise, env, delay;\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * noise = new p5.Noise('brown');\n * noise.amp(0);\n * noise.start();\n *\n * delay = new p5.Delay();\n *\n * // delay.process() accepts 4 parameters:\n * // source, delayTime, feedback, filter frequency\n * // play with these numbers!!\n * delay.process(noise, .12, .7, 2300);\n *\n * // play the noise with an envelope,\n * // a series of fades ( time / value pairs )\n * env = new p5.Envelope(.01, 0.2, .2, .1);\n * }\n *\n * // mouseClick triggers envelope\n * function mouseClicked() {\n * // is mouse over canvas?\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * env.play(noise);\n * }\n * }\n *
\n * var soundFile, reverb;\n * function preload() {\n * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * reverb = new p5.Reverb();\n * soundFile.disconnect(); // so we'll only hear reverb...\n *\n * // connect soundFile to reverb, process w/\n * // 3 second reverbTime, decayRate of 2%\n * reverb.process(soundFile, 3, 2);\n * soundFile.play();\n * }\n *
p5.Convolver extends p5.Reverb. It can emulate the sound of real\n * physical spaces through a process called \n * convolution.
\n *\n *Convolution multiplies any audio input by an \"impulse response\"\n * to simulate the dispersion of sound over time. The impulse response is\n * generated from an audio file that you provide. One way to\n * generate an impulse response is to pop a balloon in a reverberant space\n * and record the echo. Convolution can also be used to experiment with\n * sound.
\n *\n *Use the method createConvolution(path)
to instantiate a\n * p5.Convolver with a path to your impulse response audio file.
\n * var cVerb, sound;\n * function preload() {\n * // We have both MP3 and OGG versions of all sound assets\n * soundFormats('ogg', 'mp3');\n *\n * // Try replacing 'bx-spring' with other soundfiles like\n * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'\n * cVerb = createConvolver('assets/bx-spring.mp3');\n *\n * // Try replacing 'Damscray_DancingTiger' with\n * // 'beat', 'doorbell', lucky_dragons_-_power_melody'\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
\n * var cVerb, sound;\n * function preload() {\n * // We have both MP3 and OGG versions of all sound assets\n * soundFormats('ogg', 'mp3');\n *\n * // Try replacing 'bx-spring' with other soundfiles like\n * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'\n * cVerb = createConvolver('assets/bx-spring.mp3');\n *\n * // Try replacing 'Damscray_DancingTiger' with\n * // 'beat', 'doorbell', lucky_dragons_-_power_melody'\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
\n * var cVerb, sound;\n * function preload() {\n * soundFormats('ogg', 'mp3');\n *\n * cVerb = createConvolver('assets/concrete-tunnel.mp3');\n *\n * sound = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with (i.e. connect to) cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
toggleImpulse(id)
method.\n *\n * @property {Array} impulses\n * @for p5.Convolver\n */\n p5.Convolver.prototype.impulses = [];\n\n /**\n * Load and assign a new Impulse Response to the p5.Convolver.\n * The impulse is added to the .impulses
array. Previous\n * impulses can be accessed with the .toggleImpulse(id)
\n * method.\n *\n * @method addImpulse\n * @for p5.Convolver\n * @param {String} path path to a sound file\n * @param {Function} callback function (optional)\n * @param {Function} errorCallback function (optional)\n */\n p5.Convolver.prototype.addImpulse = function(path, callback, errorCallback) {\n // if loading locally without a server\n if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {\n alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');\n }\n this._loadBuffer(path, callback, errorCallback);\n };\n\n /**\n * Similar to .addImpulse, except that the .impulses
\n * Array is reset to save memory. A new .impulses
\n * array is created with this impulse as the only item.\n *\n * @method resetImpulse\n * @for p5.Convolver\n * @param {String} path path to a sound file\n * @param {Function} callback function (optional)\n * @param {Function} errorCallback function (optional)\n */\n p5.Convolver.prototype.resetImpulse = function(path, callback, errorCallback) {\n // if loading locally without a server\n if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {\n alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');\n }\n this.impulses = [];\n this._loadBuffer(path, callback, errorCallback);\n };\n\n /**\n * If you have used .addImpulse()
to add multiple impulses\n * to a p5.Convolver, then you can use this method to toggle between\n * the items in the .impulses
Array. Accepts a parameter\n * to identify which impulse you wish to use, identified either by its\n * original filename (String) or by its position in the .impulses\n *
Array (Number)..audioBuffer
(type:\n * Web Audio \n * AudioBuffer) and a .name
, a String that corresponds\n * with the original filename.\n *\n * @method toggleImpulse\n * @for p5.Convolver\n * @param {String|Number} id Identify the impulse by its original filename\n * (String), or by its position in the\n * .impulses
Array (Number).\n */\n p5.Convolver.prototype.toggleImpulse = function(id) {\n if (typeof id === 'number' && id < this.impulses.length) {\n this._setBuffer(this.impulses[id].audioBuffer);\n }\n if (typeof id === 'string') {\n for (var i = 0; i < this.impulses.length; i++) {\n if (this.impulses[i].name === id) {\n this._setBuffer(this.impulses[i].audioBuffer);\n break;\n }\n }\n }\n };\n\n p5.Convolver.prototype.dispose = function() {\n p5.Reverb.prototype.dispose.apply(this);\n\n // remove all the Impulse Response buffers\n for (var i in this.impulses) {\n if (this.impulses[i]) {\n this.impulses[i] = null;\n }\n }\n };\n\n});\n","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n\n // requires the Tone.js library's Clock (MIT license, Yotam Mann)\n // https://github.com/TONEnoTONE/Tone.js/\n var Clock = require('Tone/core/Clock');\n\n p5.Metro = function() {\n this.clock = new Clock({\n 'callback': this.ontick.bind(this)\n });\n this.syncedParts = [];\n this.bpm = 120; // gets overridden by p5.Part\n this._init();\n\n this.prevTick = 0;\n this.tatumTime = 0;\n\n this.tickCallback = function() {};\n };\n\n p5.Metro.prototype.ontick = function(tickTime) {\n var elapsedTime = tickTime - this.prevTick;\n var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;\n if (elapsedTime - this.tatumTime <= -0.02) {\n return;\n } else {\n // console.log('ok', this.syncedParts[0].phrases[0].name);\n this.prevTick = tickTime;\n\n // for all of the active things on the metro:\n var self = this;\n this.syncedParts.forEach(function(thisPart) {\n if (!thisPart.isPlaying) return;\n thisPart.incrementStep(secondsFromNow);\n // each synced source keeps track of its own beat number\n thisPart.phrases.forEach(function(thisPhrase) {\n var phraseArray = thisPhrase.sequence;\n var bNum = self.metroTicks % phraseArray.length;\n if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping) ) {\n thisPhrase.callback(secondsFromNow, phraseArray[bNum]);\n }\n });\n });\n this.metroTicks += 1;\n this.tickCallback(secondsFromNow);\n }\n };\n\n p5.Metro.prototype.setBPM = function(bpm, rampTime) {\n var beatTime = 60 / (bpm*this.tatums);\n var now = p5sound.audiocontext.currentTime;\n this.tatumTime = beatTime;\n\n var rampTime = rampTime || 0;\n this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);\n this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);\n this.bpm = bpm;\n };\n\n p5.Metro.prototype.getBPM = function() {\n return this.clock.getRate() / this.tatums * 60;\n };\n\n p5.Metro.prototype._init = function() {\n this.metroTicks = 0;\n // this.setBPM(120);\n };\n\n // clear existing synced parts, add only this one\n p5.Metro.prototype.resetSync = function(part) {\n this.syncedParts = [part];\n };\n\n // push a new synced part to the array\n p5.Metro.prototype.pushSync = function(part) {\n this.syncedParts.push(part);\n };\n\n p5.Metro.prototype.start = function(timeFromNow) {\n var t = timeFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n this.clock.start(now + t);\n this.setBPM(this.bpm);\n };\n\n p5.Metro.prototype.stop = function(timeFromNow) {\n var t = timeFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n this.clock.stop(now + t);\n };\n\n p5.Metro.prototype.beatLength = function(tatums) {\n this.tatums = 1/tatums / 4; // lowest possible division of a beat\n };\n\n});\n","define([\"Tone/core/Tone\", \"Tone/core/Timeline\", \"Tone/type/Type\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A Timeline State. Provides the methods: setStateAtTime(\"state\", time)
\n\t * and getValueAtTime(time)
.\n\t *\n\t * @extends {Tone.Timeline}\n\t * @param {String} initial The initial state of the TimelineState. \n\t * Defaults to undefined
\n\t */\n\tTone.TimelineState = function(initial){\n\n\t\tTone.Timeline.call(this);\n\n\t\t/**\n\t\t * The initial state\n\t\t * @private\n\t\t * @type {String}\n\t\t */\n\t\tthis._initial = initial;\n\t};\n\n\tTone.extend(Tone.TimelineState, Tone.Timeline);\n\n\t/**\n\t * Returns the scheduled state scheduled before or at\n\t * the given time.\n\t * @param {Number} time The time to query.\n\t * @return {String} The name of the state input in setStateAtTime.\n\t */\n\tTone.TimelineState.prototype.getValueAtTime = function(time){\n\t\tvar event = this.get(time);\n\t\tif (event !== null){\n\t\t\treturn event.state;\n\t\t} else {\n\t\t\treturn this._initial;\n\t\t}\n\t};\n\n\t/**\n\t * Returns the scheduled state scheduled before or at\n\t * the given time.\n\t * @param {String} state The name of the state to set.\n\t * @param {Number} time The time to query.\n\t */\n\tTone.TimelineState.prototype.setStateAtTime = function(state, time){\n\t\tthis.add({\n\t\t\t\"state\" : state,\n\t\t\t\"time\" : time\n\t\t});\n\t};\n\n\treturn Tone.TimelineState;\n});","'use strict';\n\ndefine(function(require) {\n var p5sound = require('master');\n\n var BPM = 120;\n\n /**\n * Set the global tempo, in beats per minute, for all\n * p5.Parts. This method will impact all active p5.Parts.\n *\n * @method setBPM\n * @for p5\n * @param {Number} BPM Beats Per Minute\n * @param {Number} rampTime Seconds from now\n */\n p5.prototype.setBPM = function(bpm, rampTime) {\n BPM = bpm;\n for (var i in p5sound.parts) {\n if (p5sound.parts[i]) {\n p5sound.parts[i].setBPM(bpm, rampTime);\n }\n }\n };\n\n /**\n * A phrase is a pattern of musical events over time, i.e.\n * a series of notes and rests.
\n *\n *Phrases must be added to a p5.Part for playback, and\n * each part can play multiple phrases at the same time.\n * For example, one Phrase might be a kick drum, another\n * could be a snare, and another could be the bassline.
\n *\n *The first parameter is a name so that the phrase can be\n * modified or deleted later. The callback is a a function that\n * this phrase will call at every step—for example it might be\n * called playNote(value){}
. The array determines\n * which value is passed into the callback at each step of the\n * phrase. It can be numbers, an object with multiple numbers,\n * or a zero (0) indicates a rest so the callback won't be called).
\n * var mySound, myPhrase, myPart;\n * var pattern = [1,0,0,2,0,2,0,0];\n * var msg = 'click to play';\n *\n * function preload() {\n * mySound = loadSound('assets/beatbox.mp3');\n * }\n *\n * function setup() {\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * masterVolume(0.1);\n *\n * myPhrase = new p5.Phrase('bbox', makeSound, pattern);\n * myPart = new p5.Part();\n * myPart.addPhrase(myPhrase);\n * myPart.setBPM(60);\n * }\n *\n * function draw() {\n * background(0);\n * text(msg, width/2, height/2);\n * }\n *\n * function makeSound(time, playbackRate) {\n * mySound.rate(playbackRate);\n * mySound.play(time);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * myPart.start();\n * msg = 'playing pattern';\n * }\n * }\n *\n *
A p5.Part plays back one or more p5.Phrases. Instantiate a part\n * with steps and tatums. By default, each step represents a 1/16th note.
\n *\n *See p5.Phrase for more about musical timing.
\n *\n * @class p5.Part\n * @constructor\n * @param {Number} [steps] Steps in the part\n * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)\n * @example\n *\n * var box, drum, myPart;\n * var boxPat = [1,0,0,2,0,2,0,0];\n * var drumPat = [0,1,1,0,2,0,1,0];\n * var msg = 'click to play';\n *\n * function preload() {\n * box = loadSound('assets/beatbox.mp3');\n * drum = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * masterVolume(0.1);\n *\n * var boxPhrase = new p5.Phrase('box', playBox, boxPat);\n * var drumPhrase = new p5.Phrase('drum', playDrum, drumPat);\n * myPart = new p5.Part();\n * myPart.addPhrase(boxPhrase);\n * myPart.addPhrase(drumPhrase);\n * myPart.setBPM(60);\n * masterVolume(0.1);\n * }\n *\n * function draw() {\n * background(0);\n * text(msg, width/2, height/2);\n * }\n *\n * function playBox(time, playbackRate) {\n * box.rate(playbackRate);\n * box.play(time);\n * }\n *\n * function playDrum(time, playbackRate) {\n * drum.rate(playbackRate);\n * drum.play(time);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * myPart.start();\n * msg = 'playing part';\n * }\n * }\n *
new p5.Score(a, a, b, a, c)
\n *\n * @class p5.Score\n * @constructor\n * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.\n */\n p5.Score = function() {\n // for all of the arguments\n this.parts = [];\n this.currentPart = 0;\n\n var thisScore = this;\n for (var i in arguments) {\n if (arguments[i] && this.parts[i]) {\n this.parts[i] = arguments[i];\n this.parts[i].nextPart = this.parts[i + 1];\n this.parts[i].onended = function() {\n thisScore.resetPart(i);\n playNextPart(thisScore);\n };\n }\n }\n this.looping = false;\n };\n\n p5.Score.prototype.onended = function() {\n if (this.looping) {\n // this.resetParts();\n this.parts[0].start();\n } else {\n this.parts[this.parts.length - 1].onended = function() {\n this.stop();\n this.resetParts();\n };\n }\n this.currentPart = 0;\n };\n\n /**\n * Start playback of the score.\n *\n * @method start\n * @for p5.Score\n */\n p5.Score.prototype.start = function() {\n this.parts[this.currentPart].start();\n this.scoreStep = 0;\n };\n\n /**\n * Stop playback of the score.\n *\n * @method stop\n * @for p5.Score\n */\n p5.Score.prototype.stop = function() {\n this.parts[this.currentPart].stop();\n this.currentPart = 0;\n this.scoreStep = 0;\n };\n\n /**\n * Pause playback of the score.\n *\n * @method pause\n * @for p5.Score\n */\n p5.Score.prototype.pause = function() {\n this.parts[this.currentPart].stop();\n };\n\n /**\n * Loop playback of the score.\n *\n * @method loop\n * @for p5.Score\n */\n p5.Score.prototype.loop = function() {\n this.looping = true;\n this.start();\n };\n\n /**\n * Stop looping playback of the score. If it\n * is currently playing, this will go into effect\n * after the current round of playback completes.\n *\n * @method noLoop\n * @for p5.Score\n */\n p5.Score.prototype.noLoop = function() {\n this.looping = false;\n };\n\n p5.Score.prototype.resetParts = function() {\n var self = this;\n this.parts.forEach(function(part) {\n self.resetParts[part];\n });\n };\n\n p5.Score.prototype.resetPart = function(i) {\n this.parts[i].stop();\n this.parts[i].partStep = 0;\n for (var p in this.parts[i].phrases) {\n if (this.parts[i]) {\n this.parts[i].phrases[p].phraseStep = 0;\n }\n }\n };\n\n /**\n * Set the tempo for all parts in the score\n *\n * @method setBPM\n * @for p5.Score\n * @param {Number} BPM Beats Per Minute\n * @param {Number} rampTime Seconds from now\n */\n p5.Score.prototype.setBPM = function(bpm, rampTime) {\n for (var i in this.parts) {\n if (this.parts[i]) {\n this.parts[i].setBPM(bpm, rampTime);\n }\n }\n };\n\n function playNextPart(aScore) {\n aScore.currentPart++;\n if (aScore.currentPart >= aScore.parts.length) {\n aScore.scoreStep = 0;\n aScore.onended();\n } else {\n aScore.scoreStep = 0;\n aScore.parts[aScore.currentPart - 1].stop();\n aScore.parts[aScore.currentPart].start();\n }\n }\n\n});\n","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n var Clock = require('Tone/core/Clock');\n\n /**\n * SoundLoop\n *\n * @class p5.SoundLoop\n * @constructor\n *\n * @param {Function} callback this function will be called on each iteration of theloop\n * @param {Number|String} [interval] amount of time or beats for each iteration of the loop\n * defaults to 1\n *\n * @example\n * \n * var click;\n * var looper1;\n *\n * function preload() {\n * click = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * //the looper's callback is passed the timeFromNow\n * //this value should be used as a reference point from\n * //which to schedule sounds\n * looper1 = new p5.SoundLoop(function(timeFromNow){\n * click.play(timeFromNow);\n * background(255 * (looper1.iterations % 2));\n * }, 2);\n *\n * //stop after 10 iteratios;\n * looper1.maxIterations = 10;\n * //start the loop\n * looper1.start();\n * }\n *
Record sounds for playback and/or to save as a .wav file.\n * The p5.SoundRecorder records all sound output from your sketch,\n * or can be assigned a specific source with setInput().
\n *The record() method accepts a p5.SoundFile as a parameter.\n * When playback is stopped (either after the given amount of time,\n * or with the stop() method), the p5.SoundRecorder will send its\n * recording to that p5.SoundFile for playback.
\n *\n * @class p5.SoundRecorder\n * @constructor\n * @example\n *\n * var mic, recorder, soundFile;\n * var state = 0;\n *\n * function setup() {\n * background(200);\n * // create an audio in\n * mic = new p5.AudioIn();\n *\n * // prompts user to enable their browser mic\n * mic.start();\n *\n * // create a sound recorder\n * recorder = new p5.SoundRecorder();\n *\n * // connect the mic to the recorder\n * recorder.setInput(mic);\n *\n * // this sound file will be used to\n * // playback & save the recording\n * soundFile = new p5.SoundFile();\n *\n * text('keyPress to record', 20, 20);\n * }\n *\n * function keyPressed() {\n * // make sure user enabled the mic\n * if (state === 0 && mic.enabled) {\n *\n * // record to our p5.SoundFile\n * recorder.record(soundFile);\n *\n * background(255,0,0);\n * text('Recording!', 20, 20);\n * state++;\n * }\n * else if (state === 1) {\n * background(0,255,0);\n *\n * // stop recorder and\n * // send result to soundFile\n * recorder.stop();\n *\n * text('Stopped', 20, 20);\n * state++;\n * }\n *\n * else if (state === 2) {\n * soundFile.play(); // play the result!\n * save(soundFile, 'mySound.wav');\n * state++;\n * }\n * }\n *
PeakDetect works in conjunction with p5.FFT to\n * look for onsets in some or all of the frequency spectrum.\n *
\n *\n * To use p5.PeakDetect, call update
in the draw loop\n * and pass in a p5.FFT object.\n *
\n * You can listen for a specific part of the frequency spectrum by\n * setting the range between freq1
and freq2
.\n *
threshold
is the threshold for detecting a peak,\n * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud\n * as 1.0.
\n * The update method is meant to be run in the draw loop, and\n * frames determines how many loops must pass before\n * another peak can be detected.\n * For example, if the frameRate() = 60, you could detect the beat of a\n * 120 beat-per-minute song with this equation:\n * framesPerPeak = 60 / (estimatedBPM / 60 );
\n *
\n * Based on example contribtued by @b2renger, and a simple beat detection\n * explanation by Felix Turner.\n *
\n *\n * @class p5.PeakDetect\n * @constructor\n * @param {Number} [freq1] lowFrequency - defaults to 20Hz\n * @param {Number} [freq2] highFrequency - defaults to 20000 Hz\n * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1\n * scaled logarithmically where 0.1 is 1/2 the loudness\n * of 1.0. Defaults to 0.35.\n * @param {Number} [framesPerPeak] Defaults to 20.\n * @example\n *\n *\n * var cnv, soundFile, fft, peakDetect;\n * var ellipseWidth = 10;\n *\n * function preload() {\n * soundFile = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n *\n * // p5.PeakDetect requires a p5.FFT\n * fft = new p5.FFT();\n * peakDetect = new p5.PeakDetect();\n * }\n *\n * function draw() {\n * background(0);\n * text('click to play/pause', width/2, height/2);\n *\n * // peakDetect accepts an fft post-analysis\n * fft.analyze();\n * peakDetect.update(fft);\n *\n * if ( peakDetect.isDetected ) {\n * ellipseWidth = 50;\n * } else {\n * ellipseWidth *= 0.95;\n * }\n *\n * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);\n * }\n *\n * // toggle play/stop when canvas is clicked\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * if (soundFile.isPlaying() ) {\n * soundFile.stop();\n * } else {\n * soundFile.play();\n * }\n * }\n * }\n *
\n * var cnv, soundFile, fft, peakDetect;\n * var ellipseWidth = 0;\n *\n * function preload() {\n * soundFile = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * cnv = createCanvas(100,100);\n * textAlign(CENTER);\n *\n * fft = new p5.FFT();\n * peakDetect = new p5.PeakDetect();\n *\n * setupSound();\n *\n * // when a beat is detected, call triggerBeat()\n * peakDetect.onPeak(triggerBeat);\n * }\n *\n * function draw() {\n * background(0);\n * fill(255);\n * text('click to play', width/2, height/2);\n *\n * fft.analyze();\n * peakDetect.update(fft);\n *\n * ellipseWidth *= 0.95;\n * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);\n * }\n *\n * // this function is called by peakDetect.onPeak\n * function triggerBeat() {\n * ellipseWidth = 50;\n * }\n *\n * // mouseclick starts/stops sound\n * function setupSound() {\n * cnv.mouseClicked( function() {\n * if (soundFile.isPlaying() ) {\n * soundFile.stop();\n * } else {\n * soundFile.play();\n * }\n * });\n * }\n *
\n *\n * // load two soundfile and crossfade beetween them\n * var sound1,sound2;\n * var gain1, gain2, gain3;\n *\n * function preload(){\n * soundFormats('ogg', 'mp3');\n * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');\n * sound2 = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * createCanvas(400,200);\n *\n * // create a 'master' gain to which we will connect both soundfiles\n * gain3 = new p5.Gain();\n * gain3.connect();\n *\n * // setup first sound for playing\n * sound1.rate(1);\n * sound1.loop();\n * sound1.disconnect(); // diconnect from p5 output\n *\n * gain1 = new p5.Gain(); // setup a gain node\n * gain1.setInput(sound1); // connect the first sound to its input\n * gain1.connect(gain3); // connect its output to the 'master'\n *\n * sound2.rate(1);\n * sound2.disconnect();\n * sound2.loop();\n *\n * gain2 = new p5.Gain();\n * gain2.setInput(sound2);\n * gain2.connect(gain3);\n *\n * }\n *\n * function draw(){\n * background(180);\n *\n * // calculate the horizontal distance beetween the mouse and the right of the screen\n * var d = dist(mouseX,0,width,0);\n *\n * // map the horizontal position of the mouse to values useable for volume control of sound1\n * var vol1 = map(mouseX,0,width,0,1);\n * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa\n *\n * gain1.amp(vol1,0.5,0);\n * gain2.amp(vol2,0.5,0);\n *\n * // map the vertical position of the mouse to values useable for 'master volume control'\n * var vol3 = map(mouseY,0,height,0,1);\n * gain3.amp(vol3,0.5,0);\n * }\n *
Scale the output of all sound in this sketch
\n * Scaled between 0.0 (silence) and 1.0 (full volume).\n * 1.0 is the maximum amplitude of a digital sound, so multiplying\n * by greater than 1.0 may cause digital distortion. To\n * fade, provide arampTime
parameter. For more\n * complex fades, see the Envelope class.\n *\n * Alternately, you can pass in a signal source such as an\n * oscillator to modulate the amplitude with an audio signal.\n *\n * How This Works: When you load the p5.sound module, it\n * creates a single instance of p5sound. All sound objects in this\n * module output to p5sound before reaching your computer's output.\n * So if you change the amplitude of p5sound, it impacts all of the\n * sound in this module.
\n *\n *If no value is provided, returns a Web Audio API Gain Node
\n *\n * @method masterVolume\n * @param {Number|Object} volume Volume (amplitude) between 0.0\n * and 1.0 or modulating signal/oscillator\n * @param {Number} [rampTime] Fade for t seconds\n * @param {Number} [timeFromNow] Schedule this event to happen at\n * t seconds in the future\n */\n p5.prototype.masterVolume = function(vol, rampTime, tFromNow) {\n if (typeof vol === 'number') {\n var rampTime = rampTime || 0;\n var tFromNow = tFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n var currentVol = p5sound.output.gain.value;\n p5sound.output.gain.cancelScheduledValues(now + tFromNow);\n p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);\n p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);\n }\n else if (vol) {\n vol.connect(p5sound.output.gain);\n } else {\n // return the Gain Node\n return p5sound.output.gain;\n }\n };\n\n /**\n * `p5.soundOut` is the p5.sound master output. It sends output to\n * the destination of this window's web audio context. It contains\n * Web Audio API nodes including a dyanmicsCompressor (.limiter
),\n * and Gain Nodes for .input
and .output
.\n *\n * @property {Object} soundOut\n */\n p5.prototype.soundOut = p5.soundOut = p5sound;\n\n /**\n * a silent connection to the DesinationNode\n * which will ensure that anything connected to it\n * will not be garbage collected\n *\n * @private\n */\n p5.soundOut._silentNode = p5sound.audiocontext.createGain();\n p5.soundOut._silentNode.gain.value = 0;\n p5.soundOut._silentNode.connect(p5sound.audiocontext.destination);\n\n\n return p5sound;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/WaveShaper\", \"Tone/type/Type\", \"Tone/core/Param\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class A signal is an audio-rate value. Tone.Signal is a core component of the library.\n\t * Unlike a number, Signals can be scheduled with sample-level accuracy. Tone.Signal\n\t * has all of the methods available to native Web Audio \n\t * [AudioParam](http://webaudio.github.io/web-audio-api/#the-audioparam-interface)\n\t * as well as additional conveniences. Read more about working with signals \n\t * [here](https://github.com/Tonejs/Tone.js/wiki/Signals).\n\t *\n\t * @constructor\n\t * @extends {Tone.Param}\n\t * @param {Number|AudioParam} [value] Initial value of the signal. If an AudioParam\n\t * is passed in, that parameter will be wrapped\n\t * and controlled by the Signal. \n\t * @param {string} [units=Number] unit The units the signal is in. \n\t * @example\n\t * var signal = new Tone.Signal(10);\n\t */\n\tTone.Signal = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"value\", \"units\"], Tone.Signal.defaults);\n\n\t\t/**\n\t\t * The node where the constant signal value is scaled.\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis.output = this._gain = this.context.createGain();\n\n\t\toptions.param = this._gain.gain;\n\t\tTone.Param.call(this, options);\n\n\t\t/**\n\t\t * The node where the value is set.\n\t\t * @type {Tone.Param}\n\t\t * @private\n\t\t */\n\t\tthis.input = this._param = this._gain.gain;\n\n\t\t//connect the const output to the node output\n\t\tthis.context.getConstant(1).chain(this._gain);\n\t};\n\n\tTone.extend(Tone.Signal, Tone.Param);\n\n\t/**\n\t * The default values\n\t * @type {Object}\n\t * @static\n\t * @const\n\t */\n\tTone.Signal.defaults = {\n\t\t\"value\" : 0,\n\t\t\"units\" : Tone.Type.Default,\n\t\t\"convert\" : true,\n\t};\n\n\t/**\n\t * When signals connect to other signals or AudioParams, \n\t * they take over the output value of that signal or AudioParam. \n\t * For all other nodes, the behavior is the same as a default connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.SignalBase} this\n\t * @method\n\t */\n\tTone.Signal.prototype.connect = Tone.SignalBase.prototype.connect;\n\n\t/**\n\t * dispose and disconnect\n\t * @returns {Tone.Signal} this\n\t */\n\tTone.Signal.prototype.dispose = function(){\n\t\tTone.Param.prototype.dispose.call(this);\n\t\tthis._param = null;\n\t\tthis._gain.disconnect();\n\t\tthis._gain = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.Signal;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Signal\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Multiply two incoming signals. Or, if a number is given in the constructor, \n\t * multiplies the incoming signal by that value. \n\t *\n\t * @constructor\n\t * @extends {Tone.Signal}\n\t * @param {number=} value Constant value to multiple. If no value is provided,\n\t * it will return the product of the first and second inputs\n\t * @example\n\t * var mult = new Tone.Multiply();\n\t * var sigA = new Tone.Signal(3);\n\t * var sigB = new Tone.Signal(4);\n\t * sigA.connect(mult, 0, 0);\n\t * sigB.connect(mult, 0, 1);\n\t * //output of mult is 12.\n\t * @example\n\t * var mult = new Tone.Multiply(10);\n\t * var sig = new Tone.Signal(2).connect(mult);\n\t * //the output of mult is 20. \n\t */\n\tTone.Multiply = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the input node is the same as the output node\n\t\t * it is also the GainNode which handles the scaling of incoming signal\n\t\t * \n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._mult = this.input[0] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * the scaling parameter\n\t\t * @type {AudioParam}\n\t\t * @private\n\t\t */\n\t\tthis._param = this.input[1] = this.output.gain;\n\t\t\n\t\tthis._param.value = this.defaultArg(value, 0);\n\t};\n\n\tTone.extend(Tone.Multiply, Tone.Signal);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.Multiply} this\n\t */\n\tTone.Multiply.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._mult.dispose();\n\t\tthis._mult = null;\n\t\tthis._param = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Multiply;\n});\n","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var CrossFade = require('Tone/component/CrossFade');\n\n /**\n * Effect is a base class for audio effects in p5. \n * var notes = [60, 64, 67, 72];\n * var i = 0;\n *\n * function setup() {\n * osc = new p5.Oscillator('Triangle');\n * osc.start();\n * frameRate(1);\n * }\n *\n * function draw() {\n * var freq = midiToFreq(notes[i]);\n * osc.freq(freq);\n * i++;\n * if (i >= notes.length){\n * i = 0;\n * }\n * }\n *
\n */\n var midiToFreq = p5.prototype.midiToFreq = function(m) {\n return 440 * Math.pow(2, (m-69)/12.0);\n };\n\n // This method converts ANSI notes specified as a string \"C4\", \"Eb3\" to a frequency\n var noteToFreq = function(note) {\n if (typeof note !== 'string') {\n return note;\n }\n var wholeNotes = {A:21, B:23, C:24, D:26, E:28, F:29, G:31};\n var value = wholeNotes[ note[0].toUpperCase() ];\n var octave = ~~note.slice(-1);\n value += 12 * (octave -1);\n\n switch(note[1]) {\n case '#':\n value += 1;\n break;\n case 'b':\n value -= 1;\n break;\n default:\n break;\n }\n return midiToFreq(value);\n }\n\n /**\n * List the SoundFile formats that you will include. LoadSound\n * will search your directory for these extensions, and will pick\n * a format that is compatable with the client's web browser.\n * Here is a free online file\n * converter.\n *\n * @method soundFormats\n * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg'\n * @example\n * \n * function preload() {\n * // set the global sound formats\n * soundFormats('mp3', 'ogg');\n *\n * // load either beatbox.mp3, or .ogg, depending on browser\n * mySound = loadSound('assets/beatbox.mp3');\n * }\n *\n * function setup() {\n * mySound.play();\n * }\n *
input[0]
\n\t * and input[1]
. If a value is passed into the constructor, \n\t * the it will be added to the input.\n\t * \n\t * @constructor\n\t * @extends {Tone.Signal}\n\t * @param {number=} value If no value is provided, Tone.Add will sum the first\n\t * and second inputs. \n\t * @example\n\t * var signal = new Tone.Signal(2);\n\t * var add = new Tone.Add(2);\n\t * signal.connect(add);\n\t * //the output of add equals 4\n\t * @example\n\t * //if constructed with no arguments\n\t * //it will add the first and second inputs\n\t * var add = new Tone.Add();\n\t * var sig0 = new Tone.Signal(3).connect(add, 0, 0);\n\t * var sig1 = new Tone.Signal(4).connect(add, 0, 1);\n\t * //the output of add equals 7. \n\t */\n\tTone.Add = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the summing node\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._sum = this.input[0] = this.input[1] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * @private\n\t\t * @type {Tone.Signal}\n\t\t */\n\t\tthis._param = this.input[1] = new Tone.Signal(value);\n\n\t\tthis._param.connect(this._sum);\n\t};\n\n\tTone.extend(Tone.Add, Tone.Signal);\n\t\n\t/**\n\t * Clean up.\n\t * @returns {Tone.Add} this\n\t */\n\tTone.Add.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._sum.dispose();\n\t\tthis._sum = null;\n\t\tthis._param.dispose();\n\t\tthis._param = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Add;\n});","define([\"Tone/core/Tone\", \"Tone/type/Time\", \"Tone/type/Frequency\", \"Tone/type/TransportTime\", \"Tone/core/Context\"],\nfunction (Tone) {\t\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tTYPES\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Units which a value can take on.\n\t * @enum {String}\n\t */\n\tTone.Type = {\n\t\t/** \n\t\t * Default units\n\t\t * @typedef {Default}\n\t\t */\n\t\tDefault : \"number\",\n\t\t/**\n\t\t * Time can be described in a number of ways. Read more [Time](https://github.com/Tonejs/Tone.js/wiki/Time).\n\t\t *\n\t\t * connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.TimelineSignal} this\n\t * @method\n\t */\n\tTone.TimelineSignal.prototype.connect = Tone.SignalBase.prototype.connect;\n\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tAUTOMATION CURVE CALCULATIONS\n\t//\tMIT License, copyright (c) 2014 Jordan Santell\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Calculates the the value along the curve produced by setTargetAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._exponentialApproach = function (t0, v0, v1, timeConstant, t) {\n\t\treturn v1 + (v0 - v1) * Math.exp(-(t - t0) / timeConstant);\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by linearRampToValueAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._linearInterpolate = function (t0, v0, t1, v1, t) {\n\t\treturn v0 + (v1 - v0) * ((t - t0) / (t1 - t0));\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by exponentialRampToValueAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._exponentialInterpolate = function (t0, v0, t1, v1, t) {\n\t\tv0 = Math.max(this._minOutput, v0);\n\t\treturn v0 * Math.pow(v1 / v0, (t - t0) / (t1 - t0));\n\t};\n\n\t/**\n\t * Calculates the the value along the curve produced by setValueCurveAtTime\n\t * @private\n\t */\n\tTone.TimelineSignal.prototype._curveInterpolate = function (start, curve, duration, time) {\n\t\tvar len = curve.length;\n\t\t// If time is after duration, return the last curve value\n\t\tif (time >= start + duration) {\n\t\t\treturn curve[len - 1];\n\t\t} else if (time <= start){\n\t\t\treturn curve[0];\n\t\t} else {\n\t\t\tvar progress = (time - start) / duration;\n\t\t\tvar lowerIndex = Math.floor((len - 1) * progress);\n\t\t\tvar upperIndex = Math.ceil((len - 1) * progress);\n\t\t\tvar lowerVal = curve[lowerIndex];\n\t\t\tvar upperVal = curve[upperIndex];\n\t\t\tif (upperIndex === lowerIndex){\n\t\t\t\treturn lowerVal;\n\t\t\t} else {\n\t\t\t\treturn this._linearInterpolate(lowerIndex, lowerVal, upperIndex, upperVal, progress * (len - 1));\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Clean up.\n\t * @return {Tone.TimelineSignal} this\n\t */\n\tTone.TimelineSignal.prototype.dispose = function(){\n\t\tTone.Signal.prototype.dispose.call(this);\n\t\tTone.Param.prototype.dispose.call(this);\n\t\tthis._events.dispose();\n\t\tthis._events = null;\n\t};\n\n\treturn Tone.TimelineSignal;\n});","'use strict';\n\ndefine(function (require) {\n var Effect = require('effect');\n\n /**\n * A p5.Filter uses a Web Audio Biquad Filter to filter\n * the frequency response of an input source. Subclasses\n * include:
\n *p5.LowPass
:\n * Allows frequencies below the cutoff frequency to pass through,\n * and attenuates frequencies above the cutoff.p5.HighPass
:\n * The opposite of a lowpass filter. p5.BandPass
:\n * Allows a range of frequencies to pass through and attenuates\n * the frequencies below and above this frequency range..res()
method controls either width of the\n * bandpass, or resonance of the low/highpass cutoff frequency.\n *\n * This class extends p5.Effect.\n * Methods amp(), chain(),\n * drywet(), connect(), and\n * disconnect() are available.\n *\n * @class p5.Filter\n * @extends p5.Effect\n * @constructor\n * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass'\n * @example\n * \n * var fft, noise, filter;\n *\n * function setup() {\n * fill(255, 40, 255);\n *\n * filter = new p5.BandPass();\n *\n * noise = new p5.Noise();\n * // disconnect unfiltered noise,\n * // and connect to filter\n * noise.disconnect();\n * noise.connect(filter);\n * noise.start();\n *\n * fft = new p5.FFT();\n * }\n *\n * function draw() {\n * background(30);\n *\n * // set the BandPass frequency based on mouseX\n * var freq = map(mouseX, 0, width, 20, 10000);\n * filter.freq(freq);\n * // give the filter a narrow band (lower res = wider bandpass)\n * filter.res(50);\n *\n * // draw filtered spectrum\n * var spectrum = fft.analyze();\n * noStroke();\n * for (var i = 0; i < spectrum.length; i++) {\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width/spectrum.length, h);\n * }\n *\n * isMouseOverCanvas();\n * }\n *\n * function isMouseOverCanvas() {\n * var mX = mouseX, mY = mouseY;\n * if (mX > 0 && mX < width && mY < height && mY > 0) {\n * noise.amp(0.5, 0.2);\n * } else {\n * noise.amp(0, 0.2);\n * }\n * }\n *
new p5.LowPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('lowpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.LowPass\n * @constructor\n * @extends p5.Filter\n */\n p5.LowPass = function() {\n p5.Filter.call(this, 'lowpass');\n };\n p5.LowPass.prototype = Object.create(p5.Filter.prototype);\n\n /**\n * Constructor: new p5.HighPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('highpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.HighPass\n * @constructor\n * @extends p5.Filter\n */\n p5.HighPass = function() {\n p5.Filter.call(this, 'highpass');\n };\n p5.HighPass.prototype = Object.create(p5.Filter.prototype);\n\n /**\n * Constructor: new p5.BandPass()
Filter.\n * This is the same as creating a p5.Filter and then calling\n * its method setType('bandpass')
.\n * See p5.Filter for methods.\n *\n * @class p5.BandPass\n * @constructor\n * @extends p5.Filter\n */\n p5.BandPass = function() {\n p5.Filter.call(this, 'bandpass');\n };\n p5.BandPass.prototype = Object.create(p5.Filter.prototype);\n\n return p5.Filter;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/Add\", \"Tone/signal/Negate\", \"Tone/signal/Signal\", \"Tone/core/Gain\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Subtract the signal connected to input[1]
from the signal connected \n\t * to input[0]
. If an argument is provided in the constructor, the \n\t * signals .value
will be subtracted from the incoming signal.\n\t *\n\t * @extends {Tone.Signal}\n\t * @constructor\n\t * @param {number=} value The value to subtract from the incoming signal. If the value\n\t * is omitted, it will subtract the second signal from the first.\n\t * @example\n\t * var sub = new Tone.Subtract(1);\n\t * var sig = new Tone.Signal(4).connect(sub);\n\t * //the output of sub is 3. \n\t * @example\n\t * var sub = new Tone.Subtract();\n\t * var sigA = new Tone.Signal(10);\n\t * var sigB = new Tone.Signal(2.5);\n\t * sigA.connect(sub, 0, 0);\n\t * sigB.connect(sub, 0, 1);\n\t * //output of sub is 7.5\n\t */\n\tTone.Subtract = function(value){\n\n\t\tthis.createInsOuts(2, 0);\n\n\t\t/**\n\t\t * the summing node\n\t\t * @type {GainNode}\n\t\t * @private\n\t\t */\n\t\tthis._sum = this.input[0] = this.output = new Tone.Gain();\n\n\t\t/**\n\t\t * negate the input of the second input before connecting it\n\t\t * to the summing node.\n\t\t * @type {Tone.Negate}\n\t\t * @private\n\t\t */\n\t\tthis._neg = new Tone.Negate();\n\n\t\t/**\n\t\t * the node where the value is set\n\t\t * @private\n\t\t * @type {Tone.Signal}\n\t\t */\n\t\tthis._param = this.input[1] = new Tone.Signal(value);\n\n\t\tthis._param.chain(this._neg, this._sum);\n\t};\n\n\tTone.extend(Tone.Subtract, Tone.Signal);\n\n\t/**\n\t * Clean up.\n\t * @returns {Tone.SignalBase} this\n\t */\n\tTone.Subtract.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._neg.dispose();\n\t\tthis._neg = null;\n\t\tthis._sum.disconnect();\n\t\tthis._sum = null;\n\t\tthis._param.dispose();\n\t\tthis._param = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.Subtract;\n});","'use strict';\n\nglobal.TONE_SILENCE_VERSION_LOGGING = true;\n\ndefine(['startaudiocontext', 'Tone/core/Context', 'Tone/core/Tone'], function (StartAudioContext, Context, Tone) {\n // Create the Audio Context\n const audiocontext = new window.AudioContext();\n\n // Tone and p5.sound share the same audio context\n Tone.context.dispose();\n Tone.setContext(audiocontext);\n\n /**\n * Returns the Audio Context for this sketch. Useful for users\n * who would like to dig deeper into the Web Audio API\n * .
\n *\n *Some browsers require users to startAudioContext\n * with a user gesture, such as touchStarted in the example below.
\n *\n * @method getAudioContext\n * @return {Object} AudioContext for this sketch\n * @example\n *\n * function draw() {\n * background(255);\n * textAlign(CENTER);\n *\n * if (getAudioContext().state !== 'running') {\n * text('click to start audio', width/2, height/2);\n * } else {\n * text('audio is enabled', width/2, height/2);\n * }\n * }\n *\n * function touchStarted() {\n * if (getAudioContext().state !== 'running') {\n * getAudioContext().resume();\n * }\n * var synth = new p5.MonoSynth();\n * synth.play('A4', 0.5, 0, 0.2);\n * }\n *\n *
It is a good practice to give users control over starting audio playback.\n * This practice is enforced by Google Chrome's autoplay policy as of r70\n * (info), iOS Safari, and other browsers.\n *
\n *\n *\n * userStartAudio() starts the Audio Context on a user gesture. It utilizes\n * the StartAudioContext library by\n * Yotam Mann (MIT Licence, 2016). Read more at https://github.com/tambien/StartAudioContext.\n *
\n *\n *Starting the audio context on a user gesture can be as simple as userStartAudio()
.\n * Optional parameters let you decide on a specific element that will start the audio context,\n * and/or call a function once the audio context is started.
\n * function setup() {\n * var myDiv = createDiv('click to start audio');\n * myDiv.position(0, 0);\n *\n * var mySynth = new p5.MonoSynth();\n *\n * // This won't play until the context has started\n * mySynth.play('A6');\n *\n * // Start the audio context on a click/touch event\n * userStartAudio().then(function() {\n * myDiv.remove();\n * });\n * }\n *
connect
. \n\t *\n\t * @override\n\t * @param {AudioParam|AudioNode|Tone.Signal|Tone} node \n\t * @param {number} [outputNumber=0] The output number to connect from.\n\t * @param {number} [inputNumber=0] The input number to connect to.\n\t * @returns {Tone.SignalBase} this\n\t */\n\tTone.SignalBase.prototype.connect = function(node, outputNumber, inputNumber){\n\t\t//zero it out so that the signal can have full control\n\t\tif ((Tone.Signal && Tone.Signal === node.constructor) || \n\t\t\t\t(Tone.Param && Tone.Param === node.constructor) || \n\t\t\t\t(Tone.TimelineSignal && Tone.TimelineSignal === node.constructor)){\n\t\t\t//cancel changes\n\t\t\tnode._param.cancelScheduledValues(0);\n\t\t\t//reset the value\n\t\t\tnode._param.value = 0;\n\t\t\t//mark the value as overridden\n\t\t\tnode.overridden = true;\n\t\t} else if (node instanceof AudioParam){\n\t\t\tnode.cancelScheduledValues(0);\n\t\t\tnode.value = 0;\n\t\t} \n\t\tTone.prototype.connect.call(this, node, outputNumber, inputNumber);\n\t\treturn this;\n\t};\n\n\treturn Tone.SignalBase;\n});","define([\"Tone/core/Tone\", \"Tone/type/TimeBase\"], function (Tone) {\n\n\t/**\n\t * @class Tone.Time is a primitive type for encoding Time values. \n\t * Eventually all time values are evaluated to seconds\n\t * using the `eval` method. Tone.Time can be constructed\n\t * with or without the `new` keyword. Tone.Time can be passed\n\t * into the parameter of any method which takes time as an argument. \n\t * @constructor\n\t * @extends {Tone.TimeBase}\n\t * @param {String|Number} val The time value.\n\t * @param {String=} units The units of the value.\n\t * @example\n\t * var t = Tone.Time(\"4n\");//encodes a quarter note\n\t * t.mult(4); // multiply that value by 4\n\t * t.toNotation(); //returns \"1m\"\n\t */\n\tTone.Time = function(val, units){\n\t\tif (this instanceof Tone.Time){\n\n\t\t\t/**\n\t\t\t * If the current clock time should\n\t\t\t * be added to the output\n\t\t\t * @type {Boolean}\n\t\t\t * @private\n\t\t\t */\n\t\t\tthis._plusNow = false;\n\t\t\t\n\t\t\tTone.TimeBase.call(this, val, units);\n\n\t\t} else {\n\t\t\treturn new Tone.Time(val, units);\n\t\t}\n\t};\n\n\tTone.extend(Tone.Time, Tone.TimeBase);\n\n\t//clone the expressions so that \n\t//we can add more without modifying the original\n\tTone.Time.prototype._unaryExpressions = Object.create(Tone.TimeBase.prototype._unaryExpressions);\n\n\t/*\n\t * Adds an additional unary expression\n\t * which quantizes values to the next subdivision\n\t * @type {Object}\n\t * @private\n\t */\n\tTone.Time.prototype._unaryExpressions.quantize = {\n\t\tregexp : /^@/,\n\t\tmethod : function(rh){\n\t\t\treturn Tone.Transport.nextSubdivision(rh());\n\t\t}\n\t};\n\n\t/*\n\t * Adds an additional unary expression\n\t * which adds the current clock time.\n\t * @type {Object}\n\t * @private\n\t */\n\tTone.Time.prototype._unaryExpressions.now = {\n\t\tregexp : /^\\+/,\n\t\tmethod : function(lh){\n\t\t\tthis._plusNow = true;\n\t\t\treturn lh();\n\t\t}\n\t};\n\n\t/**\n\t * Quantize the time by the given subdivision. Optionally add a\n\t * percentage which will move the time value towards the ideal\n\t * quantized value by that percentage. \n\t * @param {Number|Time} val The subdivision to quantize to\n\t * @param {NormalRange} [percent=1] Move the time value\n\t * towards the quantized value by\n\t * a percentage.\n\t * @return {Tone.Time} this\n\t * @example\n\t * Tone.Time(21).quantize(2) //returns 22\n\t * Tone.Time(0.6).quantize(\"4n\", 0.5) //returns 0.55\n\t */\n\tTone.Time.prototype.quantize = function(subdiv, percent){\n\t\tpercent = this.defaultArg(percent, 1);\n\t\tthis._expr = function(expr, subdivision, percent){\n\t\t\texpr = expr();\n\t\t\tsubdivision = subdivision.toSeconds();\n\t\t\tvar multiple = Math.round(expr / subdivision);\n\t\t\tvar ideal = multiple * subdivision;\n\t\t\tvar diff = ideal - expr;\n\t\t\treturn expr + diff * percent;\n\t\t}.bind(this, this._expr, new this.constructor(subdiv), percent);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Adds the clock time to the time expression at the \n\t * moment of evaluation. \n\t * @return {Tone.Time} this\n\t */\n\tTone.Time.prototype.addNow = function(){\n\t\tthis._plusNow = true;\n\t\treturn this;\n\t};\n\n\t/**\n\t * @override\n\t * Override the default value return when no arguments are passed in.\n\t * The default value is 'now'\n\t * @private\n\t */\n\tTone.Time.prototype._defaultExpr = function(){\n\t\tthis._plusNow = true;\n\t\treturn this._noOp;\n\t};\n\n\t/**\n\t * Copies the value of time to this Time\n\t * @param {Tone.Time} time\n\t * @return {Time}\n\t */\n\tTone.Time.prototype.copy = function(time){\n\t\tTone.TimeBase.prototype.copy.call(this, time);\n\t\tthis._plusNow = time._plusNow;\n\t\treturn this;\n\t};\n\n\t//CONVERSIONS//////////////////////////////////////////////////////////////\n\n\t/**\n\t * Convert a Time to Notation. Values will be thresholded to the nearest 128th note. \n\t * @return {Notation} \n\t * @example\n\t * //if the Transport is at 120bpm:\n\t * Tone.Time(2).toNotation();//returns \"1m\"\n\t */\n\tTone.Time.prototype.toNotation = function(){\n\t\tvar time = this.toSeconds();\n\t\tvar testNotations = [\"1m\", \"2n\", \"4n\", \"8n\", \"16n\", \"32n\", \"64n\", \"128n\"];\n\t\tvar retNotation = this._toNotationHelper(time, testNotations);\n\t\t//try the same thing but with tripelets\n\t\tvar testTripletNotations = [\"1m\", \"2n\", \"2t\", \"4n\", \"4t\", \"8n\", \"8t\", \"16n\", \"16t\", \"32n\", \"32t\", \"64n\", \"64t\", \"128n\"];\n\t\tvar retTripletNotation = this._toNotationHelper(time, testTripletNotations);\n\t\t//choose the simpler expression of the two\n\t\tif (retTripletNotation.split(\"+\").length < retNotation.split(\"+\").length){\n\t\t\treturn retTripletNotation;\n\t\t} else {\n\t\t\treturn retNotation;\n\t\t}\n\t};\n\n\t/**\n\t * Helper method for Tone.toNotation\n\t * @param {Number} units \n\t * @param {Array} testNotations\n\t * @return {String}\n\t * @private\n\t */\n\tTone.Time.prototype._toNotationHelper = function(units, testNotations){\n\t\t//the threshold is the last value in the array\n\t\tvar threshold = this._notationToUnits(testNotations[testNotations.length - 1]);\n\t\tvar retNotation = \"\";\n\t\tfor (var i = 0; i < testNotations.length; i++){\n\t\t\tvar notationTime = this._notationToUnits(testNotations[i]);\n\t\t\t//account for floating point errors (i.e. round up if the value is 0.999999)\n\t\t\tvar multiple = units / notationTime;\n\t\t\tvar floatingPointError = 0.000001;\n\t\t\tif (1 - multiple % 1 < floatingPointError){\n\t\t\t\tmultiple += floatingPointError;\n\t\t\t}\n\t\t\tmultiple = Math.floor(multiple);\n\t\t\tif (multiple > 0){\n\t\t\t\tif (multiple === 1){\n\t\t\t\t\tretNotation += testNotations[i];\n\t\t\t\t} else {\n\t\t\t\t\tretNotation += multiple.toString() + \"*\" + testNotations[i];\n\t\t\t\t}\n\t\t\t\tunits -= multiple * notationTime;\n\t\t\t\tif (units < threshold){\n\t\t\t\t\tbreak;\n\t\t\t\t} else {\n\t\t\t\t\tretNotation += \" + \";\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif (retNotation === \"\"){\n\t\t\tretNotation = \"0\";\n\t\t}\n\t\treturn retNotation;\n\t};\n\n\t/**\n\t * Convert a notation value to the current units\n\t * @param {Notation} notation \n\t * @return {Number} \n\t * @private\n\t */\n\tTone.Time.prototype._notationToUnits = function(notation){\n\t\tvar primaryExprs = this._primaryExpressions;\n\t\tvar notationExprs = [primaryExprs.n, primaryExprs.t, primaryExprs.m];\n\t\tfor (var i = 0; i < notationExprs.length; i++){\n\t\t\tvar expr = notationExprs[i];\n\t\t\tvar match = notation.match(expr.regexp);\n\t\t\tif (match){\n\t\t\t\treturn expr.method.call(this, match[1]);\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Return the time encoded as Bars:Beats:Sixteenths.\n\t * @return {BarsBeatsSixteenths}\n\t */\n\tTone.Time.prototype.toBarsBeatsSixteenths = function(){\n\t\tvar quarterTime = this._beatsToUnits(1);\n\t\tvar quarters = this.toSeconds() / quarterTime;\n\t\tvar measures = Math.floor(quarters / this._timeSignature());\n\t\tvar sixteenths = (quarters % 1) * 4;\n\t\tquarters = Math.floor(quarters) % this._timeSignature();\n\t\tsixteenths = sixteenths.toString();\n\t\tif (sixteenths.length > 3){\n\t\t\tsixteenths = parseFloat(sixteenths).toFixed(3);\n\t\t}\n\t\tvar progress = [measures, quarters, sixteenths];\n\t\treturn progress.join(\":\");\n\t};\n\n\t/**\n\t * Return the time in ticks.\n\t * @return {Ticks}\n\t */\n\tTone.Time.prototype.toTicks = function(){\n\t\tvar quarterTime = this._beatsToUnits(1);\n\t\tvar quarters = this.valueOf() / quarterTime;\n\t\treturn Math.floor(quarters * Tone.Transport.PPQ);\n\t};\n\n\t/**\n\t * Return the time in samples\n\t * @return {Samples} \n\t */\n\tTone.Time.prototype.toSamples = function(){\n\t\treturn this.toSeconds() * this.context.sampleRate;\n\t};\n\n\t/**\n\t * Return the time as a frequency value\n\t * @return {Frequency} \n\t * @example\n\t * Tone.Time(2).toFrequency(); //0.5\n\t */\n\tTone.Time.prototype.toFrequency = function(){\n\t\treturn 1/this.toSeconds();\n\t};\n\n\t/**\n\t * Return the time in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.Time.prototype.toSeconds = function(){\n\t\treturn this.valueOf();\n\t};\n\n\t/**\n\t * Return the time in milliseconds.\n\t * @return {Milliseconds} \n\t */\n\tTone.Time.prototype.toMilliseconds = function(){\n\t\treturn this.toSeconds() * 1000;\n\t};\n\n\t/**\n\t * Return the time in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.Time.prototype.valueOf = function(){\n\t\tvar val = this._expr();\n\t\treturn val + (this._plusNow?this.now():0);\n\t};\n\n\treturn Tone.Time;\n});","define([\"Tone/core/Tone\"], function (Tone) {\n\n\t/**\n\t * @class Tone.TimeBase is a flexible encoding of time\n\t * which can be evaluated to and from a string.\n\t * Parsing code modified from https://code.google.com/p/tapdigit/\n\t * Copyright 2011 2012 Ariya Hidayat, New BSD License\n\t * @extends {Tone}\n\t * @param {Time} val The time value as a number or string\n\t * @param {String=} units Unit values\n\t * @example\n\t * Tone.TimeBase(4, \"n\")\n\t * Tone.TimeBase(2, \"t\")\n\t * Tone.TimeBase(\"2t\").add(\"1m\")\n\t * Tone.TimeBase(\"2t + 1m\");\n\t */\n\tTone.TimeBase = function(val, units){\n\n\t\t//allows it to be constructed with or without 'new'\n\t\tif (this instanceof Tone.TimeBase) {\n\n\t\t\t/**\n\t\t\t * Any expressions parsed from the Time\n\t\t\t * @type {Array}\n\t\t\t * @private\n\t\t\t */\n\t\t\tthis._expr = this._noOp;\n\n\t\t\tif (val instanceof Tone.TimeBase){\n\t\t\t\tthis.copy(val);\n\t\t\t} else if (!this.isUndef(units) || this.isNumber(val)){\n\t\t\t\t//default units\n\t\t\t\tunits = this.defaultArg(units, this._defaultUnits);\n\t\t\t\tvar method = this._primaryExpressions[units].method;\n\t\t\t\tthis._expr = method.bind(this, val);\n\t\t\t} else if (this.isString(val)){\n\t\t\t\tthis.set(val);\n\t\t\t} else if (this.isUndef(val)){\n\t\t\t\t//default expression\n\t\t\t\tthis._expr = this._defaultExpr();\n\t\t\t}\n\t\t} else {\n\n\t\t\treturn new Tone.TimeBase(val, units);\n\t\t}\n\t};\n\n\tTone.extend(Tone.TimeBase);\n\n\t/**\n\t * Repalce the current time value with the value\n\t * given by the expression string.\n\t * @param {String} exprString\n\t * @return {Tone.TimeBase} this\n\t */\n\tTone.TimeBase.prototype.set = function(exprString){\n\t\tthis._expr = this._parseExprString(exprString);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Return a clone of the TimeBase object.\n\t * @return {Tone.TimeBase} The new cloned Tone.TimeBase\n\t */\n\tTone.TimeBase.prototype.clone = function(){\n\t\tvar instance = new this.constructor();\n\t\tinstance.copy(this);\n\t\treturn instance;\n\t};\n\n\t/**\n\t * Copies the value of time to this Time\n\t * @param {Tone.TimeBase} time\n\t * @return {TimeBase}\n\t */\n\tTone.TimeBase.prototype.copy = function(time){\n\t\tvar val = time._expr();\n\t\treturn this.set(val);\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tABSTRACT SYNTAX TREE PARSER\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * All the primary expressions.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._primaryExpressions = {\n\t\t\"n\" : {\n\t\t\tregexp : /^(\\d+)n/i,\n\t\t\tmethod : function(value){\n\t\t\t\tvalue = parseInt(value);\n\t\t\t\tif (value === 1){\n\t\t\t\t\treturn this._beatsToUnits(this._timeSignature());\n\t\t\t\t} else {\n\t\t\t\t\treturn this._beatsToUnits(4 / value);\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\t\"t\" : {\n\t\t\tregexp : /^(\\d+)t/i,\n\t\t\tmethod : function(value){\n\t\t\t\tvalue = parseInt(value);\n\t\t\t\treturn this._beatsToUnits(8 / (parseInt(value) * 3));\n\t\t\t}\n\t\t},\n\t\t\"m\" : {\n\t\t\tregexp : /^(\\d+)m/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._beatsToUnits(parseInt(value) * this._timeSignature());\n\t\t\t}\n\t\t},\n\t\t\"i\" : {\n\t\t\tregexp : /^(\\d+)i/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._ticksToUnits(parseInt(value));\n\t\t\t}\n\t\t},\n\t\t\"hz\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?)hz/i,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._frequencyToUnits(parseFloat(value));\n\t\t\t}\n\t\t},\n\t\t\"tr\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?):(\\d+(?:\\.\\d+)?):?(\\d+(?:\\.\\d+)?)?/,\n\t\t\tmethod : function(m, q, s){\n\t\t\t\tvar total = 0;\n\t\t\t\tif (m && m !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(this._timeSignature() * parseFloat(m));\n\t\t\t\t}\n\t\t\t\tif (q && q !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(parseFloat(q));\n\t\t\t\t}\n\t\t\t\tif (s && s !== \"0\"){\n\t\t\t\t\ttotal += this._beatsToUnits(parseFloat(s) / 4);\n\t\t\t\t}\n\t\t\t\treturn total;\n\t\t\t}\n\t\t},\n\t\t\"s\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?s)/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._secondsToUnits(parseFloat(value));\n\t\t\t}\n\t\t},\n\t\t\"samples\" : {\n\t\t\tregexp : /^(\\d+)samples/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn parseInt(value) / this.context.sampleRate;\n\t\t\t}\n\t\t},\n\t\t\"default\" : {\n\t\t\tregexp : /^(\\d+(?:\\.\\d+)?)/,\n\t\t\tmethod : function(value){\n\t\t\t\treturn this._primaryExpressions[this._defaultUnits].method.call(this, value);\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * All the binary expressions that TimeBase can accept.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._binaryExpressions = {\n\t\t\"+\" : {\n\t\t\tregexp : /^\\+/,\n\t\t\tprecedence : 2,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() + rh();\n\t\t\t}\n\t\t},\n\t\t\"-\" : {\n\t\t\tregexp : /^\\-/,\n\t\t\tprecedence : 2,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() - rh();\n\t\t\t}\n\t\t},\n\t\t\"*\" : {\n\t\t\tregexp : /^\\*/,\n\t\t\tprecedence : 1,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() * rh();\n\t\t\t}\n\t\t},\n\t\t\"/\" : {\n\t\t\tregexp : /^\\//,\n\t\t\tprecedence : 1,\n\t\t\tmethod : function(lh, rh){\n\t\t\t\treturn lh() / rh();\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * All the unary expressions.\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._unaryExpressions = {\n\t\t\"neg\" : {\n\t\t\tregexp : /^\\-/,\n\t\t\tmethod : function(lh){\n\t\t\t\treturn -lh();\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Syntactic glue which holds expressions together\n\t * @private\n\t * @type {Object}\n\t */\n\tTone.TimeBase.prototype._syntaxGlue = {\n\t\t\"(\" : {\n\t\t\tregexp : /^\\(/\n\t\t},\n\t\t\")\" : {\n\t\t\tregexp : /^\\)/\n\t\t}\n\t};\n\n\t/**\n\t * tokenize the expression based on the Expressions object\n\t * @param {string} expr \n\t * @return {Object} returns two methods on the tokenized list, next and peek\n\t * @private\n\t */\n\tTone.TimeBase.prototype._tokenize = function(expr){\n\t\tvar position = -1;\n\t\tvar tokens = [];\n\n\t\twhile(expr.length > 0){\n\t\t\texpr = expr.trim();\n\t\t\tvar token = getNextToken(expr, this);\n\t\t\ttokens.push(token);\n\t\t\texpr = expr.substr(token.value.length);\n\t\t}\n\n\t\tfunction getNextToken(expr, context){\n\t\t\tvar expressions = [\"_binaryExpressions\", \"_unaryExpressions\", \"_primaryExpressions\", \"_syntaxGlue\"];\n\t\t\tfor (var i = 0; i < expressions.length; i++){\n\t\t\t\tvar group = context[expressions[i]];\n\t\t\t\tfor (var opName in group){\n\t\t\t\t\tvar op = group[opName];\n\t\t\t\t\tvar reg = op.regexp;\n\t\t\t\t\tvar match = expr.match(reg);\n\t\t\t\t\tif (match !== null){\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\tmethod : op.method,\n\t\t\t\t\t\t\tprecedence : op.precedence,\n\t\t\t\t\t\t\tregexp : op.regexp,\n\t\t\t\t\t\t\tvalue : match[0],\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tthrow new SyntaxError(\"Tone.TimeBase: Unexpected token \"+expr);\n\t\t}\n\n\t\treturn {\n\t\t\tnext : function(){\n\t\t\t\treturn tokens[++position];\n\t\t\t},\n\t\t\tpeek : function(){\n\t\t\t\treturn tokens[position + 1];\n\t\t\t}\n\t\t};\n\t};\n\n\t/**\n\t * Given a token, find the value within the groupName\n\t * @param {Object} token\n\t * @param {String} groupName\n\t * @param {Number} precedence\n\t * @private\n\t */\n\tTone.TimeBase.prototype._matchGroup = function(token, group, prec) {\n\t\tvar ret = false;\n\t\tif (!this.isUndef(token)){\n\t\t\tfor (var opName in group){\n\t\t\t\tvar op = group[opName];\n\t\t\t\tif (op.regexp.test(token.value)){\n\t\t\t\t\tif (!this.isUndef(prec)){\n\t\t\t\t\t\tif(op.precedence === prec){\t\n\t\t\t\t\t\t\treturn op;\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn op;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn ret;\n\t};\n\n\t/**\n\t * Match a binary expression given the token and the precedence\n\t * @param {Lexer} lexer\n\t * @param {Number} precedence\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseBinary = function(lexer, precedence){\n\t\tif (this.isUndef(precedence)){\n\t\t\tprecedence = 2;\n\t\t}\n\t\tvar expr;\n\t\tif (precedence < 0){\n\t\t\texpr = this._parseUnary(lexer);\n\t\t} else {\n\t\t\texpr = this._parseBinary(lexer, precedence - 1);\n\t\t}\n\t\tvar token = lexer.peek();\n\t\twhile (token && this._matchGroup(token, this._binaryExpressions, precedence)){\n\t\t\ttoken = lexer.next();\n\t\t\texpr = token.method.bind(this, expr, this._parseBinary(lexer, precedence - 1));\n\t\t\ttoken = lexer.peek();\n\t\t}\n\t\treturn expr;\n\t};\n\n\t/**\n\t * Match a unary expression.\n\t * @param {Lexer} lexer\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseUnary = function(lexer){\n\t\tvar token, expr;\n\t\ttoken = lexer.peek();\n\t\tvar op = this._matchGroup(token, this._unaryExpressions);\n\t\tif (op) {\n\t\t\ttoken = lexer.next();\n\t\t\texpr = this._parseUnary(lexer);\n\t\t\treturn op.method.bind(this, expr);\n\t\t}\n\t\treturn this._parsePrimary(lexer);\n\t};\n\n\t/**\n\t * Match a primary expression (a value).\n\t * @param {Lexer} lexer\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parsePrimary = function(lexer){\n\t\tvar token, expr;\n\t\ttoken = lexer.peek();\n\t\tif (this.isUndef(token)) {\n\t\t\tthrow new SyntaxError(\"Tone.TimeBase: Unexpected end of expression\");\n\t\t}\n\t\tif (this._matchGroup(token, this._primaryExpressions)) {\n\t\t\ttoken = lexer.next();\n\t\t\tvar matching = token.value.match(token.regexp);\n\t\t\treturn token.method.bind(this, matching[1], matching[2], matching[3]);\n\t\t}\n\t\tif (token && token.value === \"(\"){\n\t\t\tlexer.next();\n\t\t\texpr = this._parseBinary(lexer);\n\t\t\ttoken = lexer.next();\n\t\t\tif (!(token && token.value === \")\")) {\n\t\t\t\tthrow new SyntaxError(\"Expected )\");\n\t\t\t}\n\t\t\treturn expr;\n\t\t}\n\t\tthrow new SyntaxError(\"Tone.TimeBase: Cannot process token \" + token.value);\n\t};\n\n\t/**\n\t * Recursively parse the string expression into a syntax tree.\n\t * @param {string} expr \n\t * @return {Function} the bound method to be evaluated later\n\t * @private\n\t */\n\tTone.TimeBase.prototype._parseExprString = function(exprString){\n\t\tif (!this.isString(exprString)){\n\t\t\texprString = exprString.toString();\n\t\t}\n\t\tvar lexer = this._tokenize(exprString);\n\t\tvar tree = this._parseBinary(lexer);\n\t\treturn tree;\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tDEFAULTS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * The initial expression value\n\t * @return {Number} The initial value 0\n\t * @private\n\t */\n\tTone.TimeBase.prototype._noOp = function(){\n\t\treturn 0;\n\t};\n\n\t/**\n\t * The default expression value if no arguments are given\n\t * @private\n\t */\n\tTone.TimeBase.prototype._defaultExpr = function(){\n\t\treturn this._noOp;\n\t};\n\n\t/**\n\t * The default units if none are given.\n\t * @private\n\t */\n\tTone.TimeBase.prototype._defaultUnits = \"s\";\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tUNIT CONVERSIONS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Returns the value of a frequency in the current units\n\t * @param {Frequency} freq\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._frequencyToUnits = function(freq){\n\t\treturn 1/freq;\n\t};\n\n\t/**\n\t * Return the value of the beats in the current units\n\t * @param {Number} beats\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._beatsToUnits = function(beats){\n\t\treturn (60 / Tone.Transport.bpm.value) * beats;\n\t};\n\n\t/**\n\t * Returns the value of a second in the current units\n\t * @param {Seconds} seconds\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._secondsToUnits = function(seconds){\n\t\treturn seconds;\n\t};\n\n\t/**\n\t * Returns the value of a tick in the current time units\n\t * @param {Ticks} ticks\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._ticksToUnits = function(ticks){\n\t\treturn ticks * (this._beatsToUnits(1) / Tone.Transport.PPQ);\n\t};\n\n\t/**\n\t * Return the time signature.\n\t * @return {Number}\n\t * @private\n\t */\n\tTone.TimeBase.prototype._timeSignature = function(){\n\t\treturn Tone.Transport.timeSignature;\n\t};\n\n\t///////////////////////////////////////////////////////////////////////////\n\t//\tEXPRESSIONS\n\t///////////////////////////////////////////////////////////////////////////\n\n\t/**\n\t * Push an expression onto the expression list\n\t * @param {Time} val\n\t * @param {String} type\n\t * @param {String} units\n\t * @return {Tone.TimeBase} \n\t * @private\n\t */\n\tTone.TimeBase.prototype._pushExpr = function(val, name, units){\n\t\t//create the expression\n\t\tif (!(val instanceof Tone.TimeBase)){\n\t\t\tval = new this.constructor(val, units);\n\t\t}\n\t\tthis._expr = this._binaryExpressions[name].method.bind(this, this._expr, val._expr);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Add to the current value.\n\t * @param {Time} val The value to add\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").add(\"1m\"); //\"3m\"\n\t */\n\tTone.TimeBase.prototype.add = function(val, units){\n\t\treturn this._pushExpr(val, \"+\", units);\n\t};\n\n\t/**\n\t * Subtract the value from the current time.\n\t * @param {Time} val The value to subtract\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").sub(\"1m\"); //\"1m\"\n\t */\n\tTone.TimeBase.prototype.sub = function(val, units){\n\t\treturn this._pushExpr(val, \"-\", units);\n\t};\n\n\t/**\n\t * Multiply the current value by the given time.\n\t * @param {Time} val The value to multiply\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").mult(\"2\"); //\"4m\"\n\t */\n\tTone.TimeBase.prototype.mult = function(val, units){\n\t\treturn this._pushExpr(val, \"*\", units);\n\t};\n\n\t/**\n\t * Divide the current value by the given time.\n\t * @param {Time} val The value to divide by\n\t * @param {String=} units Optional units to use with the value.\n\t * @return {Tone.TimeBase} this\n\t * @example\n\t * Tone.TimeBase(\"2m\").div(2); //\"1m\"\n\t */\n\tTone.TimeBase.prototype.div = function(val, units){\n\t\treturn this._pushExpr(val, \"/\", units);\n\t};\n\n\t/**\n\t * Evaluate the time value. Returns the time\n\t * in seconds.\n\t * @return {Seconds} \n\t */\n\tTone.TimeBase.prototype.valueOf = function(){\n\t\treturn this._expr();\n\t};\n\n\t/**\n\t * Clean up\n\t * @return {Tone.TimeBase} this\n\t */\n\tTone.TimeBase.prototype.dispose = function(){\n\t\tthis._expr = null;\n\t};\n\n\treturn Tone.TimeBase;\n});","define([\"Tone/core/Tone\", \"Tone/type/Type\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Tone.Param wraps the native Web Audio's AudioParam to provide\n\t * additional unit conversion functionality. It also\n\t * serves as a base-class for classes which have a single,\n\t * automatable parameter. \n\t * @extends {Tone}\n\t * @param {AudioParam} param The parameter to wrap.\n\t * @param {Tone.Type} units The units of the audio param.\n\t * @param {Boolean} convert If the param should be converted.\n\t */\n\tTone.Param = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"param\", \"units\", \"convert\"], Tone.Param.defaults);\n\n\t\t/**\n\t\t * The native parameter to control\n\t\t * @type {AudioParam}\n\t\t * @private\n\t\t */\n\t\tthis._param = this.input = options.param;\n\n\t\t/**\n\t\t * The units of the parameter\n\t\t * @type {Tone.Type}\n\t\t */\n\t\tthis.units = options.units;\n\n\t\t/**\n\t\t * If the value should be converted or not\n\t\t * @type {Boolean}\n\t\t */\n\t\tthis.convert = options.convert;\n\n\t\t/**\n\t\t * True if the signal value is being overridden by \n\t\t * a connected signal.\n\t\t * @readOnly\n\t\t * @type {boolean}\n\t\t * @private\n\t\t */\n\t\tthis.overridden = false;\n\n\t\t/**\n\t\t * If there is an LFO, this is where it is held.\n\t\t * @type {Tone.LFO}\n\t\t * @private\n\t\t */\n\t\tthis._lfo = null;\n\n\t\tif (this.isObject(options.lfo)){\n\t\t\tthis.value = options.lfo;\n\t\t} else if (!this.isUndef(options.value)){\n\t\t\tthis.value = options.value;\n\t\t}\n\t};\n\n\tTone.extend(Tone.Param);\n\t\n\t/**\n\t * Defaults\n\t * @type {Object}\n\t * @const\n\t */\n\tTone.Param.defaults = {\n\t\t\"units\" : Tone.Type.Default,\n\t\t\"convert\" : true,\n\t\t\"param\" : undefined\n\t};\n\n\t/**\n\t * The current value of the parameter. \n\t * @memberOf Tone.Param#\n\t * @type {Number}\n\t * @name value\n\t */\n\tObject.defineProperty(Tone.Param.prototype, \"value\", {\n\t\tget : function(){\n\t\t\treturn this._toUnits(this._param.value);\n\t\t},\n\t\tset : function(value){\n\t\t\tif (this.isObject(value)){\n\t\t\t\t//throw an error if the LFO needs to be included\n\t\t\t\tif (this.isUndef(Tone.LFO)){\n\t\t\t\t\tthrow new Error(\"Include 'Tone.LFO' to use an LFO as a Param value.\");\n\t\t\t\t}\n\t\t\t\t//remove the old one\n\t\t\t\tif (this._lfo){\n\t\t\t\t\tthis._lfo.dispose();\n\t\t\t\t}\n\t\t\t\tthis._lfo = new Tone.LFO(value).start();\n\t\t\t\tthis._lfo.connect(this.input);\n\t\t\t} else {\n\t\t\t\tvar convertedVal = this._fromUnits(value);\n\t\t\t\tthis._param.cancelScheduledValues(0);\n\t\t\t\tthis._param.value = convertedVal;\n\t\t\t}\n\t\t}\n\t});\n\n\t/**\n\t * Convert the given value from the type specified by Tone.Param.units\n\t * into the destination value (such as Gain or Frequency).\n\t * @private\n\t * @param {*} val the value to convert\n\t * @return {number} the number which the value should be set to\n\t */\n\tTone.Param.prototype._fromUnits = function(val){\n\t\tif (this.convert || this.isUndef(this.convert)){\n\t\t\tswitch(this.units){\n\t\t\t\tcase Tone.Type.Time: \n\t\t\t\t\treturn this.toSeconds(val);\n\t\t\t\tcase Tone.Type.Frequency: \n\t\t\t\t\treturn this.toFrequency(val);\n\t\t\t\tcase Tone.Type.Decibels: \n\t\t\t\t\treturn this.dbToGain(val);\n\t\t\t\tcase Tone.Type.NormalRange: \n\t\t\t\t\treturn Math.min(Math.max(val, 0), 1);\n\t\t\t\tcase Tone.Type.AudioRange: \n\t\t\t\t\treturn Math.min(Math.max(val, -1), 1);\n\t\t\t\tcase Tone.Type.Positive: \n\t\t\t\t\treturn Math.max(val, 0);\n\t\t\t\tdefault:\n\t\t\t\t\treturn val;\n\t\t\t}\n\t\t} else {\n\t\t\treturn val;\n\t\t}\n\t};\n\n\t/**\n\t * Convert the parameters value into the units specified by Tone.Param.units.\n\t * @private\n\t * @param {number} val the value to convert\n\t * @return {number}\n\t */\n\tTone.Param.prototype._toUnits = function(val){\n\t\tif (this.convert || this.isUndef(this.convert)){\n\t\t\tswitch(this.units){\n\t\t\t\tcase Tone.Type.Decibels: \n\t\t\t\t\treturn this.gainToDb(val);\n\t\t\t\tdefault:\n\t\t\t\t\treturn val;\n\t\t\t}\n\t\t} else {\n\t\t\treturn val;\n\t\t}\n\t};\n\n\t/**\n\t * the minimum output value\n\t * @type {Number}\n\t * @private\n\t */\n\tTone.Param.prototype._minOutput = 0.00001;\n\n\t/**\n\t * Schedules a parameter value change at the given time.\n\t * @param {*}\tvalue The value to set the signal.\n\t * @param {Time} time The time when the change should occur.\n\t * @returns {Tone.Param} this\n\t * @example\n\t * //set the frequency to \"G4\" in exactly 1 second from now. \n\t * freq.setValueAtTime(\"G4\", \"+1\");\n\t */\n\tTone.Param.prototype.setValueAtTime = function(value, time){\n\t\tvalue = this._fromUnits(value);\n\t\ttime = this.toSeconds(time);\n\t\tif (time <= this.now() + this.blockTime){\n\t\t\tthis._param.value = value;\n\t\t} else {\n\t\t\tthis._param.setValueAtTime(value, time);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Creates a schedule point with the current value at the current time.\n\t * This is useful for creating an automation anchor point in order to \n\t * schedule changes from the current value. \n\t *\n\t * @param {number=} now (Optionally) pass the now value in. \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.setRampPoint = function(now){\n\t\tnow = this.defaultArg(now, this.now());\n\t\tvar currentVal = this._param.value;\n\t\t// exponentialRampToValueAt cannot ever ramp from or to 0\n\t\t// More info: https://bugzilla.mozilla.org/show_bug.cgi?id=1125600#c2\n\t\tif (currentVal === 0){\n\t\t\tcurrentVal = this._minOutput;\n\t\t}\n\t\tthis._param.setValueAtTime(currentVal, now);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules a linear continuous change in parameter value from the \n\t * previous scheduled parameter value to the given value.\n\t * \n\t * @param {number} value \n\t * @param {Time} endTime \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.linearRampToValueAtTime = function(value, endTime){\n\t\tvalue = this._fromUnits(value);\n\t\tthis._param.linearRampToValueAtTime(value, this.toSeconds(endTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an exponential continuous change in parameter value from \n\t * the previous scheduled parameter value to the given value.\n\t * \n\t * @param {number} value \n\t * @param {Time} endTime \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.exponentialRampToValueAtTime = function(value, endTime){\n\t\tvalue = this._fromUnits(value);\n\t\tvalue = Math.max(this._minOutput, value);\n\t\tthis._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an exponential continuous change in parameter value from \n\t * the current time and current value to the given value over the \n\t * duration of the rampTime.\n\t * \n\t * @param {number} value The value to ramp to.\n\t * @param {Time} rampTime the time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //exponentially ramp to the value 2 over 4 seconds. \n\t * signal.exponentialRampToValue(2, 4);\n\t */\n\tTone.Param.prototype.exponentialRampToValue = function(value, rampTime, startTime){\n\t\tstartTime = this.toSeconds(startTime);\n\t\tthis.setRampPoint(startTime);\n\t\tthis.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Schedules an linear continuous change in parameter value from \n\t * the current time and current value to the given value over the \n\t * duration of the rampTime.\n\t * \n\t * @param {number} value The value to ramp to.\n\t * @param {Time} rampTime the time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //linearly ramp to the value 4 over 3 seconds. \n\t * signal.linearRampToValue(4, 3);\n\t */\n\tTone.Param.prototype.linearRampToValue = function(value, rampTime, startTime){\n\t\tstartTime = this.toSeconds(startTime);\n\t\tthis.setRampPoint(startTime);\n\t\tthis.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Start exponentially approaching the target value at the given time with\n\t * a rate having the given time constant.\n\t * @param {number} value \n\t * @param {Time} startTime \n\t * @param {number} timeConstant \n\t * @returns {Tone.Param} this \n\t */\n\tTone.Param.prototype.setTargetAtTime = function(value, startTime, timeConstant){\n\t\tvalue = this._fromUnits(value);\n\t\t// The value will never be able to approach without timeConstant > 0.\n\t\t// http://www.w3.org/TR/webaudio/#dfn-setTargetAtTime, where the equation\n\t\t// is described. 0 results in a division by 0.\n\t\tvalue = Math.max(this._minOutput, value);\n\t\ttimeConstant = Math.max(this._minOutput, timeConstant);\n\t\tthis._param.setTargetAtTime(value, this.toSeconds(startTime), timeConstant);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Sets an array of arbitrary parameter values starting at the given time\n\t * for the given duration.\n\t * \t\n\t * @param {Array} values \n\t * @param {Time} startTime \n\t * @param {Time} duration \n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.setValueCurveAtTime = function(values, startTime, duration){\n\t\tfor (var i = 0; i < values.length; i++){\n\t\t\tvalues[i] = this._fromUnits(values[i]);\n\t\t}\n\t\tthis._param.setValueCurveAtTime(values, this.toSeconds(startTime), this.toSeconds(duration));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Cancels all scheduled parameter changes with times greater than or \n\t * equal to startTime.\n\t * \n\t * @param {Time} startTime\n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.cancelScheduledValues = function(startTime){\n\t\tthis._param.cancelScheduledValues(this.toSeconds(startTime));\n\t\treturn this;\n\t};\n\n\t/**\n\t * Ramps to the given value over the duration of the rampTime. \n\t * Automatically selects the best ramp type (exponential or linear)\n\t * depending on the `units` of the signal\n\t * \n\t * @param {number} value \n\t * @param {Time} rampTime \tThe time that it takes the \n\t * value to ramp from it's current value\n\t * @param {Time}\t[startTime=now] \tWhen the ramp should start. \n\t * @returns {Tone.Param} this\n\t * @example\n\t * //ramp to the value either linearly or exponentially \n\t * //depending on the \"units\" value of the signal\n\t * signal.rampTo(0, 10);\n\t * @example\n\t * //schedule it to ramp starting at a specific time\n\t * signal.rampTo(0, 10, 5)\n\t */\n\tTone.Param.prototype.rampTo = function(value, rampTime, startTime){\n\t\trampTime = this.defaultArg(rampTime, 0);\n\t\tif (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM || this.units === Tone.Type.Decibels){\n\t\t\tthis.exponentialRampToValue(value, rampTime, startTime);\n\t\t} else {\n\t\t\tthis.linearRampToValue(value, rampTime, startTime);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * The LFO created by the signal instance. If none\n\t * was created, this is null.\n\t * @type {Tone.LFO}\n\t * @readOnly\n\t * @memberOf Tone.Param#\n\t * @name lfo\n\t */\n\tObject.defineProperty(Tone.Param.prototype, \"lfo\", {\n\t\tget : function(){\n\t\t\treturn this._lfo;\n\t\t}\n\t});\n\n\t/**\n\t * Clean up\n\t * @returns {Tone.Param} this\n\t */\n\tTone.Param.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._param = null;\n\t\tif (this._lfo){\n\t\t\tthis._lfo.dispose();\n\t\t\tthis._lfo = null;\n\t\t}\n\t\treturn this;\n\t};\n\n\treturn Tone.Param;\n});","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n\n var Add = require('Tone/signal/Add');\n var Mult = require('Tone/signal/Multiply');\n var Scale = require('Tone/signal/Scale');\n\n /**\n * Creates a signal that oscillates between -1.0 and 1.0.\n * By default, the oscillation takes the form of a sinusoidal\n * shape ('sine'). Additional types include 'triangle',\n * 'sawtooth' and 'square'. The frequency defaults to\n * 440 oscillations per second (440Hz, equal to the pitch of an\n * 'A' note).
\n *\n *Set the type of oscillation with setType(), or by instantiating a\n * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc.\n *
\n *\n * @class p5.Oscillator\n * @constructor\n * @param {Number} [freq] frequency defaults to 440Hz\n * @param {String} [type] type of oscillator. Options:\n * 'sine' (default), 'triangle',\n * 'sawtooth', 'square'\n * @example\n *\n * var osc;\n * var playing = false;\n *\n * function setup() {\n * backgroundColor = color(255,0,255);\n * textAlign(CENTER);\n *\n * osc = new p5.Oscillator();\n * osc.setType('sine');\n * osc.freq(240);\n * osc.amp(0);\n * osc.start();\n * }\n *\n * function draw() {\n * background(backgroundColor)\n * text('click to play', width/2, height/2);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {\n * if (!playing) {\n * // ramp amplitude to 0.5 over 0.05 seconds\n * osc.amp(0.5, 0.05);\n * playing = true;\n * backgroundColor = color(0,255,255);\n * } else {\n * // ramp amplitude to 0 over 0.5 seconds\n * osc.amp(0, 0.5);\n * playing = false;\n * backgroundColor = color(255,0,255);\n * }\n * }\n * }\n *
\n * var osc = new p5.Oscillator(300);\n * osc.start();\n * osc.freq(40, 10);\n *
new p5.SinOsc()
.\n * This creates a Sine Wave Oscillator and is\n * equivalent to new p5.Oscillator('sine')\n *
or creating a p5.Oscillator and then calling\n * its method setType('sine')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SinOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SinOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'sine');\n };\n\n p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.TriOsc()
.\n * This creates a Triangle Wave Oscillator and is\n * equivalent to new p5.Oscillator('triangle')\n *
or creating a p5.Oscillator and then calling\n * its method setType('triangle')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.TriOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.TriOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'triangle');\n };\n\n p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.SawOsc()
.\n * This creates a SawTooth Wave Oscillator and is\n * equivalent to new p5.Oscillator('sawtooth')\n *
or creating a p5.Oscillator and then calling\n * its method setType('sawtooth')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SawOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SawOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'sawtooth');\n };\n\n p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n /**\n * Constructor: new p5.SqrOsc()
.\n * This creates a Square Wave Oscillator and is\n * equivalent to new p5.Oscillator('square')\n *
or creating a p5.Oscillator and then calling\n * its method setType('square')
.\n * See p5.Oscillator for methods.\n *\n * @class p5.SqrOsc\n * @constructor\n * @extends p5.Oscillator\n * @param {Number} [freq] Set the frequency\n */\n p5.SqrOsc = function(freq) {\n p5.Oscillator.call(this, freq, 'square');\n };\n\n p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype);\n\n});\n","define([\"Tone/core/Tone\", \"Tone/type/Type\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A Timeline class for scheduling and maintaining state\n\t * along a timeline. All events must have a \"time\" property. \n\t * Internally, events are stored in time order for fast \n\t * retrieval.\n\t * @extends {Tone}\n\t * @param {Positive} [memory=Infinity] The number of previous events that are retained.\n\t */\n\tTone.Timeline = function(){\n\n\t\tvar options = this.optionsObject(arguments, [\"memory\"], Tone.Timeline.defaults);\n\n\t\t/**\n\t\t * The array of scheduled timeline events\n\t\t * @type {Array}\n\t\t * @private\n\t\t */\n\t\tthis._timeline = [];\n\n\t\t/**\n\t\t * An array of items to remove from the list. \n\t\t * @type {Array}\n\t\t * @private\n\t\t */\n\t\tthis._toRemove = [];\n\n\t\t/**\n\t\t * Flag if the tieline is mid iteration\n\t\t * @private\n\t\t * @type {Boolean}\n\t\t */\n\t\tthis._iterating = false;\n\n\t\t/**\n\t\t * The memory of the timeline, i.e.\n\t\t * how many events in the past it will retain\n\t\t * @type {Positive}\n\t\t */\n\t\tthis.memory = options.memory;\n\t};\n\n\tTone.extend(Tone.Timeline);\n\n\t/**\n\t * the default parameters\n\t * @static\n\t * @const\n\t */\n\tTone.Timeline.defaults = {\n\t\t\"memory\" : Infinity\n\t};\n\n\t/**\n\t * The number of items in the timeline.\n\t * @type {Number}\n\t * @memberOf Tone.Timeline#\n\t * @name length\n\t * @readOnly\n\t */\n\tObject.defineProperty(Tone.Timeline.prototype, \"length\", {\n\t\tget : function(){\n\t\t\treturn this._timeline.length;\n\t\t}\n\t});\n\n\t/**\n\t * Insert an event object onto the timeline. Events must have a \"time\" attribute.\n\t * @param {Object} event The event object to insert into the \n\t * timeline. \n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.add = function(event){\n\t\t//the event needs to have a time attribute\n\t\tif (this.isUndef(event.time)){\n\t\t\tthrow new Error(\"Tone.Timeline: events must have a time attribute\");\n\t\t}\n\t\tif (this._timeline.length){\n\t\t\tvar index = this._search(event.time);\n\t\t\tthis._timeline.splice(index + 1, 0, event);\n\t\t} else {\n\t\t\tthis._timeline.push(event);\t\t\t\n\t\t}\n\t\t//if the length is more than the memory, remove the previous ones\n\t\tif (this.length > this.memory){\n\t\t\tvar diff = this.length - this.memory;\n\t\t\tthis._timeline.splice(0, diff);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Remove an event from the timeline.\n\t * @param {Object} event The event object to remove from the list.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.remove = function(event){\n\t\tif (this._iterating){\n\t\t\tthis._toRemove.push(event);\n\t\t} else {\n\t\t\tvar index = this._timeline.indexOf(event);\n\t\t\tif (index !== -1){\n\t\t\t\tthis._timeline.splice(index, 1);\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Get the nearest event whose time is less than or equal to the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object set after that time.\n\t */\n\tTone.Timeline.prototype.get = function(time){\n\t\tvar index = this._search(time);\n\t\tif (index !== -1){\n\t\t\treturn this._timeline[index];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Return the first event in the timeline without removing it\n\t * @returns {Object} The first event object\n\t */\n\tTone.Timeline.prototype.peek = function(){\n\t\treturn this._timeline[0];\n\t};\n\n\t/**\n\t * Return the first event in the timeline and remove it\n\t * @returns {Object} The first event object\n\t */\n\tTone.Timeline.prototype.shift = function(){\n\t\treturn this._timeline.shift();\n\t};\n\n\t/**\n\t * Get the event which is scheduled after the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object after the given time\n\t */\n\tTone.Timeline.prototype.getAfter = function(time){\n\t\tvar index = this._search(time);\n\t\tif (index + 1 < this._timeline.length){\n\t\t\treturn this._timeline[index + 1];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Get the event before the event at the given time.\n\t * @param {Number} time The time to query.\n\t * @returns {Object} The event object before the given time\n\t */\n\tTone.Timeline.prototype.getBefore = function(time){\n\t\tvar len = this._timeline.length;\n\t\t//if it's after the last item, return the last item\n\t\tif (len > 0 && this._timeline[len - 1].time < time){\n\t\t\treturn this._timeline[len - 1];\n\t\t}\n\t\tvar index = this._search(time);\n\t\tif (index - 1 >= 0){\n\t\t\treturn this._timeline[index - 1];\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t};\n\n\t/**\n\t * Cancel events after the given time\n\t * @param {Number} time The time to query.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.cancel = function(after){\n\t\tif (this._timeline.length > 1){\n\t\t\tvar index = this._search(after);\n\t\t\tif (index >= 0){\n\t\t\t\tif (this._timeline[index].time === after){\n\t\t\t\t\t//get the first item with that time\n\t\t\t\t\tfor (var i = index; i >= 0; i--){\n\t\t\t\t\t\tif (this._timeline[i].time === after){\n\t\t\t\t\t\t\tindex = i;\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tthis._timeline = this._timeline.slice(0, index);\n\t\t\t\t} else {\n\t\t\t\t\tthis._timeline = this._timeline.slice(0, index + 1);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tthis._timeline = [];\n\t\t\t}\n\t\t} else if (this._timeline.length === 1){\n\t\t\t//the first item's time\n\t\t\tif (this._timeline[0].time >= after){\n\t\t\t\tthis._timeline = [];\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Cancel events before or equal to the given time.\n\t * @param {Number} time The time to cancel before.\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.cancelBefore = function(time){\n\t\tif (this._timeline.length){\n\t\t\tvar index = this._search(time);\n\t\t\tif (index >= 0){\n\t\t\t\tthis._timeline = this._timeline.slice(index + 1);\n\t\t\t}\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Does a binary serach on the timeline array and returns the \n\t * nearest event index whose time is after or equal to the given time.\n\t * If a time is searched before the first index in the timeline, -1 is returned.\n\t * If the time is after the end, the index of the last item is returned.\n\t * @param {Number} time \n\t * @return {Number} the index in the timeline array \n\t * @private\n\t */\n\tTone.Timeline.prototype._search = function(time){\n\t\tvar beginning = 0;\n\t\tvar len = this._timeline.length;\n\t\tvar end = len;\n\t\tif (len > 0 && this._timeline[len - 1].time <= time){\n\t\t\treturn len - 1;\n\t\t}\n\t\twhile (beginning < end){\n\t\t\t// calculate the midpoint for roughly equal partition\n\t\t\tvar midPoint = Math.floor(beginning + (end - beginning) / 2);\n\t\t\tvar event = this._timeline[midPoint];\n\t\t\tvar nextEvent = this._timeline[midPoint + 1];\n\t\t\tif (event.time === time){\n\t\t\t\t//choose the last one that has the same time\n\t\t\t\tfor (var i = midPoint; i < this._timeline.length; i++){\n\t\t\t\t\tvar testEvent = this._timeline[i];\n\t\t\t\t\tif (testEvent.time === time){\n\t\t\t\t\t\tmidPoint = i;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn midPoint;\n\t\t\t} else if (event.time < time && nextEvent.time > time){\n\t\t\t\treturn midPoint;\n\t\t\t} else if (event.time > time){\n\t\t\t\t//search lower\n\t\t\t\tend = midPoint;\n\t\t\t} else if (event.time < time){\n\t\t\t\t//search upper\n\t\t\t\tbeginning = midPoint + 1;\n\t\t\t} \n\t\t}\n\t\treturn -1;\n\t};\n\n\t/**\n\t * Internal iterator. Applies extra safety checks for \n\t * removing items from the array. \n\t * @param {Function} callback \n\t * @param {Number=} lowerBound \n\t * @param {Number=} upperBound \n\t * @private\n\t */\n\tTone.Timeline.prototype._iterate = function(callback, lowerBound, upperBound){\n\t\tthis._iterating = true;\n\t\tlowerBound = this.defaultArg(lowerBound, 0);\n\t\tupperBound = this.defaultArg(upperBound, this._timeline.length - 1);\n\t\tfor (var i = lowerBound; i <= upperBound; i++){\n\t\t\tcallback(this._timeline[i]);\n\t\t}\n\t\tthis._iterating = false;\n\t\tif (this._toRemove.length > 0){\n\t\t\tfor (var j = 0; j < this._toRemove.length; j++){\n\t\t\t\tvar index = this._timeline.indexOf(this._toRemove[j]);\n\t\t\t\tif (index !== -1){\n\t\t\t\t\tthis._timeline.splice(index, 1);\n\t\t\t\t}\n\t\t\t}\n\t\t\tthis._toRemove = [];\n\t\t}\n\t};\n\n\t/**\n\t * Iterate over everything in the array\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEach = function(callback){\n\t\tthis._iterate(callback);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at or before the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachBefore = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar upperBound = this._search(time);\n\t\tif (upperBound !== -1){\n\t\t\tthis._iterate(callback, 0, upperBound);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array after the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachAfter = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar lowerBound = this._search(time);\n\t\tthis._iterate(callback, lowerBound + 1);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at or after the given time. Similar to \n\t * forEachAfter, but includes the item(s) at the given time.\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachFrom = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar lowerBound = this._search(time);\n\t\t//work backwards until the event time is less than time\n\t\twhile (lowerBound >= 0 && this._timeline[lowerBound].time >= time){\n\t\t\tlowerBound--;\n\t\t}\n\t\tthis._iterate(callback, lowerBound + 1);\n\t\treturn this;\n\t};\n\n\t/**\n\t * Iterate over everything in the array at the given time\n\t * @param {Number} time The time to check if items are before\n\t * @param {Function} callback The callback to invoke with every item\n\t * @returns {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.forEachAtTime = function(time, callback){\n\t\t//iterate over the items in reverse so that removing an item doesn't break things\n\t\tvar upperBound = this._search(time);\n\t\tif (upperBound !== -1){\n\t\t\tthis._iterate(function(event){\n\t\t\t\tif (event.time === time){\n\t\t\t\t\tcallback(event);\n\t\t\t\t} \n\t\t\t}, 0, upperBound);\n\t\t}\n\t\treturn this;\n\t};\n\n\t/**\n\t * Clean up.\n\t * @return {Tone.Timeline} this\n\t */\n\tTone.Timeline.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._timeline = null;\n\t\tthis._toRemove = null;\n\t};\n\n\treturn Tone.Timeline;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Multiply\", \"Tone/signal/Signal\"], function(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Negate the incoming signal. i.e. an input signal of 10 will output -10\n\t *\n\t * @constructor\n\t * @extends {Tone.SignalBase}\n\t * @example\n\t * var neg = new Tone.Negate();\n\t * var sig = new Tone.Signal(-2).connect(neg);\n\t * //output of neg is positive 2. \n\t */\n\tTone.Negate = function(){\n\t\t/**\n\t\t * negation is done by multiplying by -1\n\t\t * @type {Tone.Multiply}\n\t\t * @private\n\t\t */\n\t\tthis._multiply = this.input = this.output = new Tone.Multiply(-1);\n\t};\n\n\tTone.extend(Tone.Negate, Tone.SignalBase);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.Negate} this\n\t */\n\tTone.Negate.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._multiply.dispose();\n\t\tthis._multiply = null;\n\t\treturn this;\n\t}; \n\n\treturn Tone.Negate;\n});","define([\"Tone/core/Tone\", \"Tone/signal/Signal\", \"Tone/signal/Multiply\", \"Tone/signal/WaveShaper\"], \nfunction(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class GreaterThanZero outputs 1 when the input is strictly greater than zero\n\t * \n\t * @constructor\n\t * @extends {Tone.SignalBase}\n\t * @example\n\t * var gt0 = new Tone.GreaterThanZero();\n\t * var sig = new Tone.Signal(0.01).connect(gt0);\n\t * //the output of gt0 is 1. \n\t * sig.value = 0;\n\t * //the output of gt0 is 0. \n\t */\n\tTone.GreaterThanZero = function(){\n\t\t\n\t\t/**\n\t\t * @type {Tone.WaveShaper}\n\t\t * @private\n\t\t */\n\t\tthis._thresh = this.output = new Tone.WaveShaper(function(val){\n\t\t\tif (val <= 0){\n\t\t\t\treturn 0;\n\t\t\t} else {\n\t\t\t\treturn 1;\n\t\t\t}\n\t\t}, 127);\n\n\t\t/**\n\t\t * scale the first thresholded signal by a large value.\n\t\t * this will help with values which are very close to 0\n\t\t * @type {Tone.Multiply}\n\t\t * @private\n\t\t */\n\t\tthis._scale = this.input = new Tone.Multiply(10000);\n\n\t\t//connections\n\t\tthis._scale.connect(this._thresh);\n\t};\n\n\tTone.extend(Tone.GreaterThanZero, Tone.SignalBase);\n\n\t/**\n\t * dispose method\n\t * @returns {Tone.GreaterThanZero} this\n\t */\n\tTone.GreaterThanZero.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._scale.dispose();\n\t\tthis._scale = null;\n\t\tthis._thresh.dispose();\n\t\tthis._thresh = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.GreaterThanZero;\n});","define([\"Tone/core/Tone\", \"Tone/signal/TimelineSignal\", \"Tone/core/TimelineState\", \n\t\"Tone/core/Emitter\", \"Tone/core/Context\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A sample accurate clock which provides a callback at the given rate. \n\t * While the callback is not sample-accurate (it is still susceptible to\n\t * loose JS timing), the time passed in as the argument to the callback\n\t * is precise. For most applications, it is better to use Tone.Transport\n\t * instead of the Clock by itself since you can synchronize multiple callbacks.\n\t *\n\t * \t@constructor\n\t * @extends {Tone.Emitter}\n\t * \t@param {function} callback The callback to be invoked with the time of the audio event\n\t * \t@param {Frequency} frequency The rate of the callback\n\t * \t@example\n\t * //the callback will be invoked approximately once a second\n\t * //and will print the time exactly once a second apart.\n\t * var clock = new Tone.Clock(function(time){\n\t * \tconsole.log(time);\n\t * }, 1);\n\t */\n\tTone.Clock = function(){\n\n\t\tTone.Emitter.call(this);\n\n\t\tvar options = this.optionsObject(arguments, [\"callback\", \"frequency\"], Tone.Clock.defaults);\n\n\t\t/**\n\t\t * The callback function to invoke at the scheduled tick.\n\t\t * @type {Function}\n\t\t */\n\t\tthis.callback = options.callback;\n\n\t\t/**\n\t\t * The next time the callback is scheduled.\n\t\t * @type {Number}\n\t\t * @private\n\t\t */\n\t\tthis._nextTick = 0;\n\n\t\t/**\n\t\t * The last state of the clock.\n\t\t * @type {State}\n\t\t * @private\n\t\t */\n\t\tthis._lastState = Tone.State.Stopped;\n\n\t\t/**\n\t\t * The rate the callback function should be invoked. \n\t\t * @type {BPM}\n\t\t * @signal\n\t\t */\n\t\tthis.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency);\n\t\tthis._readOnly(\"frequency\");\n\n\t\t/**\n\t\t * The number of times the callback was invoked. Starts counting at 0\n\t\t * and increments after the callback was invoked. \n\t\t * @type {Ticks}\n\t\t * @readOnly\n\t\t */\n\t\tthis.ticks = 0;\n\n\t\t/**\n\t\t * The state timeline\n\t\t * @type {Tone.TimelineState}\n\t\t * @private\n\t\t */\n\t\tthis._state = new Tone.TimelineState(Tone.State.Stopped);\n\n\t\t/**\n\t\t * The loop function bound to its context. \n\t\t * This is necessary to remove the event in the end.\n\t\t * @type {Function}\n\t\t * @private\n\t\t */\n\t\tthis._boundLoop = this._loop.bind(this);\n\n\t\t//bind a callback to the worker thread\n \tthis.context.on(\"tick\", this._boundLoop);\n\t};\n\n\tTone.extend(Tone.Clock, Tone.Emitter);\n\n\t/**\n\t * The defaults\n\t * @const\n\t * @type {Object}\n\t */\n\tTone.Clock.defaults = {\n\t\t\"callback\" : Tone.noOp,\n\t\t\"frequency\" : 1,\n\t\t\"lookAhead\" : \"auto\",\n\t};\n\n\t/**\n\t * Returns the playback state of the source, either \"started\", \"stopped\" or \"paused\".\n\t * @type {Tone.State}\n\t * @readOnly\n\t * @memberOf Tone.Clock#\n\t * @name state\n\t */\n\tObject.defineProperty(Tone.Clock.prototype, \"state\", {\n\t\tget : function(){\n\t\t\treturn this._state.getValueAtTime(this.now());\n\t\t}\n\t});\n\n\t/**\n\t * Start the clock at the given time. Optionally pass in an offset\n\t * of where to start the tick counter from.\n\t * @param {Time} time The time the clock should start\n\t * @param {Ticks=} offset Where the tick counter starts counting from.\n\t * @return {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.start = function(time, offset){\n\t\ttime = this.toSeconds(time);\n\t\tif (this._state.getValueAtTime(time) !== Tone.State.Started){\n\t\t\tthis._state.add({\n\t\t\t\t\"state\" : Tone.State.Started, \n\t\t\t\t\"time\" : time,\n\t\t\t\t\"offset\" : offset\n\t\t\t});\n\t\t}\n\t\treturn this;\t\n\t};\n\n\t/**\n\t * Stop the clock. Stopping the clock resets the tick counter to 0.\n\t * @param {Time} [time=now] The time when the clock should stop.\n\t * @returns {Tone.Clock} this\n\t * @example\n\t * clock.stop();\n\t */\n\tTone.Clock.prototype.stop = function(time){\n\t\ttime = this.toSeconds(time);\n\t\tthis._state.cancel(time);\n\t\tthis._state.setStateAtTime(Tone.State.Stopped, time);\n\t\treturn this;\t\n\t};\n\n\n\t/**\n\t * Pause the clock. Pausing does not reset the tick counter.\n\t * @param {Time} [time=now] The time when the clock should stop.\n\t * @returns {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.pause = function(time){\n\t\ttime = this.toSeconds(time);\n\t\tif (this._state.getValueAtTime(time) === Tone.State.Started){\n\t\t\tthis._state.setStateAtTime(Tone.State.Paused, time);\n\t\t}\n\t\treturn this;\t\n\t};\n\n\t/**\n\t * The scheduling loop.\n\t * @param {Number} time The current page time starting from 0\n\t * when the page was loaded.\n\t * @private\n\t */\n\tTone.Clock.prototype._loop = function(){\n\t\t//get the frequency value to compute the value of the next loop\n\t\tvar now = this.now();\n\t\t//if it's started\n\t\tvar lookAhead = this.context.lookAhead;\n\t\tvar updateInterval = this.context.updateInterval;\n\t\tvar lagCompensation = this.context.lag * 2;\n\t\tvar loopInterval = now + lookAhead + updateInterval + lagCompensation;\n\t\twhile (loopInterval > this._nextTick && this._state){\n\t\t\tvar currentState = this._state.getValueAtTime(this._nextTick);\n\t\t\tif (currentState !== this._lastState){\n\t\t\t\tthis._lastState = currentState;\n\t\t\t\tvar event = this._state.get(this._nextTick);\n\t\t\t\t// emit an event\n\t\t\t\tif (currentState === Tone.State.Started){\n\t\t\t\t\t//correct the time\n\t\t\t\t\tthis._nextTick = event.time;\n\t\t\t\t\tif (!this.isUndef(event.offset)){\n\t\t\t\t\t\tthis.ticks = event.offset;\n\t\t\t\t\t}\n\t\t\t\t\tthis.emit(\"start\", event.time, this.ticks);\n\t\t\t\t} else if (currentState === Tone.State.Stopped){\n\t\t\t\t\tthis.ticks = 0;\n\n\t\t\t\t\tthis.emit(\"stop\", event.time);\n\t\t\t\t} else if (currentState === Tone.State.Paused){\n\t\t\t\t\tthis.emit(\"pause\", event.time);\n\t\t\t\t}\n\t\t\t}\n\t\t\tvar tickTime = this._nextTick;\n\t\t\tif (this.frequency){\n\t\t\t\tthis._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick);\n\t\t\t\tif (currentState === Tone.State.Started){\n\t\t\t\t\tthis.callback(tickTime);\n\t\t\t\t\tthis.ticks++;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t};\n\n\t/**\n\t * Returns the scheduled state at the given time.\n\t * @param {Time} time The time to query.\n\t * @return {String} The name of the state input in setStateAtTime.\n\t * @example\n\t * clock.start(\"+0.1\");\n\t * clock.getStateAtTime(\"+0.1\"); //returns \"started\"\n\t */\n\tTone.Clock.prototype.getStateAtTime = function(time){\n\t\ttime = this.toSeconds(time);\n\t\treturn this._state.getValueAtTime(time);\n\t};\n\n\t/**\n\t * Clean up\n\t * @returns {Tone.Clock} this\n\t */\n\tTone.Clock.prototype.dispose = function(){\n\t\tTone.Emitter.prototype.dispose.call(this);\n\t\tthis.context.off(\"tick\", this._boundLoop);\n\t\tthis._writable(\"frequency\");\n\t\tthis.frequency.dispose();\n\t\tthis.frequency = null;\n\t\tthis._boundLoop = null;\n\t\tthis._nextTick = Infinity;\n\t\tthis.callback = null;\n\t\tthis._state.dispose();\n\t\tthis._state = null;\n\t};\n\n\treturn Tone.Clock;\n});","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var AudioVoice = require('audioVoice');\n var noteToFreq = require('helpers').noteToFreq;\n\n var DEFAULT_SUSTAIN = 0.15;\n\n /**\n * A MonoSynth is used as a single voice for sound synthesis.\n * This is a class to be used in conjunction with the PolySynth\n * class. Custom synthetisers should be built inheriting from\n * this class.\n *\n * @class p5.MonoSynth\n * @constructor\n * @example\n * \n * var monoSynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * monoSynth = new p5.MonoSynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // time from now (in seconds)\n * var time = 0;\n * // note duration (in seconds)\n * var dur = 0.25;\n * // velocity (volume, from 0 to 1)\n * var v = 0.2;\n *\n * monoSynth.play(\"G3\", v, time, dur);\n * monoSynth.play(\"C4\", v, time += dur, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var monoSynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * monoSynth = new p5.MonoSynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // time from now (in seconds)\n * var time = 0;\n * // note duration (in seconds)\n * var dur = 1/6;\n * // note velocity (volume, from 0 to 1)\n * var v = random();\n *\n * monoSynth.play(\"Fb3\", v, 0, dur);\n * monoSynth.play(\"Gb3\", v, time += dur, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var monoSynth = new p5.MonoSynth();\n *\n * function mousePressed() {\n * monoSynth.triggerAttack(\"E3\");\n * }\n *\n * function mouseReleased() {\n * monoSynth.triggerRelease();\n * }\n *
\n * var monoSynth = new p5.MonoSynth();\n *\n * function mousePressed() {\n * monoSynth.triggerAttack(\"E3\");\n * }\n *\n * function mouseReleased() {\n * monoSynth.triggerRelease();\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n */\n p5.MonoSynth.prototype.setADSR = function (attack,decay,sustain,release) {\n this.env.setADSR(attack, decay, sustain, release);\n };\n\n\n /**\n * Getters and Setters\n * @property {Number} attack\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} decay\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} sustain\n * @for p5.MonoSynth\n */\n /**\n * @property {Number} release\n * @for p5.MonoSynth\n */\n Object.defineProperties(p5.MonoSynth.prototype, {\n 'attack': {\n get : function() {\n return this.env.aTime;\n },\n set : function(attack) {\n this.env.setADSR(attack, this.env.dTime,\n this.env.sPercent, this.env.rTime);\n }\n },\n 'decay': {\n get : function() {\n return this.env.dTime;\n },\n set : function(decay) {\n this.env.setADSR(this.env.aTime, decay,\n this.env.sPercent, this.env.rTime);\n }\n },\n 'sustain': {\n get : function() {\n return this.env.sPercent;\n },\n set : function(sustain) {\n this.env.setADSR(this.env.aTime, this.env.dTime,\n sustain, this.env.rTime);\n }\n },\n 'release': {\n get : function() {\n return this.env.rTime;\n },\n set : function(release) {\n this.env.setADSR(this.env.aTime, this.env.dTime,\n this.env.sPercent, release);\n }\n },\n });\n\n\n /**\n * MonoSynth amp\n * @method amp\n * @for p5.MonoSynth\n * @param {Number} vol desired volume\n * @param {Number} [rampTime] Time to reach new volume\n * @return {Number} new volume value\n */\n p5.MonoSynth.prototype.amp = function(vol, rampTime) {\n var t = rampTime || 0;\n if (typeof vol !== 'undefined') {\n this.oscillator.amp(vol, t);\n }\n return this.oscillator.amp().value;\n };\n\n /**\n * Connect to a p5.sound / Web Audio object.\n *\n * @method connect\n * @for p5.MonoSynth\n * @param {Object} unit A p5.sound or Web Audio object\n */\n\n p5.MonoSynth.prototype.connect = function(unit) {\n var u = unit || p5sound.input;\n this.output.connect(u.input ? u.input : u);\n };\n\n /**\n * Disconnect all outputs\n *\n * @method disconnect\n * @for p5.MonoSynth\n */\n p5.MonoSynth.prototype.disconnect = function() {\n if (this.output) {\n this.output.disconnect();\n }\n };\n\n\n /**\n * Get rid of the MonoSynth and free up its resources / memory.\n *\n * @method dispose\n * @for p5.MonoSynth\n */\n p5.MonoSynth.prototype.dispose = function() {\n AudioVoice.prototype.dispose.apply(this);\n\n if (this.env) {\n this.env.dispose();\n }\n if (this.oscillator) {\n this.oscillator.dispose();\n }\n };\n\n});\n","'use strict';\ndefine(function() {\n var p5sound = require('master');\n\n /**\n * Base class for monophonic synthesizers. Any extensions of this class\n * should follow the API and implement the methods below in order to\n * remain compatible with p5.PolySynth();\n *\n * @class p5.AudioVoice\n * @constructor\n */\n p5.AudioVoice = function () {\n\t this.ac = p5sound.audiocontext;\n\t this.output = this.ac.createGain();\n\t this.connect();\n\t p5sound.soundArray.push(this);\n };\n\n p5.AudioVoice.prototype.play = function (note, velocity, secondsFromNow, sustime) {\n };\n\n p5.AudioVoice.prototype.triggerAttack = function (note, velocity, secondsFromNow) {\n };\n\n p5.AudioVoice.prototype.triggerRelease = function (secondsFromNow) {\n };\n\n p5.AudioVoice.prototype.amp = function(vol, rampTime) {\n };\n\n /**\n * Connect to p5 objects or Web Audio Nodes\n * @method connect\n * @for p5.AudioVoice\n * @param {Object} unit\n */\n p5.AudioVoice.prototype.connect = function(unit) {\n var u = unit || p5sound.input;\n this.output.connect(u.input ? u.input : u);\n };\n\n /**\n * Disconnect from soundOut\n * @method disconnect\n * @for p5.AudioVoice\n */\n p5.AudioVoice.prototype.disconnect = function() {\n this.output.disconnect();\n };\n\n p5.AudioVoice.prototype.dispose = function() {\n if (this.output) {\n this.output.disconnect();\n delete this.output;\n }\n };\n\n return p5.AudioVoice;\n});\n","'use strict';\ndefine(function (require) {\n\n var p5sound = require('master');\n var TimelineSignal = require('Tone/signal/TimelineSignal');\n var noteToFreq = require('helpers').noteToFreq;\n\n /**\n * An AudioVoice is used as a single voice for sound synthesis.\n * The PolySynth class holds an array of AudioVoice, and deals\n * with voices allocations, with setting notes to be played, and\n * parameters to be set.\n *\n * @class p5.PolySynth\n * @constructor\n *\n * @param {Number} [synthVoice] A monophonic synth voice inheriting\n * the AudioVoice class. Defaults to p5.MonoSynth\n * @param {Number} [maxVoices] Number of voices, defaults to 8;\n * @example\n * \n * var polySynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * polySynth = new p5.PolySynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // note duration (in seconds)\n * var dur = 1.5;\n *\n * // time from now (in seconds)\n * var time = 0;\n *\n * // velocity (volume, from 0 to 1)\n * var vel = 0.1;\n *\n * // notes can overlap with each other\n * polySynth.play(\"G2\", vel, 0, dur);\n * polySynth.play(\"C3\", vel, time += 1/3, dur);\n * polySynth.play(\"G3\", vel, time += 1/3, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
\n * var polySynth;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * cnv.mousePressed(playSynth);\n *\n * polySynth = new p5.PolySynth();\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n * }\n *\n * function playSynth() {\n * // note duration (in seconds)\n * var dur = 0.1;\n *\n * // time from now (in seconds)\n * var time = 0;\n *\n * // velocity (volume, from 0 to 1)\n * var vel = 0.1;\n *\n * polySynth.play(\"G2\", vel, 0, dur);\n * polySynth.play(\"C3\", vel, 0, dur);\n * polySynth.play(\"G3\", vel, 0, dur);\n *\n * background(random(255), random(255), 255);\n * text('click to play', width/2, height/2);\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n **/\n\n p5.PolySynth.prototype.noteADSR = function (note,a,d,s,r,timeFromNow) {\n var now = p5sound.audiocontext.currentTime;\n var timeFromNow = timeFromNow || 0;\n var t = now + timeFromNow\n this.audiovoices[ this.notes[note].getValueAtTime(t) ].setADSR(a,d,s,r);\n };\n\n\n /**\n * Set the PolySynths global envelope. This method modifies the envelopes of each\n * monosynth so that all notes are played with this envelope.\n *\n * @method setADSR\n * @for p5.PolySynth\n * @param {Number} [attackTime] Time (in seconds before envelope\n * reaches Attack Level\n * @param {Number} [decayTime] Time (in seconds) before envelope\n * reaches Decay/Sustain Level\n * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1,\n * where 1.0 = attackLevel, 0.0 = releaseLevel.\n * The susRatio determines the decayLevel and the level at which the\n * sustain portion of the envelope will sustain.\n * For example, if attackLevel is 0.4, releaseLevel is 0,\n * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is\n * increased to 1.0 (using setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n **/\n p5.PolySynth.prototype.setADSR = function(a,d,s,r) {\n this.audiovoices.forEach(function(voice) {\n voice.setADSR(a,d,s,r);\n });\n };\n\n /**\n * Trigger the Attack, and Decay portion of a MonoSynth.\n * Similar to holding down a key on a piano, but it will\n * hold the sustain level until you let go.\n *\n * @method noteAttack\n * @for p5.PolySynth\n * @param {Number} [note] midi note on which attack should be triggered.\n * @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/\n * @param {Number} [secondsFromNow] time from now (in seconds)\n * @example\n * \n * var polySynth = new p5.PolySynth();\n * var pitches = [\"G\", \"D\", \"G\", \"C\"];\n * var octaves = [2, 3, 4];\n *\n * function mousePressed() {\n * // play a chord: multiple notes at the same time\n * for (var i = 0; i < 4; i++) {\n * var note = random(pitches) + random(octaves);\n * polySynth.noteAttack(note, 0.1);\n * }\n * }\n *\n * function mouseReleased() {\n * // release all voices\n * polySynth.noteRelease();\n * }\n *
\n * var pitches = [\"G\", \"D\", \"G\", \"C\"];\n * var octaves = [2, 3, 4];\n * var polySynth = new p5.PolySynth();\n *\n * function mousePressed() {\n * // play a chord: multiple notes at the same time\n * for (var i = 0; i < 4; i++) {\n * var note = random(pitches) + random(octaves);\n * polySynth.noteAttack(note, 0.1);\n * }\n * }\n *\n * function mouseReleased() {\n * // release all voices\n * polySynth.noteRelease();\n * }\n *
SoundFile object with a path to a file.
\n *\n *The p5.SoundFile may not be available immediately because\n * it loads the file information asynchronously.
\n *\n *To do something with the sound as soon as it loads\n * pass the name of a function as the second parameter.
\n *\n *Only one file path is required. However, audio file formats\n * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all\n * web browsers. If you want to ensure compatability, instead of a single\n * file path, you may include an Array of filepaths, and the browser will\n * choose a format that works.
\n *\n * @class p5.SoundFile\n * @constructor\n * @param {String|Array} path path to a sound file (String). Optionally,\n * you may include multiple file formats in\n * an array. Alternately, accepts an object\n * from the HTML5 File API, or a p5.File.\n * @param {Function} [successCallback] Name of a function to call once file loads\n * @param {Function} [errorCallback] Name of a function to call if file fails to\n * load. This function will receive an error or\n * XMLHttpRequest object with information\n * about what went wrong.\n * @param {Function} [whileLoadingCallback] Name of a function to call while file\n * is loading. That function will\n * receive progress of the request to\n * load the sound file\n * (between 0 and 1) as its first\n * parameter. This progress\n * does not account for the additional\n * time needed to decode the audio data.\n *\n * @example\n *\n *\n * function preload() {\n * soundFormats('mp3', 'ogg');\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * mySound.setVolume(0.1);\n * mySound.play();\n * }\n *\n *
\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * mySound.setVolume(0.1);\n * mySound.play();\n * }\n *
restart
and\n * sustain
. Play Mode determines what happens to a\n * p5.SoundFile if it is triggered while in the middle of playback.\n * In sustain mode, playback will continue simultaneous to the\n * new playback. In restart mode, play() will stop playback\n * and start over. With untilDone, a sound will play only if it's\n * not already playing. Sustain is the default mode.\n *\n * @method playMode\n * @for p5.SoundFile\n * @param {String} str 'restart' or 'sustain' or 'untilDone'\n * @example\n * \n * var mySound;\n * function preload(){\n * mySound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n * function mouseClicked() {\n * mySound.playMode('sustain');\n * mySound.play();\n * }\n * function keyPressed() {\n * mySound.playMode('restart');\n * mySound.play();\n * }\n *\n *
\n * var soundFile;\n *\n * function preload() {\n * soundFormats('ogg', 'mp3');\n * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3');\n * }\n * function setup() {\n * background(0, 255, 0);\n * soundFile.setVolume(0.1);\n * soundFile.loop();\n * }\n * function keyTyped() {\n * if (key == 'p') {\n * soundFile.pause();\n * background(255, 0, 0);\n * }\n * }\n *\n * function keyReleased() {\n * if (key == 'p') {\n * soundFile.play();\n * background(0, 255, 0);\n * }\n * }\n *
\n * rampTime
parameter. For more\n * complex fades, see the Envelope class.\n *\n * Alternately, you can pass in a signal source such as an\n * oscillator to modulate the amplitude with an audio signal.\n *\n * @method setVolume\n * @for p5.SoundFile\n * @param {Number|Object} volume Volume (amplitude) between 0.0\n * and 1.0 or modulating signal/oscillator\n * @param {Number} [rampTime] Fade for t seconds\n * @param {Number} [timeFromNow] Schedule this event to happen at\n * t seconds in the future\n */\n p5.SoundFile.prototype.setVolume = function(vol, _rampTime, _tFromNow) {\n if (typeof vol === 'number') {\n var rampTime = _rampTime || 0;\n var tFromNow = _tFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n var currentVol = this.output.gain.value;\n this.output.gain.cancelScheduledValues(now + tFromNow);\n this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);\n this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);\n }\n else if (vol) {\n vol.connect(this.output.gain);\n } else {\n // return the Gain Node\n return this.output.gain;\n }\n };\n\n // same as setVolume, to match Processing Sound\n p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume;\n\n // these are the same thing\n p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume;\n\n p5.SoundFile.prototype.getVolume = function() {\n return this.output.gain.value;\n };\n\n /**\n * Set the stereo panning of a p5.sound object to\n * a floating point number between -1.0 (left) and 1.0 (right).\n * Default is 0.0 (center).\n *\n * @method pan\n * @for p5.SoundFile\n * @param {Number} [panValue] Set the stereo panner\n * @param {Number} [timeFromNow] schedule this event to happen\n * seconds from now\n * @example\n * \n *\n * var ball = {};\n * var soundFile;\n *\n * function preload() {\n * soundFormats('ogg', 'mp3');\n * soundFile = loadSound('assets/beatbox.mp3');\n * }\n *\n * function draw() {\n * background(0);\n * ball.x = constrain(mouseX, 0, width);\n * ellipse(ball.x, height/2, 20, 20)\n * }\n *\n * function mousePressed(){\n * // map the ball's x location to a panning degree\n * // between -1.0 (left) and 1.0 (right)\n * var panning = map(ball.x, 0., width,-1.0, 1.0);\n * soundFile.pan(panning);\n * soundFile.play();\n * }\n *
\n * var song;\n *\n * function preload() {\n * song = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * song.loop();\n * }\n *\n * function draw() {\n * background(200);\n *\n * // Set the rate to a range between 0.1 and 4\n * // Changing the rate also alters the pitch\n * var speed = map(mouseY, 0.1, height, 0, 2);\n * speed = constrain(speed, 0.01, 4);\n * song.rate(speed);\n *\n * // Draw a circle to show what is going on\n * stroke(0);\n * fill(51, 100);\n * ellipse(mouseX, 100, 48, 48);\n * }\n *\n *
\n * \n * var drum;\n *\n * function preload() {\n * drum = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * drum.reverseBuffer();\n * drum.play();\n * }\n *\n *
\n * \n * var mySound;\n * function preload() {\n * mySound = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * // schedule calls to changeText\n * mySound.addCue(0.50, changeText, \"hello\" );\n * mySound.addCue(1.00, changeText, \"p5\" );\n * mySound.addCue(1.50, changeText, \"what\" );\n * mySound.addCue(2.00, changeText, \"do\" );\n * mySound.addCue(2.50, changeText, \"you\" );\n * mySound.addCue(3.00, changeText, \"want\" );\n * mySound.addCue(4.00, changeText, \"to\" );\n * mySound.addCue(5.00, changeText, \"make\" );\n * mySound.addCue(6.00, changeText, \"?\" );\n * }\n *\n * function changeText(val) {\n * background(0);\n * text(val, width/2, height/2);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * if (mySound.isPlaying() ) {\n * mySound.stop();\n * } else {\n * mySound.play();\n * }\n * }\n * }\n *
\n * var inp, button, mySound;\n * var fileName = 'cool';\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n * function setup() {\n * btn = createButton('click to save file');\n * btn.position(0, 0);\n * btn.mouseClicked(handleMouseClick);\n * }\n *\n * function handleMouseClick() {\n * mySound.save(fileName);\n * }\n *
\n *\n * function preload() {\n * mySound = loadSound('assets/doorbell.mp3');\n * }\n *\n * function setup() {\n * noCanvas();\n * var soundBlob = mySound.getBlob();\n *\n * // Now we can send the blob to a server...\n * var serverUrl = 'https://jsonplaceholder.typicode.com/posts';\n * var httpRequestOptions = {\n * method: 'POST',\n * body: new FormData().append('soundBlob', soundBlob),\n * headers: new Headers({\n * 'Content-Type': 'multipart/form-data'\n * })\n * };\n * httpDo(serverUrl, httpRequestOptions);\n *\n * // We can also create an `ObjectURL` pointing to the Blob\n * var blobUrl = URL.createObjectURL(soundBlob);\n *\n * // The `
\n * var sound, amplitude, cnv;\n *\n * function preload(){\n * sound = loadSound('assets/beat.mp3');\n * }\n * function setup() {\n * cnv = createCanvas(100,100);\n * amplitude = new p5.Amplitude();\n *\n * // start / stop the sound when canvas is clicked\n * cnv.mouseClicked(function() {\n * if (sound.isPlaying() ){\n * sound.stop();\n * } else {\n * sound.play();\n * }\n * });\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n *\n *
\n * function preload(){\n * sound1 = loadSound('assets/beat.mp3');\n * sound2 = loadSound('assets/drum.mp3');\n * }\n * function setup(){\n * amplitude = new p5.Amplitude();\n * sound1.play();\n * sound2.play();\n * amplitude.setInput(sound2);\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n * function mouseClicked(){\n * sound1.stop();\n * sound2.stop();\n * }\n *
\n * function preload(){\n * sound = loadSound('assets/beat.mp3');\n * }\n * function setup() {\n * amplitude = new p5.Amplitude();\n * sound.play();\n * }\n * function draw() {\n * background(0);\n * fill(255);\n * var level = amplitude.getLevel();\n * var size = map(level, 0, 1, 0, 200);\n * ellipse(width/2, height/2, size, size);\n * }\n * function mouseClicked(){\n * sound.stop();\n * }\n *
FFT (Fast Fourier Transform) is an analysis algorithm that\n * isolates individual\n * \n * audio frequencies within a waveform.
\n *\n *Once instantiated, a p5.FFT object can return an array based on\n * two types of analyses:
• FFT.waveform()
computes\n * amplitude values along the time domain. The array indices correspond\n * to samples across a brief moment in time. Each value represents\n * amplitude of the waveform at that sample of time.
\n * • FFT.analyze()
computes amplitude values along the\n * frequency domain. The array indices correspond to frequencies (i.e.\n * pitches), from the lowest to the highest that humans can hear. Each\n * value represents amplitude at that slice of the frequency spectrum.\n * Use with getEnergy()
to measure amplitude at specific\n * frequencies, or within a range of frequencies.
FFT analyzes a very short snapshot of sound called a sample\n * buffer. It returns an array of amplitude measurements, referred\n * to as bins
. The array is 1024 bins long by default.\n * You can change the bin array length, but it must be a power of 2\n * between 16 and 1024 in order for the FFT algorithm to function\n * correctly. The actual size of the FFT buffer is twice the\n * number of bins, so given a standard sample rate, the buffer is\n * 2048/44100 seconds long.
\n * function preload(){\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup(){\n * var cnv = createCanvas(100,100);\n * cnv.mouseClicked(togglePlay);\n * fft = new p5.FFT();\n * sound.amp(0.2);\n * }\n *\n * function draw(){\n * background(0);\n *\n * var spectrum = fft.analyze();\n * noStroke();\n * fill(0,255,0); // spectrum is green\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width / spectrum.length, h )\n * }\n *\n * var waveform = fft.waveform();\n * noFill();\n * beginShape();\n * stroke(255,0,0); // waveform is red\n * strokeWeight(1);\n * for (var i = 0; i< waveform.length; i++){\n * var x = map(i, 0, waveform.length, 0, width);\n * var y = map( waveform[i], -1, 1, 0, height);\n * vertex(x,y);\n * }\n * endShape();\n *\n * text('click to play/pause', 4, 10);\n * }\n *\n * // fade sound if mouse is over canvas\n * function togglePlay() {\n * if (sound.isPlaying()) {\n * sound.pause();\n * } else {\n * sound.loop();\n * }\n * }\n *
getEnergy()
.\n *\n * @method analyze\n * @for p5.FFT\n * @param {Number} [bins] Must be a power of two between\n * 16 and 1024. Defaults to 1024.\n * @param {Number} [scale] If \"dB,\" returns decibel\n * float measurements between\n * -140 and 0 (max).\n * Otherwise returns integers from 0-255.\n * @return {Array} spectrum Array of energy (amplitude/volume)\n * values across the frequency spectrum.\n * Lowest energy (silence) = 0, highest\n * possible is 255.\n * @example\n * \n * var osc;\n * var fft;\n *\n * function setup(){\n * createCanvas(100,100);\n * osc = new p5.Oscillator();\n * osc.amp(0);\n * osc.start();\n * fft = new p5.FFT();\n * }\n *\n * function draw(){\n * background(0);\n *\n * var freq = map(mouseX, 0, 800, 20, 15000);\n * freq = constrain(freq, 1, 20000);\n * osc.freq(freq);\n *\n * var spectrum = fft.analyze();\n * noStroke();\n * fill(0,255,0); // spectrum is green\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(i, 0, spectrum.length, 0, width);\n * var h = -height + map(spectrum[i], 0, 255, height, 0);\n * rect(x, height, width / spectrum.length, h );\n * }\n *\n * stroke(255);\n * text('Freq: ' + round(freq)+'Hz', 10, 10);\n *\n * isMouseOverCanvas();\n * }\n *\n * // only play sound when mouse is over canvas\n * function isMouseOverCanvas() {\n * var mX = mouseX, mY = mouseY;\n * if (mX > 0 && mX < width && mY < height && mY > 0) {\n * osc.amp(0.5, 0.2);\n * } else {\n * osc.amp(0, 0.2);\n * }\n * }\n *
\n *\n *\n *function setup(){\n * cnv = createCanvas(100,100);\n * sound = new p5.AudioIn();\n * sound.start();\n * fft = new p5.FFT();\n * sound.connect(fft);\n *}\n *\n *\n *function draw(){\n *\n * var centroidplot = 0.0;\n * var spectralCentroid = 0;\n *\n *\n * background(0);\n * stroke(0,255,0);\n * var spectrum = fft.analyze();\n * fill(0,255,0); // spectrum is green\n *\n * //draw the spectrum\n * for (var i = 0; i< spectrum.length; i++){\n * var x = map(log(i), 0, log(spectrum.length), 0, width);\n * var h = map(spectrum[i], 0, 255, 0, height);\n * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length));\n * rect(x, height, rectangle_width, -h )\n * }\n\n * var nyquist = 22050;\n *\n * // get the centroid\n * spectralCentroid = fft.getCentroid();\n *\n * // the mean_freq_index calculation is for the display.\n * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length);\n *\n * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width);\n *\n *\n * stroke(255,0,0); // the line showing where the centroid is will be red\n *\n * rect(centroidplot, 0, width / spectrum.length, height)\n * noStroke();\n * fill(255,255,255); // text is white\n * text(\"centroid: \", 10, 20);\n * text(round(spectralCentroid)+\" Hz\", 10, 40);\n *}\n *
p5.Signal is a constant audio-rate signal used by p5.Oscillator\n * and p5.Envelope for modulation math.
\n *\n *This is necessary because Web Audio is processed on a seprate clock.\n * For example, the p5 draw loop runs about 60 times per second. But\n * the audio clock must process samples 44100 times per second. If we\n * want to add a value to each of those samples, we can't do it in the\n * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns\n * a Tone.Signal from the Tone.js library by Yotam Mann.\n * If you want to work directly with audio signals for modular\n * synthesis, check out\n * tone.js.
\n *\n * @class p5.Signal\n * @constructor\n * @return {Tone.Signal} A Signal object from the Tone.js library\n * @example\n *\n * function setup() {\n * carrier = new p5.Oscillator('sine');\n * carrier.amp(1); // set amplitude\n * carrier.freq(220); // set frequency\n * carrier.start(); // start oscillating\n *\n * modulator = new p5.Oscillator('sawtooth');\n * modulator.disconnect();\n * modulator.amp(1);\n * modulator.freq(4);\n * modulator.start();\n *\n * // Modulator's default amplitude range is -1 to 1.\n * // Multiply it by -200, so the range is -200 to 200\n * // then add 220 so the range is 20 to 420\n * carrier.freq( modulator.mult(-200).add(220) );\n * }\n *
Envelopes are pre-defined amplitude distribution over time.\n * Typically, envelopes are used to control the output volume\n * of an object, a series of fades referred to as Attack, Decay,\n * Sustain and Release (\n * ADSR\n * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Envelope can\n * control an Oscillator's frequency like this: osc.freq(env)
.
Use setRange
to change the attack/release level.\n * Use setADSR
to change attackTime, decayTime, sustainPercent and releaseTime.
Use the play
method to play the entire envelope,\n * the ramp
method for a pingable trigger,\n * or triggerAttack
/\n * triggerRelease
to trigger noteOn/noteOff.
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var t1 = 0.1; // attack time in seconds\n * var l1 = 0.7; // attack level 0.0 to 1.0\n * var t2 = 0.3; // decay time in seconds\n * var l2 = 0.1; // decay level 0.0 to 1.0\n * var t3 = 0.2; // sustain time in seconds\n * var l3 = 0.5; // sustain level 0.0 to 1.0\n * // release level defaults to zero\n *\n * var env;\n * var triOsc;\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope(t1, l1, t2, l2, t3, l3);\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env); // give the env control of the triOsc's amp\n * triOsc.start();\n * }\n *\n * // mouseClick triggers envelope if over canvas\n * function mouseClicked() {\n * // is mouse over canvas?\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * env.play(triOsc);\n * }\n * }\n *
setRange
),\n * then decayLevel would increase proportionally, to become 0.5.\n * @param {Number} [releaseTime] Time in seconds from now (defaults to 0)\n * @example\n * \n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * env.play();\n * }\n *
\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var susPercent = 0.2;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n *\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(playEnv);\n * }\n *\n * function playEnv() {\n * // trigger env on triOsc, 0 seconds from now\n * // After decay, sustain for 0.2 seconds before release\n * env.play(triOsc, 0, 0.2);\n * }\n *
\n *\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.3;\n * var susPercent = 0.4;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * background(200);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(envAttack);\n * }\n *\n * function envAttack() {\n * console.log('trigger attack');\n * env.triggerAttack();\n *\n * background(0,255,0);\n * text('attack!', width/2, height/2);\n * }\n *\n * function mouseReleased() {\n * env.triggerRelease();\n *\n * background(200);\n * text('click to play', width/2, height/2);\n * }\n *
\n *\n * var attackLevel = 1.0;\n * var releaseLevel = 0;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.3;\n * var susPercent = 0.4;\n * var releaseTime = 0.5;\n *\n * var env, triOsc;\n *\n * function setup() {\n * var cnv = createCanvas(100, 100);\n * background(200);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime, susPercent, releaseTime);\n * env.setRange(attackLevel, releaseLevel);\n *\n * triOsc = new p5.Oscillator('triangle');\n * triOsc.amp(env);\n * triOsc.start();\n * triOsc.freq(220);\n *\n * cnv.mousePressed(envAttack);\n * }\n *\n * function envAttack() {\n * console.log('trigger attack');\n * env.triggerAttack();\n *\n * background(0,255,0);\n * text('attack!', width/2, height/2);\n * }\n *\n * function mouseReleased() {\n * env.triggerRelease();\n *\n * background(200);\n * text('click to play', width/2, height/2);\n * }\n *
setADSR(attackTime, decayTime)
\n * as \n * time constants for simple exponential ramps.\n * If the value is higher than current value, it uses attackTime,\n * while a decrease uses decayTime.\n *\n * @method ramp\n * @for p5.Envelope\n * @param {Object} unit p5.sound Object or Web Audio Param\n * @param {Number} secondsFromNow When to trigger the ramp\n * @param {Number} v Target value\n * @param {Number} [v2] Second target value (optional)\n * @example\n * \n * var env, osc, amp, cnv;\n *\n * var attackTime = 0.001;\n * var decayTime = 0.2;\n * var attackLevel = 1;\n * var decayLevel = 0;\n *\n * function setup() {\n * cnv = createCanvas(100, 100);\n * fill(0,255,0);\n * noStroke();\n *\n * env = new p5.Envelope();\n * env.setADSR(attackTime, decayTime);\n *\n * osc = new p5.Oscillator();\n * osc.amp(env);\n * osc.start();\n *\n * amp = new p5.Amplitude();\n *\n * cnv.mousePressed(triggerRamp);\n * }\n *\n * function triggerRamp() {\n * env.ramp(osc, 0, attackLevel, decayLevel);\n * }\n *\n * function draw() {\n * background(20,20,20);\n * text('click me', 10, 20);\n * var h = map(amp.getLevel(), 0, 0.4, 0, height);;\n *\n * rect(0, height, width, -h);\n * }\n *
p5.Oscillator
for a full list of methods.\n *\n * @class p5.Pulse\n * @extends p5.Oscillator\n * @constructor\n * @param {Number} [freq] Frequency in oscillations per second (Hz)\n * @param {Number} [w] Width between the pulses (0 to 1.0,\n * defaults to 0)\n * @example\n * \n * var pulse;\n * function setup() {\n * background(0);\n *\n * // Create and start the pulse wave oscillator\n * pulse = new p5.Pulse();\n * pulse.amp(0.5);\n * pulse.freq(220);\n * pulse.start();\n * }\n *\n * function draw() {\n * var w = map(mouseX, 0, width, 0, 1);\n * w = constrain(w, 0, 1);\n * pulse.width(w)\n * }\n *
Get audio from an input, i.e. your computer's microphone.
\n *\n *Turn the mic on/off with the start() and stop() methods. When the mic\n * is on, its volume can be measured with getLevel or by connecting an\n * FFT object.
\n *\n *If you want to hear the AudioIn, use the .connect() method.\n * AudioIn does not connect to p5.sound output by default to prevent\n * feedback.
\n *\n *Note: This uses the getUserMedia/\n * Stream API, which is not supported by certain browsers. Access in Chrome browser\n * is limited to localhost and https, but access over http may be limited.
\n *\n * @class p5.AudioIn\n * @constructor\n * @param {Function} [errorCallback] A function to call if there is an error\n * accessing the AudioIn. For example,\n * Safari and iOS devices do not\n * currently allow microphone access.\n * @example\n *\n * var mic;\n * function setup(){\n * mic = new p5.AudioIn()\n * mic.start();\n * }\n * function draw(){\n * background(0);\n * micLevel = mic.getLevel();\n * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10);\n * }\n *
\n * var audiograb;\n *\n * function setup(){\n * //new audioIn\n * audioGrab = new p5.AudioIn();\n *\n * audioGrab.getSources(function(deviceList) {\n * //print out the array of available sources\n * console.log(deviceList);\n * //set the source to the first item in the deviceList array\n * audioGrab.setSource(0);\n * });\n * }\n *
input[0]
. \n\t\t * @type {Tone.Gain}\n\t\t */\n\t\tthis.a = this.input[0] = new Tone.Gain();\n\n\t\t/**\n\t\t * Alias for input[1]
. \n\t\t * @type {Tone.Gain}\n\t\t */\n\t\tthis.b = this.input[1] = new Tone.Gain();\n\n\t\t/**\n\t\t * \tThe mix between the two inputs. A fade value of 0\n\t\t * \twill output 100% input[0]
and \n\t\t * \ta value of 1 will output 100% input[1]
. \n\t\t * @type {NormalRange}\n\t\t * @signal\n\t\t */\n\t\tthis.fade = new Tone.Signal(this.defaultArg(initialFade, 0.5), Tone.Type.NormalRange);\n\n\t\t/**\n\t\t * equal power gain cross fade\n\t\t * @private\n\t\t * @type {Tone.EqualPowerGain}\n\t\t */\n\t\tthis._equalPowerA = new Tone.EqualPowerGain();\n\n\t\t/**\n\t\t * equal power gain cross fade\n\t\t * @private\n\t\t * @type {Tone.EqualPowerGain}\n\t\t */\n\t\tthis._equalPowerB = new Tone.EqualPowerGain();\n\t\t\n\t\t/**\n\t\t * invert the incoming signal\n\t\t * @private\n\t\t * @type {Tone}\n\t\t */\n\t\tthis._invert = new Tone.Expr(\"1 - $0\");\n\n\t\t//connections\n\t\tthis.a.connect(this.output);\n\t\tthis.b.connect(this.output);\n\t\tthis.fade.chain(this._equalPowerB, this.b.gain);\n\t\tthis.fade.chain(this._invert, this._equalPowerA, this.a.gain);\n\t\tthis._readOnly(\"fade\");\n\t};\n\n\tTone.extend(Tone.CrossFade);\n\n\t/**\n\t * clean up\n\t * @returns {Tone.CrossFade} this\n\t */\n\tTone.CrossFade.prototype.dispose = function(){\n\t\tTone.prototype.dispose.call(this);\n\t\tthis._writable(\"fade\");\n\t\tthis._equalPowerA.dispose();\n\t\tthis._equalPowerA = null;\n\t\tthis._equalPowerB.dispose();\n\t\tthis._equalPowerB = null;\n\t\tthis.fade.dispose();\n\t\tthis.fade = null;\n\t\tthis._invert.dispose();\n\t\tthis._invert = null;\n\t\tthis.a.dispose();\n\t\tthis.a = null;\n\t\tthis.b.dispose();\n\t\tthis.b = null;\n\t\treturn this;\n\t};\n\n\treturn Tone.CrossFade;\n});\n","define([\"Tone/core/Tone\", \"Tone/signal/Add\", \"Tone/signal/Subtract\", \"Tone/signal/Multiply\", \n\t\"Tone/signal/GreaterThan\", \"Tone/signal/GreaterThanZero\", \"Tone/signal/Abs\", \"Tone/signal/Negate\", \n\t\"Tone/signal/Modulo\", \"Tone/signal/Pow\", \"Tone/signal/AudioToGain\"], \n\tfunction(Tone){\n\n\t\"use strict\";\n\n\t/**\n\t * @class Evaluate an expression at audio rate. \n * var eq;\n * var band_names;\n * var band_index;\n *\n * var soundFile, play;\n *\n * function preload() {\n * soundFormats('mp3', 'ogg');\n * soundFile = loadSound('assets/beat');\n * }\n *\n * function setup() {\n * eq = new p5.EQ(3);\n * soundFile.disconnect();\n * eq.process(soundFile);\n *\n * band_names = ['lows','mids','highs'];\n * band_index = 0;\n * play = false;\n * textAlign(CENTER);\n * }\n *\n * function draw() {\n * background(30);\n * noStroke();\n * fill(255);\n * text('click to kill',50,25);\n *\n * fill(255, 40, 255);\n * textSize(26);\n * text(band_names[band_index],50,55);\n *\n * fill(255);\n * textSize(9);\n * text('space = play/pause',50,80);\n * }\n *\n * //If mouse is over canvas, cycle to the next band and kill the frequency\n * function mouseClicked() {\n * for (var i = 0; i < eq.bands.length; i++) {\n * eq.bands[i].gain(0);\n * }\n * eq.bands[band_index].gain(-40);\n * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) {\n * band_index === 2 ? band_index = 0 : band_index++;\n * }\n * }\n *\n * //use space bar to trigger play / pause\n * function keyPressed() {\n * if (key===' ') {\n * play = !play\n * play ? soundFile.loop() : soundFile.pause();\n * }\n * }\n *
p5.soundOut.audiocontext.listener
\n *\n *\n * @class p5.Panner3D\n * @constructor\n */\n\tp5.Panner3D = function() {\n Effect.call(this);\n\n /**\n * \n * Web Audio Spatial Panner Node\n *\n * Properties include\n * - panningModel: \"equal power\" or \"HRTF\"\n * - distanceModel: \"linear\", \"inverse\", or \"exponential\"\n *\n * @property {AudioNode} panner\n *\n */\n this.panner = this.ac.createPanner();\n this.panner.panningModel = 'HRTF';\n this.panner.distanceModel = 'linear';\n this.panner.connect(this.output);\n this.input.connect(this.panner);\n\t};\n\n p5.Panner3D.prototype = Object.create(Effect.prototype);\n\n\n /**\n * Connect an audio sorce\n *\n * @method process\n * @for p5.Panner3D\n * @param {Object} src Input source\n */\n p5.Panner3D.prototype.process = function(src) {\n src.connect(this.input);\n }\n /**\n * Set the X,Y,Z position of the Panner\n * @method set\n * @for p5.Panner3D\n * @param {Number} xVal\n * @param {Number} yVal\n * @param {Number} zVal\n * @param {Number} time\n * @return {Array} Updated x, y, z values as an array\n */\n p5.Panner3D.prototype.set = function(xVal, yVal, zVal, time) {\n this.positionX(xVal,time);\n this.positionY(yVal,time);\n this.positionZ(zVal,time);\n return [this.panner.positionX.value,\n this.panner.positionY.value,\n this.panner.positionZ.value];\n };\n\n /**\n * Getter and setter methods for position coordinates\n * @method positionX\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for position coordinates\n * @method positionY\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for position coordinates\n * @method positionZ\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n p5.Panner3D.prototype.positionX = function(xVal, time) {\n var t = time || 0;\n if (typeof xVal === 'number') {\n this.panner.positionX.value = xVal;\n this.panner.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t);\n } else if (xVal) {\n xVal.connect(this.panner.positionX);\n }\n return this.panner.positionX.value;\n };\n p5.Panner3D.prototype.positionY = function(yVal, time) {\n var t = time || 0;\n if (typeof yVal === 'number') {\n this.panner.positionY.value = yVal;\n this.panner.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t);\n } else if (yVal) {\n yVal.connect(this.panner.positionY);\n }\n return this.panner.positionY.value;\n };\n p5.Panner3D.prototype.positionZ = function(zVal, time) {\n var t = time || 0;\n if (typeof zVal === 'number') {\n this.panner.positionZ.value = zVal;\n this.panner.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.positionZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t);\n } else if (zVal) {\n zVal.connect(this.panner.positionZ);\n }\n return this.panner.positionZ.value;\n };\n\n /**\n * Set the X,Y,Z position of the Panner\n * @method orient\n * @for p5.Panner3D\n * @param {Number} xVal\n * @param {Number} yVal\n * @param {Number} zVal\n * @param {Number} time\n * @return {Array} Updated x, y, z values as an array\n */\n p5.Panner3D.prototype.orient = function(xVal, yVal, zVal, time) {\n this.orientX(xVal,time);\n this.orientY(yVal,time);\n this.orientZ(zVal,time);\n return [this.panner.orientationX.value,\n this.panner.orientationY.value,\n this.panner.orientationZ.value];\n };\n\n /**\n * Getter and setter methods for orient coordinates\n * @method orientX\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for orient coordinates\n * @method orientY\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n /**\n * Getter and setter methods for orient coordinates\n * @method orientZ\n * @for p5.Panner3D\n * @return {Number} updated coordinate value\n */\n p5.Panner3D.prototype.orientX = function(xVal, time) {\n var t = time || 0;\n if (typeof xVal === 'number') {\n this.panner.orientationX.value = xVal;\n this.panner.orientationX.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t);\n } else if (xVal) {\n xVal.connect(this.panner.orientationX);\n }\n return this.panner.orientationX.value;\n };\n p5.Panner3D.prototype.orientY = function(yVal, time) {\n var t = time || 0;\n if (typeof yVal === 'number') {\n this.panner.orientationY.value = yVal;\n this.panner.orientationY.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t);\n } else if (yVal) {\n yVal.connect(this.panner.orientationY);\n }\n return this.panner.orientationY.value;\n };\n p5.Panner3D.prototype.orientZ = function(zVal, time) {\n var t = time || 0;\n if (typeof zVal === 'number') {\n this.panner.orientationZ.value = zVal;\n this.panner.orientationZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t);\n this.panner.orientationZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t);\n } else if (zVal) {\n zVal.connect(this.panner.orientationZ);\n }\n return this.panner.orientationZ.value;\n };\n\n /**\n * Set the rolloff factor and max distance\n * @method setFalloff\n * @for p5.Panner3D\n * @param {Number} [maxDistance]\n * @param {Number} [rolloffFactor]\n */\n p5.Panner3D.prototype.setFalloff = function(maxDistance, rolloffFactor) {\n this.maxDist(maxDistance);\n this.rolloff(rolloffFactor);\n };\n /**\n * Maxium distance between the source and the listener\n * @method maxDist\n * @for p5.Panner3D\n * @param {Number} maxDistance\n * @return {Number} updated value\n */\n p5.Panner3D.prototype.maxDist = function(maxDistance){\n if (typeof maxDistance === 'number') {\n this.panner.maxDistance = maxDistance;\n }\n return this.panner.maxDistance;\n };\n\n /**\n * How quickly the volume is reduced as the source moves away from the listener\n * @method rollof\n * @for p5.Panner3D\n * @param {Number} rolloffFactor\n * @return {Number} updated value\n */\n p5.Panner3D.prototype.rolloff = function(rolloffFactor){\n if (typeof rolloffFactor === 'number') {\n this.panner.rolloffFactor = rolloffFactor;\n }\n return this.panner.rolloffFactor;\n };\n\n p5.Panner3D.dispose = function() {\n Effect.prototype.dispose.apply(this);\n if (this.panner) {\n this.panner.disconnect();\n delete this.panner;\n }\n };\n\n return p5.Panner3D;\n\n});\n","'use strict'\n\ndefine(function (require) {\n var p5sound = require('master');\n var Effect = require('effect');\n\n// /**\n// * listener is a class that can construct both a Spatial Panner\n// * and a Spatial Listener. The panner is based on the \n// * Web Audio Spatial Panner Node\n// * https://www.w3.org/TR/webaudio/#the-listenernode-interface\n// * This panner is a spatial processing node that allows audio to be positioned\n// * and oriented in 3D space. \n// *\n// * The Listener modifies the properties of the Audio Context Listener. \n// * Both objects types use the same methods. The default is a spatial panner.\n// *\n// * p5.Panner3D
- Constructs a Spatial Pannerp5.Listener3D
- Constructs a Spatial Listener\n * var noise, env, delay;\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * text('click to play', width/2, height/2);\n *\n * noise = new p5.Noise('brown');\n * noise.amp(0);\n * noise.start();\n *\n * delay = new p5.Delay();\n *\n * // delay.process() accepts 4 parameters:\n * // source, delayTime, feedback, filter frequency\n * // play with these numbers!!\n * delay.process(noise, .12, .7, 2300);\n *\n * // play the noise with an envelope,\n * // a series of fades ( time / value pairs )\n * env = new p5.Envelope(.01, 0.2, .2, .1);\n * }\n *\n * // mouseClick triggers envelope\n * function mouseClicked() {\n * // is mouse over canvas?\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * env.play(noise);\n * }\n * }\n *
\n * var soundFile, reverb;\n * function preload() {\n * soundFile = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * reverb = new p5.Reverb();\n * soundFile.disconnect(); // so we'll only hear reverb...\n *\n * // connect soundFile to reverb, process w/\n * // 3 second reverbTime, decayRate of 2%\n * reverb.process(soundFile, 3, 2);\n * soundFile.play();\n * }\n *
p5.Convolver extends p5.Reverb. It can emulate the sound of real\n * physical spaces through a process called \n * convolution.
\n *\n *Convolution multiplies any audio input by an \"impulse response\"\n * to simulate the dispersion of sound over time. The impulse response is\n * generated from an audio file that you provide. One way to\n * generate an impulse response is to pop a balloon in a reverberant space\n * and record the echo. Convolution can also be used to experiment with\n * sound.
\n *\n *Use the method createConvolution(path)
to instantiate a\n * p5.Convolver with a path to your impulse response audio file.
\n * var cVerb, sound;\n * function preload() {\n * // We have both MP3 and OGG versions of all sound assets\n * soundFormats('ogg', 'mp3');\n *\n * // Try replacing 'bx-spring' with other soundfiles like\n * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'\n * cVerb = createConvolver('assets/bx-spring.mp3');\n *\n * // Try replacing 'Damscray_DancingTiger' with\n * // 'beat', 'doorbell', lucky_dragons_-_power_melody'\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
\n * var cVerb, sound;\n * function preload() {\n * // We have both MP3 and OGG versions of all sound assets\n * soundFormats('ogg', 'mp3');\n *\n * // Try replacing 'bx-spring' with other soundfiles like\n * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox'\n * cVerb = createConvolver('assets/bx-spring.mp3');\n *\n * // Try replacing 'Damscray_DancingTiger' with\n * // 'beat', 'doorbell', lucky_dragons_-_power_melody'\n * sound = loadSound('assets/Damscray_DancingTiger.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
\n * var cVerb, sound;\n * function preload() {\n * soundFormats('ogg', 'mp3');\n *\n * cVerb = createConvolver('assets/concrete-tunnel.mp3');\n *\n * sound = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * // disconnect from master output...\n * sound.disconnect();\n *\n * // ...and process with (i.e. connect to) cVerb\n * // so that we only hear the convolution\n * cVerb.process(sound);\n *\n * sound.play();\n * }\n *
toggleImpulse(id)
method.\n *\n * @property {Array} impulses\n * @for p5.Convolver\n */\n p5.Convolver.prototype.impulses = [];\n\n /**\n * Load and assign a new Impulse Response to the p5.Convolver.\n * The impulse is added to the .impulses
array. Previous\n * impulses can be accessed with the .toggleImpulse(id)
\n * method.\n *\n * @method addImpulse\n * @for p5.Convolver\n * @param {String} path path to a sound file\n * @param {Function} callback function (optional)\n * @param {Function} errorCallback function (optional)\n */\n p5.Convolver.prototype.addImpulse = function(path, callback, errorCallback) {\n // if loading locally without a server\n if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {\n alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');\n }\n this._loadBuffer(path, callback, errorCallback);\n };\n\n /**\n * Similar to .addImpulse, except that the .impulses
\n * Array is reset to save memory. A new .impulses
\n * array is created with this impulse as the only item.\n *\n * @method resetImpulse\n * @for p5.Convolver\n * @param {String} path path to a sound file\n * @param {Function} callback function (optional)\n * @param {Function} errorCallback function (optional)\n */\n p5.Convolver.prototype.resetImpulse = function(path, callback, errorCallback) {\n // if loading locally without a server\n if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') {\n alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS');\n }\n this.impulses = [];\n this._loadBuffer(path, callback, errorCallback);\n };\n\n /**\n * If you have used .addImpulse()
to add multiple impulses\n * to a p5.Convolver, then you can use this method to toggle between\n * the items in the .impulses
Array. Accepts a parameter\n * to identify which impulse you wish to use, identified either by its\n * original filename (String) or by its position in the .impulses\n *
Array (Number)..audioBuffer
(type:\n * Web Audio \n * AudioBuffer) and a .name
, a String that corresponds\n * with the original filename.\n *\n * @method toggleImpulse\n * @for p5.Convolver\n * @param {String|Number} id Identify the impulse by its original filename\n * (String), or by its position in the\n * .impulses
Array (Number).\n */\n p5.Convolver.prototype.toggleImpulse = function(id) {\n if (typeof id === 'number' && id < this.impulses.length) {\n this._setBuffer(this.impulses[id].audioBuffer);\n }\n if (typeof id === 'string') {\n for (var i = 0; i < this.impulses.length; i++) {\n if (this.impulses[i].name === id) {\n this._setBuffer(this.impulses[i].audioBuffer);\n break;\n }\n }\n }\n };\n\n p5.Convolver.prototype.dispose = function() {\n p5.Reverb.prototype.dispose.apply(this);\n\n // remove all the Impulse Response buffers\n for (var i in this.impulses) {\n if (this.impulses[i]) {\n this.impulses[i] = null;\n }\n }\n };\n\n});\n","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n\n // requires the Tone.js library's Clock (MIT license, Yotam Mann)\n // https://github.com/TONEnoTONE/Tone.js/\n var Clock = require('Tone/core/Clock');\n\n p5.Metro = function() {\n this.clock = new Clock({\n 'callback': this.ontick.bind(this)\n });\n this.syncedParts = [];\n this.bpm = 120; // gets overridden by p5.Part\n this._init();\n\n this.prevTick = 0;\n this.tatumTime = 0;\n\n this.tickCallback = function() {};\n };\n\n p5.Metro.prototype.ontick = function(tickTime) {\n var elapsedTime = tickTime - this.prevTick;\n var secondsFromNow = tickTime - p5sound.audiocontext.currentTime;\n if (elapsedTime - this.tatumTime <= -0.02) {\n return;\n } else {\n // console.log('ok', this.syncedParts[0].phrases[0].name);\n this.prevTick = tickTime;\n\n // for all of the active things on the metro:\n var self = this;\n this.syncedParts.forEach(function(thisPart) {\n if (!thisPart.isPlaying) return;\n thisPart.incrementStep(secondsFromNow);\n // each synced source keeps track of its own beat number\n thisPart.phrases.forEach(function(thisPhrase) {\n var phraseArray = thisPhrase.sequence;\n var bNum = self.metroTicks % phraseArray.length;\n if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping) ) {\n thisPhrase.callback(secondsFromNow, phraseArray[bNum]);\n }\n });\n });\n this.metroTicks += 1;\n this.tickCallback(secondsFromNow);\n }\n };\n\n p5.Metro.prototype.setBPM = function(bpm, rampTime) {\n var beatTime = 60 / (bpm*this.tatums);\n var now = p5sound.audiocontext.currentTime;\n this.tatumTime = beatTime;\n\n var rampTime = rampTime || 0;\n this.clock.frequency.setValueAtTime(this.clock.frequency.value, now);\n this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime);\n this.bpm = bpm;\n };\n\n p5.Metro.prototype.getBPM = function() {\n return this.clock.getRate() / this.tatums * 60;\n };\n\n p5.Metro.prototype._init = function() {\n this.metroTicks = 0;\n // this.setBPM(120);\n };\n\n // clear existing synced parts, add only this one\n p5.Metro.prototype.resetSync = function(part) {\n this.syncedParts = [part];\n };\n\n // push a new synced part to the array\n p5.Metro.prototype.pushSync = function(part) {\n this.syncedParts.push(part);\n };\n\n p5.Metro.prototype.start = function(timeFromNow) {\n var t = timeFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n this.clock.start(now + t);\n this.setBPM(this.bpm);\n };\n\n p5.Metro.prototype.stop = function(timeFromNow) {\n var t = timeFromNow || 0;\n var now = p5sound.audiocontext.currentTime;\n this.clock.stop(now + t);\n };\n\n p5.Metro.prototype.beatLength = function(tatums) {\n this.tatums = 1/tatums / 4; // lowest possible division of a beat\n };\n\n});\n","define([\"Tone/core/Tone\", \"Tone/core/Timeline\", \"Tone/type/Type\"], function (Tone) {\n\n\t\"use strict\";\n\n\t/**\n\t * @class A Timeline State. Provides the methods: setStateAtTime(\"state\", time)
\n\t * and getValueAtTime(time)
.\n\t *\n\t * @extends {Tone.Timeline}\n\t * @param {String} initial The initial state of the TimelineState. \n\t * Defaults to undefined
\n\t */\n\tTone.TimelineState = function(initial){\n\n\t\tTone.Timeline.call(this);\n\n\t\t/**\n\t\t * The initial state\n\t\t * @private\n\t\t * @type {String}\n\t\t */\n\t\tthis._initial = initial;\n\t};\n\n\tTone.extend(Tone.TimelineState, Tone.Timeline);\n\n\t/**\n\t * Returns the scheduled state scheduled before or at\n\t * the given time.\n\t * @param {Number} time The time to query.\n\t * @return {String} The name of the state input in setStateAtTime.\n\t */\n\tTone.TimelineState.prototype.getValueAtTime = function(time){\n\t\tvar event = this.get(time);\n\t\tif (event !== null){\n\t\t\treturn event.state;\n\t\t} else {\n\t\t\treturn this._initial;\n\t\t}\n\t};\n\n\t/**\n\t * Returns the scheduled state scheduled before or at\n\t * the given time.\n\t * @param {String} state The name of the state to set.\n\t * @param {Number} time The time to query.\n\t */\n\tTone.TimelineState.prototype.setStateAtTime = function(state, time){\n\t\tthis.add({\n\t\t\t\"state\" : state,\n\t\t\t\"time\" : time\n\t\t});\n\t};\n\n\treturn Tone.TimelineState;\n});","'use strict';\n\ndefine(function(require) {\n var p5sound = require('master');\n\n var BPM = 120;\n\n /**\n * Set the global tempo, in beats per minute, for all\n * p5.Parts. This method will impact all active p5.Parts.\n *\n * @method setBPM\n * @for p5\n * @param {Number} BPM Beats Per Minute\n * @param {Number} rampTime Seconds from now\n */\n p5.prototype.setBPM = function(bpm, rampTime) {\n BPM = bpm;\n for (var i in p5sound.parts) {\n if (p5sound.parts[i]) {\n p5sound.parts[i].setBPM(bpm, rampTime);\n }\n }\n };\n\n /**\n * A phrase is a pattern of musical events over time, i.e.\n * a series of notes and rests.
\n *\n *Phrases must be added to a p5.Part for playback, and\n * each part can play multiple phrases at the same time.\n * For example, one Phrase might be a kick drum, another\n * could be a snare, and another could be the bassline.
\n *\n *The first parameter is a name so that the phrase can be\n * modified or deleted later. The callback is a a function that\n * this phrase will call at every step—for example it might be\n * called playNote(value){}
. The array determines\n * which value is passed into the callback at each step of the\n * phrase. It can be numbers, an object with multiple numbers,\n * or a zero (0) indicates a rest so the callback won't be called).
\n * var mySound, myPhrase, myPart;\n * var pattern = [1,0,0,2,0,2,0,0];\n * var msg = 'click to play';\n *\n * function preload() {\n * mySound = loadSound('assets/beatbox.mp3');\n * }\n *\n * function setup() {\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * masterVolume(0.1);\n *\n * myPhrase = new p5.Phrase('bbox', makeSound, pattern);\n * myPart = new p5.Part();\n * myPart.addPhrase(myPhrase);\n * myPart.setBPM(60);\n * }\n *\n * function draw() {\n * background(0);\n * text(msg, width/2, height/2);\n * }\n *\n * function makeSound(time, playbackRate) {\n * mySound.rate(playbackRate);\n * mySound.play(time);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * myPart.start();\n * msg = 'playing pattern';\n * }\n * }\n *\n *
A p5.Part plays back one or more p5.Phrases. Instantiate a part\n * with steps and tatums. By default, each step represents a 1/16th note.
\n *\n *See p5.Phrase for more about musical timing.
\n *\n * @class p5.Part\n * @constructor\n * @param {Number} [steps] Steps in the part\n * @param {Number} [tatums] Divisions of a beat, e.g. use 1/4, or 0.25 for a quater note (default is 1/16, a sixteenth note)\n * @example\n *\n * var box, drum, myPart;\n * var boxPat = [1,0,0,2,0,2,0,0];\n * var drumPat = [0,1,1,0,2,0,1,0];\n * var msg = 'click to play';\n *\n * function preload() {\n * box = loadSound('assets/beatbox.mp3');\n * drum = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n * masterVolume(0.1);\n *\n * var boxPhrase = new p5.Phrase('box', playBox, boxPat);\n * var drumPhrase = new p5.Phrase('drum', playDrum, drumPat);\n * myPart = new p5.Part();\n * myPart.addPhrase(boxPhrase);\n * myPart.addPhrase(drumPhrase);\n * myPart.setBPM(60);\n * masterVolume(0.1);\n * }\n *\n * function draw() {\n * background(0);\n * text(msg, width/2, height/2);\n * }\n *\n * function playBox(time, playbackRate) {\n * box.rate(playbackRate);\n * box.play(time);\n * }\n *\n * function playDrum(time, playbackRate) {\n * drum.rate(playbackRate);\n * drum.play(time);\n * }\n *\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * myPart.start();\n * msg = 'playing part';\n * }\n * }\n *
new p5.Score(a, a, b, a, c)
\n *\n * @class p5.Score\n * @constructor\n * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence.\n */\n p5.Score = function() {\n // for all of the arguments\n this.parts = [];\n this.currentPart = 0;\n\n var thisScore = this;\n for (var i in arguments) {\n if (arguments[i] && this.parts[i]) {\n this.parts[i] = arguments[i];\n this.parts[i].nextPart = this.parts[i + 1];\n this.parts[i].onended = function() {\n thisScore.resetPart(i);\n playNextPart(thisScore);\n };\n }\n }\n this.looping = false;\n };\n\n p5.Score.prototype.onended = function() {\n if (this.looping) {\n // this.resetParts();\n this.parts[0].start();\n } else {\n this.parts[this.parts.length - 1].onended = function() {\n this.stop();\n this.resetParts();\n };\n }\n this.currentPart = 0;\n };\n\n /**\n * Start playback of the score.\n *\n * @method start\n * @for p5.Score\n */\n p5.Score.prototype.start = function() {\n this.parts[this.currentPart].start();\n this.scoreStep = 0;\n };\n\n /**\n * Stop playback of the score.\n *\n * @method stop\n * @for p5.Score\n */\n p5.Score.prototype.stop = function() {\n this.parts[this.currentPart].stop();\n this.currentPart = 0;\n this.scoreStep = 0;\n };\n\n /**\n * Pause playback of the score.\n *\n * @method pause\n * @for p5.Score\n */\n p5.Score.prototype.pause = function() {\n this.parts[this.currentPart].stop();\n };\n\n /**\n * Loop playback of the score.\n *\n * @method loop\n * @for p5.Score\n */\n p5.Score.prototype.loop = function() {\n this.looping = true;\n this.start();\n };\n\n /**\n * Stop looping playback of the score. If it\n * is currently playing, this will go into effect\n * after the current round of playback completes.\n *\n * @method noLoop\n * @for p5.Score\n */\n p5.Score.prototype.noLoop = function() {\n this.looping = false;\n };\n\n p5.Score.prototype.resetParts = function() {\n var self = this;\n this.parts.forEach(function(part) {\n self.resetParts[part];\n });\n };\n\n p5.Score.prototype.resetPart = function(i) {\n this.parts[i].stop();\n this.parts[i].partStep = 0;\n for (var p in this.parts[i].phrases) {\n if (this.parts[i]) {\n this.parts[i].phrases[p].phraseStep = 0;\n }\n }\n };\n\n /**\n * Set the tempo for all parts in the score\n *\n * @method setBPM\n * @for p5.Score\n * @param {Number} BPM Beats Per Minute\n * @param {Number} rampTime Seconds from now\n */\n p5.Score.prototype.setBPM = function(bpm, rampTime) {\n for (var i in this.parts) {\n if (this.parts[i]) {\n this.parts[i].setBPM(bpm, rampTime);\n }\n }\n };\n\n function playNextPart(aScore) {\n aScore.currentPart++;\n if (aScore.currentPart >= aScore.parts.length) {\n aScore.scoreStep = 0;\n aScore.onended();\n } else {\n aScore.scoreStep = 0;\n aScore.parts[aScore.currentPart - 1].stop();\n aScore.parts[aScore.currentPart].start();\n }\n }\n\n});\n","'use strict';\n\ndefine(function (require) {\n var p5sound = require('master');\n var Clock = require('Tone/core/Clock');\n\n /**\n * SoundLoop\n *\n * @class p5.SoundLoop\n * @constructor\n *\n * @param {Function} callback this function will be called on each iteration of theloop\n * @param {Number|String} [interval] amount of time or beats for each iteration of the loop\n * defaults to 1\n *\n * @example\n * \n * var click;\n * var looper1;\n *\n * function preload() {\n * click = loadSound('assets/drum.mp3');\n * }\n *\n * function setup() {\n * //the looper's callback is passed the timeFromNow\n * //this value should be used as a reference point from\n * //which to schedule sounds\n * looper1 = new p5.SoundLoop(function(timeFromNow){\n * click.play(timeFromNow);\n * background(255 * (looper1.iterations % 2));\n * }, 2);\n *\n * //stop after 10 iteratios;\n * looper1.maxIterations = 10;\n * //start the loop\n * looper1.start();\n * }\n *
Record sounds for playback and/or to save as a .wav file.\n * The p5.SoundRecorder records all sound output from your sketch,\n * or can be assigned a specific source with setInput().
\n *The record() method accepts a p5.SoundFile as a parameter.\n * When playback is stopped (either after the given amount of time,\n * or with the stop() method), the p5.SoundRecorder will send its\n * recording to that p5.SoundFile for playback.
\n *\n * @class p5.SoundRecorder\n * @constructor\n * @example\n *\n * var mic, recorder, soundFile;\n * var state = 0;\n *\n * function setup() {\n * background(200);\n * // create an audio in\n * mic = new p5.AudioIn();\n *\n * // prompts user to enable their browser mic\n * mic.start();\n *\n * // create a sound recorder\n * recorder = new p5.SoundRecorder();\n *\n * // connect the mic to the recorder\n * recorder.setInput(mic);\n *\n * // this sound file will be used to\n * // playback & save the recording\n * soundFile = new p5.SoundFile();\n *\n * text('keyPress to record', 20, 20);\n * }\n *\n * function keyPressed() {\n * // make sure user enabled the mic\n * if (state === 0 && mic.enabled) {\n *\n * // record to our p5.SoundFile\n * recorder.record(soundFile);\n *\n * background(255,0,0);\n * text('Recording!', 20, 20);\n * state++;\n * }\n * else if (state === 1) {\n * background(0,255,0);\n *\n * // stop recorder and\n * // send result to soundFile\n * recorder.stop();\n *\n * text('Stopped', 20, 20);\n * state++;\n * }\n *\n * else if (state === 2) {\n * soundFile.play(); // play the result!\n * save(soundFile, 'mySound.wav');\n * state++;\n * }\n * }\n *
PeakDetect works in conjunction with p5.FFT to\n * look for onsets in some or all of the frequency spectrum.\n *
\n *\n * To use p5.PeakDetect, call update
in the draw loop\n * and pass in a p5.FFT object.\n *
\n * You can listen for a specific part of the frequency spectrum by\n * setting the range between freq1
and freq2
.\n *
threshold
is the threshold for detecting a peak,\n * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud\n * as 1.0.
\n * The update method is meant to be run in the draw loop, and\n * frames determines how many loops must pass before\n * another peak can be detected.\n * For example, if the frameRate() = 60, you could detect the beat of a\n * 120 beat-per-minute song with this equation:\n * framesPerPeak = 60 / (estimatedBPM / 60 );
\n *
\n * Based on example contribtued by @b2renger, and a simple beat detection\n * explanation by Felix Turner.\n *
\n *\n * @class p5.PeakDetect\n * @constructor\n * @param {Number} [freq1] lowFrequency - defaults to 20Hz\n * @param {Number} [freq2] highFrequency - defaults to 20000 Hz\n * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1\n * scaled logarithmically where 0.1 is 1/2 the loudness\n * of 1.0. Defaults to 0.35.\n * @param {Number} [framesPerPeak] Defaults to 20.\n * @example\n *\n *\n * var cnv, soundFile, fft, peakDetect;\n * var ellipseWidth = 10;\n *\n * function preload() {\n * soundFile = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * background(0);\n * noStroke();\n * fill(255);\n * textAlign(CENTER);\n *\n * // p5.PeakDetect requires a p5.FFT\n * fft = new p5.FFT();\n * peakDetect = new p5.PeakDetect();\n * }\n *\n * function draw() {\n * background(0);\n * text('click to play/pause', width/2, height/2);\n *\n * // peakDetect accepts an fft post-analysis\n * fft.analyze();\n * peakDetect.update(fft);\n *\n * if ( peakDetect.isDetected ) {\n * ellipseWidth = 50;\n * } else {\n * ellipseWidth *= 0.95;\n * }\n *\n * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);\n * }\n *\n * // toggle play/stop when canvas is clicked\n * function mouseClicked() {\n * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) {\n * if (soundFile.isPlaying() ) {\n * soundFile.stop();\n * } else {\n * soundFile.play();\n * }\n * }\n * }\n *
\n * var cnv, soundFile, fft, peakDetect;\n * var ellipseWidth = 0;\n *\n * function preload() {\n * soundFile = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * cnv = createCanvas(100,100);\n * textAlign(CENTER);\n *\n * fft = new p5.FFT();\n * peakDetect = new p5.PeakDetect();\n *\n * setupSound();\n *\n * // when a beat is detected, call triggerBeat()\n * peakDetect.onPeak(triggerBeat);\n * }\n *\n * function draw() {\n * background(0);\n * fill(255);\n * text('click to play', width/2, height/2);\n *\n * fft.analyze();\n * peakDetect.update(fft);\n *\n * ellipseWidth *= 0.95;\n * ellipse(width/2, height/2, ellipseWidth, ellipseWidth);\n * }\n *\n * // this function is called by peakDetect.onPeak\n * function triggerBeat() {\n * ellipseWidth = 50;\n * }\n *\n * // mouseclick starts/stops sound\n * function setupSound() {\n * cnv.mouseClicked( function() {\n * if (soundFile.isPlaying() ) {\n * soundFile.stop();\n * } else {\n * soundFile.play();\n * }\n * });\n * }\n *
\n *\n * // load two soundfile and crossfade beetween them\n * var sound1,sound2;\n * var gain1, gain2, gain3;\n *\n * function preload(){\n * soundFormats('ogg', 'mp3');\n * sound1 = loadSound('assets/Damscray_-_Dancing_Tiger_01');\n * sound2 = loadSound('assets/beat.mp3');\n * }\n *\n * function setup() {\n * createCanvas(400,200);\n *\n * // create a 'master' gain to which we will connect both soundfiles\n * gain3 = new p5.Gain();\n * gain3.connect();\n *\n * // setup first sound for playing\n * sound1.rate(1);\n * sound1.loop();\n * sound1.disconnect(); // diconnect from p5 output\n *\n * gain1 = new p5.Gain(); // setup a gain node\n * gain1.setInput(sound1); // connect the first sound to its input\n * gain1.connect(gain3); // connect its output to the 'master'\n *\n * sound2.rate(1);\n * sound2.disconnect();\n * sound2.loop();\n *\n * gain2 = new p5.Gain();\n * gain2.setInput(sound2);\n * gain2.connect(gain3);\n *\n * }\n *\n * function draw(){\n * background(180);\n *\n * // calculate the horizontal distance beetween the mouse and the right of the screen\n * var d = dist(mouseX,0,width,0);\n *\n * // map the horizontal position of the mouse to values useable for volume control of sound1\n * var vol1 = map(mouseX,0,width,0,1);\n * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa\n *\n * gain1.amp(vol1,0.5,0);\n * gain2.amp(vol2,0.5,0);\n *\n * // map the vertical position of the mouse to values useable for 'master volume control'\n * var vol3 = map(mouseY,0,height,0,1);\n * gain3.amp(vol3,0.5,0);\n * }\n *
* function setup() {
diff --git a/src/audioin.js b/src/audioin.js
index c0bf573e..6a07b5ee 100644
--- a/src/audioin.js
+++ b/src/audioin.js
@@ -101,6 +101,7 @@ define(function (require) {
* the browser won't provide mic access.
*
* @method start
+ * @for p5.AudioIn
* @param {Function} [successCallback] Name of a function to call on
* success.
* @param {Function} [errorCallback] Name of a function to call if
@@ -151,6 +152,7 @@ define(function (require) {
* If re-starting, the user may be prompted for permission access.
*
* @method stop
+ * @for p5.AudioIn
*/
p5.AudioIn.prototype.stop = function() {
if (this.stream) {
@@ -170,6 +172,7 @@ define(function (require) {
* connect to the master output (i.e. your speakers).
*
* @method connect
+ * @for p5.AudioIn
* @param {Object} [unit] An object that accepts audio input,
* such as an FFT
*/
@@ -196,6 +199,7 @@ define(function (require) {
* signal to your speakers.
*
* @method disconnect
+ * @for p5.AudioIn
*/
p5.AudioIn.prototype.disconnect = function() {
if (this.output) {
@@ -213,6 +217,7 @@ define(function (require) {
* .start() before using .getLevel().
*
* @method getLevel
+ * @for p5.AudioIn
* @param {Number} [smoothing] Smoothing is 0.0 by default.
* Smooths values based on previous values.
* @return {Number} Volume level (between 0.0 and 1.0)
@@ -228,6 +233,7 @@ define(function (require) {
* Set amplitude (volume) of a mic input between 0 and 1.0.
*
* @method amp
+ * @for p5.AudioIn
* @param {Number} vol between 0 and 1.0
* @param {Number} [time] ramp time (optional)
*/
@@ -251,6 +257,7 @@ define(function (require) {
* > and it returns a Promise.
*
* @method getSources
+ * @for p5.AudioIn
* @param {Function} [successCallback] This callback function handles the sources when they
* have been enumerated. The callback function
* receives the deviceList array as its only argument
@@ -307,6 +314,7 @@ define(function (require) {
* >navigator.mediaDevices.enumerateDevices().
*
* @method setSource
+ * @for p5.AudioIn
* @param {number} num position of input source in the array
*/
p5.AudioIn.prototype.setSource = function(num) {
diff --git a/src/compressor.js b/src/compressor.js
index 2dd9d0ca..f075050c 100644
--- a/src/compressor.js
+++ b/src/compressor.js
@@ -8,34 +8,34 @@ define(function (require) {
/**
* Compressor is an audio effect class that performs dynamics compression
* on an audio input source. This is a very commonly used technique in music
- * and sound production. Compression creates an overall louder, richer,
+ * and sound production. Compression creates an overall louder, richer,
* and fuller sound by lowering the volume of louds and raising that of softs.
- * Compression can be used to avoid clipping (sound distortion due to
- * peaks in volume) and is especially useful when many sounds are played
+ * Compression can be used to avoid clipping (sound distortion due to
+ * peaks in volume) and is especially useful when many sounds are played
* at once. Compression can be used on indivudal sound sources in addition
- * to the master output.
+ * to the master output.
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
*
* @class p5.Compressor
* @constructor
* @extends p5.Effect
*
- *
+ *
*/
p5.Compressor = function() {
Effect.call(this);
/**
- * The p5.Compressor is built with a Web Audio Dynamics Compressor Node
*
- * @property {AudioNode} compressor
+ * @property {AudioNode} compressor
*/
-
+
this.compressor = this.ac.createDynamicsCompressor();
@@ -48,13 +48,14 @@ define(function (require) {
/**
* Performs the same function as .connect, but also accepts
* optional parameters to set compressor's audioParams
- * @method process
+ * @method process
+ * @for p5.Compressor
*
* @param {Object} src Sound source to be connected
- *
+ *
* @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB,
* default = .003, range 0 - 1
- * @param {Number} [knee] A decibel value representing the range above the
+ * @param {Number} [knee] A decibel value representing the range above the
* threshold where the curve smoothly transitions to the "ratio" portion.
* default = 30, range 0 - 40
* @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
@@ -64,18 +65,19 @@ define(function (require) {
* @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB
* default = .25, range 0 - 1
*/
- p5.Compressor.prototype.process = function(src, attack, knee,
+ p5.Compressor.prototype.process = function(src, attack, knee,
ratio, threshold, release) {
src.connect(this.input);
this.set(attack, knee, ratio, threshold, release);
};
/**
- * Set the paramters of a compressor.
+ * Set the paramters of a compressor.
* @method set
+ * @for p5.Compressor
* @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB,
* default = .003, range 0 - 1
- * @param {Number} knee A decibel value representing the range above the
+ * @param {Number} knee A decibel value representing the range above the
* threshold where the curve smoothly transitions to the "ratio" portion.
* default = 30, range 0 - 40
* @param {Number} ratio The amount of dB change in input for a 1 dB change in output
@@ -85,7 +87,7 @@ define(function (require) {
* @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
* default = .25, range 0 - 1
*/
- p5.Compressor.prototype.set = function (attack, knee,
+ p5.Compressor.prototype.set = function (attack, knee,
ratio, threshold, release) {
if (typeof attack !== 'undefined') {this.attack(attack);}
@@ -98,9 +100,10 @@ define(function (require) {
/**
* Get current attack or set value w/ time ramp
- *
- *
+ *
+ *
* @method attack
+ * @for p5.Compressor
* @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB,
* default = .003, range 0 - 1
* @param {Number} [time] Assign time value to schedule the change in value
@@ -120,9 +123,10 @@ define(function (require) {
/**
* Get current knee or set value w/ time ramp
- *
+ *
* @method knee
- * @param {Number} [knee] A decibel value representing the range above the
+ * @for p5.Compressor
+ * @param {Number} [knee] A decibel value representing the range above the
* threshold where the curve smoothly transitions to the "ratio" portion.
* default = 30, range 0 - 40
* @param {Number} [time] Assign time value to schedule the change in value
@@ -143,9 +147,9 @@ define(function (require) {
/**
* Get current ratio or set value w/ time ramp
* @method ratio
- *
+ * @for p5.Compressor
* @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output
- * default = 12, range 1 - 20
+ * default = 12, range 1 - 20
* @param {Number} [time] Assign time value to schedule the change in value
*/
p5.Compressor.prototype.ratio = function (ratio, time){
@@ -164,7 +168,7 @@ define(function (require) {
/**
* Get current threshold or set value w/ time ramp
* @method threshold
- *
+ * @for p5.Compressor
* @param {Number} threshold The decibel value above which the compression will start taking effect
* default = -24, range -100 - 0
* @param {Number} [time] Assign time value to schedule the change in value
@@ -185,7 +189,7 @@ define(function (require) {
/**
* Get current release or set value w/ time ramp
* @method release
- *
+ * @for p5.Compressor
* @param {Number} release The amount of time (in seconds) to increase the gain by 10dB
* default = .25, range 0 - 1
*
@@ -207,6 +211,7 @@ define(function (require) {
* Return the current reduction value
*
* @method reduction
+ * @for p5.Compressor
* @return {Number} Value of the amount of gain reduction that is applied to the signal
*/
p5.Compressor.prototype.reduction =function() {
diff --git a/src/delay.js b/src/delay.js
index 1e74d0fd..8d095d48 100644
--- a/src/delay.js
+++ b/src/delay.js
@@ -15,9 +15,9 @@ define(function (require) {
* original source.
*
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
* @class p5.Delay
* @extends p5.Effect
@@ -123,6 +123,7 @@ define(function (require) {
* of delay parameters.
*
* @method process
+ * @for p5.Delay
* @param {Object} Signal An object that outputs audio
* @param {Number} [delayTime] Time (in seconds) of the delay/echo.
* Some browsers limit delayTime to
@@ -161,6 +162,7 @@ define(function (require) {
* a floating point number between 0.0 and 1.0.
*
* @method delayTime
+ * @for p5.Delay
* @param {Number} delayTime Time (in seconds) of the delay
*/
p5.Delay.prototype.delayTime = function(t) {
@@ -186,6 +188,7 @@ define(function (require) {
* creating an infinite feedback loop. The default value is 0.5
*
* @method feedback
+ * @for p5.Delay
* @param {Number|Object} feedback 0.0 to 1.0, or an object such as an
* Oscillator that can be used to
* modulate this param
@@ -215,6 +218,7 @@ define(function (require) {
* will cut off any frequencies higher than the filter frequency.
*
* @method filter
+ * @for p5.Delay
* @param {Number|Object} cutoffFreq A lowpass filter will cut off any
* frequencies higher than the filter frequency.
* @param {Number|Object} res Resonance of the filter frequency
@@ -235,6 +239,7 @@ define(function (require) {
* Any other parameter will revert to the default delay setting.
*
* @method setType
+ * @for p5.Delay
* @param {String|Number} type 'pingPong' (1) or 'default' (0)
*/
p5.Delay.prototype.setType = function(t) {
@@ -267,6 +272,7 @@ define(function (require) {
* Set the output level of the delay effect.
*
* @method amp
+ * @for p5.Delay
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
@@ -276,12 +282,14 @@ define(function (require) {
* Send output to a p5.sound or web audio object
*
* @method connect
+ * @for p5.Delay
* @param {Object} unit
*/
/**
* Disconnect all output.
*
* @method disconnect
+ * @for p5.Delay
*/
p5.Delay.prototype.dispose = function() {
diff --git a/src/distortion.js b/src/distortion.js
index 58eef490..c657b28c 100644
--- a/src/distortion.js
+++ b/src/distortion.js
@@ -25,12 +25,12 @@ define(function (require) {
* A Distortion effect created with a Waveshaper Node,
* with an approach adapted from
* [Kevin Ennis](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion)
- *
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ *
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
- *
+ *
* @class p5.Distortion
* @extends p5.Effect
* @constructor
@@ -79,6 +79,7 @@ define(function (require) {
* Process a sound source, optionally specify amount and oversample values.
*
* @method process
+ * @for p5.Distortion
* @param {Number} [amount=0.25] Unbounded distortion amount.
* Normal values range from 0-1.
* @param {String} [oversample='none'] 'none', '2x', or '4x'.
@@ -92,6 +93,7 @@ define(function (require) {
* Set the amount and oversample of the waveshaper distortion.
*
* @method set
+ * @for p5.Distortion
* @param {Number} [amount=0.25] Unbounded distortion amount.
* Normal values range from 0-1.
* @param {String} [oversample='none'] 'none', '2x', or '4x'.
@@ -111,6 +113,7 @@ define(function (require) {
* Return the distortion amount, typically between 0-1.
*
* @method getAmount
+ * @for p5.Distortion
* @return {Number} Unbounded distortion amount.
* Normal values range from 0-1.
*/
@@ -122,7 +125,7 @@ define(function (require) {
* Return the oversampling.
*
* @method getOversample
- *
+ * @for p5.Distortion
* @return {String} Oversample can either be 'none', '2x', or '4x'.
*/
p5.Distortion.prototype.getOversample = function() {
diff --git a/src/effect.js b/src/effect.js
index c02750fe..70728176 100644
--- a/src/effect.js
+++ b/src/effect.js
@@ -6,25 +6,25 @@ define(function (require) {
/**
* Effect is a base class for audio effects in p5.
- * This module handles the nodes and methods that are
+ * This module handles the nodes and methods that are
* common and useful for current and future effects.
*
*
- * This class is extended by p5.Distortion,
+ * This class is extended by p5.Distortion,
* p5.Compressor,
- * p5.Delay,
- * p5.Filter,
+ * p5.Delay,
+ * p5.Filter,
* p5.Reverb.
*
* @class p5.Effect
* @constructor
- *
+ *
* @param {Object} [ac] Reference to the audio context of the p5 object
* @param {AudioNode} [input] Gain Node effect wrapper
* @param {AudioNode} [output] Gain Node effect wrapper
* @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1)
* @param {AudioNode} [wet] Effects that extend this class should connect
- * to the wet signal to this gain node, so that dry and wet
+ * to the wet signal to this gain node, so that dry and wet
* signals are mixed properly.
*/
p5.Effect = function() {
@@ -38,7 +38,7 @@ define(function (require) {
* using Tone.js CrossFade
* @private
*/
-
+
this._drywet = new CrossFade(1);
/**
@@ -60,10 +60,11 @@ define(function (require) {
/**
* Set the output volume of the filter.
- *
+ *
* @method amp
+ * @for p5.Effect
* @param {Number} [vol] amplitude between 0 and 1.0
- * @param {Number} [rampTime] create a fade that lasts until rampTime
+ * @param {Number} [rampTime] create a fade that lasts until rampTime
* @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds
*/
p5.Effect.prototype.amp = function(vol, rampTime, tFromNow){
@@ -77,13 +78,14 @@ define(function (require) {
};
/**
- * Link effects together in a chain
- * Example usage: filter.chain(reverb, delay, panner);
- * May be used with an open-ended number of arguments
+ * Link effects together in a chain
+ * Example usage: filter.chain(reverb, delay, panner);
+ * May be used with an open-ended number of arguments
*
- * @method chain
- * @param {Object} [arguments] Chain together multiple sound objects
- */
+ * @method chain
+ * @for p5.Effect
+ * @param {Object} [arguments] Chain together multiple sound objects
+ */
p5.Effect.prototype.chain = function(){
if (arguments.length>0){
this.connect(arguments[0]);
@@ -95,24 +97,26 @@ define(function (require) {
};
/**
- * Adjust the dry/wet value.
- *
- * @method drywet
- * @param {Number} [fade] The desired drywet value (0 - 1.0)
+ * Adjust the dry/wet value.
+ *
+ * @method drywet
+ * @for p5.Effect
+ * @param {Number} [fade] The desired drywet value (0 - 1.0)
*/
p5.Effect.prototype.drywet = function(fade){
- if (typeof fade !=="undefined"){
+ if (typeof fade !=="undefined"){
this._drywet.fade.value = fade
}
return this._drywet.fade.value;
};
/**
- * Send output to a p5.js-sound, Web Audio Node, or use signal to
- * control an AudioParam
- *
- * @method connect
- * @param {Object} unit
+ * Send output to a p5.js-sound, Web Audio Node, or use signal to
+ * control an AudioParam
+ *
+ * @method connect
+ * @for p5.Effect
+ * @param {Object} unit
*/
p5.Effect.prototype.connect = function (unit) {
var u = unit || p5.soundOut.input;
@@ -120,9 +124,9 @@ define(function (require) {
};
/**
- * Disconnect all output.
- *
- * @method disconnect
+ * Disconnect all output.
+ * @method disconnect
+ * @for p5.Effect
*/
p5.Effect.prototype.disconnect = function() {
if (this.output) {
diff --git a/src/envelope.js b/src/envelope.js
index cbfdfa31..d7f805c5 100644
--- a/src/envelope.js
+++ b/src/envelope.js
@@ -139,6 +139,7 @@ define(function (require) {
* Reset the envelope with a series of time/value pairs.
*
* @method set
+ * @for p5.Envelope
* @param {Number} attackTime Time (in seconds) before level
* reaches attackLevel
* @param {Number} attackLevel Typically an amplitude between
@@ -203,6 +204,7 @@ define(function (require) {
* .
*
* @method setADSR
+ * @for p5.Envelope
* @param {Number} attackTime Time (in seconds before envelope
* reaches Attack Level
* @param {Number} [decayTime] Time (in seconds) before envelope
@@ -269,6 +271,7 @@ define(function (require) {
* Set max (attackLevel) and min (releaseLevel) of envelope.
*
* @method setRange
+ * @for p5.Envelope
* @param {Number} aLevel attack level (defaults to 1)
* @param {Number} rLevel release level (defaults to 0)
* @example
@@ -365,6 +368,7 @@ define(function (require) {
* control all of them.
*
* @method setInput
+ * @for p5.Envelope
* @param {Object} [...inputs] A p5.sound object or
* Web Audio Param.
*/
@@ -380,6 +384,7 @@ define(function (require) {
* and frequency logarithmically.
*
* @method setExp
+ * @for p5.Envelope
* @param {Boolean} isExp true is exponential, false is linear
*/
p5.Envelope.prototype.setExp = function(isExp) {
@@ -404,6 +409,7 @@ define(function (require) {
* Web Audio Audio Param.
*
* @method play
+ * @for p5.Envelope
* @param {Object} unit A p5.sound object or
* Web Audio Param.
* @param {Number} [startTime] time from now (in seconds) at which to play
@@ -470,6 +476,7 @@ define(function (require) {
* Web Audio Param.
*
* @method triggerAttack
+ * @for p5.Envelope
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow time from now (in seconds)
* @example
@@ -564,7 +571,7 @@ define(function (require) {
valToSet = this.control.getValueAtTime(t);
this.control.cancelScheduledValues(t);
this.control.linearRampToValueAtTime(valToSet, t);
-
+
}
// decay to decay level (if using ADSR, then decay level == sustain level)
@@ -591,6 +598,7 @@ define(function (require) {
* release level and release time.
*
* @method triggerRelease
+ * @for p5.Envelope
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow time to trigger the release
* @example
@@ -705,6 +713,7 @@ define(function (require) {
* while a decrease uses decayTime.
*
* @method ramp
+ * @for p5.Envelope
* @param {Object} unit p5.sound Object or Web Audio Param
* @param {Number} secondsFromNow When to trigger the ramp
* @param {Number} v Target value
@@ -834,6 +843,7 @@ define(function (require) {
* again will override the initial add() with new values.
*
* @method add
+ * @for p5.Envelope
* @param {Number} number Constant number to add
* @return {p5.Envelope} Envelope Returns this envelope
* with scaled output
@@ -851,6 +861,7 @@ define(function (require) {
* again will override the initial mult() with new values.
*
* @method mult
+ * @for p5.Envelope
* @param {Number} number Constant number to multiply
* @return {p5.Envelope} Envelope Returns this envelope
* with scaled output
@@ -868,6 +879,7 @@ define(function (require) {
* again will override the initial scale() with new values.
*
* @method scale
+ * @for p5.Envelope
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
diff --git a/src/eq.js b/src/eq.js
index e384efe7..26b4409f 100644
--- a/src/eq.js
+++ b/src/eq.js
@@ -3,7 +3,7 @@
define(function (require) {
var Effect = require('effect');
- var EQFilter = require('src/eqFilter');
+ var EQFilter = require('eqFilter');
/**
* p5.EQ is an audio effect that performs the function of a multiband
@@ -32,40 +32,40 @@ define(function (require) {
* var eq;
* var band_names;
* var band_index;
- *
+ *
* var soundFile, play;
- *
+ *
* function preload() {
* soundFormats('mp3', 'ogg');
* soundFile = loadSound('assets/beat');
* }
- *
+ *
* function setup() {
* eq = new p5.EQ(3);
* soundFile.disconnect();
* eq.process(soundFile);
- *
+ *
* band_names = ['lows','mids','highs'];
* band_index = 0;
* play = false;
* textAlign(CENTER);
* }
- *
+ *
* function draw() {
* background(30);
* noStroke();
* fill(255);
* text('click to kill',50,25);
- *
+ *
* fill(255, 40, 255);
* textSize(26);
* text(band_names[band_index],50,55);
- *
+ *
* fill(255);
* textSize(9);
* text('space = play/pause',50,80);
* }
- *
+ *
* //If mouse is over canvas, cycle to the next band and kill the frequency
* function mouseClicked() {
* for (var i = 0; i < eq.bands.length; i++) {
@@ -76,7 +76,7 @@ define(function (require) {
* band_index === 2 ? band_index = 0 : band_index++;
* }
* }
- *
+ *
* //use space bar to trigger play / pause
* function keyPressed() {
* if (key===' ') {
@@ -97,7 +97,7 @@ define(function (require) {
/**
* The p5.EQ is built with abstracted p5.Filter objects.
- * To modify any bands, use methods of the
* p5.Filter API, especially `gain` and `freq`.
* Bands are stored in an array, with indices 0 - 3, or 0 - 7
@@ -128,10 +128,10 @@ define(function (require) {
if (i>0) {
this.bands[i-1].connect(this.bands[i].biquad);
} else {
- this.input.connect(this.bands[i].biquad);
+ this.input.connect(this.bands[i].biquad);
}
}
- this.bands[_eqsize-1].connect(this.output);
+ this.bands[_eqsize-1].connect(this.output);
};
p5.EQ.prototype = Object.create(Effect.prototype);
@@ -150,6 +150,7 @@ define(function (require) {
// * ex. eq.set(freq0, gain0, freq1, gain1, freq2, gain2);
// *
// * @method set
+ // * @for p5.EQ
// * @param {Number} [freq0] Frequency value for band with index 0
// * @param {Number} [gain0] Gain value for band with index 0
// * @param {Number} [freq1] Frequency value for band with index 1
@@ -185,6 +186,7 @@ define(function (require) {
* the raw biquad filter. This method returns an abstracted p5.Filter,
* which can be added to p5.EQ.bands, in order to create new EQ bands.
* @private
+ * @for p5.EQ
* @method _newBand
* @param {Number} freq
* @param {Number} res
diff --git a/src/fft.js b/src/fft.js
index 47d6b332..e912eb8a 100644
--- a/src/fft.js
+++ b/src/fft.js
@@ -141,6 +141,7 @@ define(function(require) {
* provided, FFT will analyze all sound in the sketch.
*
* @method setInput
+ * @for p5.FFT
* @param {Object} [source] p5.sound object (or web audio API source node)
*/
p5.FFT.prototype.setInput = function(source) {
@@ -163,6 +164,7 @@ define(function(require) {
* of a sound.
*
* @method waveform
+ * @for p5.FFT
* @param {Number} [bins] Must be a power of two between
* 16 and 1024. Defaults to 1024.
* @param {String} [precision] If any value is provided, will return results
@@ -212,6 +214,7 @@ define(function(require) {
* getEnergy()
.
*
* @method analyze
+ * @for p5.FFT
* @param {Number} [bins] Must be a power of two between
* 16 and 1024. Defaults to 1024.
* @param {Number} [scale] If "dB," returns decibel
@@ -312,6 +315,7 @@ define(function(require) {
* range of frequencies.
*
* @method getEnergy
+ * @for p5.FFT
* @param {Number|String} frequency1 Will return a value representing
* energy at this frequency. Alternately,
* the strings "bass", "lowMid" "mid",
@@ -393,6 +397,7 @@ define(function(require) {
* the results determine the spectral centroid.
*
* @method getCentroid
+ * @for p5.FFT
* @return {Number} Spectral Centroid Frequency Frequency of the spectral centroid in Hz.
*
*
@@ -503,6 +508,7 @@ define(function(require) {
* the results to group them into a smaller set of averages.
*
* @method linAverages
+ * @for p5.FFT
* @param {Number} N Number of returned frequency groups
* @return {Array} linearAverages Array of average amplitude values for each group
*/
@@ -542,6 +548,7 @@ define(function(require) {
* the results to group them into a smaller set of averages.
*
* @method logAverages
+ * @for p5.FFT
* @param {Array} octaveBands Array of Octave Bands objects for grouping
* @return {Array} logAverages Array of average amplitude values for each group
*/
@@ -583,6 +590,7 @@ define(function(require) {
* and produce less frequency groups.
*
* @method getOctaveBands
+ * @for p5.FFT
* @param {Number} N Specifies the 1/N type of generated octave bands
* @param {Number} fCtr0 Minimum central frequency for the lowest band
* @return {Array} octaveBands Array of octave band objects with their bounds
diff --git a/src/filter.js b/src/filter.js
index fe088dc0..d89f0bb8 100644
--- a/src/filter.js
+++ b/src/filter.js
@@ -1,28 +1,27 @@
'use strict';
define(function (require) {
- var p5sound = require('master');
var Effect = require('effect');
/**
* A p5.Filter uses a Web Audio Biquad Filter to filter
* the frequency response of an input source. Subclasses
* include:
- * * p5.LowPass
:
+ * p5.LowPass
:
* Allows frequencies below the cutoff frequency to pass through,
* and attenuates frequencies above the cutoff.
- * * p5.HighPass
:
+ * p5.HighPass
:
* The opposite of a lowpass filter.
- * * p5.BandPass
:
+ * p5.BandPass
:
* Allows a range of frequencies to pass through and attenuates
* the frequencies below and above this frequency range.
*
* The .res()
method controls either width of the
* bandpass, or resonance of the low/highpass cutoff frequency.
*
- * This class extends p5.Effect.
- * Methods amp(), chain(),
- * drywet(), connect(), and
+ * This class extends p5.Effect.
+ * Methods amp(), chain(),
+ * drywet(), connect(), and
* disconnect() are available.
*
* @class p5.Filter
@@ -79,8 +78,6 @@ define(function (require) {
* }
*
* var monoSynth = new p5.MonoSynth();
@@ -166,6 +168,7 @@ define(function (require) {
*
* @param {Number} secondsFromNow time to trigger the release
* @method triggerRelease
+ * @for p5.MonoSynth
* @example
*
* var monoSynth = new p5.MonoSynth();
@@ -191,6 +194,7 @@ define(function (require) {
* .
*
* @method setADSR
+ * @for p5.MonoSynth
* @param {Number} attackTime Time (in seconds before envelope
* reaches Attack Level
* @param {Number} [decayTime] Time (in seconds) before envelope
@@ -213,15 +217,19 @@ define(function (require) {
/**
* Getters and Setters
* @property {Number} attack
+ * @for p5.MonoSynth
*/
/**
* @property {Number} decay
+ * @for p5.MonoSynth
*/
/**
* @property {Number} sustain
+ * @for p5.MonoSynth
*/
/**
* @property {Number} release
+ * @for p5.MonoSynth
*/
Object.defineProperties(p5.MonoSynth.prototype, {
'attack': {
@@ -266,6 +274,7 @@ define(function (require) {
/**
* MonoSynth amp
* @method amp
+ * @for p5.MonoSynth
* @param {Number} vol desired volume
* @param {Number} [rampTime] Time to reach new volume
* @return {Number} new volume value
@@ -282,6 +291,7 @@ define(function (require) {
* Connect to a p5.sound / Web Audio object.
*
* @method connect
+ * @for p5.MonoSynth
* @param {Object} unit A p5.sound or Web Audio object
*/
@@ -294,6 +304,7 @@ define(function (require) {
* Disconnect all outputs
*
* @method disconnect
+ * @for p5.MonoSynth
*/
p5.MonoSynth.prototype.disconnect = function() {
if (this.output) {
@@ -306,6 +317,7 @@ define(function (require) {
* Get rid of the MonoSynth and free up its resources / memory.
*
* @method dispose
+ * @for p5.MonoSynth
*/
p5.MonoSynth.prototype.dispose = function() {
AudioVoice.prototype.dispose.apply(this);
diff --git a/src/oscillator.js b/src/oscillator.js
index d16bb148..e64a2b8b 100644
--- a/src/oscillator.js
+++ b/src/oscillator.js
@@ -113,6 +113,7 @@ define(function (require) {
* oscillator starts.
*
* @method start
+ * @for p5.Oscillator
* @param {Number} [time] startTime in seconds from now.
* @param {Number} [frequency] frequency in Hz.
*/
@@ -158,6 +159,7 @@ define(function (require) {
* oscillator stops.
*
* @method stop
+ * @for p5.Oscillator
* @param {Number} secondsFromNow Time, in seconds from now.
*/
p5.Oscillator.prototype.stop = function(time) {
@@ -174,6 +176,7 @@ define(function (require) {
* such as an oscillator to modulate amplitude with an audio signal.
*
* @method amp
+ * @for p5.Oscillator
* @param {Number|Object} vol between 0 and 1.0
* or a modulating signal/oscillator
* @param {Number} [rampTime] create a fade that lasts rampTime
@@ -214,6 +217,7 @@ define(function (require) {
* such as an oscillator to modulate the frequency with an audio signal.
*
* @method freq
+ * @for p5.Oscillator
* @param {Number|Object} Frequency Frequency in Hz
* or modulating signal/oscillator
* @param {Number} [rampTime] Ramp time (in seconds)
@@ -278,6 +282,7 @@ define(function (require) {
* Set type to 'sine', 'triangle', 'sawtooth' or 'square'.
*
* @method setType
+ * @for p5.Oscillator
* @param {String} type 'sine', 'triangle', 'sawtooth' or 'square'.
*/
p5.Oscillator.prototype.setType = function(type) {
@@ -292,6 +297,7 @@ define(function (require) {
* Connect to a p5.sound / Web Audio object.
*
* @method connect
+ * @for p5.Oscillator
* @param {Object} unit A p5.sound or Web Audio object
*/
p5.Oscillator.prototype.connect = function(unit) {
@@ -312,6 +318,7 @@ define(function (require) {
* Disconnect all outputs
*
* @method disconnect
+ * @for p5.Oscillator
*/
p5.Oscillator.prototype.disconnect = function() {
if (this.output) {
@@ -330,6 +337,7 @@ define(function (require) {
* Pan between Left (-1) and Right (1)
*
* @method pan
+ * @for p5.Oscillator
* @param {Number} panning Number between -1 and 1
* @param {Number} timeFromNow schedule this event to happen
* seconds from now
@@ -368,6 +376,7 @@ define(function (require) {
* based on the oscillator's current frequency.
*
* @method phase
+ * @for p5.Oscillator
* @param {Number} phase float between 0.0 and 1.0
*/
p5.Oscillator.prototype.phase = function(p) {
@@ -428,6 +437,7 @@ define(function (require) {
* will override the initial add() with a new value.
*
* @method add
+ * @for p5.Oscillator
* @param {Number} number Constant number to add
* @return {p5.Oscillator} Oscillator Returns this oscillator
* with scaled output
@@ -446,6 +456,7 @@ define(function (require) {
* again will override the initial mult() with a new value.
*
* @method mult
+ * @for p5.Oscillator
* @param {Number} number Constant number to multiply
* @return {p5.Oscillator} Oscillator Returns this oscillator
* with multiplied output
@@ -463,6 +474,7 @@ define(function (require) {
* again will override the initial scale() with new values.
*
* @method scale
+ * @for p5.Oscillator
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
* @param {Number} outMin input range minumum
diff --git a/src/panner3d.js b/src/panner3d.js
index 48e28d50..41cd910d 100644
--- a/src/panner3d.js
+++ b/src/panner3d.js
@@ -53,6 +53,7 @@ define(function (require) {
* Connect an audio sorce
*
* @method process
+ * @for p5.Panner3D
* @param {Object} src Input source
*/
p5.Panner3D.prototype.process = function(src) {
@@ -61,6 +62,7 @@ define(function (require) {
/**
* Set the X,Y,Z position of the Panner
* @method set
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
@@ -79,16 +81,19 @@ define(function (require) {
/**
* Getter and setter methods for position coordinates
* @method positionX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
/**
* Getter and setter methods for position coordinates
* @method positionY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
/**
* Getter and setter methods for position coordinates
* @method positionZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
p5.Panner3D.prototype.positionX = function(xVal, time) {
@@ -128,6 +133,7 @@ define(function (require) {
/**
* Set the X,Y,Z position of the Panner
* @method orient
+ * @for p5.Panner3D
* @param {Number} xVal
* @param {Number} yVal
* @param {Number} zVal
@@ -146,16 +152,19 @@ define(function (require) {
/**
* Getter and setter methods for orient coordinates
* @method orientX
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
/**
* Getter and setter methods for orient coordinates
* @method orientY
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
/**
* Getter and setter methods for orient coordinates
* @method orientZ
+ * @for p5.Panner3D
* @return {Number} updated coordinate value
*/
p5.Panner3D.prototype.orientX = function(xVal, time) {
@@ -195,6 +204,7 @@ define(function (require) {
/**
* Set the rolloff factor and max distance
* @method setFalloff
+ * @for p5.Panner3D
* @param {Number} [maxDistance]
* @param {Number} [rolloffFactor]
*/
@@ -205,6 +215,7 @@ define(function (require) {
/**
* Maxium distance between the source and the listener
* @method maxDist
+ * @for p5.Panner3D
* @param {Number} maxDistance
* @return {Number} updated value
*/
@@ -218,6 +229,7 @@ define(function (require) {
/**
* How quickly the volume is reduced as the source moves away from the listener
* @method rollof
+ * @for p5.Panner3D
* @param {Number} rolloffFactor
* @return {Number} updated value
*/
diff --git a/src/polysynth.js b/src/polysynth.js
index 82b276d7..341e8efa 100644
--- a/src/polysynth.js
+++ b/src/polysynth.js
@@ -101,6 +101,7 @@ define(function (require) {
/**
* Construct the appropriate number of audiovoices
* @private
+ * @for p5.PolySynth
* @method _allocateVoices
*/
p5.PolySynth.prototype._allocateVoices = function() {
@@ -115,6 +116,7 @@ define(function (require) {
* Play a note by triggering noteAttack and noteRelease with sustain time
*
* @method play
+ * @for p5.PolySynth
* @param {Number} [note] midi note to play (ranging from 0 to 127 - 60 being a middle C)
* @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)
* @param {Number} [secondsFromNow] time from now (in seconds) at which to play
@@ -166,6 +168,7 @@ define(function (require) {
* in order to prevent the modified envelope from being used on other notes.
*
* @method noteADSR
+ * @for p5.PolySynth
* @param {Number} [note] Midi note on which ADSR should be set.
* @param {Number} [attackTime] Time (in seconds before envelope
* reaches Attack Level
@@ -195,6 +198,7 @@ define(function (require) {
* monosynth so that all notes are played with this envelope.
*
* @method setADSR
+ * @for p5.PolySynth
* @param {Number} [attackTime] Time (in seconds before envelope
* reaches Attack Level
* @param {Number} [decayTime] Time (in seconds) before envelope
@@ -221,6 +225,7 @@ define(function (require) {
* hold the sustain level until you let go.
*
* @method noteAttack
+ * @for p5.PolySynth
* @param {Number} [note] midi note on which attack should be triggered.
* @param {Number} [velocity] velocity of the note to play (ranging from 0 to 1)/
* @param {Number} [secondsFromNow] time from now (in seconds)
@@ -306,6 +311,7 @@ define(function (require) {
* scheduledValues after release
*
* @private
+ * @for p5.PolySynth
* @param {[type]} time [description]
* @param {[type]} value [description]
* @return {[type]} [description]
@@ -327,6 +333,7 @@ define(function (require) {
* release level and release time.
*
* @method noteRelease
+ * @for p5.PolySynth
* @param {Number} [note] midi note on which attack should be triggered.
* If no value is provided, all notes will be released.
* @param {Number} [secondsFromNow] time to trigger the release
@@ -397,6 +404,7 @@ define(function (require) {
* Connect to a p5.sound / Web Audio object.
*
* @method connect
+ * @for p5.PolySynth
* @param {Object} unit A p5.sound or Web Audio object
*/
p5.PolySynth.prototype.connect = function (unit) {
@@ -408,6 +416,7 @@ define(function (require) {
* Disconnect all outputs
*
* @method disconnect
+ * @for p5.PolySynth
*/
p5.PolySynth.prototype.disconnect = function() {
if (this.output) {
@@ -419,6 +428,7 @@ define(function (require) {
* Get rid of the MonoSynth and free up its resources / memory.
*
* @method dispose
+ * @for p5.PolySynth
*/
p5.PolySynth.prototype.dispose = function() {
this.audiovoices.forEach(function(voice) {
diff --git a/src/reverb.js b/src/reverb.js
index 2a43e74a..4b44b6f3 100644
--- a/src/reverb.js
+++ b/src/reverb.js
@@ -82,6 +82,7 @@ define(function (require) {
* Connect a source to the reverb, and assign reverb parameters.
*
* @method process
+ * @for p5.Reverb
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
* @param {Number} [seconds] Duration of the reverb, in seconds.
@@ -113,6 +114,7 @@ define(function (require) {
* assigning a new input.
*
* @method set
+ * @for p5.Reverb
* @param {Number} [seconds] Duration of the reverb, in seconds.
* Min: 0, Max: 10. Defaults to 3.
* @param {Number} [decayRate] Percentage of decay with each echo.
@@ -141,6 +143,7 @@ define(function (require) {
* Set the output level of the reverb effect.
*
* @method amp
+ * @for p5.Reverb
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
@@ -150,12 +153,14 @@ define(function (require) {
* Send output to a p5.sound or web audio object
*
* @method connect
+ * @for p5.Reverb
* @param {Object} unit
*/
/**
* Disconnect all output.
*
* @method disconnect
+ * @for p5.Reverb
*/
/**
@@ -284,6 +289,7 @@ define(function (require) {
* that will be used to generate an impulse response.
*
* @method createConvolver
+ * @for p5
* @param {String} path path to a sound file
* @param {Function} [callback] function to call if loading is successful.
* The object will be passed in as the argument
@@ -422,6 +428,7 @@ define(function (require) {
* Connect a source to the reverb, and assign reverb parameters.
*
* @method process
+ * @for p5.Convolver
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
* @example
@@ -457,6 +464,7 @@ define(function (require) {
* with the toggleImpulse(id)
method.
*
* @property {Array} impulses
+ * @for p5.Convolver
*/
p5.Convolver.prototype.impulses = [];
@@ -467,6 +475,7 @@ define(function (require) {
* method.
*
* @method addImpulse
+ * @for p5.Convolver
* @param {String} path path to a sound file
* @param {Function} callback function (optional)
* @param {Function} errorCallback function (optional)
@@ -485,6 +494,7 @@ define(function (require) {
* array is created with this impulse as the only item.
*
* @method resetImpulse
+ * @for p5.Convolver
* @param {String} path path to a sound file
* @param {Function} callback function (optional)
* @param {Function} errorCallback function (optional)
@@ -513,6 +523,7 @@ define(function (require) {
* with the original filename.
*
* @method toggleImpulse
+ * @for p5.Convolver
* @param {String|Number} id Identify the impulse by its original filename
* (String), or by its position in the
* .impulses
Array (Number).
diff --git a/src/signal.js b/src/signal.js
index 05d731ec..0d280427 100644
--- a/src/signal.js
+++ b/src/signal.js
@@ -59,6 +59,7 @@ define(function (require) {
* Fade to value, for smooth transitions
*
* @method fade
+ * @for p5.Signal
* @param {Number} value Value to set this signal
* @param {Number} [secondsFromNow] Length of fade, in seconds from now
*/
@@ -73,6 +74,7 @@ define(function (require) {
* p5.Signal so that its amplitude values can be scaled.
*
* @method setInput
+ * @for p5.Signal
* @param {Object} input
*/
Signal.prototype.setInput = function(_input) {
@@ -92,6 +94,7 @@ define(function (require) {
* instead it returns a new p5.SignalAdd.
*
* @method add
+ * @for p5.Signal
* @param {Number} number
* @return {p5.Signal} object
*/
@@ -112,6 +115,7 @@ define(function (require) {
* instead it returns a new p5.SignalMult.
*
* @method mult
+ * @for p5.Signal
* @param {Number} number to multiply
* @return {p5.Signal} object
*/
@@ -132,6 +136,7 @@ define(function (require) {
* instead it returns a new p5.SignalScale.
*
* @method scale
+ * @for p5.Signal
* @param {Number} number to multiply
* @param {Number} inMin input range minumum
* @param {Number} inMax input range maximum
diff --git a/src/soundLoop.js b/src/soundLoop.js
index af9b489d..57955d6b 100644
--- a/src/soundLoop.js
+++ b/src/soundLoop.js
@@ -18,15 +18,15 @@ define(function (require) {
*
* var click;
* var looper1;
- *
+ *
* function preload() {
* click = loadSound('assets/drum.mp3');
* }
- *
+ *
* function setup() {
* //the looper's callback is passed the timeFromNow
- * //this value should be used as a reference point from
- * //which to schedule sounds
+ * //this value should be used as a reference point from
+ * //which to schedule sounds
* looper1 = new p5.SoundLoop(function(timeFromNow){
* click.play(timeFromNow);
* background(255 * (looper1.iterations % 2));
@@ -86,6 +86,7 @@ define(function (require) {
/**
* Start the loop
* @method start
+ * @for p5.SoundLoop
* @param {Number} [timeFromNow] schedule a starting time
*/
p5.SoundLoop.prototype.start = function(timeFromNow) {
@@ -100,6 +101,7 @@ define(function (require) {
/**
* Stop the loop
* @method stop
+ * @for p5.SoundLoop
* @param {Number} [timeFromNow] schedule a stopping time
*/
p5.SoundLoop.prototype.stop = function(timeFromNow) {
@@ -113,6 +115,7 @@ define(function (require) {
/**
* Pause the loop
* @method pause
+ * @for p5.SoundLoop
* @param {Number} [timeFromNow] schedule a pausing time
*/
p5.SoundLoop.prototype.pause = function(timeFromNow) {
@@ -130,9 +133,10 @@ define(function (require) {
* or to start a loop in synchronization with a loop that is already playing
* This method will schedule the implicit loop in sync with the explicit master loop
* i.e. loopToStart.syncedStart(loopToSyncWith)
- *
+ *
* @method syncedStart
- * @param {Object} otherLoop a p5.SoundLoop to sync with
+ * @for p5.SoundLoop
+ * @param {Object} otherLoop a p5.SoundLoop to sync with
* @param {Number} [timeFromNow] Start the loops in sync after timeFromNow seconds
*/
p5.SoundLoop.prototype.syncedStart = function(otherLoop, timeFromNow) {
@@ -155,6 +159,7 @@ define(function (require) {
/**
* Updates frequency value, reflected in next callback
* @private
+ * @for p5.SoundLoop
* @method _update
*/
p5.SoundLoop.prototype._update = function() {
@@ -164,6 +169,7 @@ define(function (require) {
/**
* Calculate the frequency of the clock's callback based on bpm, interval, and timesignature
* @private
+ * @for p5.SoundLoop
* @method _calcFreq
* @return {Number} new clock frequency value
*/
@@ -184,6 +190,7 @@ define(function (require) {
* Convert notation from musical time format to seconds
* Uses Tone.Time convention
* @private
+ * @for p5.SoundLoop
* @method _convertNotation
* @param {String} value value to be converted
* @return {Number} converted value in seconds
@@ -205,13 +212,18 @@ define(function (require) {
/**
* Helper conversion methods of measure and note
* @private
+ * @for p5.SoundLoop
* @method _measure
- * @private
- * @method _note
*/
p5.SoundLoop.prototype._measure = function(value) {
return value * this._timeSignature;
};
+
+ /**
+ * @private
+ * @method _note
+ * @for p5.SoundLoop
+ */
p5.SoundLoop.prototype._note = function(value) {
return this._timeSignature / value ;
};
@@ -222,6 +234,7 @@ define(function (require) {
* frequency, that will be reflected after the next callback
* beats per minute (defaults to 60)
* @property {Number} bpm
+ * @for p5.SoundLoop
*/
Object.defineProperty(p5.SoundLoop.prototype, 'bpm', {
get : function() {
@@ -242,6 +255,7 @@ define(function (require) {
/**
* number of quarter notes in a measure (defaults to 4)
* @property {Number} timeSignature
+ * @for p5.SoundLoop
*/
Object.defineProperty(p5.SoundLoop.prototype, 'timeSignature', {
get : function() {
@@ -262,6 +276,7 @@ define(function (require) {
/**
* length of the loops interval
* @property {Number|String} interval
+ * @for p5.SoundLoop
*/
Object.defineProperty(p5.SoundLoop.prototype, 'interval', {
get : function() {
@@ -277,6 +292,7 @@ define(function (require) {
/**
* how many times the callback has been called so far
* @property {Number} iterations
+ * @for p5.SoundLoop
* @readonly
*/
Object.defineProperty(p5.SoundLoop.prototype, 'iterations', {
diff --git a/src/soundRecorder.js b/src/soundRecorder.js
index 7f2ac563..bd52b3e5 100644
--- a/src/soundRecorder.js
+++ b/src/soundRecorder.js
@@ -110,6 +110,7 @@ define(function (require) {
* all audible p5.sound from your sketch.
*
* @method setInput
+ * @for p5.SoundRecorder
* @param {Object} [unit] p5.sound object or a web audio unit
* that outputs sound
*/
@@ -136,6 +137,7 @@ define(function (require) {
* transfered to the p5.SoundFile.
*
* @method record
+ * @for p5.SoundRecorder
* @param {p5.SoundFile} soundFile p5.SoundFile
* @param {Number} [duration] Time (in seconds)
* @param {Function} [callback] The name of a function that will be
@@ -169,6 +171,7 @@ define(function (require) {
* was provided on record, that function will be called.
*
* @method stop
+ * @for p5.SoundRecorder
*/
p5.SoundRecorder.prototype.stop = function() {
this.recording = false;
@@ -187,6 +190,7 @@ define(function (require) {
* internal method called on audio process
*
* @private
+ * @for p5.SoundRecorder
* @param {AudioProcessorEvent} event
*/
p5.SoundRecorder.prototype._audioprocess = function(event) {
diff --git a/src/soundfile.js b/src/soundfile.js
index bef489f9..7f6dd69b 100644
--- a/src/soundfile.js
+++ b/src/soundfile.js
@@ -156,6 +156,7 @@ define(function (require) {
* local server is recommended when loading external files.
*
* @method loadSound
+ * @for p5
* @param {String|Array} path Path to the sound file, or an array with
* paths to soundfiles in multiple formats
* i.e. ['sound.ogg', 'sound.mp3'].
@@ -206,6 +207,7 @@ define(function (require) {
* as an optional parameter.
*
* @private
+ * @for p5.SoundFile
* @param {Function} [successCallback] Name of a function to call once file loads
* @param {Function} [errorCallback] Name of a function to call if there is an error
*/
@@ -319,6 +321,7 @@ define(function (require) {
* Returns true if the sound file finished loading successfully.
*
* @method isLoaded
+ * @for p5.SoundFile
* @return {Boolean}
*/
p5.SoundFile.prototype.isLoaded = function() {
@@ -333,6 +336,7 @@ define(function (require) {
* Play the p5.SoundFile
*
* @method play
+ * @for p5.SoundFile
* @param {Number} [startTime] (optional) schedule playback to start (in seconds from now).
* @param {Number} [rate] (optional) playback rate
* @param {Number} [amp] (optional) amplitude (volume)
@@ -448,6 +452,7 @@ define(function (require) {
* not already playing. Sustain is the default mode.
*
* @method playMode
+ * @for p5.SoundFile
* @param {String} str 'restart' or 'sustain' or 'untilDone'
* @example
*
@@ -495,6 +500,7 @@ define(function (require) {
* it will continue to loop after it is unpaused with .play().
*
* @method pause
+ * @for p5.SoundFile
* @param {Number} [startTime] (optional) schedule event to occur
* seconds from now
* @example
@@ -550,6 +556,7 @@ define(function (require) {
* playback rate, playback volume, loopStart, loopEnd.
*
* @method loop
+ * @for p5.SoundFile
* @param {Number} [startTime] (optional) schedule event to occur
* seconds from now
* @param {Number} [rate] (optional) playback rate
@@ -568,6 +575,7 @@ define(function (require) {
* reaches the end of the current playback.
*
* @method setLoop
+ * @for p5.SoundFile
* @param {Boolean} Boolean set looping to true or false
*/
p5.SoundFile.prototype.setLoop = function(bool) {
@@ -590,6 +598,7 @@ define(function (require) {
* Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not.
*
* @method isLooping
+ * @for p5.SoundFile
* @return {Boolean}
*/
p5.SoundFile.prototype.isLooping = function() {
@@ -607,6 +616,7 @@ define(function (require) {
* paused or stopped).
*
* @method isPlaying
+ * @for p5.SoundFile
* @return {Boolean}
*/
p5.SoundFile.prototype.isPlaying = function() {
@@ -618,6 +628,7 @@ define(function (require) {
* playing or stopped).
*
* @method isPaused
+ * @for p5.SoundFile
* @return {Boolean}
*/
p5.SoundFile.prototype.isPaused = function() {
@@ -628,6 +639,7 @@ define(function (require) {
* Stop soundfile playback.
*
* @method stop
+ * @for p5.SoundFile
* @param {Number} [startTime] (optional) schedule event to occur
* in seconds from now
*/
@@ -686,6 +698,7 @@ define(function (require) {
* oscillator to modulate the amplitude with an audio signal.
*
* @method setVolume
+ * @for p5.SoundFile
* @param {Number|Object} volume Volume (amplitude) between 0.0
* and 1.0 or modulating signal/oscillator
* @param {Number} [rampTime] Fade for t seconds
@@ -726,6 +739,7 @@ define(function (require) {
* Default is 0.0 (center).
*
* @method pan
+ * @for p5.SoundFile
* @param {Number} [panValue] Set the stereo panner
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
@@ -764,6 +778,7 @@ define(function (require) {
* Returns the current stereo pan position (-1.0 to 1.0)
*
* @method getPan
+ * @for p5.SoundFile
* @return {Number} Returns the stereo pan setting of the Oscillator
* as a number between -1.0 (left) and 1.0 (right).
* 0.0 is center and default.
@@ -777,6 +792,7 @@ define(function (require) {
* Values less than zero will reverse the audio buffer.
*
* @method rate
+ * @for p5.SoundFile
* @param {Number} [playbackRate] Set the playback rate. 1.0 is normal,
* .5 is half-speed, 2.0 is twice as fast.
* Values less than zero play backwards.
@@ -860,6 +876,7 @@ define(function (require) {
* Returns the duration of a sound file in seconds.
*
* @method duration
+ * @for p5.SoundFile
* @return {Number} The duration of the soundFile in seconds.
*/
p5.SoundFile.prototype.duration = function() {
@@ -877,6 +894,7 @@ define(function (require) {
* has been called, currentTime will count backwards.
*
* @method currentTime
+ * @for p5.SoundFile
* @return {Number} currentTime of the soundFile in seconds.
*/
p5.SoundFile.prototype.currentTime = function() {
@@ -891,6 +909,7 @@ define(function (require) {
* entire duration from start to finish.
*
* @method jump
+ * @for p5.SoundFile
* @param {Number} cueTime cueTime of the soundFile in seconds.
* @param {Number} duration duration in seconds.
*/
@@ -915,6 +934,7 @@ define(function (require) {
* For example, Mono = 1, Stereo = 2.
*
* @method channels
+ * @for p5.SoundFile
* @return {Number} [channels]
*/
p5.SoundFile.prototype.channels = function() {
@@ -925,6 +945,7 @@ define(function (require) {
* Return the sample rate of the sound file.
*
* @method sampleRate
+ * @for p5.SoundFile
* @return {Number} [sampleRate]
*/
p5.SoundFile.prototype.sampleRate = function() {
@@ -936,6 +957,7 @@ define(function (require) {
* Equal to sampleRate * duration.
*
* @method frames
+ * @for p5.SoundFile
* @return {Number} [sampleCount]
*/
p5.SoundFile.prototype.frames = function() {
@@ -952,6 +974,7 @@ define(function (require) {
* Inspired by Wavesurfer.js.
*
* @method getPeaks
+ * @for p5.SoundFile
* @params {Number} [length] length is the size of the returned array.
* Larger length results in more precision.
* Defaults to 5*width of the browser window.
@@ -1006,6 +1029,7 @@ define(function (require) {
* Playback must be handled separately (see example).
*
* @method reverseBuffer
+ * @for p5.SoundFile
* @example
*
* var drum;
@@ -1052,6 +1076,7 @@ define(function (require) {
* stop is called.
*
* @method onended
+ * @for p5.SoundFile
* @param {Function} callback function to call when the
* soundfile has ended.
*/
@@ -1111,6 +1136,7 @@ define(function (require) {
* output when they are created.
*
* @method connect
+ * @for p5.SoundFile
* @param {Object} [object] Audio object that accepts an input
*/
p5.SoundFile.prototype.connect = function(unit) {
@@ -1130,6 +1156,7 @@ define(function (require) {
* Disconnects the output of this p5sound object.
*
* @method disconnect
+ * @for p5.SoundFile
*/
p5.SoundFile.prototype.disconnect = function() {
if (this.panner) {
@@ -1148,6 +1175,7 @@ define(function (require) {
* new path (URL).
*
* @method setPath
+ * @for p5.SoundFile
* @param {String} path path to audio file
* @param {Function} callback Callback
*/
@@ -1161,6 +1189,7 @@ define(function (require) {
* Replace the current Audio Buffer with a new Buffer.
*
* @method setBuffer
+ * @for p5.SoundFile
* @param {Array} buf Array of Float32 Array(s). 2 Float32 Arrays
* will create a stereo source. 1 will create
* a mono source.
@@ -1250,6 +1279,7 @@ define(function (require) {
* it decreases the threshold and re-runs the analysis until either minPeaks or minThreshold are reached.
*
* @method processPeaks
+ * @for p5.SoundFile
* @param {Function} callback a function to call once this data is returned
* @param {Number} [initThreshold] initial threshold defaults to 0.9
* @param {Number} [minThreshold] minimum threshold defaults to 0.22
@@ -1501,6 +1531,7 @@ define(function (require) {
*
*
* @method addCue
+ * @for p5.SoundFile
* @param {Number} time Time in seconds, relative to this media
* element's playback. For example, to trigger
* an event every time playback reaches two
@@ -1576,6 +1607,7 @@ define(function (require) {
* addCue method.
*
* @method removeCue
+ * @for p5.SoundFile
* @param {Number} id ID of the cue, as returned by addCue
*/
p5.SoundFile.prototype.removeCue = function(id) {
@@ -1631,8 +1663,9 @@ define(function (require) {
* Save a p5.SoundFile as a .wav file. The browser will prompt the user
* to download the file to their device. To upload a file to a server, see
* getBlob
- *
+ *
* @method save
+ * @for p5.SoundFile
* @param {String} [fileName] name of the resulting .wav file.
* @example
*
@@ -1666,9 +1699,10 @@ define(function (require) {
* use the `httpDo` options object to send a POST request with some
* specific options: we encode the request as `multipart/form-data`,
* and attach the blob as one of the form values using `FormData`.
- *
+ *
*
* @method getBlob
+ * @for p5.SoundFile
* @returns {Blob} A file-like data object
* @example
*
diff --git a/webpack.config.js b/webpack.config.js
new file mode 100644
index 00000000..eea75e91
--- /dev/null
+++ b/webpack.config.js
@@ -0,0 +1,72 @@
+const webpack = require('webpack');
+const path = require('path');
+const fs = require('fs');
+const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
+
+module.exports = {
+ context: __dirname + '/src',
+ entry: {
+ 'p5.sound': './app.js',
+ 'p5.sound.min': './app.js'
+ },
+ output: {
+ // where we want to output built files
+ path: __dirname + "/lib"
+ },
+ mode: 'production',
+ devtool: 'source-map',
+ // devtool: production ? false : "eval",
+ plugins: [
+ new webpack.NormalModuleReplacementPlugin(/Tone(\.*)/, function(resource) {
+ resource.request = path.join(__dirname, './node_modules/tone/', resource.request);
+ }),
+ new webpack.BannerPlugin({
+ banner: fs.readFileSync('./fragments/before.frag').toString(),
+ raw: true,
+ })
+ ],
+ module: {
+ rules: [
+ {
+ test: /Tone(\.*)/,
+ use: {
+ loader: 'uglify-loader'
+ }
+ },
+ {
+ test: /\.js$/,
+ exclude: /(node_modules)/,
+ use: {
+ loader: 'babel-loader',
+ options: {
+ presets: ['@babel/preset-env']
+ }
+ }
+ }
+ ]
+ },
+ optimization: {
+ minimize: true,
+ minimizer: [
+ new UglifyJsPlugin({
+ include: [/\.min\.js$/],
+ cache: true,
+ parallel: true,
+ uglifyOptions: {
+ compress: {
+ drop_console: true
+ },
+ ecma: 6,
+ mangle: true,
+ output: {
+ comments: false
+ }
+ },
+ sourceMap: true,
+ })
+ ]
+ },
+ resolve: {
+ modules: [path.resolve(__dirname, 'src'), 'node_modules']
+ }
+}