diff --git a/brendafernanda-anna/README.md b/brendafernanda-anna/README.md new file mode 100644 index 0000000..4301fed --- /dev/null +++ b/brendafernanda-anna/README.md @@ -0,0 +1,2 @@ +# Cenario de Natal +Alunas: Brenda Fernanda e Anna Paula \ No newline at end of file diff --git a/brendafernanda-anna/bonecodeneve.png b/brendafernanda-anna/bonecodeneve.png new file mode 100644 index 0000000..beff8da Binary files /dev/null and b/brendafernanda-anna/bonecodeneve.png differ diff --git a/brendafernanda-anna/ceu.jpg b/brendafernanda-anna/ceu.jpg new file mode 100644 index 0000000..f340772 Binary files /dev/null and b/brendafernanda-anna/ceu.jpg differ diff --git a/brendafernanda-anna/estrela.png b/brendafernanda-anna/estrela.png new file mode 100644 index 0000000..105eba0 Binary files /dev/null and b/brendafernanda-anna/estrela.png differ diff --git a/brendafernanda-anna/estrelinha.png b/brendafernanda-anna/estrelinha.png new file mode 100644 index 0000000..fdda36b Binary files /dev/null and b/brendafernanda-anna/estrelinha.png differ diff --git a/brendafernanda-anna/floco1.png b/brendafernanda-anna/floco1.png new file mode 100644 index 0000000..fcbbcb3 Binary files /dev/null and b/brendafernanda-anna/floco1.png differ diff --git a/brendafernanda-anna/floco2.png b/brendafernanda-anna/floco2.png new file mode 100644 index 0000000..8dc283a Binary files /dev/null and b/brendafernanda-anna/floco2.png differ diff --git a/brendafernanda-anna/floco3.png b/brendafernanda-anna/floco3.png new file mode 100644 index 0000000..9659151 Binary files /dev/null and b/brendafernanda-anna/floco3.png differ diff --git a/brendafernanda-anna/floco4.png b/brendafernanda-anna/floco4.png new file mode 100644 index 0000000..e9ebe1a Binary files /dev/null and b/brendafernanda-anna/floco4.png differ diff --git a/brendafernanda-anna/index.html b/brendafernanda-anna/index.html new file mode 100644 index 0000000..472efd4 --- /dev/null +++ b/brendafernanda-anna/index.html @@ -0,0 +1,12 @@ + + + + p5js - boilerplate + + + + + + + + \ No newline at end of file diff --git a/brendafernanda-anna/musica.mp3 b/brendafernanda-anna/musica.mp3 new file mode 100644 index 0000000..bbeadeb Binary files /dev/null and b/brendafernanda-anna/musica.mp3 differ diff --git a/brendafernanda-anna/p5.min.js b/brendafernanda-anna/p5.min.js new file mode 100644 index 0000000..82d3099 --- /dev/null +++ b/brendafernanda-anna/p5.min.js @@ -0,0 +1,9 @@ +/*! p5.js v0.5.3 August 17, 2016 */ !function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.p5=a()}}(function(){var define,module,exports;return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g0,"No "+b+" specified.")}var c=[],d=this;b("familyName"),b("weightName"),b("manufacturer"),b("copyright"),b("version"),a(this.unitsPerEm>0,"No unitsPerEm specified.")},d.prototype.toTables=function(){return f.fontToTable(this)},d.prototype.toBuffer=function(){for(var a=this.toTables(),b=a.encode(),c=new ArrayBuffer(b.length),d=new Uint8Array(c),e=0;eD;D+=1){var E=l.getTag(m,C),F=l.getULong(m,C+8);switch(E){case"cmap":k.tables.cmap=n.parse(m,F),k.encoding=new i.CmapEncoding(k.tables.cmap),k.encoding||(k.supported=!1);break;case"head":k.tables.head=r.parse(m,F),k.unitsPerEm=k.tables.head.unitsPerEm,b=k.tables.head.indexToLocFormat;break;case"hhea":k.tables.hhea=s.parse(m,F),k.ascender=k.tables.hhea.ascender,k.descender=k.tables.hhea.descender,k.numberOfHMetrics=k.tables.hhea.numberOfHMetrics;break;case"hmtx":c=F;break;case"maxp":k.tables.maxp=w.parse(m,F),k.numGlyphs=k.tables.maxp.numGlyphs;break;case"name":k.tables.name=x.parse(m,F),k.familyName=k.tables.name.fontFamily,k.styleName=k.tables.name.fontSubfamily;break;case"OS/2":k.tables.os2=y.parse(m,F);break;case"post":k.tables.post=z.parse(m,F),k.glyphNames=new i.GlyphNames(k.tables.post);break;case"glyf":d=F;break;case"loca":e=F;break;case"CFF ":f=F;break;case"kern":g=F;break;case"GPOS":h=F}C+=16}if(d&&e){var G=0===b,H=v.parse(m,e,k.numGlyphs,G);k.glyphs=p.parse(m,d,H,k),t.parse(m,c,k.numberOfHMetrics,k.numGlyphs,k.glyphs),i.addGlyphNames(k)}else f?(o.parse(m,f,k),i.addGlyphNames(k)):k.supported=!1;return k.supported&&(g?k.kerningPairs=u.parse(m,g):k.kerningPairs={},h&&q.parse(m,h,k)),k}function h(a,b){var c="undefined"==typeof window,d=c?e:f;d(a,function(a,c){if(a)return b(a);var d=g(c);return d.supported?b(null,d):b("Font is not supported (is this a Postscript font?)")})}var i=a("./encoding"),j=a("./font"),k=a("./glyph"),l=a("./parse"),m=a("./path"),n=a("./tables/cmap"),o=a("./tables/cff"),p=a("./tables/glyf"),q=a("./tables/gpos"),r=a("./tables/head"),s=a("./tables/hhea"),t=a("./tables/hmtx"),u=a("./tables/kern"),v=a("./tables/loca"),w=a("./tables/maxp"),x=a("./tables/name"),y=a("./tables/os2"),z=a("./tables/post");c._parse=l,c.Font=j.Font,c.Glyph=k.Glyph,c.Path=m.Path,c.parse=g,c.load=h},{"./encoding":4,"./font":5,"./glyph":6,"./parse":9,"./path":10,"./tables/cff":12,"./tables/cmap":13,"./tables/glyf":14,"./tables/gpos":15,"./tables/head":16,"./tables/hhea":17,"./tables/hmtx":18,"./tables/kern":19,"./tables/loca":20,"./tables/maxp":21,"./tables/name":22,"./tables/os2":23,"./tables/post":24,fs:1}],9:[function(a,b,c){"use strict";function d(a,b){this.data=a,this.offset=b,this.relativeOffset=0}c.getByte=function(a,b){return a.getUint8(b)},c.getCard8=c.getByte,c.getUShort=function(a,b){return a.getUint16(b,!1)},c.getCard16=c.getUShort,c.getShort=function(a,b){return a.getInt16(b,!1)},c.getULong=function(a,b){return a.getUint32(b,!1)},c.getFixed=function(a,b){var c=a.getInt16(b,!1),d=a.getUint16(b+2,!1);return c+d/65535},c.getTag=function(a,b){for(var c="",d=b;b+4>d;d+=1)c+=String.fromCharCode(a.getInt8(d));return c},c.getOffset=function(a,b,c){for(var d=0,e=0;c>e;e+=1)d<<=8,d+=a.getUint8(b+e);return d},c.getBytes=function(a,b,c){for(var d=[],e=b;c>e;e+=1)d.push(a.getUint8(e));return d},c.bytesToString=function(a){for(var b="",c=0;cf;f++)b[f]=c.getUShort(d,e),e+=2;return this.relativeOffset+=2*a,b},d.prototype.parseString=function(a){var b=this.data,c=this.offset+this.relativeOffset,d="";this.relativeOffset+=a;for(var e=0;a>e;e++)d+=String.fromCharCode(b.getUint8(c+e));return d},d.prototype.parseTag=function(){return this.parseString(4)},d.prototype.parseLongDateTime=function(){var a=c.getULong(this.data,this.offset+this.relativeOffset+4);return this.relativeOffset+=8,a},d.prototype.parseFixed=function(){var a=c.getULong(this.data,this.offset+this.relativeOffset);return this.relativeOffset+=4,a/65536},d.prototype.parseVersion=function(){var a=c.getUShort(this.data,this.offset+this.relativeOffset),b=c.getUShort(this.data,this.offset+this.relativeOffset+2);return this.relativeOffset+=4,a+b/4096/10},d.prototype.skip=function(a,b){void 0===b&&(b=1),this.relativeOffset+=e[a]*b},c.Parser=d},{}],10:[function(a,b,c){"use strict";function d(){this.commands=[],this.fill="black",this.stroke=null,this.strokeWidth=1}d.prototype.moveTo=function(a,b){this.commands.push({type:"M",x:a,y:b})},d.prototype.lineTo=function(a,b){this.commands.push({type:"L",x:a,y:b})},d.prototype.curveTo=d.prototype.bezierCurveTo=function(a,b,c,d,e,f){this.commands.push({type:"C",x1:a,y1:b,x2:c,y2:d,x:e,y:f})},d.prototype.quadTo=d.prototype.quadraticCurveTo=function(a,b,c,d){this.commands.push({type:"Q",x1:a,y1:b,x:c,y:d})},d.prototype.close=d.prototype.closePath=function(){this.commands.push({type:"Z"})},d.prototype.extend=function(a){a.commands&&(a=a.commands),Array.prototype.push.apply(this.commands,a)},d.prototype.draw=function(a){a.beginPath();for(var b=0;b=0&&c>0&&(a+=" "),a+=b(d)}return a}a=void 0!==a?a:2;for(var d="",e=0;ed;d+=1)g.push(J.getOffset(a,k,j)),k+=j;f=e+g[i]}else f=b+2;for(d=0;d>4,g=15&e;if(f===c)break;if(b+=d[f],g===c)break;b+=d[g]}return parseFloat(b)}function g(a,b){var c,d,e,g;if(28===b)return c=a.parseByte(),d=a.parseByte(),c<<8|d;if(29===b)return c=a.parseByte(),d=a.parseByte(),e=a.parseByte(),g=a.parseByte(),c<<24|d<<16|e<<8|g;if(30===b)return f(a);if(b>=32&&246>=b)return b-139;if(b>=247&&250>=b)return c=a.parseByte(),256*(b-247)+c+108;if(b>=251&&254>=b)return c=a.parseByte(),256*-(b-251)-c-108;throw new Error("Invalid b0 "+b)}function h(a){for(var b={},c=0;c=i?(12===i&&(i=1200+d.parseByte()),e.push([i,f]),f=[]):f.push(g(d,i))}return h(e)}function j(a,b){return b=390>=b?H.cffStandardStrings[b]:a[b-391]}function k(a,b,c){for(var d={},e=0;ee;e+=1)f=h.parseSID(),i.push(j(d,f));else if(1===k)for(;i.length<=c;)for(f=h.parseSID(),g=h.parseCard8(),e=0;g>=e;e+=1)i.push(j(d,f)),f+=1;else{if(2!==k)throw new Error("Unknown charset format "+k);for(;i.length<=c;)for(f=h.parseSID(),g=h.parseCard16(),e=0;g>=e;e+=1)i.push(j(d,f)),f+=1}return i}function p(a,b,c){var d,e,f={},g=new J.Parser(a,b),h=g.parseCard8();if(0===h){var i=g.parseCard8();for(d=0;i>d;d+=1)e=g.parseCard8(),f[e]=d}else{if(1!==h)throw new Error("Unknown encoding format "+h);var j=g.parseCard8();for(e=1,d=0;j>d;d+=1)for(var k=g.parseCard8(),l=g.parseCard8(),m=k;k+l>=m;m+=1)f[m]=e,e+=1}return new H.CffEncoding(f,c)}function q(a,b,c){function d(a,b){p&&k.closePath(),k.moveTo(a,b),p=!0}function e(){ +var b;b=l.length%2!==0,b&&!n&&(o=l.shift()+a.nominalWidthX),m+=l.length>>1,l.length=0,n=!0}function f(c){for(var s,t,u,v,w,x,y,z,A,B,C,D,E=0;E1&&!n&&(o=l.shift()+a.nominalWidthX,n=!0),r+=l.pop(),d(q,r);break;case 5:for(;l.length>0;)q+=l.shift(),r+=l.shift(),k.lineTo(q,r);break;case 6:for(;l.length>0&&(q+=l.shift(),k.lineTo(q,r),0!==l.length);)r+=l.shift(),k.lineTo(q,r);break;case 7:for(;l.length>0&&(r+=l.shift(),k.lineTo(q,r),0!==l.length);)q+=l.shift(),k.lineTo(q,r);break;case 8:for(;l.length>0;)g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j+l.shift(),k.curveTo(g,h,i,j,q,r);break;case 10:w=l.pop()+a.subrsBias,x=a.subrs[w],x&&f(x);break;case 11:return;case 12:switch(F=c[E],E+=1,F){case 35:g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),y=i+l.shift(),z=j+l.shift(),A=y+l.shift(),B=z+l.shift(),C=A+l.shift(),D=B+l.shift(),q=C+l.shift(),r=D+l.shift(),l.shift(),k.curveTo(g,h,i,j,y,z),k.curveTo(A,B,C,D,q,r);break;case 34:g=q+l.shift(),h=r,i=g+l.shift(),j=h+l.shift(),y=i+l.shift(),z=j,A=y+l.shift(),B=j,C=A+l.shift(),D=r,q=C+l.shift(),k.curveTo(g,h,i,j,y,z),k.curveTo(A,B,C,D,q,r);break;case 36:g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),y=i+l.shift(),z=j,A=y+l.shift(),B=j,C=A+l.shift(),D=B+l.shift(),q=C+l.shift(),k.curveTo(g,h,i,j,y,z),k.curveTo(A,B,C,D,q,r);break;case 37:g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),y=i+l.shift(),z=j+l.shift(),A=y+l.shift(),B=z+l.shift(),C=A+l.shift(),D=B+l.shift(),Math.abs(C-q)>Math.abs(D-r)?q=C+l.shift():r=D+l.shift(),k.curveTo(g,h,i,j,y,z),k.curveTo(A,B,C,D,q,r);break;default:console.log("Glyph "+b.index+": unknown operator 1200"+F),l.length=0}break;case 14:l.length>0&&!n&&(o=l.shift()+a.nominalWidthX,n=!0),p&&(k.closePath(),p=!1);break;case 18:e();break;case 19:case 20:e(),E+=m+7>>3;break;case 21:l.length>2&&!n&&(o=l.shift()+a.nominalWidthX,n=!0),r+=l.pop(),q+=l.pop(),d(q,r);break;case 22:l.length>1&&!n&&(o=l.shift()+a.nominalWidthX,n=!0),q+=l.pop(),d(q,r);break;case 23:e();break;case 24:for(;l.length>2;)g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j+l.shift(),k.curveTo(g,h,i,j,q,r);q+=l.shift(),r+=l.shift(),k.lineTo(q,r);break;case 25:for(;l.length>6;)q+=l.shift(),r+=l.shift(),k.lineTo(q,r);g=q+l.shift(),h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j+l.shift(),k.curveTo(g,h,i,j,q,r);break;case 26:for(l.length%2&&(q+=l.shift());l.length>0;)g=q,h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i,r=j+l.shift(),k.curveTo(g,h,i,j,q,r);break;case 27:for(l.length%2&&(r+=l.shift());l.length>0;)g=q+l.shift(),h=r,i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j,k.curveTo(g,h,i,j,q,r);break;case 28:s=c[E],t=c[E+1],l.push((s<<24|t<<16)>>16),E+=2;break;case 29:w=l.pop()+a.gsubrsBias,x=a.gsubrs[w],x&&f(x);break;case 30:for(;l.length>0&&(g=q,h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j+(1===l.length?l.shift():0),k.curveTo(g,h,i,j,q,r),0!==l.length);)g=q+l.shift(),h=r,i=g+l.shift(),j=h+l.shift(),r=j+l.shift(),q=i+(1===l.length?l.shift():0),k.curveTo(g,h,i,j,q,r);break;case 31:for(;l.length>0&&(g=q+l.shift(),h=r,i=g+l.shift(),j=h+l.shift(),r=j+l.shift(),q=i+(1===l.length?l.shift():0),k.curveTo(g,h,i,j,q,r),0!==l.length);)g=q,h=r+l.shift(),i=g+l.shift(),j=h+l.shift(),q=i+l.shift(),r=j+(1===l.length?l.shift():0),k.curveTo(g,h,i,j,q,r);break;default:32>F?console.log("Glyph "+b.index+": unknown operator "+F):247>F?l.push(F-139):251>F?(s=c[E],E+=1,l.push(256*(F-247)+s+108)):255>F?(s=c[E],E+=1,l.push(256*-(F-251)-s-108)):(s=c[E],t=c[E+1],u=c[E+2],v=c[E+3],E+=4,l.push((s<<24|t<<16|u<<8|v)/65536))}}}var g,h,i,j,k=new K.Path,l=[],m=0,n=!1,o=a.defaultWidthX,p=!1,q=0,r=0;return f(c),b.advanceWidth=o,k}function r(a){var b;return b=a.length<1240?107:a.length<33900?1131:32768}function s(a,b,c){c.tables.cff={};var d=l(a,b),f=e(a,d.endOffset,J.bytesToString),g=e(a,f.endOffset),h=e(a,g.endOffset,J.bytesToString),i=e(a,h.endOffset);c.gsubrs=i.objects,c.gsubrsBias=r(c.gsubrs);var j=new DataView(new Uint8Array(g.objects[0]).buffer),k=m(j,h.objects);c.tables.cff.topDict=k;var s=b+k["private"][1],t=n(a,s,k["private"][0],h.objects);if(c.defaultWidthX=t.defaultWidthX,c.nominalWidthX=t.nominalWidthX,0!==t.subrs){var u=s+t.subrs,v=e(a,u);c.subrs=v.objects,c.subrsBias=r(c.subrs)}else c.subrs=[],c.subrsBias=0;var w=e(a,b+k.charStrings);c.nGlyphs=w.objects.length;var x=o(a,b+k.charset,c.nGlyphs,h.objects);0===k.encoding?c.cffEncoding=new H.CffEncoding(H.cffStandardEncoding,x):1===k.encoding?c.cffEncoding=new H.CffEncoding(H.cffExpertEncoding,x):c.cffEncoding=p(a,b+k.encoding,x),c.encoding=c.encoding||c.cffEncoding,c.glyphs=new I.GlyphSet(c);for(var y=0;y=0&&(c=d),d=b.indexOf(a),d>=0?c=d+H.cffStandardStrings.length:(c=H.cffStandardStrings.length+b.length,b.push(a)),c}function u(){return new L.Table("Header",[{name:"major",type:"Card8",value:1},{name:"minor",type:"Card8",value:0},{name:"hdrSize",type:"Card8",value:4},{name:"major",type:"Card8",value:1}])}function v(a){var b=new L.Table("Name INDEX",[{name:"names",type:"INDEX",value:[]}]);b.names=[];for(var c=0;c>1,j.skip("uShort",3),d.glyphIndexMap={};var l=new i.Parser(a,b+e+14),m=new i.Parser(a,b+e+16+2*k),n=new i.Parser(a,b+e+16+4*k),o=new i.Parser(a,b+e+16+6*k),p=b+e+16+8*k;for(c=0;k-1>c;c+=1)for(var q,r=l.parseUShort(),s=m.parseUShort(),t=n.parseShort(),u=o.parseUShort(),v=s;r>=v;v+=1)0!==u?(p=o.offset+o.relativeOffset-2,p+=u,p+=2*(v-s),q=i.getUShort(a,p),0!==q&&(q=q+t&65535)):q=v+t&65535,d.glyphIndexMap[v]=q;return d}function e(a,b,c){a.segments.push({end:b,start:b,delta:-(b-c),offset:0})}function f(a){a.segments.push({end:65535,start:65535,delta:1,offset:0})}function g(a){var b,c=new j.Table("cmap",[{name:"version",type:"USHORT",value:0},{name:"numTables",type:"USHORT",value:1},{name:"platformID",type:"USHORT",value:3},{name:"encodingID",type:"USHORT",value:1},{name:"offset",type:"ULONG",value:12},{name:"format",type:"USHORT",value:4},{name:"length",type:"USHORT",value:0},{name:"language",type:"USHORT",value:0},{name:"segCountX2",type:"USHORT",value:0},{name:"searchRange",type:"USHORT",value:0},{name:"entrySelector",type:"USHORT",value:0},{name:"rangeShift",type:"USHORT",value:0}]);for(c.segments=[],b=0;bb;b+=1){var o=c.segments[b];i=i.concat({name:"end_"+b,type:"USHORT",value:o.end}),k=k.concat({name:"start_"+b,type:"USHORT",value:o.start}),l=l.concat({name:"idDelta_"+b,type:"SHORT",value:o.delta}),m=m.concat({name:"idRangeOffset_"+b,type:"USHORT",value:o.offset}),void 0!==o.glyphId&&(n=n.concat({name:"glyph_"+b,type:"USHORT",value:o.glyphId}))}return c.fields=c.fields.concat(i),c.fields.push({name:"reservedPad",type:"USHORT",value:0}),c.fields=c.fields.concat(k),c.fields=c.fields.concat(l),c.fields=c.fields.concat(m),c.fields=c.fields.concat(n),c.length=14+2*i.length+2+2*k.length+2*l.length+2*m.length+2*n.length,c}var h=a("../check"),i=a("../parse"),j=a("../table");c.parse=d,c.make=g},{"../check":2,"../parse":9,"../table":11}],14:[function(a,b,c){"use strict";function d(a,b,c,d,e){var f;return(b&d)>0?(f=a.parseByte(),0===(b&e)&&(f=-f),f=c+f):f=(b&e)>0?c:c+a.parseShort(),f}function e(a,b,c){var e=new m.Parser(b,c);a.numberOfContours=e.parseShort(),a.xMin=e.parseShort(),a.yMin=e.parseShort(),a.xMax=e.parseShort(),a.yMax=e.parseShort();var f,g;if(a.numberOfContours>0){var h,i=a.endPointIndices=[];for(h=0;hh;h+=1)if(g=e.parseByte(),f.push(g),(8&g)>0)for(var l=e.parseByte(),n=0;l>n;n+=1)f.push(g),h+=1;if(k.argument(f.length===j,"Bad flags."),i.length>0){var o,p=[];if(j>0){for(h=0;j>h;h+=1)g=f[h],o={},o.onCurve=!!(1&g),o.lastPointOfContour=i.indexOf(h)>=0,p.push(o);var q=0;for(h=0;j>h;h+=1)g=f[h],o=p[h],o.x=d(e,g,q,2,16),q=o.x;var r=0;for(h=0;j>h;h+=1)g=f[h],o=p[h],o.y=d(e,g,r,4,32),r=o.y}a.points=p}else a.points=[]}else if(0===a.numberOfContours)a.points=[];else{a.isComposite=!0,a.points=[],a.components=[];for(var s=!0;s;){f=e.parseUShort();var t={glyphIndex:e.parseUShort(),xScale:1,scale01:0,scale10:0,yScale:1,dx:0,dy:0};(1&f)>0?(t.dx=e.parseShort(),t.dy=e.parseShort()):(t.dx=e.parseChar(),t.dy=e.parseChar()),(8&f)>0?t.xScale=t.yScale=e.parseF2Dot14():(64&f)>0?(t.xScale=e.parseF2Dot14(),t.yScale=e.parseF2Dot14()):(128&f)>0&&(t.xScale=e.parseF2Dot14(),t.scale01=e.parseF2Dot14(),t.scale10=e.parseF2Dot14(),t.yScale=e.parseF2Dot14()),a.components.push(t),s=!!(32&f)}}}function f(a,b){for(var c=[],d=0;df;f++)e[c.parseTag()]={offset:c.parseUShort()};return e}function e(a,b){var c=new k.Parser(a,b),d=c.parseUShort(),e=c.parseUShort();if(1===d)return c.parseUShortList(e);if(2===d){for(var f=[];e--;)for(var g=c.parseUShort(),h=c.parseUShort(),i=c.parseUShort(),j=g;h>=j;j++)f[i++]=j;return f}}function f(a,b){var c=new k.Parser(a,b),d=c.parseUShort();if(1===d){var e=c.parseUShort(),f=c.parseUShort(),g=c.parseUShortList(f);return function(a){return g[a-e]||0}}if(2===d){for(var h=c.parseUShort(),i=[],j=[],l=[],m=0;h>m;m++)i[m]=c.parseUShort(),j[m]=c.parseUShort(),l[m]=c.parseUShort();return function(a){for(var b=0,c=i.length-1;c>b;){var d=b+c+1>>1;ar;r++){var s=q[r],t=n[s];if(!t){t={},g.relativeOffset=s;for(var u=g.parseUShort();u--;){var v=g.parseUShort();l&&(c=g.parseShort()),m&&(d=g.parseShort()),t[v]=c}}p[j[r]]=t}return function(a,b){var c=p[a];return c?c[b]:void 0}}if(2===h){for(var w=g.parseUShort(),x=g.parseUShort(),y=g.parseUShort(),z=g.parseUShort(),A=f(a,b+w),B=f(a,b+x),C=[],D=0;y>D;D++)for(var E=C[D]=[],F=0;z>F;F++)l&&(c=g.parseShort()),m&&(d=g.parseShort()),E[F]=c;var G={};for(D=0;Dm;m++)l.push(g(a,b+i[m]));j.getKerningValue=function(a,b){for(var c=l.length;c--;){var d=l[c](a,b);if(void 0!==d)return d}return 0}}return j}function i(a,b,c){var e=new k.Parser(a,b),f=e.parseFixed();j.argument(1===f,"Unsupported GPOS table version."),d(a,b+e.parseUShort()),d(a,b+e.parseUShort());var g=e.parseUShort();e.relativeOffset=g;for(var i=e.parseUShort(),l=e.parseOffset16List(i),m=b+g,n=0;i>n;n++){var o=h(a,m+l[n]);2!==o.lookupType||c.getGposKerningValue||(c.getGposKerningValue=o.getKerningValue)}}var j=a("../check"),k=a("../parse");c.parse=i},{"../check":2,"../parse":9}],16:[function(a,b,c){"use strict";function d(a,b){var c={},d=new g.Parser(a,b);return c.version=d.parseVersion(),c.fontRevision=Math.round(1e3*d.parseFixed())/1e3,c.checkSumAdjustment=d.parseULong(),c.magicNumber=d.parseULong(),f.argument(1594834165===c.magicNumber,"Font header has wrong magic number."),c.flags=d.parseUShort(),c.unitsPerEm=d.parseUShort(),c.created=d.parseLongDateTime(),c.modified=d.parseLongDateTime(),c.xMin=d.parseShort(),c.yMin=d.parseShort(),c.xMax=d.parseShort(),c.yMax=d.parseShort(),c.macStyle=d.parseUShort(),c.lowestRecPPEM=d.parseUShort(),c.fontDirectionHint=d.parseShort(),c.indexToLocFormat=d.parseShort(),c.glyphDataFormat=d.parseShort(),c}function e(a){return new h.Table("head",[{name:"version",type:"FIXED",value:65536},{name:"fontRevision",type:"FIXED",value:65536},{name:"checkSumAdjustment",type:"ULONG",value:0},{name:"magicNumber",type:"ULONG",value:1594834165},{name:"flags",type:"USHORT",value:0},{name:"unitsPerEm",type:"USHORT",value:1e3},{name:"created",type:"LONGDATETIME",value:0},{name:"modified",type:"LONGDATETIME",value:0},{name:"xMin",type:"SHORT",value:0},{name:"yMin",type:"SHORT",value:0},{name:"xMax",type:"SHORT",value:0},{name:"yMax",type:"SHORT",value:0},{name:"macStyle",type:"USHORT",value:0},{name:"lowestRecPPEM",type:"USHORT",value:0},{name:"fontDirectionHint",type:"SHORT",value:2},{name:"indexToLocFormat",type:"SHORT",value:0},{name:"glyphDataFormat",type:"SHORT",value:0}],a)}var f=a("../check"),g=a("../parse"),h=a("../table");c.parse=d,c.make=e},{"../check":2,"../parse":9,"../table":11}],17:[function(a,b,c){"use strict";function d(a,b){var c={},d=new f.Parser(a,b);return c.version=d.parseVersion(),c.ascender=d.parseShort(),c.descender=d.parseShort(),c.lineGap=d.parseShort(),c.advanceWidthMax=d.parseUShort(),c.minLeftSideBearing=d.parseShort(),c.minRightSideBearing=d.parseShort(),c.xMaxExtent=d.parseShort(),c.caretSlopeRise=d.parseShort(),c.caretSlopeRun=d.parseShort(),c.caretOffset=d.parseShort(),d.relativeOffset+=8,c.metricDataFormat=d.parseShort(),c.numberOfHMetrics=d.parseUShort(),c}function e(a){return new g.Table("hhea",[{name:"version",type:"FIXED",value:65536},{name:"ascender",type:"FWORD",value:0},{name:"descender",type:"FWORD",value:0},{name:"lineGap",type:"FWORD",value:0},{name:"advanceWidthMax",type:"UFWORD",value:0},{name:"minLeftSideBearing",type:"FWORD",value:0},{name:"minRightSideBearing",type:"FWORD",value:0},{name:"xMaxExtent",type:"FWORD",value:0},{name:"caretSlopeRise",type:"SHORT",value:1},{name:"caretSlopeRun",type:"SHORT",value:0},{name:"caretOffset",type:"SHORT",value:0},{name:"reserved1",type:"SHORT",value:0},{name:"reserved2",type:"SHORT",value:0},{name:"reserved3",type:"SHORT",value:0},{name:"reserved4",type:"SHORT",value:0},{name:"metricDataFormat",type:"SHORT",value:0},{name:"numberOfHMetrics",type:"USHORT",value:0}],a)}var f=a("../parse"),g=a("../table");c.parse=d,c.make=e},{"../parse":9,"../table":11}],18:[function(a,b,c){"use strict";function d(a,b,c,d,e){for(var g,h,i=new f.Parser(a,b),j=0;d>j;j+=1){c>j&&(g=i.parseUShort(),h=i.parseShort());var k=e.get(j);k.advanceWidth=g,k.leftSideBearing=h}}function e(a){for(var b=new g.Table("hmtx",[]),c=0;cj;j+=1){var k=d.parseUShort(),l=d.parseUShort(),m=d.parseShort();c[k+","+l]=m}return c}var e=a("../check"),f=a("../parse");c.parse=d},{"../check":2,"../parse":9}],20:[function(a,b,c){"use strict";function d(a,b,c,d){for(var f=new e.Parser(a,b),g=d?f.parseUShort:f.parseULong,h=[],i=0;c+1>i;i+=1){var j=g.call(f);d&&(j*=2),h.push(j)}return h}var e=a("../parse");c.parse=d},{"../parse":9}],21:[function(a,b,c){"use strict";function d(a,b){var c={},d=new f.Parser(a,b);return c.version=d.parseVersion(),c.numGlyphs=d.parseUShort(),1===c.version&&(c.maxPoints=d.parseUShort(),c.maxContours=d.parseUShort(),c.maxCompositePoints=d.parseUShort(),c.maxCompositeContours=d.parseUShort(),c.maxZones=d.parseUShort(),c.maxTwilightPoints=d.parseUShort(),c.maxStorage=d.parseUShort(),c.maxFunctionDefs=d.parseUShort(),c.maxInstructionDefs=d.parseUShort(),c.maxStackElements=d.parseUShort(),c.maxSizeOfInstructions=d.parseUShort(),c.maxComponentElements=d.parseUShort(),c.maxComponentDepth=d.parseUShort()),c}function e(a){return new g.Table("maxp",[{name:"version",type:"FIXED",value:20480},{name:"numGlyphs",type:"USHORT",value:a}])}var f=a("../parse"),g=a("../table");c.parse=d,c.make=e},{"../parse":9,"../table":11}],22:[function(a,b,c){"use strict";function d(a,b){var c={},d=new j.Parser(a,b);c.format=d.parseUShort();for(var e=d.parseUShort(),f=d.offset+d.parseUShort(),g=0,h=0;e>h;h++){var i=d.parseUShort(),k=d.parseUShort(),m=d.parseUShort(),n=d.parseUShort(),o=l[n],p=d.parseUShort(),q=d.parseUShort();if(3===i&&1===k&&1033===m){for(var r=[],s=p/2,t=0;s>t;t++,q+=2)r[t]=j.getShort(a,f+q);var u=String.fromCharCode.apply(null,r);o?c[o]=u:(g++,c["unknown"+g]=u)}}return 1===c.format&&(c.langTagCount=d.parseUShort()),c}function e(a,b,c,d,e,f){return new k.Table("NameRecord",[{name:"platformID",type:"USHORT",value:a},{name:"encodingID",type:"USHORT",value:b},{name:"languageID",type:"USHORT",value:c},{name:"nameID",type:"USHORT",value:d},{name:"length",type:"USHORT",value:e},{name:"offset",type:"USHORT",value:f}])}function f(a,b,c,d){var f=i.STRING(c);return a.records.push(e(1,0,0,b,f.length,d)),a.strings.push(f),d+=f.length}function g(a,b,c,d){var f=i.UTF16(c);return a.records.push(e(3,1,1033,b,f.length,d)),a.strings.push(f),d+=f.length}function h(a){var b=new k.Table("name",[{name:"format",type:"USHORT",value:0},{name:"count",type:"USHORT",value:0},{name:"stringOffset",type:"USHORT",value:0}]);b.records=[],b.strings=[];var c,d,e=0;for(c=0;c=c.begin&&ae;e++)c.panose[e]=d.parseByte();return c.ulUnicodeRange1=d.parseULong(),c.ulUnicodeRange2=d.parseULong(),c.ulUnicodeRange3=d.parseULong(),c.ulUnicodeRange4=d.parseULong(),c.achVendID=String.fromCharCode(d.parseByte(),d.parseByte(),d.parseByte(),d.parseByte()),c.fsSelection=d.parseUShort(),c.usFirstCharIndex=d.parseUShort(),c.usLastCharIndex=d.parseUShort(),c.sTypoAscender=d.parseShort(),c.sTypoDescender=d.parseShort(),c.sTypoLineGap=d.parseShort(),c.usWinAscent=d.parseUShort(),c.usWinDescent=d.parseUShort(),c.version>=1&&(c.ulCodePageRange1=d.parseULong(),c.ulCodePageRange2=d.parseULong()),c.version>=2&&(c.sxHeight=d.parseShort(),c.sCapHeight=d.parseShort(),c.usDefaultChar=d.parseUShort(),c.usBreakChar=d.parseUShort(),c.usMaxContent=d.parseUShort()),c}function f(a){return new h.Table("OS/2",[{name:"version",type:"USHORT",value:3},{name:"xAvgCharWidth",type:"SHORT",value:0},{name:"usWeightClass",type:"USHORT",value:0},{name:"usWidthClass",type:"USHORT",value:0},{name:"fsType",type:"USHORT",value:0},{name:"ySubscriptXSize",type:"SHORT",value:650},{name:"ySubscriptYSize",type:"SHORT",value:699},{name:"ySubscriptXOffset",type:"SHORT",value:0},{name:"ySubscriptYOffset",type:"SHORT",value:140},{name:"ySuperscriptXSize",type:"SHORT",value:650},{name:"ySuperscriptYSize",type:"SHORT",value:699},{name:"ySuperscriptXOffset",type:"SHORT",value:0},{name:"ySuperscriptYOffset",type:"SHORT",value:479},{name:"yStrikeoutSize",type:"SHORT",value:49},{name:"yStrikeoutPosition",type:"SHORT",value:258},{name:"sFamilyClass",type:"SHORT",value:0},{name:"bFamilyType",type:"BYTE",value:0},{name:"bSerifStyle",type:"BYTE",value:0},{name:"bWeight",type:"BYTE",value:0},{name:"bProportion",type:"BYTE",value:0},{name:"bContrast",type:"BYTE",value:0},{name:"bStrokeVariation",type:"BYTE",value:0},{name:"bArmStyle",type:"BYTE",value:0},{name:"bLetterform",type:"BYTE",value:0},{name:"bMidline",type:"BYTE",value:0},{name:"bXHeight",type:"BYTE",value:0},{name:"ulUnicodeRange1",type:"ULONG",value:0},{name:"ulUnicodeRange2",type:"ULONG",value:0},{name:"ulUnicodeRange3",type:"ULONG",value:0},{name:"ulUnicodeRange4",type:"ULONG",value:0},{name:"achVendID",type:"CHARARRAY",value:"XXXX"},{name:"fsSelection",type:"USHORT",value:0},{name:"usFirstCharIndex",type:"USHORT",value:0},{name:"usLastCharIndex",type:"USHORT",value:0},{name:"sTypoAscender",type:"SHORT",value:0},{name:"sTypoDescender",type:"SHORT",value:0},{name:"sTypoLineGap",type:"SHORT",value:0},{name:"usWinAscent",type:"USHORT",value:0},{name:"usWinDescent",type:"USHORT",value:0},{name:"ulCodePageRange1",type:"ULONG",value:0},{name:"ulCodePageRange2",type:"ULONG",value:0},{name:"sxHeight",type:"SHORT",value:0},{name:"sCapHeight",type:"SHORT",value:0},{name:"usDefaultChar",type:"USHORT",value:0},{name:"usBreakChar",type:"USHORT",value:0},{name:"usMaxContext",type:"USHORT",value:0}],a)}var g=a("../parse"),h=a("../table"),i=[{begin:0,end:127},{begin:128,end:255},{begin:256,end:383},{begin:384,end:591},{begin:592,end:687},{begin:688,end:767},{begin:768,end:879},{begin:880,end:1023},{begin:11392,end:11519},{begin:1024,end:1279},{begin:1328,end:1423},{begin:1424,end:1535},{begin:42240,end:42559},{begin:1536,end:1791},{begin:1984,end:2047},{begin:2304,end:2431},{begin:2432,end:2559},{begin:2560,end:2687},{begin:2688,end:2815},{begin:2816,end:2943},{begin:2944,end:3071},{begin:3072,end:3199},{begin:3200,end:3327},{begin:3328,end:3455},{begin:3584,end:3711},{begin:3712,end:3839},{begin:4256,end:4351},{begin:6912,end:7039},{begin:4352,end:4607},{begin:7680,end:7935},{begin:7936,end:8191},{begin:8192,end:8303},{begin:8304,end:8351},{begin:8352,end:8399},{begin:8400,end:8447},{begin:8448,end:8527},{begin:8528,end:8591},{begin:8592,end:8703},{begin:8704,end:8959},{begin:8960,end:9215},{begin:9216,end:9279},{begin:9280,end:9311},{begin:9312,end:9471},{begin:9472,end:9599},{begin:9600,end:9631},{begin:9632,end:9727},{begin:9728,end:9983},{begin:9984,end:10175},{begin:12288,end:12351},{begin:12352,end:12447},{begin:12448,end:12543},{begin:12544,end:12591},{begin:12592,end:12687},{begin:43072,end:43135},{begin:12800,end:13055},{begin:13056,end:13311},{begin:44032,end:55215},{begin:55296,end:57343},{begin:67840,end:67871},{begin:19968,end:40959},{begin:57344,end:63743},{begin:12736,end:12783},{begin:64256,end:64335},{begin:64336,end:65023},{begin:65056,end:65071},{begin:65040,end:65055},{begin:65104,end:65135},{begin:65136,end:65279},{begin:65280,end:65519},{begin:65520,end:65535},{begin:3840,end:4095},{begin:1792,end:1871},{begin:1920,end:1983},{begin:3456,end:3583},{begin:4096,end:4255},{begin:4608,end:4991},{begin:5024,end:5119},{begin:5120,end:5759},{begin:5760,end:5791},{begin:5792,end:5887},{begin:6016,end:6143},{begin:6144,end:6319},{begin:10240,end:10495},{begin:40960,end:42127},{begin:5888,end:5919},{begin:66304,end:66351},{begin:66352,end:66383},{begin:66560,end:66639},{begin:118784,end:119039},{begin:119808,end:120831},{begin:1044480,end:1048573},{begin:65024,end:65039},{begin:917504,end:917631},{begin:6400,end:6479},{begin:6480,end:6527},{begin:6528,end:6623},{begin:6656,end:6687},{begin:11264,end:11359},{begin:11568,end:11647},{begin:19904,end:19967},{begin:43008,end:43055},{begin:65536,end:65663},{begin:65856,end:65935},{begin:66432,end:66463},{begin:66464,end:66527},{begin:66640,end:66687},{begin:66688,end:66735},{begin:67584, +end:67647},{begin:68096,end:68191},{begin:119552,end:119647},{begin:73728,end:74751},{begin:119648,end:119679},{begin:7040,end:7103},{begin:7168,end:7247},{begin:7248,end:7295},{begin:43136,end:43231},{begin:43264,end:43311},{begin:43312,end:43359},{begin:43520,end:43615},{begin:65936,end:65999},{begin:66e3,end:66047},{begin:66208,end:66271},{begin:127024,end:127135}];c.unicodeRanges=i,c.getUnicodeRange=d,c.parse=e,c.make=f},{"../parse":9,"../table":11}],24:[function(a,b,c){"use strict";function d(a,b){var c,d={},e=new g.Parser(a,b);switch(d.version=e.parseVersion(),d.italicAngle=e.parseFixed(),d.underlinePosition=e.parseShort(),d.underlineThickness=e.parseShort(),d.isFixedPitch=e.parseULong(),d.minMemType42=e.parseULong(),d.maxMemType42=e.parseULong(),d.minMemType1=e.parseULong(),d.maxMemType1=e.parseULong(),d.version){case 1:d.names=f.standardNames.slice();break;case 2:for(d.numberOfGlyphs=e.parseUShort(),d.glyphNameIndex=new Array(d.numberOfGlyphs),c=0;c=f.standardNames.length){var h=e.parseChar();d.names.push(e.parseString(h))}break;case 2.5:for(d.numberOfGlyphs=e.parseUShort(),d.offset=new Array(d.numberOfGlyphs),c=0;cb.value.tag?1:-1}),b.fields=b.fields.concat(g),b.fields=b.fields.concat(h),b}function h(a,b,c){for(var d=0;d0){var f=a.glyphs.get(e);return f.getMetrics()}}return c}function i(a){for(var b=0,c=0;cD||null===b)&&(b=D),D>w&&(w=D);var E=t.getUnicodeRange(D);if(32>E)x|=1<E)y|=1<E)z|=1<E))throw new Error("Unicode ranges bits > 123 are reserved for internal usage");A|=1<=0&&255>=a,"Byte value should be between 0 and 255."),[a]},j.BYTE=d(1),i.CHAR=function(a){return[a.charCodeAt(0)]},j.BYTE=d(1),i.CHARARRAY=function(a){for(var b=[],c=0;c>8&255,255&a]},j.USHORT=d(2),i.SHORT=function(a){return a>=f&&(a=-(2*f-a)),[a>>8&255,255&a]},j.SHORT=d(2),i.UINT24=function(a){return[a>>16&255,a>>8&255,255&a]},j.UINT24=d(3),i.ULONG=function(a){return[a>>24&255,a>>16&255,a>>8&255,255&a]},j.ULONG=d(4),i.LONG=function(a){return a>=g&&(a=-(2*g-a)),[a>>24&255,a>>16&255,a>>8&255,255&a]},j.LONG=d(4),i.FIXED=i.ULONG,j.FIXED=j.ULONG,i.FWORD=i.SHORT,j.FWORD=j.SHORT,i.UFWORD=i.USHORT,j.UFWORD=j.USHORT,i.LONGDATETIME=function(){return[0,0,0,0,0,0,0,0]},j.LONGDATETIME=d(8),i.TAG=function(a){return e.argument(4===a.length,"Tag should be exactly 4 ASCII characters."),[a.charCodeAt(0),a.charCodeAt(1),a.charCodeAt(2),a.charCodeAt(3)]},j.TAG=d(4),i.Card8=i.BYTE,j.Card8=j.BYTE,i.Card16=i.USHORT,j.Card16=j.USHORT,i.OffSize=i.BYTE,j.OffSize=j.BYTE,i.SID=i.USHORT,j.SID=j.USHORT,i.NUMBER=function(a){return a>=-107&&107>=a?[a+139]:a>=108&&1131>=a?(a-=108,[(a>>8)+247,255&a]):a>=-1131&&-108>=a?(a=-a-108,[(a>>8)+251,255&a]):a>=-32768&&32767>=a?i.NUMBER16(a):i.NUMBER32(a)},j.NUMBER=function(a){return i.NUMBER(a).length},i.NUMBER16=function(a){return[28,a>>8&255,255&a]},j.NUMBER16=d(2),i.NUMBER32=function(a){return[29,a>>24&255,a>>16&255,a>>8&255,255&a]},j.NUMBER32=d(4),i.REAL=function(a){var b=a.toString(),c=/\.(\d*?)(?:9{5,20}|0{5,20})\d{0,2}(?:e(.+)|$)/.exec(b);if(c){var d=parseFloat("1e"+((c[2]?+c[2]:0)+c[1].length));b=(Math.round(a*d)/d).toString()}var e,f,g="";for(e=0,f=b.length;f>e;e+=1){var h=b[e];g+="e"===h?"-"===b[++e]?"c":"b":"."===h?"a":"-"===h?"e":h}g+=1&g.length?"f":"ff";var i=[30];for(e=0,f=g.length;f>e;e+=2)i.push(parseInt(g.substr(e,2),16));return i},j.REAL=function(a){return i.REAL(a).length},i.NAME=i.CHARARRAY,j.NAME=j.CHARARRAY,i.STRING=i.CHARARRAY,j.STRING=j.CHARARRAY,i.UTF16=function(a){for(var b=[],c=0;ce;e+=1){var f=parseInt(c[e],0),g=a[f];b=b.concat(i.OPERAND(g.value,g.type)),b=b.concat(i.OPERATOR(f))}return b},j.DICT=function(a){return i.DICT(a).length},i.OPERATOR=function(a){return 1200>a?[a]:[12,a-1200]},i.OPERAND=function(a,b){var c=[];if(Array.isArray(b))for(var d=0;dd;d+=1){var e=a[d];b=b.concat(i[e.type](e.value))}return k&&k.set(a,b),b},j.CHARSTRING=function(a){return i.CHARSTRING(a).length},i.OBJECT=function(a){var b=i[a.type];return e.argument(void 0!==b,"No encoding function for type "+a.type),b(a.value)},i.TABLE=function(a){for(var b=[],c=a.fields.length,d=0;c>d;d+=1){var f=a.fields[d],g=i[f.type];e.argument(void 0!==g,"No encoding function for field type "+f.type);var h=a[f.name];void 0===h&&(h=f.value);var j=g(h);b=b.concat(j)}return b},i.LITERAL=function(a){return a},j.LITERAL=function(a){return a.length},c.decode=h,c.encode=i,c.sizeOf=j},{"./check":2}],27:[function(_dereq_,module,exports){!function(a,b,c){"undefined"!=typeof module&&module.exports?module.exports=c():"function"==typeof define&&define.amd?define(c):b[a]=c()}("reqwest",this,function(){function succeed(a){var b=protocolRe.exec(a.url);return b=b&&b[1]||window.location.protocol,httpsRe.test(b)?twoHundo.test(a.request.status):!!a.request.response}function handleReadyState(a,b,c){return function(){return a._aborted?c(a.request):a._timedOut?c(a.request,"Request is aborted: timeout"):void(a.request&&4==a.request[readyState]&&(a.request.onreadystatechange=noop,succeed(a)?b(a.request):c(a.request)))}}function setHeaders(a,b){var c,d=b.headers||{};d.Accept=d.Accept||defaultHeaders.accept[b.type]||defaultHeaders.accept["*"];var e="function"==typeof FormData&&b.data instanceof FormData;b.crossOrigin||d[requestedWith]||(d[requestedWith]=defaultHeaders.requestedWith),d[contentType]||e||(d[contentType]=b.contentType||defaultHeaders.contentType);for(c in d)d.hasOwnProperty(c)&&"setRequestHeader"in a&&a.setRequestHeader(c,d[c])}function setCredentials(a,b){"undefined"!=typeof b.withCredentials&&"undefined"!=typeof a.withCredentials&&(a.withCredentials=!!b.withCredentials)}function generalCallback(a){lastValue=a}function urlappend(a,b){return a+(/\?/.test(a)?"&":"?")+b}function handleJsonp(a,b,c,d){var e=uniqid++,f=a.jsonpCallback||"callback",g=a.jsonpCallbackName||reqwest.getcallbackPrefix(e),h=new RegExp("((^|\\?|&)"+f+")=([^&]+)"),i=d.match(h),j=doc.createElement("script"),k=0,l=-1!==navigator.userAgent.indexOf("MSIE 10.0");return i?"?"===i[3]?d=d.replace(h,"$1="+g):g=i[3]:d=urlappend(d,f+"="+g),win[g]=generalCallback,j.type="text/javascript",j.src=d,j.async=!0,"undefined"==typeof j.onreadystatechange||l||(j.htmlFor=j.id="_reqwest_"+e),j.onload=j.onreadystatechange=function(){return j[readyState]&&"complete"!==j[readyState]&&"loaded"!==j[readyState]||k?!1:(j.onload=j.onreadystatechange=null,j.onclick&&j.onclick(),b(lastValue),lastValue=void 0,head.removeChild(j),void(k=1))},head.appendChild(j),{abort:function(){j.onload=j.onreadystatechange=null,c({},"Request is aborted: timeout",{}),lastValue=void 0,head.removeChild(j),k=1}}}function getRequest(a,b){var c,d=this.o,e=(d.method||"GET").toUpperCase(),f="string"==typeof d?d:d.url,g=d.processData!==!1&&d.data&&"string"!=typeof d.data?reqwest.toQueryString(d.data):d.data||null,h=!1;return"jsonp"!=d.type&&"GET"!=e||!g||(f=urlappend(f,g),g=null),"jsonp"==d.type?handleJsonp(d,a,b,f):(c=d.xhr&&d.xhr(d)||xhr(d),c.open(e,f,d.async===!1?!1:!0),setHeaders(c,d),setCredentials(c,d),win[xDomainRequest]&&c instanceof win[xDomainRequest]?(c.onload=a,c.onerror=b,c.onprogress=function(){},h=!0):c.onreadystatechange=handleReadyState(this,a,b),d.before&&d.before(c),h?setTimeout(function(){c.send(g)},200):c.send(g),c)}function Reqwest(a,b){this.o=a,this.fn=b,init.apply(this,arguments)}function setType(a){return a.match("json")?"json":a.match("javascript")?"js":a.match("text")?"html":a.match("xml")?"xml":void 0}function init(o,fn){function complete(a){for(o.timeout&&clearTimeout(self.timeout),self.timeout=null;self._completeHandlers.length>0;)self._completeHandlers.shift()(a)}function success(resp){var type=o.type||resp&&setType(resp.getResponseHeader("Content-Type"));resp="jsonp"!==type?self.request:resp;var filteredResponse=globalSetupOptions.dataFilter(resp.responseText,type),r=filteredResponse;try{resp.responseText=r}catch(e){}if(r)switch(type){case"json":try{resp=win.JSON?win.JSON.parse(r):eval("("+r+")")}catch(err){return error(resp,"Could not parse JSON in response",err)}break;case"js":resp=eval(r);break;case"html":resp=r;break;case"xml":resp=resp.responseXML&&resp.responseXML.parseError&&resp.responseXML.parseError.errorCode&&resp.responseXML.parseError.reason?null:resp.responseXML}for(self._responseArgs.resp=resp,self._fulfilled=!0,fn(resp),self._successHandler(resp);self._fulfillmentHandlers.length>0;)resp=self._fulfillmentHandlers.shift()(resp);complete(resp)}function timedOut(){self._timedOut=!0,self.request.abort()}function error(a,b,c){for(a=self.request,self._responseArgs.resp=a,self._responseArgs.msg=b,self._responseArgs.t=c,self._erred=!0;self._errorHandlers.length>0;)self._errorHandlers.shift()(a,b,c);complete(a)}this.url="string"==typeof o?o:o.url,this.timeout=null,this._fulfilled=!1,this._successHandler=function(){},this._fulfillmentHandlers=[],this._errorHandlers=[],this._completeHandlers=[],this._erred=!1,this._responseArgs={};var self=this;fn=fn||function(){},o.timeout&&(this.timeout=setTimeout(function(){timedOut()},o.timeout)),o.success&&(this._successHandler=function(){o.success.apply(o,arguments)}),o.error&&this._errorHandlers.push(function(){o.error.apply(o,arguments)}),o.complete&&this._completeHandlers.push(function(){o.complete.apply(o,arguments)}),this.request=getRequest.call(this,success,error)}function reqwest(a,b){return new Reqwest(a,b)}function normalize(a){return a?a.replace(/\r?\n/g,"\r\n"):""}function serial(a,b){var c,d,e,f,g=a.name,h=a.tagName.toLowerCase(),i=function(a){a&&!a.disabled&&b(g,normalize(a.attributes.value&&a.attributes.value.specified?a.value:a.text))};if(!a.disabled&&g)switch(h){case"input":/reset|button|image|file/i.test(a.type)||(c=/checkbox/i.test(a.type),d=/radio/i.test(a.type),e=a.value,(!(c||d)||a.checked)&&b(g,normalize(c&&""===e?"on":e)));break;case"textarea":b(g,normalize(a.value));break;case"select":if("select-one"===a.type.toLowerCase())i(a.selectedIndex>=0?a.options[a.selectedIndex]:null);else for(f=0;a.length&&fe?c/=2-c:c=c*d/(2-2*e)),[b,c,e,a[3]]},d.ColorConversion._hsbaToRGBA=function(a){var b=6*a[0],c=a[1],d=a[2],e=[];if(0===c)e=[d,d,d,a[3]];else{var f,g,h,i=Math.floor(b),j=d*(1-c),k=d*(1-c*(b-i)),l=d*(1-c*(1+i-b));1===i?(f=k,g=d,h=j):2===i?(f=j,g=d,h=l):3===i?(f=j,g=k,h=d):4===i?(f=l,g=j,h=d):5===i?(f=d,g=j,h=k):(f=d,g=l,h=j),e=[f,g,h,a[3]]}return e},d.ColorConversion._hslaToHSBA=function(a){var b,c=a[0],d=a[1],e=a[2];return b=.5>e?(1+d)*e:e+d-e*d,d=2*(b-e)/b,[c,d,b,a[3]]},d.ColorConversion._hslaToRGBA=function(a){var b=6*a[0],c=a[1],d=a[2],e=[];if(0===c)e=[d,d,d,a[3]];else{var f;f=.5>d?(1+c)*d:d+c-d*c;var g=2*d-f,h=function(a,b,c){return 0>a?a+=6:a>=6&&(a-=6),1>a?b+(c-b)*a:3>a?c:4>a?b+(c-b)*(4-a):b};e=[h(b+2,g,f),h(b,g,f),h(b-2,g,f),a[3]]}return e},d.ColorConversion._rgbaToHSBA=function(a){var b,c,d=a[0],e=a[1],f=a[2],g=Math.max(d,e,f),h=g-Math.min(d,e,f);return 0===h?(b=0,c=0):(c=h/g,d===g?b=(e-f)/h:e===g?b=2+(f-d)/h:f===g&&(b=4+(d-e)/h),0>b?b+=6:b>=6&&(b-=6)),[b/6,c,g,a[3]]},d.ColorConversion._rgbaToHSLA=function(a){var b,c,d=a[0],e=a[1],f=a[2],g=Math.max(d,e,f),h=Math.min(d,e,f),i=g+h,j=g-h;return 0===j?(b=0,c=0):(c=1>i?j/i:j/(2-j),d===g?b=(e-f)/j:e===g?b=2+(f-d)/j:f===g&&(b=4+(d-e)/j),0>b?b+=6:b>=6&&(b-=6)),[b/6,c,i/2,a[3]]},b.exports=d.ColorConversion},{"../core/core":37}],30:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("../core/constants");a("./p5.Color"),d.prototype.alpha=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getAlpha();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.blue=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getBlue();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.brightness=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getBrightness();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.color=function(){return arguments[0]instanceof d.Color?arguments[0]:arguments[0]instanceof Array?this instanceof d.Renderer?new d.Color(this,arguments[0]):new d.Color(this._renderer,arguments[0]):this instanceof d.Renderer?new d.Color(this,arguments):new d.Color(this._renderer,arguments)},d.prototype.green=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getGreen();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.hue=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getHue();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.lerpColor=function(a,b,c){var d,f,g,h,i,j,k=this._renderer._colorMode,l=this._renderer._colorMaxes;if(k===e.RGB)i=a.levels.map(function(a){return a/255}),j=b.levels.map(function(a){return a/255});else if(k===e.HSB)a._getBrightness(),b._getBrightness(),i=a.hsba,j=b.hsba;else{if(k!==e.HSL)throw new Error(k+"cannot be used for interpolation.");a._getLightness(),b._getLightness(),i=a.hsla,j=b.hsla}return c=Math.max(Math.min(c,1),0),d=this.lerp(i[0],j[0],c),f=this.lerp(i[1],j[1],c),g=this.lerp(i[2],j[2],c),h=this.lerp(i[3],j[3],c),d*=l[k][0],f*=l[k][1],g*=l[k][2],h*=l[k][3],this.color(d,f,g,h)},d.prototype.lightness=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getLightness();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.red=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getRed();throw new Error("Needs p5.Color or pixel array as argument.")},d.prototype.saturation=function(a){if(a instanceof d.Color||a instanceof Array)return this.color(a)._getSaturation();throw new Error("Needs p5.Color or pixel array as argument.")},b.exports=d},{"../core/constants":36,"../core/core":37,"./p5.Color":31}],31:[function(a,b,c){var d=a("../core/core"),e=a("../core/constants"),f=a("./color_conversion");d.Color=function(a,b){if(this.mode=a._colorMode,this.maxes=a._colorMaxes,this.mode!==e.RGB&&this.mode!==e.HSL&&this.mode!==e.HSB)throw new Error(this.mode+" is an invalid colorMode.");return this._array=d.Color._parseInputs.apply(a,b),this.levels=this._array.map(function(a){return Math.round(255*a)}),this},d.Color.prototype.toString=function(){var a=this.levels,b=this._array[3];return"rgba("+a[0]+","+a[1]+","+a[2]+","+b+")"},d.Color.prototype._getAlpha=function(){return this._array[3]*this.maxes[this.mode][3]},d.Color.prototype._getBlue=function(){return this._array[2]*this.maxes[e.RGB][2]},d.Color.prototype._getBrightness=function(){return this.hsba||(this.hsba=f._rgbaToHSBA(this._array)),this.hsba[2]*this.maxes[e.HSB][2]},d.Color.prototype._getGreen=function(){return this._array[1]*this.maxes[e.RGB][1]},d.Color.prototype._getHue=function(){return this.mode===e.HSB?(this.hsba||(this.hsba=f._rgbaToHSBA(this._array)),this.hsba[0]*this.maxes[e.HSB][0]):(this.hsla||(this.hsla=f._rgbaToHSLA(this._array)),this.hsla[0]*this.maxes[e.HSL][0])},d.Color.prototype._getLightness=function(){return this.hsla||(this.hsla=f._rgbaToHSLA(this._array)),this.hsla[2]*this.maxes[e.HSL][2]},d.Color.prototype._getRed=function(){return this._array[0]*this.maxes[e.RGB][0]},d.Color.prototype._getSaturation=function(){return this.mode===e.HSB?(this.hsba||(this.hsba=f._rgbaToHSBA(this._array)),this.hsba[1]*this.maxes[e.HSB][1]):(this.hsla||(this.hsla=f._rgbaToHSLA(this._array)),this.hsla[1]*this.maxes[e.HSL][1])};var g={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400",darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32"},h=/\s*/,i=/(\d{1,3})/,j=/((?:\d+(?:\.\d+)?)|(?:\.\d+))/,k=new RegExp(j.source+"%"),l={HEX3:/^#([a-f0-9])([a-f0-9])([a-f0-9])$/i,HEX6:/^#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})$/i,RGB:new RegExp(["^rgb\\(",i.source,",",i.source,",",i.source,"\\)$"].join(h.source),"i"),RGB_PERCENT:new RegExp(["^rgb\\(",k.source,",",k.source,",",k.source,"\\)$"].join(h.source),"i"),RGBA:new RegExp(["^rgba\\(",i.source,",",i.source,",",i.source,",",j.source,"\\)$"].join(h.source),"i"), +RGBA_PERCENT:new RegExp(["^rgba\\(",k.source,",",k.source,",",k.source,",",j.source,"\\)$"].join(h.source),"i"),HSL:new RegExp(["^hsl\\(",i.source,",",k.source,",",k.source,"\\)$"].join(h.source),"i"),HSLA:new RegExp(["^hsla\\(",i.source,",",k.source,",",k.source,",",j.source,"\\)$"].join(h.source),"i"),HSB:new RegExp(["^hsb\\(",i.source,",",k.source,",",k.source,"\\)$"].join(h.source),"i"),HSBA:new RegExp(["^hsba\\(",i.source,",",k.source,",",k.source,",",j.source,"\\)$"].join(h.source),"i")};d.Color._parseInputs=function(){var a=arguments.length,b=this._colorMode,c=this._colorMaxes,h=[];if(a>=3)return h[0]=arguments[0]/c[b][0],h[1]=arguments[1]/c[b][1],h[2]=arguments[2]/c[b][2],"number"==typeof arguments[3]?h[3]=arguments[3]/c[b][3]:h[3]=1,h=h.map(function(a){return Math.max(Math.min(a,1),0)}),b===e.HSL?f._hslaToRGBA(h):b===e.HSB?f._hsbaToRGBA(h):h;if(1===a&&"string"==typeof arguments[0]){var i=arguments[0].trim().toLowerCase();if(g[i])return d.Color._parseInputs.apply(this,[g[i]]);if(l.HEX3.test(i))return h=l.HEX3.exec(i).slice(1).map(function(a){return parseInt(a+a,16)/255}),h[3]=1,h;if(l.HEX6.test(i))return h=l.HEX6.exec(i).slice(1).map(function(a){return parseInt(a,16)/255}),h[3]=1,h;if(l.RGB.test(i))return h=l.RGB.exec(i).slice(1).map(function(a){return a/255}),h[3]=1,h;if(l.RGB_PERCENT.test(i))return h=l.RGB_PERCENT.exec(i).slice(1).map(function(a){return parseFloat(a)/100}),h[3]=1,h;if(l.RGBA.test(i))return h=l.RGBA.exec(i).slice(1).map(function(a,b){return 3===b?parseFloat(a):a/255});if(l.RGBA_PERCENT.test(i))return h=l.RGBA_PERCENT.exec(i).slice(1).map(function(a,b){return 3===b?parseFloat(a):parseFloat(a)/100});if(l.HSL.test(i)?(h=l.HSL.exec(i).slice(1).map(function(a,b){return 0===b?parseInt(a,10)/360:parseInt(a,10)/100}),h[3]=1):l.HSLA.test(i)&&(h=l.HSLA.exec(i).slice(1).map(function(a,b){return 0===b?parseInt(a,10)/360:3===b?parseFloat(a):parseInt(a,10)/100})),h.length)return f._hslaToRGBA(h);if(l.HSB.test(i)?(h=l.HSB.exec(i).slice(1).map(function(a,b){return 0===b?parseInt(a,10)/360:parseInt(a,10)/100}),h[3]=1):l.HSBA.test(i)&&(h=l.HSBA.exec(i).slice(1).map(function(a,b){return 0===b?parseInt(a,10)/360:3===b?parseFloat(a):parseInt(a,10)/100})),h.length)return f._hsbaToRGBA(h);h=[1,1,1,1]}else{if(1!==a&&2!==a||"number"!=typeof arguments[0])throw new Error(arguments+"is not a valid color representation.");h[0]=arguments[0]/c[b][2],h[1]=arguments[0]/c[b][2],h[2]=arguments[0]/c[b][2],"number"==typeof arguments[1]?h[3]=arguments[1]/c[b][3]:h[3]=1,h=h.map(function(a){return Math.max(Math.min(a,1),0)})}return h},b.exports=d.Color},{"../core/constants":36,"../core/core":37,"./color_conversion":29}],32:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("../core/constants");a("./p5.Color"),d.prototype.background=function(){return arguments[0]instanceof d.Image?this.image(arguments[0],0,0,this.width,this.height):this._renderer.background.apply(this._renderer,arguments),this},d.prototype.clear=function(){return this._renderer.clear(),this},d.prototype.colorMode=function(){if(arguments[0]===e.RGB||arguments[0]===e.HSB||arguments[0]===e.HSL){this._renderer._colorMode=arguments[0];var a=this._renderer._colorMaxes[this._renderer._colorMode];2===arguments.length?(a[0]=arguments[1],a[1]=arguments[1],a[2]=arguments[1],a[3]=arguments[1]):4===arguments.length?(a[0]=arguments[1],a[1]=arguments[2],a[2]=arguments[3]):5===arguments.length&&(a[0]=arguments[1],a[1]=arguments[2],a[2]=arguments[3],a[3]=arguments[4])}return this},d.prototype.fill=function(){return this._renderer._setProperty("_fillSet",!0),this._renderer._setProperty("_doFill",!0),this._renderer.fill.apply(this._renderer,arguments),this},d.prototype.noFill=function(){return this._renderer._setProperty("_doFill",!1),this},d.prototype.noStroke=function(){return this._renderer._setProperty("_doStroke",!1),this},d.prototype.stroke=function(){return this._renderer._setProperty("_strokeSet",!0),this._renderer._setProperty("_doStroke",!0),this._renderer.stroke.apply(this._renderer,arguments),this},b.exports=d},{"../core/constants":36,"../core/core":37,"./p5.Color":31}],33:[function(a,b,c){"use strict";var d=a("./core"),e=a("./constants"),f=a("./canvas");a("./error_helpers"),d.prototype.arc=function(a,b,c,d,f,g,h){for(var i=new Array(arguments.length),j=0;jf;)f+=e.TWO_PI;for(;0>g;)g+=e.TWO_PI;return f%=e.TWO_PI,g%=e.TWO_PI,g===f&&(g+=e.TWO_PI),f=f<=e.HALF_PI?Math.atan(c/d*Math.tan(f)):f>e.HALF_PI&&f<=3*e.HALF_PI?Math.atan(c/d*Math.tan(f))+e.PI:Math.atan(c/d*Math.tan(f))+e.TWO_PI,g=g<=e.HALF_PI?Math.atan(c/d*Math.tan(g)):g>e.HALF_PI&&g<=3*e.HALF_PI?Math.atan(c/d*Math.tan(g))+e.PI:Math.atan(c/d*Math.tan(g))+e.TWO_PI,f>g&&(g+=e.TWO_PI),c=Math.abs(c),d=Math.abs(d),this._renderer.arc(a,b,c,d,f,g,h),this},d.prototype.ellipse=function(){for(var a=new Array(arguments.length),b=0;b=c-d)&&(this._setProperty("frameCount",this.frameCount+1),this.redraw(),this._updateMouseCoords(),this._updateTouchCoords(),this._frameRate=1e3/(a-this._lastFrameTime),this._lastFrameTime=a),this._loop&&(this._requestAnimId=window.requestAnimationFrame(this._draw))}.bind(this),this._runFrames=function(){this._updateInterval&&clearInterval(this._updateInterval)}.bind(this),this._setProperty=function(a,b){this[a]=b,this._isGlobal&&(window[a]=b)}.bind(this),this.remove=function(){if(this._curElement){this._loop=!1,this._requestAnimId&&window.cancelAnimationFrame(this._requestAnimId);for(var a in this._events)window.removeEventListener(a,this._events[a]);for(var b=0;b1)console.log.apply(console,arguments);else{var b=JSON.parse(JSON.stringify(a));console.log(b)}}catch(c){console.log(a)}}:h.prototype.println=function(){},h.prototype.frameCount=0,h.prototype.focused=document.hasFocus(),h.prototype.cursor=function(a,b,c){var d="auto",e=this._curElement.elt;if(j.indexOf(a)>-1)d=a;else if("string"==typeof a){var f="";b&&c&&"number"==typeof b&&"number"==typeof c&&(f=b+" "+c),d="http://"!==a.substring(0,6)?"url("+a+") "+f+", auto":/\.(cur|jpg|jpeg|gif|png|CUR|JPG|JPEG|GIF|PNG)$/.test(a)?"url("+a+") "+f+", auto":a}e.style.cursor=d},h.prototype.frameRate=function(a){return"number"!=typeof a||0>=a?this._frameRate:(this._setProperty("_targetFrameRate",a),this._runFrames(),this)},h.prototype.getFrameRate=function(){return this.frameRate()},h.prototype.setFrameRate=function(a){return this.frameRate(a)},h.prototype.noCursor=function(){this._curElement.elt.style.cursor="none"},h.prototype.displayWidth=screen.width,h.prototype.displayHeight=screen.height,h.prototype.windowWidth=d(),h.prototype.windowHeight=e(),h.prototype._onresize=function(a){this._setProperty("windowWidth",d()),this._setProperty("windowHeight",e());var b,c=this._isGlobal?window:this;"function"==typeof c.windowResized&&(b=c.windowResized(a),void 0===b||b||a.preventDefault())},h.prototype.width=0,h.prototype.height=0,h.prototype.fullscreen=function(a){return"undefined"==typeof a?document.fullscreenElement||document.webkitFullscreenElement||document.mozFullScreenElement||document.msFullscreenElement:void(a?f(document.documentElement):g())},h.prototype.pixelDensity=function(a){return"number"!=typeof a?this._pixelDensity:(this._pixelDensity=a,void this.resizeCanvas(this.width,this.height,!0))},h.prototype.displayDensity=function(){return window.devicePixelRatio},h.prototype.getURL=function(){return location.href},h.prototype.getURLPath=function(){return location.pathname.split("/").filter(function(a){return""!==a})},h.prototype.getURLParams=function(){for(var a,b=/[?&]([^&=]+)(?:[&=])([^&=]+)/gim,c={};null!=(a=b.exec(location.search));)a.index===b.lastIndex&&b.lastIndex++,c[a[1]]=a[2];return c},b.exports=h},{"./constants":36,"./core":37}],40:[function(a,b,c){"use strict";function d(a,b,c){if(a.match(/^p5\./)){var d=a.split(".");return c instanceof i[d[1]]}return"Boolean"===a||a.toLowerCase()===b||r.indexOf(a)>-1&&q(c)}function e(a,b,c){j&&(f(),j=!1),"undefined"===o(c)?c="#B40033":"number"===o(c)&&(c=w[c])}function f(){var a="transparent",b="#ED225D",c="#ED225D",d="white";console.log("%c _ \n /\\| |/\\ \n \\ ` ' / \n / , . \\ \n \\/|_|\\/ \n\n%c> p5.js says: Welcome! This is your friendly debugger. To turn me off switch to using “p5.min.js”.","background-color:"+a+";color:"+b+";","background-color:"+c+";color:"+d+";")}function g(){var b={},c=function(a){return Object.getOwnPropertyNames(a).filter(function(a){return"_"===a[0]?!1:a in b?!1:(b[a]=!0,!0)}).map(function(b){var c;return c="function"==typeof a[b]?"function":b===b.toUpperCase()?"constant":"variable",{name:b,type:c}})};y=[].concat(c(i.prototype),c(a("./constants"))),y.sort(function(a,b){return b.name.length-a.name.length})}function h(a,b){b||(b=console.log.bind(console)),y||g(),y.some(function(c){return a.message&&-1!==a.message.indexOf(c.name)?(b("%cDid you just try to use p5.js's "+c.name+("function"===c.type?"() ":" ")+c.type+"? If so, you may want to move it into your sketch's setup() function.\n\nFor more details, see: "+z,"color: #B40033"),!0):void 0})}for(var i=a("./core"),j=!1,k={},l=k.toString,m=["Boolean","Number","String","Function","Array","Date","RegExp","Object","Error"],n=0;n=0},r=["Number","Integer","Number/Constant"],s=0,t=1,u=2,v=3,w=["#2D7BB6","#EE9900","#4DB200","#C83C00"];i.prototype._validateParameters=function(a,b,c){p(c[0])||(c=[c]);for(var f,g=Math.abs(b.length-c[0].length),h=0,i=1,j=c.length;j>i;i++){var k=Math.abs(b.length-c[i].length);g>=k&&(h=i,g=k)}var l="X";g>0&&(f="You wrote "+a+"(",b.length>0&&(f+=l+Array(b.length).join(","+l)),f+="). "+a+" was expecting "+c[h].length+" parameters. Try "+a+"(",c[h].length>0&&(f+=l+Array(c[h].length).join(","+l)),f+=").",c.length>1&&(f+=" "+a+" takes different numbers of parameters depending on what you want to do. Click this link to learn more: "),e(f,a,s));for(var m=0;m1&&(f+=" "+a+" takes different numbers of parameters depending on what you want to do. Click this link to learn more:"),e(f,a,u))}},i.prototype._validateParameters=function(){return!0};var x={0:{fileType:"image",method:"loadImage",message:" hosting the image online,"},1:{fileType:"XML file",method:"loadXML"},2:{fileType:"table file",method:"loadTable"},3:{fileType:"text file",method:"loadStrings"}};i._friendlyFileLoadError=function(a,b){var c=x[a],d="It looks like there was a problem loading your "+c.fileType+". Try checking if the file path%c ["+b+"] %cis correct,"+(c.message||"")+" or running a local server.";e(d,c.method,v)};var y=null,z="https://github.com/processing/p5.js/wiki/Frequently-Asked-Questions#why-cant-i-assign-variables-using-p5-functions-and-variables-before-setup";i.prototype._helpForMisusedAtTopLevelCode=h,"complete"!==document.readyState&&(window.addEventListener("error",h,!1),window.addEventListener("load",function(){window.removeEventListener("error",h,!1)})),b.exports=i},{"./constants":36,"./core":37}],41:[function(a,b,c){function d(a,b,c){var d=b.bind(c);c.elt.addEventListener(a,d,!1),c._events[a]=d}var e=a("./core");e.Element=function(a,b){this.elt=a,this._pInst=b,this._events={},this.width=this.elt.offsetWidth,this.height=this.elt.offsetHeight},e.Element.prototype.parent=function(a){return 0===arguments.length?this.elt.parentNode:("string"==typeof a?("#"===a[0]&&(a=a.substring(1)),a=document.getElementById(a)):a instanceof e.Element&&(a=a.elt),a.appendChild(this.elt),this)},e.Element.prototype.id=function(a){return 0===arguments.length?this.elt.id:(this.elt.id=a,this.width=this.elt.offsetWidth,this.height=this.elt.offsetHeight,this)},e.Element.prototype["class"]=function(a){return 0===arguments.length?this.elt.className:(this.elt.className=a,this)},e.Element.prototype.mousePressed=function(a){return d("mousedown",a,this),d("touchstart",a,this),this},e.Element.prototype.mouseWheel=function(a){return d("wheel",a,this),this},e.Element.prototype.mouseReleased=function(a){return d("mouseup",a,this),d("touchend",a,this),this},e.Element.prototype.mouseClicked=function(a){return d("click",a,this),this},e.Element.prototype.mouseMoved=function(a){return d("mousemove",a,this),d("touchmove",a,this),this},e.Element.prototype.mouseOver=function(a){return d("mouseover",a,this),this},e.Element.prototype.changed=function(a){return d("change",a,this),this},e.Element.prototype.input=function(a){return d("input",a,this),this},e.Element.prototype.mouseOut=function(a){return d("mouseout",a,this),this},e.Element.prototype.touchStarted=function(a){return d("touchstart",a,this),d("mousedown",a,this),this},e.Element.prototype.touchMoved=function(a){return d("touchmove",a,this),d("mousemove",a,this),this},e.Element.prototype.touchEnded=function(a){return d("touchend",a,this),d("mouseup",a,this),this},e.Element.prototype.dragOver=function(a){return d("dragover",a,this),this},e.Element.prototype.dragLeave=function(a){return d("dragleave",a,this),this},e.Element.prototype.drop=function(a,b){function c(b){var c=new e.File(b);return function(b){c.data=b.target.result,a(c)}}return window.File&&window.FileReader&&window.FileList&&window.Blob?(d("dragover",function(a){a.stopPropagation(),a.preventDefault()},this),d("dragleave",function(a){a.stopPropagation(),a.preventDefault()},this),arguments.length>1&&d("drop",b,this),d("drop",function(a){a.stopPropagation(),a.preventDefault();for(var b=a.dataTransfer.files,d=0;d-1?f.readAsText(e):f.readAsDataURL(e)}},this)):console.log("The File APIs are not fully supported in this browser."),this},e.Element.prototype._setProperty=function(a,b){this[a]=b},b.exports=e.Element},{"./core":37}],42:[function(a,b,c){var d=a("./core"),e=a("./constants");d.Graphics=function(a,b,c,f){var g=c||e.P2D,h=document.createElement("canvas"),i=this._userNode||document.body;i.appendChild(h),d.Element.call(this,h,f,!1),this._styles=[],this.width=a,this.height=b,this._pixelDensity=f._pixelDensity,g===e.WEBGL?this._renderer=new d.RendererGL(h,this,!1):this._renderer=new d.Renderer2D(h,this,!1),this._renderer.resize(a,b),this._renderer._applyDefaults(),f._elements.push(this);for(var j in d.prototype)this[j]||("function"==typeof d.prototype[j]?this[j]=d.prototype[j].bind(this):this[j]=d.prototype[j]);return this},d.Graphics.prototype=Object.create(d.Element.prototype),b.exports=d.Graphics},{"./constants":36,"./core":37}],43:[function(a,b,c){function d(a){var b=0,c=0;if(a.offsetParent){do b+=a.offsetLeft,c+=a.offsetTop;while(a=a.offsetParent)}else b+=a.offsetLeft,c+=a.offsetTop;return[b,c]}var e=a("./core"),f=a("../core/constants");e.Renderer=function(a,b,c){e.Element.call(this,a,b),this.canvas=a,this._pInst=b,c?(this._isMainCanvas=!0,this._pInst._setProperty("_curElement",this),this._pInst._setProperty("canvas",this.canvas),this._pInst._setProperty("width",this.width),this._pInst._setProperty("height",this.height)):(this.canvas.style.display="none",this._styles=[]),this._textSize=12,this._textLeading=15,this._textFont="sans-serif",this._textStyle=f.NORMAL,this._textAscent=null,this._textDescent=null,this._rectMode=f.CORNER,this._ellipseMode=f.CENTER,this._curveTightness=0,this._imageMode=f.CORNER,this._tint=null,this._doStroke=!0,this._doFill=!0,this._strokeSet=!1,this._fillSet=!1,this._colorMode=f.RGB,this._colorMaxes={rgb:[255,255,255,255],hsb:[360,100,100,1],hsl:[360,100,100,1]}},e.Renderer.prototype=Object.create(e.Element.prototype),e.Renderer.prototype.resize=function(a,b){this.width=a,this.height=b,this.elt.width=a*this._pInst._pixelDensity,this.elt.height=b*this._pInst._pixelDensity,this.elt.style.width=a+"px",this.elt.style.height=b+"px",this._isMainCanvas&&(this._pInst._setProperty("width",this.width),this._pInst._setProperty("height",this.height))},e.Renderer.prototype.textLeading=function(a){return arguments.length&&arguments[0]?(this._setProperty("_textLeading",a),this):this._textLeading},e.Renderer.prototype.textSize=function(a){return arguments.length&&arguments[0]?(this._setProperty("_textSize",a),this._setProperty("_textLeading",a*f._DEFAULT_LEADMULT),this._applyTextProperties()):this._textSize},e.Renderer.prototype.textStyle=function(a){return arguments.length&&arguments[0]?((a===f.NORMAL||a===f.ITALIC||a===f.BOLD)&&this._setProperty("_textStyle",a),this._applyTextProperties()):this._textStyle},e.Renderer.prototype.textAscent=function(){return null===this._textAscent&&this._updateTextMetrics(),this._textAscent},e.Renderer.prototype.textDescent=function(){return null===this._textDescent&&this._updateTextMetrics(),this._textDescent},e.Renderer.prototype._applyDefaults=function(){return this},e.Renderer.prototype._isOpenType=function(a){return a=a||this._textFont,"object"==typeof a&&a.font&&a.font.supported},e.Renderer.prototype._updateTextMetrics=function(){if(this._isOpenType())return this._setProperty("_textAscent",this._textFont._textAscent()),this._setProperty("_textDescent",this._textFont._textDescent()),this;var a=document.createElement("span");a.style.fontFamily=this._textFont,a.style.fontSize=this._textSize+"px",a.innerHTML="ABCjgq|";var b=document.createElement("div");b.style.display="inline-block",b.style.width="1px",b.style.height="0px";var c=document.createElement("div");c.appendChild(a),c.appendChild(b),c.style.height="0px",c.style.overflow="hidden",document.body.appendChild(c),b.style.verticalAlign="baseline";var e=d(b),f=d(a),g=e[1]-f[1];b.style.verticalAlign="bottom",e=d(b),f=d(a);var h=e[1]-f[1],i=h-g;return document.body.removeChild(c),this._setProperty("_textAscent",g),this._setProperty("_textDescent",i),this},b.exports=e.Renderer},{"../core/constants":36, +"./core":37}],44:[function(a,b,c){var d=a("./core"),e=a("./canvas"),f=a("./constants"),g=a("../image/filters");a("./p5.Renderer");var h="rgba(0,0,0,0)";d.Renderer2D=function(a,b,c){return d.Renderer.call(this,a,b,c),this.drawingContext=this.canvas.getContext("2d"),this._pInst._setProperty("drawingContext",this.drawingContext),this},d.Renderer2D.prototype=Object.create(d.Renderer.prototype),d.Renderer2D.prototype._applyDefaults=function(){this.drawingContext.fillStyle=f._DEFAULT_FILL,this.drawingContext.strokeStyle=f._DEFAULT_STROKE,this.drawingContext.lineCap=f.ROUND,this.drawingContext.font="normal 12px sans-serif"},d.Renderer2D.prototype.resize=function(a,b){d.Renderer.prototype.resize.call(this,a,b),this.drawingContext.scale(this._pInst._pixelDensity,this._pInst._pixelDensity)},d.Renderer2D.prototype.background=function(){if(this.drawingContext.save(),this.drawingContext.setTransform(1,0,0,1,0,0),this.drawingContext.scale(this._pInst._pixelDensity,this._pInst._pixelDensity),arguments[0]instanceof d.Image)this._pInst.image(arguments[0],0,0,this.width,this.height);else{var a=this.drawingContext.fillStyle,b=this._pInst.color.apply(this,arguments),c=b.toString();this.drawingContext.fillStyle=c,this.drawingContext.fillRect(0,0,this.width,this.height),this.drawingContext.fillStyle=a}this.drawingContext.restore()},d.Renderer2D.prototype.clear=function(){this.drawingContext.clearRect(0,0,this.width,this.height)},d.Renderer2D.prototype.fill=function(){var a=this.drawingContext,b=this._pInst.color.apply(this,arguments);a.fillStyle=b.toString()},d.Renderer2D.prototype.stroke=function(){var a=this.drawingContext,b=this._pInst.color.apply(this,arguments);a.strokeStyle=b.toString()},d.Renderer2D.prototype.image=function(a,b,c,e,f,g,h,i,j){var k;try{this._tint&&(d.MediaElement&&a instanceof d.MediaElement&&a.loadPixels(),a.canvas&&(k=this._getTintedImageCanvas(a))),k||(k=a.canvas||a.elt),this.drawingContext.drawImage(k,b,c,e,f,g,h,i,j)}catch(l){if("NS_ERROR_NOT_AVAILABLE"!==l.name)throw l}},d.Renderer2D.prototype._getTintedImageCanvas=function(a){if(!a.canvas)return a;var b=g._toPixels(a.canvas),c=document.createElement("canvas");c.width=a.canvas.width,c.height=a.canvas.height;for(var d=c.getContext("2d"),e=d.createImageData(a.canvas.width,a.canvas.height),f=e.data,h=0;ha+c||0>b+e||a>this.width||b>this.height)return[0,0,0,255];var f=this._pInst||this,g=f._pixelDensity;a=Math.floor(a),b=Math.floor(b);var h=a*g,i=b*g;if(1===c&&1===e){var j=this.drawingContext.getImageData(h,i,1,1).data;return[j[0],j[1],j[2],j[3]]}var k=Math.min(c,f.width),l=Math.min(e,f.height),m=k*g,n=l*g,o=new d.Image(k,l);return o.canvas.getContext("2d").drawImage(this.canvas,h,i,m,n,0,0,k,l),o},d.Renderer2D.prototype.loadPixels=function(){var a=this._pixelDensity||this._pInst._pixelDensity,b=this.width*a,c=this.height*a,d=this.drawingContext.getImageData(0,0,b,c);this._pInst?(this._pInst._setProperty("imageData",d),this._pInst._setProperty("pixels",d.data)):(this._setProperty("imageData",d),this._setProperty("pixels",d.data))},d.Renderer2D.prototype.set=function(a,b,c){if(a=Math.floor(a),b=Math.floor(b),c instanceof d.Image)this.drawingContext.save(),this.drawingContext.setTransform(1,0,0,1,0,0),this.drawingContext.scale(this._pInst._pixelDensity,this._pInst._pixelDensity),this.drawingContext.drawImage(c.canvas,a,b),this.loadPixels.call(this._pInst),this.drawingContext.restore();else{var e=this._pInst||this,f=0,g=0,h=0,i=0,j=4*(b*e._pixelDensity*this.width*e._pixelDensity+a*e._pixelDensity);if(e.imageData||e.loadPixels.call(e),"number"==typeof c)jn;)o=Math.min(h-g,f.HALF_PI),p.push(this._acuteArcToBezier(g,o)),g+=o;return this._doFill&&(j.beginPath(),p.forEach(function(a,b){0===b&&j.moveTo(k.x+a.ax*l,k.y+a.ay*m),j.bezierCurveTo(k.x+a.bx*l,k.y+a.by*m,k.x+a.cx*l,k.y+a.cy*m,k.x+a.dx*l,k.y+a.dy*m)}),(i===f.PIE||null==i)&&j.lineTo(k.x,k.y),j.closePath(),j.fill()),this._doStroke&&(j.beginPath(),p.forEach(function(a,b){0===b&&j.moveTo(k.x+a.ax*l,k.y+a.ay*m),j.bezierCurveTo(k.x+a.bx*l,k.y+a.by*m,k.x+a.cx*l,k.y+a.cy*m,k.x+a.dx*l,k.y+a.dy*m)}),i===f.PIE?(j.lineTo(k.x,k.y),j.closePath()):i===f.CHORD&&j.closePath(),j.stroke()),this},d.Renderer2D.prototype.ellipse=function(a){var b=this.drawingContext,c=this._doFill,d=this._doStroke,e=a[0],f=a[1],g=a[2],i=a[3];if(c&&!d){if(b.fillStyle===h)return this}else if(!c&&d&&b.strokeStyle===h)return this;var j=.5522847498,k=g/2*j,l=i/2*j,m=e+g,n=f+i,o=e+g/2,p=f+i/2;b.beginPath(),b.moveTo(e,p),b.bezierCurveTo(e,p-l,o-k,f,o,f),b.bezierCurveTo(o+k,f,m,p-l,m,p),b.bezierCurveTo(m,p+l,o+k,n,o,n),b.bezierCurveTo(o-k,n,e,p+l,e,p),b.closePath(),c&&b.fill(),d&&b.stroke()},d.Renderer2D.prototype.line=function(a,b,c,d){var e=this.drawingContext;return this._doStroke?e.strokeStyle===h?this:(e.lineWidth%2===1&&e.translate(.5,.5),e.beginPath(),e.moveTo(a,b),e.lineTo(c,d),e.stroke(),e.lineWidth%2===1&&e.translate(-.5,-.5),this):this},d.Renderer2D.prototype.point=function(a,b){var c=this.drawingContext,d=c.strokeStyle,e=c.fillStyle;return this._doStroke?c.strokeStyle===h?this:(a=Math.round(a),b=Math.round(b),c.fillStyle=d,c.lineWidth>1?(c.beginPath(),c.arc(a,b,c.lineWidth/2,0,f.TWO_PI,!1),c.fill()):c.fillRect(a,b,1,1),void(c.fillStyle=e)):this},d.Renderer2D.prototype.quad=function(a,b,c,d,e,f,g,i){var j=this.drawingContext,k=this._doFill,l=this._doStroke;if(k&&!l){if(j.fillStyle===h)return this}else if(!k&&l&&j.strokeStyle===h)return this;return j.beginPath(),j.moveTo(a,b),j.lineTo(c,d),j.lineTo(e,f),j.lineTo(g,i),j.closePath(),k&&j.fill(),l&&j.stroke(),this},d.Renderer2D.prototype.rect=function(a){var b=a[0],c=a[1],d=a[2],e=a[3],f=a[4],g=a[5],i=a[6],j=a[7],k=this.drawingContext,l=this._doFill,m=this._doStroke;if(l&&!m){if(k.fillStyle===h)return this}else if(!l&&m&&k.strokeStyle===h)return this;if(this._doStroke&&k.lineWidth%2===1&&k.translate(.5,.5),k.beginPath(),"undefined"==typeof f)k.rect(b,c,d,e);else{"undefined"==typeof g&&(g=f),"undefined"==typeof i&&(i=g),"undefined"==typeof j&&(j=i);var n=d/2,o=e/2;2*f>d&&(f=n),2*f>e&&(f=o),2*g>d&&(g=n),2*g>e&&(g=o),2*i>d&&(i=n),2*i>e&&(i=o),2*j>d&&(j=n),2*j>e&&(j=o),k.beginPath(),k.moveTo(b+f,c),k.arcTo(b+d,c,b+d,c+e,g),k.arcTo(b+d,c+e,b,c+e,i),k.arcTo(b,c+e,b,c,j),k.arcTo(b,c,b+d,c,f),k.closePath()}return this._doFill&&k.fill(),this._doStroke&&k.stroke(),this._doStroke&&k.lineWidth%2===1&&k.translate(-.5,-.5),this},d.Renderer2D.prototype.triangle=function(a){var b=this.drawingContext,c=this._doFill,d=this._doStroke,e=a[0],f=a[1],g=a[2],i=a[3],j=a[4],k=a[5];if(c&&!d){if(b.fillStyle===h)return this}else if(!c&&d&&b.strokeStyle===h)return this;b.beginPath(),b.moveTo(e,f),b.lineTo(g,i),b.lineTo(j,k),b.closePath(),c&&b.fill(),d&&b.stroke()},d.Renderer2D.prototype.endShape=function(a,b,c,d,e,g,h){if(0===b.length)return this;if(!this._doStroke&&!this._doFill)return this;var i,j=a===f.CLOSE;j&&!g&&b.push(b[0]);var k,l,m=b.length;if(!c||h!==f.POLYGON&&null!==h)if(!d||h!==f.POLYGON&&null!==h)if(!e||h!==f.POLYGON&&null!==h)if(h===f.POINTS)for(k=0;m>k;k++)i=b[k],this._doStroke&&this._pInst.stroke(i[6]),this._pInst.point(i[0],i[1]);else if(h===f.LINES)for(k=0;m>k+1;k+=2)i=b[k],this._doStroke&&this._pInst.stroke(b[k+1][6]),this._pInst.line(i[0],i[1],b[k+1][0],b[k+1][1]);else if(h===f.TRIANGLES)for(k=0;m>k+2;k+=3)i=b[k],this.drawingContext.beginPath(),this.drawingContext.moveTo(i[0],i[1]),this.drawingContext.lineTo(b[k+1][0],b[k+1][1]),this.drawingContext.lineTo(b[k+2][0],b[k+2][1]),this.drawingContext.lineTo(i[0],i[1]),this._doFill&&(this._pInst.fill(b[k+2][5]),this.drawingContext.fill()),this._doStroke&&(this._pInst.stroke(b[k+2][6]),this.drawingContext.stroke()),this.drawingContext.closePath();else if(h===f.TRIANGLE_STRIP)for(k=0;m>k+1;k++)i=b[k],this.drawingContext.beginPath(),this.drawingContext.moveTo(b[k+1][0],b[k+1][1]),this.drawingContext.lineTo(i[0],i[1]),this._doStroke&&this._pInst.stroke(b[k+1][6]),this._doFill&&this._pInst.fill(b[k+1][5]),m>k+2&&(this.drawingContext.lineTo(b[k+2][0],b[k+2][1]),this._doStroke&&this._pInst.stroke(b[k+2][6]),this._doFill&&this._pInst.fill(b[k+2][5])),this._doFillStrokeClose();else if(h===f.TRIANGLE_FAN){if(m>2)for(this.drawingContext.beginPath(),this.drawingContext.moveTo(b[0][0],b[0][1]),this.drawingContext.lineTo(b[1][0],b[1][1]),this.drawingContext.lineTo(b[2][0],b[2][1]),this._doFill&&this._pInst.fill(b[2][5]),this._doStroke&&this._pInst.stroke(b[2][6]),this._doFillStrokeClose(),k=3;m>k;k++)i=b[k],this.drawingContext.beginPath(),this.drawingContext.moveTo(b[0][0],b[0][1]),this.drawingContext.lineTo(b[k-1][0],b[k-1][1]),this.drawingContext.lineTo(i[0],i[1]),this._doFill&&this._pInst.fill(i[5]),this._doStroke&&this._pInst.stroke(i[6]),this._doFillStrokeClose()}else if(h===f.QUADS)for(k=0;m>k+3;k+=4){for(i=b[k],this.drawingContext.beginPath(),this.drawingContext.moveTo(i[0],i[1]),l=1;4>l;l++)this.drawingContext.lineTo(b[k+l][0],b[k+l][1]);this.drawingContext.lineTo(i[0],i[1]),this._doFill&&this._pInst.fill(b[k+3][5]),this._doStroke&&this._pInst.stroke(b[k+3][6]),this._doFillStrokeClose()}else if(h===f.QUAD_STRIP){if(m>3)for(k=0;m>k+1;k+=2)i=b[k],this.drawingContext.beginPath(),m>k+3?(this.drawingContext.moveTo(b[k+2][0],b[k+2][1]),this.drawingContext.lineTo(i[0],i[1]),this.drawingContext.lineTo(b[k+1][0],b[k+1][1]),this.drawingContext.lineTo(b[k+3][0],b[k+3][1]),this._doFill&&this._pInst.fill(b[k+3][5]),this._doStroke&&this._pInst.stroke(b[k+3][6])):(this.drawingContext.moveTo(i[0],i[1]),this.drawingContext.lineTo(b[k+1][0],b[k+1][1])),this._doFillStrokeClose()}else{for(this.drawingContext.beginPath(),this.drawingContext.moveTo(b[0][0],b[0][1]),k=1;m>k;k++)i=b[k],i.isVert&&(i.moveTo?this.drawingContext.moveTo(i[0],i[1]):this.drawingContext.lineTo(i[0],i[1]));this._doFillStrokeClose()}else{for(this.drawingContext.beginPath(),k=0;m>k;k++)b[k].isVert?b[k].moveTo?this.drawingContext.moveTo([0],b[k][1]):this.drawingContext.lineTo(b[k][0],b[k][1]):this.drawingContext.quadraticCurveTo(b[k][0],b[k][1],b[k][2],b[k][3]);this._doFillStrokeClose()}else{for(this.drawingContext.beginPath(),k=0;m>k;k++)b[k].isVert?b[k].moveTo?this.drawingContext.moveTo(b[k][0],b[k][1]):this.drawingContext.lineTo(b[k][0],b[k][1]):this.drawingContext.bezierCurveTo(b[k][0],b[k][1],b[k][2],b[k][3],b[k][4],b[k][5]);this._doFillStrokeClose()}else if(m>3){var n=[],o=1-this._curveTightness;for(this.drawingContext.beginPath(),this.drawingContext.moveTo(b[1][0],b[1][1]),k=1;m>k+2;k++)i=b[k],n[0]=[i[0],i[1]],n[1]=[i[0]+(o*b[k+1][0]-o*b[k-1][0])/6,i[1]+(o*b[k+1][1]-o*b[k-1][1])/6],n[2]=[b[k+1][0]+(o*b[k][0]-o*b[k+2][0])/6,b[k+1][1]+(o*b[k][1]-o*b[k+2][1])/6],n[3]=[b[k+1][0],b[k+1][1]],this.drawingContext.bezierCurveTo(n[1][0],n[1][1],n[2][0],n[2][1],n[3][0],n[3][1]);j&&this.drawingContext.lineTo(b[k+1][0],b[k+1][1]),this._doFillStrokeClose()}return c=!1,d=!1,e=!1,g=!1,j&&b.pop(),this},d.Renderer2D.prototype.noSmooth=function(){return"imageSmoothingEnabled"in this.drawingContext?this.drawingContext.imageSmoothingEnabled=!1:"mozImageSmoothingEnabled"in this.drawingContext?this.drawingContext.mozImageSmoothingEnabled=!1:"webkitImageSmoothingEnabled"in this.drawingContext?this.drawingContext.webkitImageSmoothingEnabled=!1:"msImageSmoothingEnabled"in this.drawingContext&&(this.drawingContext.msImageSmoothingEnabled=!1),this},d.Renderer2D.prototype.smooth=function(){return"imageSmoothingEnabled"in this.drawingContext?this.drawingContext.imageSmoothingEnabled=!0:"mozImageSmoothingEnabled"in this.drawingContext?this.drawingContext.mozImageSmoothingEnabled=!0:"webkitImageSmoothingEnabled"in this.drawingContext?this.drawingContext.webkitImageSmoothingEnabled=!0:"msImageSmoothingEnabled"in this.drawingContext&&(this.drawingContext.msImageSmoothingEnabled=!0),this},d.Renderer2D.prototype.strokeCap=function(a){return(a===f.ROUND||a===f.SQUARE||a===f.PROJECT)&&(this.drawingContext.lineCap=a),this},d.Renderer2D.prototype.strokeJoin=function(a){return(a===f.ROUND||a===f.BEVEL||a===f.MITER)&&(this.drawingContext.lineJoin=a),this},d.Renderer2D.prototype.strokeWeight=function(a){return"undefined"==typeof a||0===a?this.drawingContext.lineWidth=1e-4:this.drawingContext.lineWidth=a,this},d.Renderer2D.prototype._getFill=function(){return this.drawingContext.fillStyle},d.Renderer2D.prototype._getStroke=function(){return this.drawingContext.strokeStyle},d.Renderer2D.prototype.bezier=function(a,b,c,d,e,f,g,h){return this._pInst.beginShape(),this._pInst.vertex(a,b),this._pInst.bezierVertex(c,d,e,f,g,h),this._pInst.endShape(),this},d.Renderer2D.prototype.curve=function(a,b,c,d,e,f,g,h){return this._pInst.beginShape(),this._pInst.curveVertex(a,b),this._pInst.curveVertex(c,d),this._pInst.curveVertex(e,f),this._pInst.curveVertex(g,h),this._pInst.endShape(),this},d.Renderer2D.prototype._doFillStrokeClose=function(){this._doFill&&this.drawingContext.fill(),this._doStroke&&this.drawingContext.stroke(),this.drawingContext.closePath()},d.Renderer2D.prototype.applyMatrix=function(a,b,c,d,e,f){this.drawingContext.transform(a,b,c,d,e,f)},d.Renderer2D.prototype.resetMatrix=function(){return this.drawingContext.setTransform(1,0,0,1,0,0),this.drawingContext.scale(this._pInst._pixelDensity,this._pInst._pixelDensity),this},d.Renderer2D.prototype.rotate=function(a){this.drawingContext.rotate(a)},d.Renderer2D.prototype.scale=function(a,b){return this.drawingContext.scale(a,b),this},d.Renderer2D.prototype.shearX=function(a){return this._pInst._angleMode===f.DEGREES&&(a=this._pInst.degrees(a)),this.drawingContext.transform(1,0,this._pInst.tan(a),1,0,0),this},d.Renderer2D.prototype.shearY=function(a){return this._pInst._angleMode===f.DEGREES&&(a=this._pInst.degrees(a)),this.drawingContext.transform(1,this._pInst.tan(a),0,1,0,0),this},d.Renderer2D.prototype.translate=function(a,b){return this.drawingContext.translate(a,b),this},d.Renderer2D.prototype.text=function(a,b,c,d,e){var g,h,i,j,k,l,m,n,o,p,q=this._pInst,r=Number.MAX_VALUE;if(this._doFill||this._doStroke){if("string"!=typeof a&&(a=a.toString()),a=a.replace(/(\t)/g," "),g=a.split("\n"),"undefined"!=typeof d){for(o=0,i=0;id?(k=n[h]+" ",o+=q.textLeading()):k=l;switch(this._rectMode===f.CENTER&&(b-=d/2,c-=e/2),this.drawingContext.textAlign){case f.CENTER:b+=d/2;break;case f.RIGHT:b+=d}if("undefined"!=typeof e){switch(this.drawingContext.textBaseline){case f.BOTTOM:c+=e-o;break;case f._CTX_MIDDLE:c+=(e-o)/2;break;case f.BASELINE:p=!0,this.drawingContext.textBaseline=f.TOP}r=c+e-q.textAscent()}for(i=0;id&&k.length>0?(this._renderText(q,k,b,c,r),k=n[h]+" ",c+=q.textLeading()):k=l;this._renderText(q,k,b,c,r),c+=q.textLeading()}}else{var s=0,t=q.textAlign().vertical;for(t===f.CENTER?s=(g.length-1)*q.textLeading()/2:t===f.BOTTOM&&(s=(g.length-1)*q.textLeading()),j=0;j=e?void 0:(a.push(),this._isOpenType()?this._textFont._renderPath(b,c,d,{renderer:this}):(this._doStroke&&this._strokeSet&&this.drawingContext.strokeText(b,c,d),this._doFill&&(this.drawingContext.fillStyle=this._fillSet?this.drawingContext.fillStyle:f._DEFAULT_TEXT_FILL,this.drawingContext.fillText(b,c,d))),a.pop(),a)},d.Renderer2D.prototype.textWidth=function(a){return this._isOpenType()?this._textFont._textWidth(a,this._textSize):this.drawingContext.measureText(a).width},d.Renderer2D.prototype.textAlign=function(a,b){if(arguments.length)return(a===f.LEFT||a===f.RIGHT||a===f.CENTER)&&(this.drawingContext.textAlign=a),(b===f.TOP||b===f.BOTTOM||b===f.CENTER||b===f.BASELINE)&&(b===f.CENTER?this.drawingContext.textBaseline=f._CTX_MIDDLE:this.drawingContext.textBaseline=b),this._pInst;var c=this.drawingContext.textBaseline;return c===f._CTX_MIDDLE&&(c=f.CENTER),{horizontal:this.drawingContext.textAlign,vertical:c}},d.Renderer2D.prototype._applyTextProperties=function(){var a,b=this._pInst;return this._setProperty("_textAscent",null),this._setProperty("_textDescent",null),a=this._textFont,this._isOpenType()&&(a=this._textFont.font.familyName,this._setProperty("_textStyle",this._textFont.font.styleName)),this.drawingContext.font=this._textStyle+" "+this._textSize+"px "+a,b},d.Renderer2D.prototype.push=function(){this.drawingContext.save()},d.Renderer2D.prototype.pop=function(){this.drawingContext.restore()},b.exports=d.Renderer2D},{"../image/filters":54,"./canvas":35,"./constants":36,"./core":37,"./p5.Renderer":43}],45:[function(a,b,c){var d=a("./core"),e=a("./constants");a("./p5.Graphics"),a("./p5.Renderer2D"),a("../webgl/p5.RendererGL");var f="defaultCanvas0";d.prototype.createCanvas=function(a,b,c){var g,h,i=c||e.P2D;if(arguments[3]&&(g="boolean"==typeof arguments[3]?arguments[3]:!1),i===e.WEBGL)h=document.getElementById(f),h&&h.parentNode.removeChild(h),h=document.createElement("canvas"),h.id=f;else if(g){h=document.createElement("canvas");for(var j=0;document.getElementById("defaultCanvas"+j);)j++;f="defaultCanvas"+j,h.id=f}else h=this.canvas;return this._setupDone||(h.dataset.hidden=!0,h.style.visibility="hidden"),this._userNode?this._userNode.appendChild(h):document.body.appendChild(h),i===e.WEBGL?(this._setProperty("_renderer",new d.RendererGL(h,this,!0)),this._isdefaultGraphics=!0):this._isdefaultGraphics||(this._setProperty("_renderer",new d.Renderer2D(h,this,!0)),this._isdefaultGraphics=!0),this._renderer.resize(a,b),this._renderer._applyDefaults(),g&&this._elements.push(this._renderer),this._renderer},d.prototype.resizeCanvas=function(a,b,c){if(this._renderer){var d={};for(var e in this.drawingContext){var f=this.drawingContext[e];"object"!=typeof f&&"function"!=typeof f&&(d[e]=f)}this._renderer.resize(a,b);for(var g in d)this.drawingContext[g]=d[g];c||this.redraw()}},d.prototype.noCanvas=function(){this.canvas&&this.canvas.parentNode.removeChild(this.canvas)},d.prototype.createGraphics=function(a,b,c){return new d.Graphics(a,b,c,this)},d.prototype.blendMode=function(a){if(a!==e.BLEND&&a!==e.DARKEST&&a!==e.LIGHTEST&&a!==e.DIFFERENCE&&a!==e.MULTIPLY&&a!==e.EXCLUSION&&a!==e.SCREEN&&a!==e.REPLACE&&a!==e.OVERLAY&&a!==e.HARD_LIGHT&&a!==e.SOFT_LIGHT&&a!==e.DODGE&&a!==e.BURN&&a!==e.ADD&&a!==e.NORMAL)throw new Error("Mode "+a+" not recognized.");this._renderer.blendMode(a)},b.exports=d},{"../webgl/p5.RendererGL":86,"./constants":36,"./core":37,"./p5.Graphics":42,"./p5.Renderer2D":44}],46:[function(a,b,c){window.requestAnimationFrame=function(){return window.requestAnimationFrame||window.webkitRequestAnimationFrame||window.mozRequestAnimationFrame||window.oRequestAnimationFrame||window.msRequestAnimationFrame||function(a,b){window.setTimeout(a,1e3/60)}}(),window.performance=window.performance||{},window.performance.now=function(){var a=Date.now();return window.performance.now||window.performance.mozNow||window.performance.msNow||window.performance.oNow||window.performance.webkitNow||function(){return Date.now()-a}}(),function(){"use strict";"undefined"==typeof Uint8ClampedArray||Uint8ClampedArray.prototype.slice||Object.defineProperty(Uint8ClampedArray.prototype,"slice",{value:Array.prototype.slice,writable:!0,configurable:!0,enumerable:!1})}()},{}],47:[function(a,b,c){"use strict";var d=a("./core");d.prototype.exit=function(){throw"exit() not implemented, see remove()"},d.prototype.noLoop=function(){this._loop=!1},d.prototype.loop=function(){this._loop=!0,this._draw()},d.prototype.push=function(){this._renderer.push(),this._styles.push({_doStroke:this._renderer._doStroke,_strokeSet:this._renderer._strokeSet,_doFill:this._renderer._doFill,_fillSet:this._renderer._fillSet,_tint:this._renderer._tint,_imageMode:this._renderer._imageMode,_rectMode:this._renderer._rectMode,_ellipseMode:this._renderer._ellipseMode,_colorMode:this._renderer._colorMode,_textFont:this._renderer._textFont,_textLeading:this._renderer._textLeading,_textSize:this._renderer._textSize,_textStyle:this._renderer._textStyle})},d.prototype.pop=function(){this._renderer.pop();var a=this._styles.pop();for(var b in a)this._renderer[b]=a[b]},d.prototype.pushStyle=function(){throw new Error("pushStyle() not used, see push()")},d.prototype.popStyle=function(){throw new Error("popStyle() not used, see pop()")},d.prototype.redraw=function(){this.resetMatrix(),this._renderer.isP3D&&this._renderer._update();var a=1;if(1===arguments.length)try{parseInt(arguments[0])>1&&(a=parseInt(arguments[0]))}catch(b){}var c=this.setup||window.setup,d=this.draw||window.draw;if("function"==typeof d){"undefined"==typeof c&&this.scale(this._pixelDensity,this._pixelDensity);for(var e=this,f=function(a){a.call(e)},g=0;a>g;g++)this._registeredMethods.pre.forEach(f),d(),this._registeredMethods.post.forEach(f)}},d.prototype.size=function(){var a="size() is not a valid p5 function, to set the size of the ";throw a+="drawing canvas, please use createCanvas() instead"},b.exports=d},{"./core":37}],48:[function(a,b,c){"use strict";var d=a("./core"),e=a("./constants");d.prototype.applyMatrix=function(a,b,c,d,e,f){return this._renderer.applyMatrix(a,b,c,d,e,f),this},d.prototype.popMatrix=function(){throw new Error("popMatrix() not used, see pop()")},d.prototype.printMatrix=function(){throw new Error("printMatrix() not implemented")},d.prototype.pushMatrix=function(){throw new Error("pushMatrix() not used, see push()")},d.prototype.resetMatrix=function(){return this._renderer.resetMatrix(),this},d.prototype.rotate=function(){for(var a,b=new Array(arguments.length),c=0;c1?this._renderer.rotate(a,b[1]):this._renderer.rotate(a),this},d.prototype.rotateX=function(a){for(var b=new Array(arguments.length),c=0;c0))throw"vertex() must be used once before calling quadraticVertex()";k=!0;for(var i=[],j=0;jn||Math.abs(this.accelerationY-this.pAccelerationY)>n||Math.abs(this.accelerationZ-this.pAccelerationZ)>n)&&a();var b=this.deviceTurned||window.deviceTurned;if("function"==typeof b){var c=this.rotationX+180,d=this.pRotationX+180,p=h+180;c-d>0&&270>c-d||-270>c-d?k="clockwise":(0>c-d||c-d>270)&&(k="counter-clockwise"),k!==e&&(p=c),Math.abs(c-p)>90&&Math.abs(c-p)<270&&(p=c,this._setProperty("turnAxis","X"),b()),e=k,h=p-180;var q=this.rotationY+180,r=this.pRotationY+180,s=i+180;q-r>0&&270>q-r||-270>q-r?l="clockwise":(0>q-r||q-this.pRotationY>270)&&(l="counter-clockwise"),l!==f&&(s=q),Math.abs(q-s)>90&&Math.abs(q-s)<270&&(s=q,this._setProperty("turnAxis","Y"),b()),f=l,i=s-180,this.rotationZ-this.pRotationZ>0&&this.rotationZ-this.pRotationZ<270||this.rotationZ-this.pRotationZ<-270?m="clockwise":(this.rotationZ-this.pRotationZ<0||this.rotationZ-this.pRotationZ>270)&&(m="counter-clockwise"),m!==g&&(j=this.rotationZ),Math.abs(this.rotationZ-j)>90&&Math.abs(this.rotationZ-j)<270&&(j=this.rotationZ,this._setProperty("turnAxis","Z"),b()),g=m,this._setProperty("turnAxis",void 0)}var t=this.deviceShaken||window.deviceShaken;if("function"==typeof t){var u,v;null!==this.pAccelerationX&&(u=Math.abs(this.accelerationX-this.pAccelerationX),v=Math.abs(this.accelerationY-this.pAccelerationY)),u+v>o&&t()}},b.exports=d},{"../core/core":37}],51:[function(a,b,c){"use strict";var d=a("../core/core"),e={};d.prototype.isKeyPressed=!1,d.prototype.keyIsPressed=!1,d.prototype.key="",d.prototype.keyCode=0,d.prototype._onkeydown=function(a){if(!e[a.which]){this._setProperty("isKeyPressed",!0),this._setProperty("keyIsPressed",!0), +this._setProperty("keyCode",a.which),e[a.which]=!0;var b=String.fromCharCode(a.which);b||(b=a.which),this._setProperty("key",b);var c=this.keyPressed||window.keyPressed;if("function"==typeof c&&!a.charCode){var d=c(a);d===!1&&a.preventDefault()}}},d.prototype._onkeyup=function(a){var b=this.keyReleased||window.keyReleased;this._setProperty("isKeyPressed",!1),this._setProperty("keyIsPressed",!1),this._setProperty("_lastKeyCodeTyped",null),e[a.which]=!1;var c=String.fromCharCode(a.which);if(c||(c=a.which),this._setProperty("key",c),this._setProperty("keyCode",a.which),"function"==typeof b){var d=b(a);d===!1&&a.preventDefault()}},d.prototype._onkeypress=function(a){if(a.which!==this._lastKeyCodeTyped){this._setProperty("keyCode",a.which),this._setProperty("_lastKeyCodeTyped",a.which),this._setProperty("key",String.fromCharCode(a.which));var b=this.keyTyped||window.keyTyped;if("function"==typeof b){var c=b(a);c===!1&&a.preventDefault()}}},d.prototype._onblur=function(a){e={}},d.prototype.keyIsDown=function(a){return e[a]},b.exports=d},{"../core/core":37}],52:[function(a,b,c){"use strict";function d(a,b){var c=a.getBoundingClientRect();return{x:b.clientX-c.left,y:b.clientY-c.top}}var e=a("../core/core"),f=a("../core/constants");e.prototype._hasMouseInteracted=!1,e.prototype.mouseX=0,e.prototype.mouseY=0,e.prototype.pmouseX=0,e.prototype.pmouseY=0,e.prototype.winMouseX=0,e.prototype.winMouseY=0,e.prototype.pwinMouseX=0,e.prototype.pwinMouseY=0,e.prototype.mouseButton=0,e.prototype.mouseIsPressed=!1,e.prototype.isMousePressed=!1,e.prototype._updateNextMouseCoords=function(a){var b=this.mouseX,c=this.mouseY;if("touchstart"===a.type||"touchmove"===a.type||"touchend"===a.type||a.touches)b=this.touchX,c=this.touchY;else if(null!==this._curElement){var e=d(this._curElement.elt,a);b=e.x,c=e.y}this._setProperty("mouseX",b),this._setProperty("mouseY",c),this._setProperty("winMouseX",a.pageX),this._setProperty("winMouseY",a.pageY),this._hasMouseInteracted||(this._updateMouseCoords(),this._setProperty("_hasMouseInteracted",!0))},e.prototype._updateMouseCoords=function(){this._setProperty("pmouseX",this.mouseX),this._setProperty("pmouseY",this.mouseY),this._setProperty("pwinMouseX",this.winMouseX),this._setProperty("pwinMouseY",this.winMouseY)},e.prototype._setMouseButton=function(a){1===a.button?this._setProperty("mouseButton",f.CENTER):2===a.button?this._setProperty("mouseButton",f.RIGHT):this._setProperty("mouseButton",f.LEFT)},e.prototype._onmousemove=function(a){var b,c=this._isGlobal?window:this;this._updateNextMouseCoords(a),this._updateNextTouchCoords(a),this.isMousePressed?"function"==typeof c.mouseDragged?(b=c.mouseDragged(a),b===!1&&a.preventDefault()):"function"==typeof c.touchMoved&&(b=c.touchMoved(a),b===!1&&a.preventDefault()):"function"==typeof c.mouseMoved&&(b=c.mouseMoved(a),b===!1&&a.preventDefault())},e.prototype._onmousedown=function(a){var b,c=this._isGlobal?window:this;this._setProperty("isMousePressed",!0),this._setProperty("mouseIsPressed",!0),this._setMouseButton(a),this._updateNextMouseCoords(a),this._updateNextTouchCoords(a),"function"==typeof c.mousePressed?(b=c.mousePressed(a),b===!1&&a.preventDefault()):"function"==typeof c.touchStarted&&(b=c.touchStarted(a),b===!1&&a.preventDefault())},e.prototype._onmouseup=function(a){var b,c=this._isGlobal?window:this;this._setProperty("isMousePressed",!1),this._setProperty("mouseIsPressed",!1),"function"==typeof c.mouseReleased?(b=c.mouseReleased(a),b===!1&&a.preventDefault()):"function"==typeof c.touchEnded&&(b=c.touchEnded(a),b===!1&&a.preventDefault())},e.prototype._ondragend=e.prototype._onmouseup,e.prototype._ondragover=e.prototype._onmousemove,e.prototype._onclick=function(a){var b=this._isGlobal?window:this;if("function"==typeof b.mouseClicked){var c=b.mouseClicked(a);c===!1&&a.preventDefault()}},e.prototype._onwheel=function(a){var b=this._isGlobal?window:this;if("function"==typeof b.mouseWheel){a.delta=a.deltaY;var c=b.mouseWheel(a);c===!1&&a.preventDefault()}},b.exports=e},{"../core/constants":36,"../core/core":37}],53:[function(a,b,c){"use strict";function d(a,b,c){c=c||0;var d=a.getBoundingClientRect(),e=b.touches[c]||b.changedTouches[c];return{x:e.clientX-d.left,y:e.clientY-d.top,id:e.identifier}}var e=a("../core/core");e.prototype._hasTouchInteracted=!1,e.prototype.touchX=0,e.prototype.touchY=0,e.prototype.ptouchX=0,e.prototype.ptouchY=0,e.prototype.touches=[],e.prototype.touchIsDown=!1,e.prototype._updateNextTouchCoords=function(a){var b=this.touchX,c=this.touchY;if("mousedown"!==a.type&&"mousemove"!==a.type&&"mouseup"!==a.type&&a.touches){if(null!==this._curElement){var e=d(this._curElement.elt,a,0);b=e.x,c=e.y;for(var f=[],g=0;gb?1:248>b?b:248,g!==b){g=b,h=1+g<<1,i=new Int32Array(h),j=new Array(h);for(var c=0;h>c;c++)j[c]=new Int32Array(256);for(var d,e,f,k,l=1,m=b-1;b>l;l++){i[b+l]=i[m]=e=m*m,f=j[b+l],k=j[m--];for(var n=0;256>n;n++)f[n]=k[n]=e*n}d=i[b]=b*b,f=j[b];for(var o=0;256>o;o++)f[o]=d*o}}function e(a,b){for(var c=f._toPixels(a),e=a.width,k=a.height,l=e*k,m=new Int32Array(l),n=0;l>n;n++)m[n]=f._getARGB(c,n);var o,p,q,r,s,t,u,v,w,x,y=new Int32Array(l),z=new Int32Array(l),A=new Int32Array(l),B=new Int32Array(l),C=0;d(b);var D,E,F,G;for(E=0;k>E;E++){for(D=0;e>D;D++){if(r=q=p=s=o=0,t=D-g,0>t)x=-t,t=0;else{if(t>=e)break;x=0}for(F=x;h>F&&!(t>=e);F++){var H=m[t+C];G=j[F],s+=G[(-16777216&H)>>>24],p+=G[(16711680&H)>>16],q+=G[(65280&H)>>8],r+=G[255&H],o+=i[F],t++}u=C+D,y[u]=s/o,z[u]=p/o,A[u]=q/o,B[u]=r/o}C+=e}for(C=0,v=-g,w=v*e,E=0;k>E;E++){for(D=0;e>D;D++){if(r=q=p=s=o=0,0>v)x=u=-v,t=D;else{if(v>=k)break;x=0,u=v,t=D+w}for(F=x;h>F&&!(u>=k);F++)G=j[F],s+=G[y[t]],p+=G[z[t]],q+=G[A[t]],r+=G[B[t]],o+=i[F],u++,t+=e;m[D+C]=s/o<<24|p/o<<16|q/o<<8|r/o}C+=e,w+=e,v++}f._setPixels(c,m)}var f={};f._toPixels=function(a){return a instanceof ImageData?a.data:a.getContext("2d").getImageData(0,0,a.width,a.height).data},f._getARGB=function(a,b){var c=4*b;return a[c+3]<<24&4278190080|a[c]<<16&16711680|a[c+1]<<8&65280|255&a[c+2]},f._setPixels=function(a,b){for(var c=0,d=0,e=a.length;e>d;d++)c=4*d,a[c+0]=(16711680&b[d])>>>16,a[c+1]=(65280&b[d])>>>8,a[c+2]=255&b[d],a[c+3]=(4278190080&b[d])>>>24},f._toImageData=function(a){return a instanceof ImageData?a:a.getContext("2d").getImageData(0,0,a.width,a.height)},f._createImageData=function(a,b){return f._tmpCanvas=document.createElement("canvas"),f._tmpCtx=f._tmpCanvas.getContext("2d"),this._tmpCtx.createImageData(a,b)},f.apply=function(a,b,c){var d=a.getContext("2d"),e=d.getImageData(0,0,a.width,a.height),f=b(e,c);f instanceof ImageData?d.putImageData(f,0,0,0,0,a.width,a.height):d.putImageData(e,0,0,0,0,a.width,a.height)},f.threshold=function(a,b){var c=f._toPixels(a);void 0===b&&(b=.5);for(var d=Math.floor(255*b),e=0;e=d?255:0,c[e]=c[e+1]=c[e+2]=g}},f.gray=function(a){for(var b=f._toPixels(a),c=0;cb||b>255)throw new Error("Level must be greater than 2 and less than 255 for posterize");for(var d=b-1,e=0;e>8)/d,c[e+1]=255*(h*b>>8)/d,c[e+2]=255*(i*b>>8)/d}},f.dilate=function(a){for(var b,c,d,e,g,h,i,j,k,l,m,n,o,p,q,r,s,t=f._toPixels(a),u=0,v=t.length?t.length/4:0,w=new Int32Array(v);v>u;)for(b=u,c=u+a.width;c>u;)d=e=f._getARGB(t,u),i=u-1,h=u+1,j=u-a.width,k=u+a.width,b>i&&(i=u),h>=c&&(h=u),0>j&&(j=0),k>=v&&(k=u),n=f._getARGB(t,j),m=f._getARGB(t,i),o=f._getARGB(t,k),l=f._getARGB(t,h),g=77*(d>>16&255)+151*(d>>8&255)+28*(255&d),q=77*(m>>16&255)+151*(m>>8&255)+28*(255&m),p=77*(l>>16&255)+151*(l>>8&255)+28*(255&l),r=77*(n>>16&255)+151*(n>>8&255)+28*(255&n),s=77*(o>>16&255)+151*(o>>8&255)+28*(255&o),q>g&&(e=m,g=q),p>g&&(e=l,g=p),r>g&&(e=n,g=r),s>g&&(e=o,g=s),w[u++]=e;f._setPixels(t,w)},f.erode=function(a){for(var b,c,d,e,g,h,i,j,k,l,m,n,o,p,q,r,s,t=f._toPixels(a),u=0,v=t.length?t.length/4:0,w=new Int32Array(v);v>u;)for(b=u,c=u+a.width;c>u;)d=e=f._getARGB(t,u),i=u-1,h=u+1,j=u-a.width,k=u+a.width,b>i&&(i=u),h>=c&&(h=u),0>j&&(j=0),k>=v&&(k=u),n=f._getARGB(t,j),m=f._getARGB(t,i),o=f._getARGB(t,k),l=f._getARGB(t,h),g=77*(d>>16&255)+151*(d>>8&255)+28*(255&d),q=77*(m>>16&255)+151*(m>>8&255)+28*(255&m),p=77*(l>>16&255)+151*(l>>8&255)+28*(255&l),r=77*(n>>16&255)+151*(n>>8&255)+28*(255&n),s=77*(o>>16&255)+151*(o>>8&255)+28*(255&o),g>q&&(e=m,g=q),g>p&&(e=l,g=p),g>r&&(e=n,g=r),g>s&&(e=o,g=s),w[u++]=e;f._setPixels(t,w)};var g,h,i,j;f.blur=function(a,b){e(a,b)},b.exports=f},{}],55:[function(a,b,c){"use strict";var d=a("../core/core"),e=[];d.prototype.createImage=function(a,b){return new d.Image(a,b)},d.prototype.saveCanvas=function(){var a,b,c;if(3===arguments.length?(a=arguments[0],b=arguments[1],c=arguments[2]):2===arguments.length?"object"==typeof arguments[0]?(a=arguments[0],b=arguments[1]):(b=arguments[0],c=arguments[1]):1===arguments.length&&("object"==typeof arguments[0]?a=arguments[0]:b=arguments[0]),a instanceof d.Element&&(a=a.elt),a instanceof HTMLCanvasElement||(a=null),c||(c=d.prototype._checkFileExtension(b,c)[1],""===c&&(c="png")),a||this._curElement&&this._curElement.elt&&(a=this._curElement.elt),d.prototype._isSafari()){var e="Hello, Safari user!\n";e+="Now capturing a screenshot...\n",e+="To save this image,\n",e+="go to File --> Save As.\n",alert(e),window.location.href=a.toDataURL()}else{var f;if("undefined"==typeof c)c="png",f="image/png";else switch(c){case"png":f="image/png";break;case"jpeg":f="image/jpeg";break;case"jpg":f="image/jpeg";break;default:f="image/png"}var g="image/octet-stream",h=a.toDataURL(f);h=h.replace(f,g),d.prototype.downloadFile(h,b,c)}},d.prototype.saveFrames=function(a,b,c,f,g){var h=c||3;h=d.prototype.constrain(h,0,15),h=1e3*h;var i=f||15;i=d.prototype.constrain(i,0,22);var j=0,k=d.prototype._makeFrame,l=this._curElement.elt,m=setInterval(function(){k(a+j,b,l),j++},1e3/i);setTimeout(function(){if(clearInterval(m),g)g(e);else for(var a=0;a0&&b>a?a:b}var e=a("../core/core"),f=a("./filters"),g=a("../core/canvas"),h=a("../core/constants");a("../core/error_helpers"),e.prototype.loadImage=function(a,b,c){var d=new Image,f=new e.Image(1,1,this),g=e._getDecrementPreload.apply(this,arguments);return d.onload=function(){f.width=f.canvas.width=d.width,f.height=f.canvas.height=d.height,f.drawingContext.drawImage(d,0,0),"function"==typeof b&&b(f),g&&b!==g&&g()},d.onerror=function(a){e._friendlyFileLoadError(0,d.src),"function"==typeof c&&c!==g&&c(a)},0!==a.indexOf("data:image/")&&(d.crossOrigin="Anonymous"),d.src=a,f},e.prototype.image=function(a,b,c,e,f,h,i,j,k){if(arguments.length<=5)if(h=b||0,i=c||0,b=0,c=0,a.elt&&a.elt.videoWidth&&!a.canvas){var l=a.elt.videoWidth,m=a.elt.videoHeight;j=e||a.elt.width,k=f||a.elt.width*m/l,e=l,f=m}else j=e||a.width,k=f||a.height,e=a.width,f=a.height;else{if(9!==arguments.length)throw"Wrong number of arguments to image()";b=b||0,c=c||0,e=d(e,a.width),f=d(f,a.height),h=h||0,i=i||0,j=j||a.width,k=k||a.height}var n=g.modeAdjust(h,i,j,k,this._renderer._imageMode);this._renderer.image(a,b,c,e,f,n.x,n.y,n.w,n.h)},e.prototype.tint=function(){var a=this.color.apply(this,arguments);this._renderer._tint=a.levels},e.prototype.noTint=function(){this._renderer._tint=null},e.prototype._getTintedImageCanvas=function(a){if(!a.canvas)return a;var b=f._toPixels(a.canvas),c=document.createElement("canvas");c.width=a.canvas.width,c.height=a.canvas.height;for(var d=c.getContext("2d"),e=d.createImageData(a.canvas.width,a.canvas.height),g=e.data,h=0;h0&&this.loadPixels()},d.Image.prototype.copy=function(){d.prototype.copy.apply(this,arguments)},d.Image.prototype.mask=function(a){void 0===a&&(a=this);var b=this.drawingContext.globalCompositeOperation,c=1;a instanceof d.Renderer&&(c=a._pInst._pixelDensity);var e=[a,0,0,c*a.width,c*a.height,0,0,this.width,this.height];this.drawingContext.globalCompositeOperation="destination-in",this.copy.apply(this,e),this.drawingContext.globalCompositeOperation=b},d.Image.prototype.filter=function(a,b){e.apply(this.canvas,e[a.toLowerCase()],b)},d.Image.prototype.blend=function(){d.prototype.blend.apply(this,arguments)},d.Image.prototype.save=function(a,b){var c;if(b)switch(b.toLowerCase()){case"png":c="image/png";break;case"jpeg":c="image/jpeg";break;case"jpg":c="image/jpeg";break;default:c="image/png"}else b="png",c="image/png";var e="image/octet-stream",f=this.canvas.toDataURL(c);f=f.replace(c,e),d.prototype.downloadFile(f,a,b)},b.exports=d.Image},{"../core/core":37,"./filters":54}],58:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("./filters");a("../color/p5.Color"),d.prototype.pixels=[],d.prototype.blend=function(){this._renderer?this._renderer.blend.apply(this._renderer,arguments):d.Renderer2D.prototype.blend.apply(this,arguments)},d.prototype.copy=function(){d.Renderer2D._copyHelper.apply(this,arguments)},d.prototype.filter=function(a,b){e.apply(this.canvas,e[a.toLowerCase()],b)},d.prototype.get=function(a,b,c,d){return this._renderer.get(a,b,c,d)},d.prototype.loadPixels=function(){this._renderer.loadPixels()},d.prototype.set=function(a,b,c){this._renderer.set(a,b,c)},d.prototype.updatePixels=function(a,b,c,d){0!==this.pixels.length&&this._renderer.updatePixels(a,b,c,d)},b.exports=d},{"../color/p5.Color":31,"../core/core":37,"./filters":54}],59:[function(a,b,c){"use strict";function d(a,b){var c={};if(b=b||[],"undefined"==typeof b)for(var d=0;d/g,">").replace(/"/g,""").replace(/'/g,"'")}function f(a,b){b&&b!==!0&&"true"!==b||(b=""),a||(a="untitled");var c="";return a&&a.indexOf(".")>-1&&(c=a.split(".").pop()),b&&c!==b&&(c=b,a=a+"."+c),[a,c]}function g(a){document.body.removeChild(a.target)}var h=a("../core/core"),i=a("reqwest"),j=a("opentype.js");a("../core/error_helpers"),h._getDecrementPreload=function(){var a=arguments[arguments.length-1];return(window.preload||this&&this.preload)&&"function"==typeof a?a:null},h.prototype.loadFont=function(a,b,c){var d=new h.Font(this),e=h._getDecrementPreload.apply(this,arguments);return j.load(a,function(f,g){if(f){if("undefined"!=typeof c&&c!==e)return c(f);throw f}d.font=g,"undefined"!=typeof b&&b(d),e&&b!==e&&e();var h,i,j=["ttf","otf","woff","woff2"],k=a.split("\\").pop().split("/").pop(),l=k.lastIndexOf("."),m=1>l?null:k.substr(l+1);j.indexOf(m)>-1&&(h=k.substr(0,l),i=document.createElement("style"),i.appendChild(document.createTextNode("\n@font-face {\nfont-family: "+h+";\nsrc: url("+a+");\n}\n")),document.head.appendChild(i))}),d},h.prototype.createInput=function(){throw"not yet implemented"},h.prototype.createReader=function(){throw"not yet implemented"},h.prototype.loadBytes=function(){throw"not yet implemented"},h.prototype.loadJSON=function(){for(var a,b=arguments[0],c=arguments[1],d=h._getDecrementPreload.apply(this,arguments),e=[],f="json",g=2;g"),d.println("");var k=' "),d.println(""),d.println(" "),"0"!==f[0]){d.println(" ");for(var l=0;l"+m),d.println(" ")}d.println(" ")}for(var n=0;n");for(var o=0;o"+q),d.println(" ")}d.println(" ")}d.println("
"),d.println(""),d.print("")}d.close(),d.flush()},h.prototype.writeFile=function(a,b,c){var d="application/octet-stream";h.prototype._isSafari()&&(d="text/plain");var e=new Blob(a,{type:d}),f=window.URL.createObjectURL(e);h.prototype.downloadFile(f,b,c)},h.prototype.downloadFile=function(a,b,c){var d=f(b,c),e=d[0],i=d[1],j=document.createElement("a");if(j.href=a,j.download=e,j.onclick=g,j.style.display="none",document.body.appendChild(j),h.prototype._isSafari()){var k="Hello, Safari user! To download this file...\n";k+="1. Go to File --> Save As.\n",k+='2. Choose "Page Source" as the Format.\n',k+='3. Name it with this extension: ."'+i+'"',alert(k)}j.click(),a=null},h.prototype._checkFileExtension=f,h.prototype._isSafari=function(){var a=Object.prototype.toString.call(window.HTMLElement);return a.indexOf("Constructor")>0},b.exports=h},{"../core/core":37,"../core/error_helpers":40,"opentype.js":8,reqwest:27}],60:[function(a,b,c){"use strict";var d=a("../core/core");d.Table=function(a){this.columns=[],this.rows=[]},d.Table.prototype.addRow=function(a){var b=a||new d.TableRow;if("undefined"==typeof b.arr||"undefined"==typeof b.obj)throw"invalid TableRow: "+b;return b.table=this,this.rows.push(b),b},d.Table.prototype.removeRow=function(a){this.rows[a].table=null;var b=this.rows.splice(a+1,this.rows.length);this.rows.pop(),this.rows=this.rows.concat(b)},d.Table.prototype.getRow=function(a){return this.rows[a]},d.Table.prototype.getRows=function(){return this.rows},d.Table.prototype.findRow=function(a,b){if("string"==typeof b){for(var c=0;c=0))throw'This table has no column named "'+a+'"';d=b[a],e[d]=b}else e[f]=this.rows[f].obj;return e},d.Table.prototype.getArray=function(){for(var a=[],b=0;b=0))throw'This table has no column named "'+a+'"';this.obj[a]=b,this.arr[c]=b}else{if(!(a0},d.XML.prototype.listChildren=function(){return this.children.map(function(a){return a.name})},d.XML.prototype.getChildren=function(a){return a?this.children.filter(function(b){return b.name===a}):this.children},d.XML.prototype.getChild=function(a){return"string"==typeof a?this.children.find(function(b){return b.name===a}):this.children[a]},d.XML.prototype.addChild=function(a){a instanceof d.XML&&this.children.push(a)},d.XML.prototype.removeChild=function(a){var b=-1;if("string"==typeof a){for(var c=0;ce;e++)d[e]=Math.random()}0>a&&(a=-a),0>b&&(b=-b),0>c&&(c=-c);for(var n,o,p,q,r,s=Math.floor(a),t=Math.floor(b),u=Math.floor(c),v=a-s,w=b-t,x=c-u,y=0,z=.5,A=0;k>A;A++){var B=s+(t<=1&&(s++,v--),w>=1&&(t++,w--),x>=1&&(u++,x--)}return y},e.prototype.noiseDetail=function(a,b){a>0&&(k=a),b>0&&(l=b)},e.prototype.noiseSeed=function(a){var b=function(){var a,b,c=4294967296,d=1664525,e=1013904223;return{setSeed:function(d){b=a=(null==d?Math.random()*c:d)>>>0},getSeed:function(){return a},rand:function(){return b=(d*b+e)%c,b/c}}}();b.setSeed(a),d=new Array(j+1);for(var c=0;j+1>c;c++)d[c]=b.rand()},b.exports=e},{"../core/core":37}],66:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("./polargeometry"),f=a("../core/constants");d.Vector=function(){var a,b,c;arguments[0]instanceof d?(this.p5=arguments[0],a=arguments[1][0]||0,b=arguments[1][1]||0,c=arguments[1][2]||0):(a=arguments[0]||0,b=arguments[1]||0,c=arguments[2]||0),this.x=a,this.y=b,this.z=c},d.Vector.prototype.toString=function(){return"p5.Vector Object : ["+this.x+", "+this.y+", "+this.z+"]"},d.Vector.prototype.set=function(a,b,c){return a instanceof d.Vector?(this.x=a.x||0,this.y=a.y||0,this.z=a.z||0,this):a instanceof Array?(this.x=a[0]||0,this.y=a[1]||0,this.z=a[2]||0,this):(this.x=a||0,this.y=b||0,this.z=c||0,this)},d.Vector.prototype.copy=function(){return this.p5?new d.Vector(this.p5,[this.x,this.y,this.z]):new d.Vector(this.x,this.y,this.z)},d.Vector.prototype.add=function(a,b,c){return a instanceof d.Vector?(this.x+=a.x||0,this.y+=a.y||0,this.z+=a.z||0,this):a instanceof Array?(this.x+=a[0]||0,this.y+=a[1]||0,this.z+=a[2]||0,this):(this.x+=a||0,this.y+=b||0,this.z+=c||0,this)},d.Vector.prototype.sub=function(a,b,c){return a instanceof d.Vector?(this.x-=a.x||0,this.y-=a.y||0,this.z-=a.z||0,this):a instanceof Array?(this.x-=a[0]||0,this.y-=a[1]||0,this.z-=a[2]||0,this):(this.x-=a||0,this.y-=b||0,this.z-=c||0,this)},d.Vector.prototype.mult=function(a){return this.x*=a||0,this.y*=a||0,this.z*=a||0,this},d.Vector.prototype.div=function(a){return this.x/=a,this.y/=a,this.z/=a,this},d.Vector.prototype.mag=function(){return Math.sqrt(this.magSq())},d.Vector.prototype.magSq=function(){var a=this.x,b=this.y,c=this.z;return a*a+b*b+c*c},d.Vector.prototype.dot=function(a,b,c){return a instanceof d.Vector?this.dot(a.x,a.y,a.z):this.x*(a||0)+this.y*(b||0)+this.z*(c||0)},d.Vector.prototype.cross=function(a){var b=this.y*a.z-this.z*a.y,c=this.z*a.x-this.x*a.z,e=this.x*a.y-this.y*a.x;return this.p5?new d.Vector(this.p5,[b,c,e]):new d.Vector(b,c,e)},d.Vector.prototype.dist=function(a){var b=a.copy().sub(this);return b.mag()},d.Vector.prototype.normalize=function(){return 0===this.mag()?this:this.div(this.mag())},d.Vector.prototype.limit=function(a){var b=this.magSq();return b>a*a&&(this.div(Math.sqrt(b)),this.mult(a)),this},d.Vector.prototype.setMag=function(a){return this.normalize().mult(a)},d.Vector.prototype.heading=function(){var a=Math.atan2(this.y,this.x);return this.p5?this.p5._angleMode===f.RADIANS?a:e.radiansToDegrees(a):a},d.Vector.prototype.rotate=function(a){this.p5&&this.p5._angleMode===f.DEGREES&&(a=e.degreesToRadians(a));var b=this.heading()+a,c=this.mag();return this.x=Math.cos(b)*c,this.y=Math.sin(b)*c,this},d.Vector.prototype.lerp=function(a,b,c,e){return a instanceof d.Vector?this.lerp(a.x,a.y,a.z,b):(this.x+=(a-this.x)*e||0,this.y+=(b-this.y)*e||0,this.z+=(c-this.z)*e||0,this)},d.Vector.prototype.array=function(){return[this.x||0,this.y||0,this.z||0]},d.Vector.prototype.equals=function(a,b,c){var e,f,g;return a instanceof d.Vector?(e=a.x||0,f=a.y||0,g=a.z||0):a instanceof Array?(e=a[0]||0,f=a[1]||0,g=a[2]||0):(e=a||0,f=b||0,g=c||0),this.x===e&&this.y===f&&this.z===g},d.Vector.fromAngle=function(a){return this.p5&&this.p5._angleMode===f.DEGREES&&(a=e.degreesToRadians(a)),this.p5?new d.Vector(this.p5,[Math.cos(a),Math.sin(a),0]):new d.Vector(Math.cos(a),Math.sin(a),0)},d.Vector.random2D=function(){var a;return a=this.p5?this.p5._angleMode===f.DEGREES?this.p5.random(360):this.p5.random(f.TWO_PI):Math.random()*Math.PI*2,this.fromAngle(a)},d.Vector.random3D=function(){var a,b;this.p5?(a=this.p5.random(0,f.TWO_PI),b=this.p5.random(-1,1)):(a=Math.random()*Math.PI*2,b=2*Math.random()-1);var c=Math.sqrt(1-b*b)*Math.cos(a),e=Math.sqrt(1-b*b)*Math.sin(a);return this.p5?new d.Vector(this.p5,[c,e,b]):new d.Vector(c,e,b)},d.Vector.add=function(a,b,c){return c?c.set(a):c=a.copy(),c.add(b),c},d.Vector.sub=function(a,b,c){return c?c.set(a):c=a.copy(),c.sub(b),c},d.Vector.mult=function(a,b,c){return c?c.set(a):c=a.copy(),c.mult(b),c},d.Vector.div=function(a,b,c){return c?c.set(a):c=a.copy(),c.div(b),c},d.Vector.dot=function(a,b){return a.dot(b)},d.Vector.cross=function(a,b){return a.cross(b)},d.Vector.dist=function(a,b){return a.dist(b)},d.Vector.lerp=function(a,b,c,d){return d?d.set(a):d=a.copy(),d.lerp(b,c),d},d.Vector.angleBetween=function(a,b){var c=Math.acos(a.dot(b)/(a.mag()*b.mag()));return this.p5&&this.p5._angleMode===f.DEGREES&&(c=e.radiansToDegrees(c)),c},d.Vector.mag=function(a){var b=a.x,c=a.y,d=a.z,e=b*b+c*c+d*d;return Math.sqrt(e)},b.exports=d.Vector},{"../core/constants":36,"../core/core":37,"./polargeometry":67}],67:[function(a,b,c){b.exports={degreesToRadians:function(a){return 2*Math.PI*a/360},radiansToDegrees:function(a){return 360*a/(2*Math.PI)}}},{}],68:[function(a,b,c){"use strict";var d=a("../core/core"),e=!1,f=function(){var a,b,c=4294967296,d=1664525,e=1013904223;return{setSeed:function(d){b=a=(null==d?Math.random()*c:d)>>>0},getSeed:function(){return a},rand:function(){return b=(d*b+e)%c,b/c}}}();d.prototype.randomSeed=function(a){f.setSeed(a),e=!0},d.prototype.random=function(a,b){var c;if(c=e?f.rand():Math.random(),"undefined"==typeof a)return c;if("undefined"==typeof b)return a instanceof Array?a[Math.floor(c*a.length)]:c*a;if(a>b){var d=a;a=b,b=d}return c*(b-a)+a};var g,h=!1;d.prototype.randomGaussian=function(a,b){var c,d,e,f;if(h)c=g,h=!1;else{do d=this.random(2)-1,e=this.random(2)-1,f=d*d+e*e;while(f>=1);f=Math.sqrt(-2*Math.log(f)/f),c=d*f,g=e*f,h=!0}var i=a||0,j=b||1;return c*j+i},b.exports=d},{"../core/core":37}],69:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("./polargeometry"),f=a("../core/constants");d.prototype._angleMode=f.RADIANS,d.prototype.acos=function(a){return this._angleMode===f.RADIANS?Math.acos(a):e.radiansToDegrees(Math.acos(a))},d.prototype.asin=function(a){return this._angleMode===f.RADIANS?Math.asin(a):e.radiansToDegrees(Math.asin(a))},d.prototype.atan=function(a){return this._angleMode===f.RADIANS?Math.atan(a):e.radiansToDegrees(Math.atan(a))},d.prototype.atan2=function(a,b){return this._angleMode===f.RADIANS?Math.atan2(a,b):e.radiansToDegrees(Math.atan2(a,b))},d.prototype.cos=function(a){return this._angleMode===f.RADIANS?Math.cos(a):Math.cos(this.radians(a))},d.prototype.sin=function(a){return this._angleMode===f.RADIANS?Math.sin(a):Math.sin(this.radians(a))},d.prototype.tan=function(a){return this._angleMode===f.RADIANS?Math.tan(a):Math.tan(this.radians(a))},d.prototype.degrees=function(a){return e.radiansToDegrees(a)},d.prototype.radians=function(a){return e.degreesToRadians(a)},d.prototype.angleMode=function(a){(a===f.DEGREES||a===f.RADIANS)&&(this._angleMode=a)},b.exports=d},{"../core/constants":36,"../core/core":37,"./polargeometry":67}],70:[function(a,b,c){"use strict";var d=a("../core/core");d.prototype.textAlign=function(a,b){return this._renderer.textAlign.apply(this._renderer,arguments)},d.prototype.textLeading=function(a){return this._renderer.textLeading.apply(this._renderer,arguments)},d.prototype.textSize=function(a){return this._renderer.textSize.apply(this._renderer,arguments)},d.prototype.textStyle=function(a){return this._renderer.textStyle.apply(this._renderer,arguments)},d.prototype.textWidth=function(a){return this._renderer.textWidth.apply(this._renderer,arguments)},d.prototype.textAscent=function(){return this._renderer.textAscent()},d.prototype.textDescent=function(){return this._renderer.textDescent()},d.prototype._updateTextMetrics=function(){return this._renderer._updateTextMetrics()},b.exports=d},{"../core/core":37}],71:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("../core/constants");a("../core/error_helpers"),d.prototype.text=function(a,b,c,d,e){for(var f=new Array(arguments.length),g=0;gi;i+=f)g.push(n(a,i));return c.simplifyThreshold&&e(g,c.simplifyThreshold),g}function e(a,b){b="undefined"==typeof b?0:b;for(var c=0,d=a.length-1;a.length>3&&d>=0;--d)j(i(a,d-1),i(a,d),i(a,d+1),b)&&(a.splice(d%a.length,1),c++);return c}function f(a){for(var b,c=[],d=0;db?b%c+c:b%c]}function j(a,b,c,d){if(!d)return 0===k(a,b,c);"undefined"==typeof j.tmpPoint1&&(j.tmpPoint1=[],j.tmpPoint2=[]);var e=j.tmpPoint1,f=j.tmpPoint2;e.x=b.x-a.x,e.y=b.y-a.y,f.x=c.x-b.x,f.y=c.y-b.y;var g=e.x*f.x+e.y*f.y,h=Math.sqrt(e.x*e.x+e.y*e.y),i=Math.sqrt(f.x*f.x+f.y*f.y),l=Math.acos(g/(h*i));return d>l}function k(a,b,c){return(b[0]-a[0])*(c[1]-a[1])-(c[0]-a[0])*(b[1]-a[1])}function l(a,b,c,d,e,f,g,h,i){var j=1-i,k=Math.pow(j,3),l=Math.pow(j,2),m=i*i,n=m*i,o=k*a+3*l*i*c+3*j*i*i*e+n*g,p=k*b+3*l*i*d+3*j*i*i*f+n*h,q=a+2*i*(c-a)+m*(e-2*c+a),r=b+2*i*(d-b)+m*(f-2*d+b),s=c+2*i*(e-c)+m*(g-2*e+c),t=d+2*i*(f-d)+m*(h-2*f+d),u=j*a+i*c,v=j*b+i*d,w=j*e+i*g,x=j*f+i*h,y=90-180*Math.atan2(q-s,r-t)/Math.PI;return(q>s||t>r)&&(y+=180),{x:o,y:p,m:{x:q,y:r},n:{x:s,y:t},start:{x:u,y:v},end:{x:w,y:x},alpha:y}}function m(a,b,c,d,e,f,g,h,i){return null==i?u(a,b,c,d,e,f,g,h):l(a,b,c,d,e,f,g,h,v(a,b,c,d,e,f,g,h,i))}function n(a,b,c){a=p(a);for(var d,e,f,g,h,i="",j={},k=0,n=0,o=a.length;o>n;n++){if(f=a[n],"M"===f[0])d=+f[1],e=+f[2];else{if(g=m(d,e,f[1],f[2],f[3],f[4],f[5],f[6]),k+g>b&&!c)return h=m(d,e,f[1],f[2],f[3],f[4],f[5],f[6],b-k),{x:h.x,y:h.y,alpha:h.alpha};k+=g,d=+f[5],e=+f[6]}i+=f.shift()+f}return j.end=i,h=c?k:l(d,e,f[0],f[1],f[2],f[3],f[4],f[5],1),h.alpha&&(h={x:h.x,y:h.y,alpha:h.alpha}),h}function o(a){var b=[],c=0,d=0,e=0,f=0,g=0;"M"===a[0][0]&&(c=+a[0][1],d=+a[0][2],e=c,f=d,g++,b[0]=["M",c,d]);for(var h,i,j,k=3===a.length&&"M"===a[0][0]&&"R"===a[1][0].toUpperCase()&&"Z"===a[2][0].toUpperCase(),l=g,m=a.length;m>l;l++){if(b.push(i=[]),j=a[l],j[0]!==String.prototype.toUpperCase.call(j[0]))switch(i[0]=String.prototype.toUpperCase.call(j[0]),i[0]){case"A":i[1]=j[1],i[2]=j[2],i[3]=j[3],i[4]=j[4],i[5]=j[5],i[6]=+(j[6]+c),i[7]=+(j[7]+d);break;case"V":i[1]=+j[1]+d;break;case"H":i[1]=+j[1]+c;break;case"R":h=[c,d].concat(j.slice(1));for(var n=2,o=h.length;o>n;n++)h[n]=+h[n]+c,h[++n]=+h[n]+d;b.pop(),b=b.concat(r(h,k));break;case"M":e=+j[1]+c,f=+j[2]+d;break;default:for(n=1,o=j.length;o>n;n++)i[n]=+j[n]+(n%2?c:d)}else if("R"===j[0])h=[c,d].concat(j.slice(1)),b.pop(),b=b.concat(r(h,k)),i=["R"].concat(j.slice(-2));else for(var p=0,q=j.length;q>p;p++)i[p]=j[p];switch(i[0]){case"Z":c=e,d=f;break;case"H":c=i[1];break;case"V":d=i[1];break;case"M":e=i[i.length-2],f=i[i.length-1];break;default:c=i[i.length-2],d=i[i.length-1]}}return b}function p(a,b){for(var c=o(a),d=b&&o(b),e={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},f={x:0,y:0,bx:0,by:0,X:0,Y:0,qx:null,qy:null},g=(function(a,b,c){var d,e,f={T:1,Q:1};if(!a)return["C",b.x,b.y,b.x,b.y,b.x,b.y];switch(a[0]in f||(b.qx=b.qy=null),a[0]){case"M":b.X=a[1],b.Y=a[2];break;case"A":a=["C"].concat(q.apply(0,[b.x,b.y].concat(a.slice(1))));break;case"S":"C"===c||"S"===c?(d=2*b.x-b.bx,e=2*b.y-b.by):(d=b.x,e=b.y),a=["C",d,e].concat(a.slice(1));break;case"T":"Q"===c||"T"===c?(b.qx=2*b.x-b.qx,b.qy=2*b.y-b.qy):(b.qx=b.x,b.qy=b.y),a=["C"].concat(t(b.x,b.y,b.qx,b.qy,a[1],a[2]));break;case"Q":b.qx=a[1],b.qy=a[2],a=["C"].concat(t(b.x,b.y,a[1],a[2],a[3],a[4]));break;case"L":a=["C"].concat(s(b.x,b.y,a[1],a[2]));break;case"H":a=["C"].concat(s(b.x,b.y,a[1],b.y));break;case"V":a=["C"].concat(s(b.x,b.y,b.x,a[1]));break;case"Z":a=["C"].concat(s(b.x,b.y,b.X,b.Y))}return a}),h=function(a,b){if(a[b].length>7){a[b].shift();for(var e=a[b];e.length;)j[b]="A",d&&(k[b]="A"),a.splice(b++,0,["C"].concat(e.splice(0,6)));a.splice(b,1),p=Math.max(c.length,d&&d.length||0)}},i=function(a,b,e,f,g){a&&b&&"M"===a[g][0]&&"M"!==b[g][0]&&(b.splice(g,0,["M",f.x,f.y]),e.bx=0,e.by=0,e.x=a[g][1],e.y=a[g][2],p=Math.max(c.length,d&&d.length||0))},j=[],k=[],l="",m="",n=0,p=Math.max(c.length,d&&d.length||0);p>n;n++){c[n]&&(l=c[n][0]),"C"!==l&&(j[n]=l,n&&(m=j[n-1])),c[n]=g(c[n],e,m),"A"!==j[n]&&"C"===l&&(j[n]="C"),h(c,n),d&&(d[n]&&(l=d[n][0]),"C"!==l&&(k[n]=l,n&&(m=k[n-1])),d[n]=g(d[n],f,m),"A"!==k[n]&&"C"===l&&(k[n]="C"),h(d,n)),i(c,d,e,f,n),i(d,c,f,e,n);var r=c[n],u=d&&d[n],v=r.length,w=d&&u.length;e.x=r[v-2],e.y=r[v-1],e.bx=parseFloat(r[v-4])||e.x,e.by=parseFloat(r[v-3])||e.y,f.bx=d&&(parseFloat(u[w-4])||f.x),f.by=d&&(parseFloat(u[w-3])||f.y),f.x=d&&u[w-2],f.y=d&&u[w-1]}return d?[c,d]:c}function q(a,b,c,d,e,f,g,h,i,j){var k,l,m,n,o,p=Math.PI,r=120*p/180,s=p/180*(+e||0),t=[],u=function(a,b,c){var d=a*Math.cos(c)-b*Math.sin(c),e=a*Math.sin(c)+b*Math.cos(c);return{x:d,y:e}};if(j)k=j[0],l=j[1],m=j[2],n=j[3];else{o=u(a,b,-s),a=o.x,b=o.y,o=u(h,i,-s),h=o.x,i=o.y;var v=(a-h)/2,w=(b-i)/2,x=v*v/(c*c)+w*w/(d*d);x>1&&(x=Math.sqrt(x),c=x*c,d=x*d);var y=c*c,z=d*d,A=(f===g?-1:1)*Math.sqrt(Math.abs((y*z-y*w*w-z*v*v)/(y*w*w+z*v*v)));m=A*c*w/d+(a+h)/2,n=A*-d*v/c+(b+i)/2,k=Math.asin(((b-n)/d).toFixed(9)),l=Math.asin(((i-n)/d).toFixed(9)),k=m>a?p-k:k,l=m>h?p-l:l,0>k&&(k=2*p+k),0>l&&(l=2*p+l),g&&k>l&&(k-=2*p),!g&&l>k&&(l-=2*p)}var B=l-k;if(Math.abs(B)>r){var C=l,D=h,E=i;l=k+r*(g&&l>k?1:-1),h=m+c*Math.cos(l),i=n+d*Math.sin(l),t=q(h,i,c,d,e,0,g,D,E,[l,C,m,n])}B=l-k;var F=Math.cos(k),G=Math.sin(k),H=Math.cos(l),I=Math.sin(l),J=Math.tan(B/4),K=4/3*c*J,L=4/3*d*J,M=[a,b],N=[a+K*G,b-L*F],O=[h+K*I,i-L*H],P=[h,i];if(N[0]=2*M[0]-N[0],N[1]=2*M[1]-N[1],j)return[N,O,P].concat(t);t=[N,O,P].concat(t).join().split(",");for(var Q=[],R=0,S=t.length;S>R;R++)Q[R]=R%2?u(t[R-1],t[R],s).y:u(t[R],t[R+1],s).x;return Q}function r(a,b){for(var c=[],d=0,e=a.length;e-2*!b>d;d+=2){var f=[{x:+a[d-2],y:+a[d-1]},{x:+a[d],y:+a[d+1]},{x:+a[d+2],y:+a[d+3]},{x:+a[d+4],y:+a[d+5]}];b?d?e-4===d?f[3]={x:+a[0],y:+a[1]}:e-2===d&&(f[2]={x:+a[0],y:+a[1]},f[3]={x:+a[2],y:+a[3]}):f[0]={x:+a[e-2],y:+a[e-1]}:e-4===d?f[3]=f[2]:d||(f[0]={x:+a[d],y:+a[d+1]}),c.push(["C",(-f[0].x+6*f[1].x+f[2].x)/6,(-f[0].y+6*f[1].y+f[2].y)/6,(f[1].x+6*f[2].x-f[3].x)/6,(f[1].y+6*f[2].y-f[3].y)/6,f[2].x,f[2].y])}return c}function s(a,b,c,d){return[a,b,c,d,c,d]}function t(a,b,c,d,e,f){var g=1/3,h=2/3;return[g*a+h*c,g*b+h*d,g*e+h*c,g*f+h*d,e,f]}function u(a,b,c,d,e,f,g,h,i){null==i&&(i=1),i=i>1?1:0>i?0:i;for(var j=i/2,k=12,l=[-.1252,.1252,-.3678,.3678,-.5873,.5873,-.7699,.7699,-.9041,.9041,-.9816,.9816],m=0,n=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],o=0;k>o;o++){var p=j*l[o]+j,q=w(p,a,c,e,g),r=w(p,b,d,f,h),s=q*q+r*r;m+=n[o]*Math.sqrt(s)}return j*m}function v(a,b,c,d,e,f,g,h,i){if(!(0>i||u(a,b,c,d,e,f,g,h)n;)l/=2,m+=(i>j?1:-1)*l,j=u(a,b,c,d,e,f,g,h,m);return m}}function w(a,b,c,d,e){var f=-3*b+9*c-9*d+3*e,g=a*f+6*b-12*c+6*d;return a*g-3*b+3*c}function x(){for(var a=new Array(arguments.length),b=0;b2?a=this._getPath(a,b,c,d):"object"==typeof b&&(d=b),d&&"number"==typeof d.decimals&&(e=d.decimals),a.toPathData(e)},y.Font.prototype._getSVG=function(a,b,c,d){var e=3;return"string"==typeof a&&arguments.length>2?a=this._getPath(a,b,c,d):"object"==typeof b&&(d=b),d&&("number"==typeof d.decimals&&(e=d.decimals),"number"==typeof d.strokeWidth&&(a.strokeWidth=d.strokeWidth),"undefined"!=typeof d.fill&&(a.fill=d.fill),"undefined"!=typeof d.stroke&&(a.stroke=d.stroke)),a.toSVG(e)},y.Font.prototype._renderPath=function(a,b,c,d){var e,f=d&&d.renderer||this.parent._renderer,g=f.drawingContext;e="object"==typeof a&&a.commands?a.commands:this._getPath(a,b,c,d).commands,g.beginPath();for(var h=0;h1;)d=Math.random()*f|0,e=a[--f],a[f]=a[d],a[d]=e;return a},d.prototype.sort=function(a,b){var c=b?a.slice(0,Math.min(b,a.length)):a,d=b?a.slice(Math.min(b,a.length)):[];return c="string"==typeof c[0]?c.sort():c.sort(function(a,b){return a-b}),c.concat(d)},d.prototype.splice=function(a,b,c){return Array.prototype.splice.apply(a,[c,0].concat(b)),a},d.prototype.subset=function(a,b,c){return"undefined"!=typeof c?a.slice(b,b+c):a.slice(b,a.length)},b.exports=d},{"../core/core":37}],74:[function(a,b,c){"use strict";var d=a("../core/core");d.prototype["float"]=function(a){return parseFloat(a)},d.prototype["int"]=function(a,b){return"string"==typeof a?(b=b||10,parseInt(a,b)):"number"==typeof a?0|a:"boolean"==typeof a?a?1:0:a instanceof Array?a.map(function(a){return d.prototype["int"](a,b)}):void 0},d.prototype.str=function(a){return a instanceof Array?a.map(d.prototype.str):String(a)},d.prototype["boolean"]=function(a){return"number"==typeof a?0!==a:"string"==typeof a?"true"===a.toLowerCase():"boolean"==typeof a?a:a instanceof Array?a.map(d.prototype["boolean"]):void 0},d.prototype["byte"]=function(a){var b=d.prototype["int"](a,10);return"number"==typeof b?(b+128)%256-128:b instanceof Array?b.map(d.prototype["byte"]):void 0},d.prototype["char"]=function(a){return"number"!=typeof a||isNaN(a)?a instanceof Array?a.map(d.prototype["char"]):"string"==typeof a?d.prototype["char"](parseInt(a,10)):void 0:String.fromCharCode(a)},d.prototype.unchar=function(a){return"string"==typeof a&&1===a.length?a.charCodeAt(0):a instanceof Array?a.map(d.prototype.unchar):void 0},d.prototype.hex=function(a,b){if(b=void 0===b||null===b?b=8:b,a instanceof Array)return a.map(function(a){return d.prototype.hex(a,b)});if("number"==typeof a){0>a&&(a=4294967295+a+1);for(var c=Number(a).toString(16).toUpperCase();c.length=b&&(c=c.substring(c.length-b,c.length)),c}},d.prototype.unhex=function(a){return a instanceof Array?a.map(d.prototype.unhex):parseInt("0x"+a,16)},b.exports=d},{"../core/core":37}],75:[function(a,b,c){"use strict";function d(){var a=arguments[0],b=0>a,c=b?a.toString().substring(1):a.toString(),d=c.indexOf("."),e=-1!==d?c.substring(0,d):c,f=-1!==d?c.substring(d+1):"",g=b?"-":"";if(3===arguments.length){var h="";(-1!==d||arguments[2]-f.length>0)&&(h="."),f.length>arguments[2]&&(f=f.substring(0,arguments[2]));for(var i=0;ic.length){c+=-1===b?".":"";for(var e=arguments[1]-c.length+1,f=0;e>f;f++)c+="0"}else c=c.substring(0,arguments[1]+1);return d+c}function f(){return parseFloat(arguments[0])>0?"+"+arguments[0].toString():arguments[0].toString()}function g(){return parseFloat(arguments[0])>0?" "+arguments[0].toString():arguments[0].toString()}var h=a("../core/core");h.prototype.join=function(a,b){return a.join(b)},h.prototype.match=function(a,b){return a.match(b)},h.prototype.matchAll=function(a,b){for(var c=new RegExp(b,"g"),d=c.exec(a),e=[];null!==d;)e.push(d),d=c.exec(a);return e},h.prototype.nf=function(){if(arguments[0]instanceof Array){var a=arguments[1],b=arguments[2];return arguments[0].map(function(c){return d(c,a,b)})}var c=Object.prototype.toString.call(arguments[0]);return"[object Arguments]"===c?3===arguments[0].length?this.nf(arguments[0][0],arguments[0][1],arguments[0][2]):2===arguments[0].length?this.nf(arguments[0][0],arguments[0][1]):this.nf(arguments[0][0]):d.apply(this,arguments)},h.prototype.nfc=function(){if(arguments[0]instanceof Array){var a=arguments[1];return arguments[0].map(function(b){return e(b,a)})}return e.apply(this,arguments)},h.prototype.nfp=function(){var a=this.nf.apply(this,arguments);return a instanceof Array?a.map(f):f(a)},h.prototype.nfs=function(){var a=this.nf.apply(this,arguments);return a instanceof Array?a.map(g):g(a)},h.prototype.split=function(a,b){return a.split(b)},h.prototype.splitTokens=function(){var a,b,c,d;return d=arguments[1],arguments.length>1?(c=/\]/g.exec(d),b=/\[/g.exec(d),b&&c?(d=d.slice(0,c.index)+d.slice(c.index+1),b=/\[/g.exec(d),d=d.slice(0,b.index)+d.slice(b.index+1),a=new RegExp("[\\["+d+"\\]]","g")):c?(d=d.slice(0,c.index)+d.slice(c.index+1),a=new RegExp("["+d+"\\]]","g")):b?(d=d.slice(0,b.index)+d.slice(b.index+1),a=new RegExp("["+d+"\\[]","g")):a=new RegExp("["+d+"]","g")):a=/\s/g,arguments[0].split(a).filter(function(a){return a})},h.prototype.trim=function(a){return a instanceof Array?a.map(this.trim):a.trim()},b.exports=h},{"../core/core":37}],76:[function(a,b,c){"use strict";var d=a("../core/core");d.prototype.day=function(){return(new Date).getDate()},d.prototype.hour=function(){return(new Date).getHours()},d.prototype.minute=function(){return(new Date).getMinutes()},d.prototype.millis=function(){return window.performance.now()},d.prototype.month=function(){return(new Date).getMonth()+1},d.prototype.second=function(){return(new Date).getSeconds()},d.prototype.year=function(){return(new Date).getFullYear()},b.exports=d},{"../core/core":37}],77:[function(a,b,c){"use strict";var d=a("../core/core");d.prototype.camera=function(a,b,c){for(var d=new Array(arguments.length),e=0;e0)if("v"===g[0]||"vn"===g[0]){var h=new e.Vector(parseFloat(g[1]),parseFloat(g[2]),parseFloat(g[3]));c[g[0]].push(h)}else if("vt"===g[0]){var i=[parseFloat(g[1]),parseFloat(g[2])];c[g[0]].push(i)}else if("f"===g[0])for(var j=3;j0&&(this._renderer.geometryInHash(a.gid)||this._renderer.createBuffers(a.gid,a),this._renderer.drawBuffers(a.gid))},b.exports=e},{"../core/core":37,"./p5.Geometry":82}],81:[function(a,b,c){"use strict";var d=a("../core/core");d.prototype.normalMaterial=function(){return this._renderer._getShader("normalVert","normalFrag"),this},d.prototype.texture=function(){for(var a=new Array(arguments.length),b=0;bthis.vertices.length-1-this.detailX;b--)a.add(this.vertexNormals[b]);for(a=d.Vector.div(a,this.detailX),b=this.vertices.length-1;b>this.vertices.length-1-this.detailX;b--)this.vertexNormals[b]=a;return this},d.Geometry.prototype.normalize=function(){if(this.vertices.length>0){for(var a=this.vertices[0].copy(),b=this.vertices[0].copy(),c=0;c0?a.reduce(function(a,b){return a.concat(b)}):[]}function e(a){return d(a.map(function(a){return[a.x,a.y,a.z]}))}var f=a("../core/core"),g=0;f.RendererGL.prototype._initBufferDefaults=function(a){if(g++,g>1e3){var b=Object.keys(this.gHash)[0];delete this.gHash[b],g--}var c=this.GL;this.gHash[a]={},this.gHash[a].vertexBuffer=c.createBuffer(),this.gHash[a].normalBuffer=c.createBuffer(),this.gHash[a].uvBuffer=c.createBuffer(),this.gHash[a].indexBuffer=c.createBuffer()},f.RendererGL.prototype.createBuffers=function(a,b){var c=this.GL;this._setDefaultCamera(),this._initBufferDefaults(a);var f=this.mHash[this._getCurShaderId()];this.gHash[a].numberOfItems=3*b.faces.length,c.bindBuffer(c.ARRAY_BUFFER,this.gHash[a].vertexBuffer),c.bufferData(c.ARRAY_BUFFER,new Float32Array(e(b.vertices)),c.STATIC_DRAW),f.vertexPositionAttribute=c.getAttribLocation(f,"aPosition"),c.enableVertexAttribArray(f.vertexPositionAttribute),c.vertexAttribPointer(f.vertexPositionAttribute,3,c.FLOAT,!1,0,0),c.bindBuffer(c.ARRAY_BUFFER,this.gHash[a].normalBuffer),c.bufferData(c.ARRAY_BUFFER,new Float32Array(e(b.vertexNormals)),c.STATIC_DRAW),f.vertexNormalAttribute=c.getAttribLocation(f,"aNormal"),c.enableVertexAttribArray(f.vertexNormalAttribute),c.vertexAttribPointer(f.vertexNormalAttribute,3,c.FLOAT,!1,0,0),c.bindBuffer(c.ARRAY_BUFFER,this.gHash[a].uvBuffer),c.bufferData(c.ARRAY_BUFFER,new Float32Array(d(b.uvs)),c.STATIC_DRAW),f.textureCoordAttribute=c.getAttribLocation(f,"aTexCoord"),c.enableVertexAttribArray(f.textureCoordAttribute),c.vertexAttribPointer(f.textureCoordAttribute,2,c.FLOAT,!1,0,0),c.bindBuffer(c.ELEMENT_ARRAY_BUFFER,this.gHash[a].indexBuffer),c.bufferData(c.ELEMENT_ARRAY_BUFFER,new Uint16Array(d(b.faces)),c.STATIC_DRAW)},f.RendererGL.prototype.drawBuffers=function(a){this._setDefaultCamera();var b=this.GL,c=this._getCurShaderId(),d=this.mHash[c];return b.bindBuffer(b.ARRAY_BUFFER,this.gHash[a].vertexBuffer),b.vertexAttribPointer(d.vertexPositionAttribute,3,b.FLOAT,!1,0,0),b.bindBuffer(b.ARRAY_BUFFER,this.gHash[a].normalBuffer),b.vertexAttribPointer(d.vertexNormalAttribute,3,b.FLOAT,!1,0,0),b.bindBuffer(b.ARRAY_BUFFER,this.gHash[a].uvBuffer),b.vertexAttribPointer(d.textureCoordAttribute,2,b.FLOAT,!1,0,0),b.bindBuffer(b.ELEMENT_ARRAY_BUFFER,this.gHash[a].indexBuffer),this._setMatrixUniforms(c),b.drawElements(b.TRIANGLES,this.gHash[a].numberOfItems,b.UNSIGNED_SHORT,0),this},b.exports=f.RendererGL},{"../core/core":37}],86:[function(a,b,c){"use strict";var d=a("../core/core"),e=a("./shader");a("../core/p5.Renderer"),a("./p5.Matrix");var f=[],g=1e3,h={alpha:!0,depth:!0,stencil:!0,antialias:!1,premultipliedAlpha:!1,preserveDrawingBuffer:!1};d.RendererGL=function(a,b,c){return d.Renderer.call(this,a,b,c),this._initContext(),this.isP3D=!0,this.GL=this.drawingContext,this.ambientLightCount=0,this.directionalLightCount=0,this.pointLightCount=0,this._curCamera=null,this.uMVMatrix=new d.Matrix,this.uPMatrix=new d.Matrix,this.uNMatrix=new d.Matrix("mat3"),this.gHash={},this.mHash={},this.isImmediateDrawing=!1,this.immediateMode={},this.curFillColor=[.5,.5,.5,1],this.curStrokeColor=[.5,.5,.5,1],this.pointSize=5,this},d.RendererGL.prototype=Object.create(d.Renderer.prototype),d.RendererGL.prototype._initContext=function(){try{if(this.drawingContext=this.canvas.getContext("webgl",h)||this.canvas.getContext("experimental-webgl",h),null===this.drawingContext)throw new Error("Error creating webgl context");console.log("p5.RendererGL: enabled webgl context");var a=this.drawingContext;a.enable(a.DEPTH_TEST),a.depthFunc(a.LEQUAL),a.viewport(0,0,a.drawingBufferWidth,a.drawingBufferHeight)}catch(b){throw new Error(b)}},d.RendererGL.prototype._setDefaultCamera=function(){if(null===this._curCamera){var a=this.width,b=this.height;this.uPMatrix=d.Matrix.identity(),this.uPMatrix.perspective(60/180*Math.PI,a/b,.1,100),this._curCamera="default"}},d.RendererGL.prototype._update=function(){this.uMVMatrix=d.Matrix.identity(),this.translate(0,0,-(this.height/2)/Math.tan(30*Math.PI/180)),this.ambientLightCount=0,this.directionalLightCount=0,this.pointLightCount=0},d.RendererGL.prototype.background=function(){var a=this.GL,b=this._pInst.color.apply(this._pInst,arguments),c=b.levels[0]/255,d=b.levels[1]/255,e=b.levels[2]/255,f=b.levels[3]/255;a.clearColor(c,d,e,f),a.clear(a.COLOR_BUFFER_BIT|a.DEPTH_BUFFER_BIT)},d.RendererGL.prototype._initShaders=function(a,b,c){var d=this.GL,f=d.createShader(d.VERTEX_SHADER);if(d.shaderSource(f,e[a]),d.compileShader(f),!d.getShaderParameter(f,d.COMPILE_STATUS))return alert("Yikes! An error occurred compiling the shaders:"+d.getShaderInfoLog(f)),null;var g=d.createShader(d.FRAGMENT_SHADER);if(d.shaderSource(g,e[b]),d.compileShader(g),!d.getShaderParameter(g,d.COMPILE_STATUS))return alert("Darn! An error occurred compiling the shaders:"+d.getShaderInfoLog(g)),null;var h=d.createProgram();return d.attachShader(h,f),d.attachShader(h,g),d.linkProgram(h),d.getProgramParameter(h,d.LINK_STATUS)||alert("Snap! Error linking shader program"),this._getLocation(h,c),h},d.RendererGL.prototype._getLocation=function(a,b){var c=this.GL;c.useProgram(a),a.uResolution=c.getUniformLocation(a,"uResolution"),c.uniform1f(a.uResolution,g),a.uPMatrixUniform=c.getUniformLocation(a,"uProjectionMatrix"),a.uMVMatrixUniform=c.getUniformLocation(a,"uModelViewMatrix"),void 0===b&&(a.uNMatrixUniform=c.getUniformLocation(a,"uNormalMatrix"),a.samplerUniform=c.getUniformLocation(a,"uSampler"))},d.RendererGL.prototype._setUniform1f=function(a,b,c){var d=this.GL,e=this.mHash[a];return d.useProgram(e),e[b]=d.getUniformLocation(e,b),d.uniform1f(e[b],c),this},d.RendererGL.prototype._setMatrixUniforms=function(a){var b=this.GL,c=this.mHash[a];b.useProgram(c),b.uniformMatrix4fv(c.uPMatrixUniform,!1,this.uPMatrix.mat4),b.uniformMatrix4fv(c.uMVMatrixUniform,!1,this.uMVMatrix.mat4),this.uNMatrix.inverseTranspose(this.uMVMatrix),b.uniformMatrix3fv(c.uNMatrixUniform,!1,this.uNMatrix.mat3)},d.RendererGL.prototype._getShader=function(a,b,c){var d=a+"|"+b;if(!this.materialInHash(d)){var e=this._initShaders(a,b,c);this.mHash[d]=e}return this.curShaderId=d,this.mHash[this.curShaderId]},d.RendererGL.prototype._getCurShaderId=function(){var a,b;return"fill"!==this.drawMode&&void 0===this.curShaderId?(a="normalVert|normalFrag",b=this._initShaders("normalVert","normalFrag"),this.mHash[a]=b,this.curShaderId=a):this.isImmediateDrawing&&"fill"===this.drawMode&&(a="immediateVert|vertexColorFrag",b=this._initShaders("immediateVert","vertexColorFrag"),this.mHash[a]=b,this.curShaderId=a),this.curShaderId},d.RendererGL.prototype.fill=function(a,b,c,d){var e,f=this.GL,g=this._applyColorBlend.apply(this,arguments);return this.curFillColor=g,this.drawMode="fill",this.isImmediateDrawing?(e=this._getShader("immediateVert","vertexColorFrag"),f.useProgram(e)):(e=this._getShader("normalVert","basicFrag"),f.useProgram(e),e.uMaterialColor=f.getUniformLocation(e,"uMaterialColor"),f.uniform4f(e.uMaterialColor,g[0],g[1],g[2],g[3])),this},d.RendererGL.prototype.stroke=function(a,b,c,d){var e=this._pInst.color.apply(this._pInst,arguments),f=e._array;return this.curStrokeColor=f,this.drawMode="stroke",this},d.RendererGL.prototype._strokeCheck=function(){if("stroke"===this.drawMode)throw new Error("stroke for shapes in 3D not yet implemented, use fill for now :(")},d.RendererGL.prototype.strokeWeight=function(a){return this.pointSize=a,this},d.RendererGL.prototype.geometryInHash=function(a){return void 0!==this.gHash[a]},d.RendererGL.prototype.materialInHash=function(a){return void 0!==this.mHash[a]},d.RendererGL.prototype.resize=function(a,b){var c=this.GL;d.Renderer.prototype.resize.call(this,a,b),c.viewport(0,0,c.drawingBufferWidth,c.drawingBufferHeight),"default"===this._curCamera&&(this._curCamera=null,this._setDefaultCamera())},d.RendererGL.prototype.clear=function(){var a=this.GL;a.clearColor(arguments[0],arguments[1],arguments[2],arguments[3]),a.clear(a.COLOR_BUFFER_BIT|a.DEPTH_BUFFER_BIT)},d.RendererGL.prototype.translate=function(a,b,c){return a/=g,b=-b/g,c/=g,this.uMVMatrix.translate([a,b,c]),this},d.RendererGL.prototype.scale=function(a,b,c){return this.uMVMatrix.scale([a,b,c]),this},d.RendererGL.prototype.rotate=function(a,b){return this.uMVMatrix.rotate(a,b),this},d.RendererGL.prototype.rotateX=function(a){return this.rotate(a,[1,0,0]),this},d.RendererGL.prototype.rotateY=function(a){return this.rotate(a,[0,1,0]),this},d.RendererGL.prototype.rotateZ=function(a){return this.rotate(a,[0,0,1]),this},d.RendererGL.prototype.push=function(){f.push(this.uMVMatrix.copy())},d.RendererGL.prototype.pop=function(){if(0===f.length)throw new Error("Invalid popMatrix!");this.uMVMatrix=f.pop()},d.RendererGL.prototype.resetMatrix=function(){return this.uMVMatrix=d.Matrix.identity(),this.translate(0,0,-800),this},d.RendererGL.prototype._applyTextProperties=function(){console.error("text commands not yet implemented in webgl")},b.exports=d.RendererGL},{"../core/core":37,"../core/p5.Renderer":43,"./p5.Matrix":83,"./shader":88}],87:[function(a,b,c){"use strict";var d=a("../core/core");a("./p5.Geometry"),d.prototype.plane=function(){for(var a=new Array(arguments.length),b=0;bj;j++){var k=h[j],l=new d.Vector((2*(1&k)-1)*c/2,((2&k)-1)*e/2,((4&k)/2-1)*f/2);this.vertices.push(l),this.uvs.push([1&j,(2&j)/2]),b++}this.faces.push([i,i+1,i+2]),this.faces.push([i+2,i+1,i+3])}},k=new d.Geometry(g,h,j);k.computeNormals(),this._renderer.createBuffers(i,k)}return this._renderer.drawBuffers(i),this},d.prototype.sphere=function(){for(var a=new Array(arguments.length),b=0;be?3:e,f=1>f?1:f,g=void 0===g?!0:g,h=void 0===h?!0:h;var i,j,k=(g?2:0)+(h?2:0),l=e+1,m=Math.atan2(a-b,c),n=g?-2:0,o=f+(h?2:0);for(i=n;o>=i;++i){var p,q=i/f,r=c*q;for(0>i?(r=0,q=1,p=a):i>f?(r=c,q=1,p=b):p=a+(b-a)*(i/f),(-2===i||i===f+2)&&(p=0,q=0),r-=c/2,j=0;l>j;++j)this.vertices.push(new d.Vector(Math.sin(j*Math.PI*2/e)*p,r,Math.cos(j*Math.PI*2/e)*p)),this.vertexNormals.push(new d.Vector(0>i||i>f?0:Math.sin(j*Math.PI*2/e)*Math.cos(m),0>i?-1:i>f?1:Math.sin(m),0>i||i>f?0:Math.cos(j*Math.PI*2/e)*Math.cos(m))),this.uvs.push([j/e,q])}for(i=0;f+k>i;++i)for(j=0;e>j;++j)this.faces.push([l*(i+0)+0+j,l*(i+0)+1+j,l*(i+1)+1+j]),this.faces.push([l*(i+0)+0+j,l*(i+1)+1+j,l*(i+1)+0+j])};d.prototype.cylinder=function(){for(var a=new Array(arguments.length),b=0;b=e;e++)c[0]=Math.pow(1-e/b,3),c[1]=3*(e/b)*Math.pow(1-e/b,2),c[2]=3*Math.pow(e/b,2)*(1-e/b),c[3]=Math.pow(e/b,3),d[0]=a[0]*c[0]+a[3]*c[1]+a[6]*c[2]+a[9]*c[3],d[1]=a[1]*c[0]+a[4]*c[1]+a[7]*c[2]+a[10]*c[3],d[2]=a[2]*c[0]+a[5]*c[1]+a[8]*c[2]+a[11]*c[3],this.vertex(d[0],d[1],d[2]);return this.endShape(),this},d.RendererGL.prototype.curve=function(a){var b=a[12];this.beginShape();for(var c=[0,0,0,0],d=[0,0,0],e=0;b>=e;e++)c[0]=.5*Math.pow(e/b,3),c[1]=.5*Math.pow(e/b,2),c[2]=e/b*.5,c[3]=.5,d[0]=c[0]*(-a[0]+3*a[3]-3*a[6]+a[9])+c[1]*(2*a[0]-5*a[3]+4*a[6]-a[9])+c[2]*(-a[0]+a[6])+2*c[3]*a[3],d[1]=c[0]*(-a[1]+3*a[4]-3*a[7]+a[10])+c[1]*(2*a[1]-5*a[4]+4*a[7]-a[10])+c[2]*(-a[1]+a[7])+2*c[3]*a[4],d[2]=c[0]*(-a[2]+3*a[5]-3*a[8]+a[11])+c[1]*(2*a[2]-5*a[5]+4*a[8]-a[11])+c[2]*(-a[2]+a[8])+2*c[3]*a[5],this.vertex(d[0],d[1],d[2]);return this.endShape(),this},b.exports=d},{"../core/core":37,"./p5.Geometry":82}],88:[function(a,b,c){b.exports={immediateVert:"attribute vec3 aPosition;\nattribute vec4 aVertexColor;\n\nuniform mat4 uModelViewMatrix;\nuniform mat4 uProjectionMatrix;\nuniform float uResolution;\nuniform float uPointSize;\n\nvarying vec4 vColor;\nvoid main(void) {\n vec4 positionVec4 = vec4(aPosition / uResolution *vec3(1.0, -1.0, 1.0), 1.0);\n gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4;\n vColor = aVertexColor;\n gl_PointSize = uPointSize;\n}\n",vertexColorVert:"attribute vec3 aPosition;\nattribute vec4 aVertexColor;\n\nuniform mat4 uModelViewMatrix;\nuniform mat4 uProjectionMatrix;\nuniform float uResolution;\n\nvarying vec4 vColor;\n\nvoid main(void) {\n vec4 positionVec4 = vec4(aPosition / uResolution * vec3(1.0, -1.0, 1.0), 1.0);\n gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4;\n vColor = aVertexColor;\n}\n",vertexColorFrag:"precision mediump float;\nvarying vec4 vColor;\nvoid main(void) {\n gl_FragColor = vColor;\n}",normalVert:"attribute vec3 aPosition;\nattribute vec3 aNormal;\nattribute vec2 aTexCoord;\n\nuniform mat4 uModelViewMatrix;\nuniform mat4 uProjectionMatrix;\nuniform mat3 uNormalMatrix;\nuniform float uResolution;\n\nvarying vec3 vVertexNormal;\nvarying highp vec2 vVertTexCoord;\n\nvoid main(void) {\n vec4 positionVec4 = vec4(aPosition / uResolution * vec3(1.0, -1.0, 1.0), 1.0);\n gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4;\n vVertexNormal = vec3( uNormalMatrix * aNormal );\n vVertTexCoord = aTexCoord;\n}\n",normalFrag:"precision mediump float;\nvarying vec3 vVertexNormal;\nvoid main(void) {\n gl_FragColor = vec4(vVertexNormal, 1.0);\n}",basicFrag:"precision mediump float;\nvarying vec3 vVertexNormal;\nuniform vec4 uMaterialColor;\nvoid main(void) {\n gl_FragColor = uMaterialColor;\n}",lightVert:"attribute vec3 aPosition;\nattribute vec3 aNormal;\nattribute vec2 aTexCoord;\n\nuniform mat4 uModelViewMatrix;\nuniform mat4 uProjectionMatrix;\nuniform mat3 uNormalMatrix;\nuniform float uResolution;\nuniform int uAmbientLightCount;\nuniform int uDirectionalLightCount;\nuniform int uPointLightCount;\n\nuniform vec3 uAmbientColor[8];\nuniform vec3 uLightingDirection[8];\nuniform vec3 uDirectionalColor[8];\nuniform vec3 uPointLightLocation[8];\nuniform vec3 uPointLightColor[8];\nuniform bool uSpecular;\n\nvarying vec3 vVertexNormal;\nvarying vec2 vVertTexCoord;\nvarying vec3 vLightWeighting;\n\nvec3 ambientLightFactor = vec3(0.0, 0.0, 0.0);\nvec3 directionalLightFactor = vec3(0.0, 0.0, 0.0);\nvec3 pointLightFactor = vec3(0.0, 0.0, 0.0);\nvec3 pointLightFactor2 = vec3(0.0, 0.0, 0.0);\n\nvoid main(void){\n\n vec4 positionVec4 = vec4(aPosition / uResolution, 1.0);\n gl_Position = uProjectionMatrix * uModelViewMatrix * positionVec4;\n\n vec3 vertexNormal = vec3( uNormalMatrix * aNormal );\n vVertexNormal = vertexNormal;\n vVertTexCoord = aTexCoord;\n\n vec4 mvPosition = uModelViewMatrix * vec4(aPosition / uResolution, 1.0);\n vec3 eyeDirection = normalize(-mvPosition.xyz);\n\n float shininess = 32.0;\n float specularFactor = 2.0;\n float diffuseFactor = 0.3;\n\n for(int i = 0; i < 8; i++){\n if(uAmbientLightCount == i) break;\n ambientLightFactor += uAmbientColor[i];\n }\n\n for(int j = 0; j < 8; j++){\n if(uDirectionalLightCount == j) break;\n vec3 dir = uLightingDirection[j];\n float directionalLightWeighting = max(dot(vertexNormal, dir), 0.0);\n directionalLightFactor += uDirectionalColor[j] * directionalLightWeighting;\n }\n\n for(int k = 0; k < 8; k++){\n if(uPointLightCount == k) break;\n vec3 loc = uPointLightLocation[k];\n //loc = loc / uResolution;\n vec3 lightDirection = normalize(loc - mvPosition.xyz);\n\n float directionalLightWeighting = max(dot(vertexNormal, lightDirection), 0.0);\n pointLightFactor += uPointLightColor[k] * directionalLightWeighting;\n\n //factor2 for specular\n vec3 reflectionDirection = reflect(-lightDirection, vertexNormal);\n float specularLightWeighting = pow(max(dot(reflectionDirection, eyeDirection), 0.0), shininess);\n\n pointLightFactor2 += uPointLightColor[k] * (specularFactor * specularLightWeighting\n + directionalLightWeighting * diffuseFactor);\n }\n\n if(!uSpecular){\n vLightWeighting = ambientLightFactor + directionalLightFactor + pointLightFactor;\n }else{\n vLightWeighting = ambientLightFactor + directionalLightFactor + pointLightFactor2;\n }\n\n}\n",lightTextureFrag:"precision mediump float;\n\nuniform vec4 uMaterialColor;\nuniform sampler2D uSampler;\nuniform bool isTexture;\n\nvarying vec3 vLightWeighting;\nvarying highp vec2 vVertTexCoord;\n\nvoid main(void) {\n if(!isTexture){\n gl_FragColor = vec4(vec3(uMaterialColor.rgb * vLightWeighting), uMaterialColor.a);\n }else{\n vec4 textureColor = texture2D(uSampler, vVertTexCoord);\n if(vLightWeighting == vec3(0., 0., 0.)){\n gl_FragColor = textureColor;\n }else{\n gl_FragColor = vec4(vec3(textureColor.rgb * vLightWeighting), textureColor.a); \n }\n }\n}"}},{}]},{},[28])(28)}); \ No newline at end of file diff --git a/brendafernanda-anna/p5.sound.js b/brendafernanda-anna/p5.sound.js new file mode 100644 index 0000000..5f9e6b1 --- /dev/null +++ b/brendafernanda-anna/p5.sound.js @@ -0,0 +1,11441 @@ +/*! p5.sound.js v0.3.5 2017-08-14 */ +/** + * p5.sound extends p5 with Web Audio functionality including audio input, + * playback, analysis and synthesis. + *

+ * p5.SoundFile: Load and play sound files.
+ * p5.Amplitude: Get the current volume of a sound.
+ * p5.AudioIn: Get sound from an input source, typically + * a computer microphone.
+ * p5.FFT: Analyze the frequency of sound. Returns + * results from the frequency spectrum or time domain (waveform).
+ * p5.Oscillator: Generate Sine, + * Triangle, Square and Sawtooth waveforms. Base class of + * p5.Noise and p5.Pulse. + *
+ * p5.Env: An Envelope is a series + * of fades over time. Often used to control an object's + * output gain level as an "ADSR Envelope" (Attack, Decay, + * Sustain, Release). Can also modulate other parameters.
+ * p5.Delay: A delay effect with + * parameters for feedback, delayTime, and lowpass filter.
+ * p5.Filter: Filter the frequency range of a + * sound. + *
+ * p5.Reverb: Add reverb to a sound by specifying + * duration and decay.
+ * p5.Convolver: Extends + * p5.Reverb to simulate the sound of real + * physical spaces through convolution.
+ * p5.SoundRecorder: Record sound for playback + * / save the .wav file. + * p5.Phrase, p5.Part and + * p5.Score: Compose musical sequences. + *

+ * p5.sound is on GitHub. + * Download the latest version + * here. + * + * @module p5.sound + * @submodule p5.sound + * @for p5.sound + * @main + */ + +/** + * p5.sound + * https://p5js.org/reference/#/libraries/p5.sound + * + * From the Processing Foundation and contributors + * https://github.com/processing/p5.js-sound/graphs/contributors + * + * MIT License (MIT) + * https://github.com/processing/p5.js-sound/blob/master/LICENSE + * + * Some of the many audio libraries & resources that inspire p5.sound: + * - TONE.js (c) Yotam Mann. Licensed under The MIT License (MIT). https://github.com/TONEnoTONE/Tone.js + * - buzz.js (c) Jay Salvat. Licensed under The MIT License (MIT). http://buzz.jaysalvat.com/ + * - Boris Smus Web Audio API book, 2013. Licensed under the Apache License http://www.apache.org/licenses/LICENSE-2.0 + * - wavesurfer.js https://github.com/katspaugh/wavesurfer.js + * - Web Audio Components by Jordan Santell https://github.com/web-audio-components + * - Wilm Thoben's Sound library for Processing https://github.com/processing/processing/tree/master/java/libraries/sound + * + * Web Audio API: http://w3.org/TR/webaudio/ + */ + +(function (root, factory) { + if (typeof define === 'function' && define.amd) + define('p5.sound', ['p5'], function (p5) { (factory(p5));}); + else if (typeof exports === 'object') + factory(require('../p5')); + else + factory(root['p5']); +}(this, function (p5) { + +var sndcore; +'use strict'; +sndcore = function () { + /* AudioContext Monkeypatch + Copyright 2013 Chris Wilson + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + */ + (function () { + function fixSetTarget(param) { + if (!param) + // if NYI, just return + return; + if (!param.setTargetAtTime) + param.setTargetAtTime = param.setTargetValueAtTime; + } + if (window.hasOwnProperty('webkitAudioContext') && !window.hasOwnProperty('AudioContext')) { + window.AudioContext = window.webkitAudioContext; + if (typeof AudioContext.prototype.createGain !== 'function') + AudioContext.prototype.createGain = AudioContext.prototype.createGainNode; + if (typeof AudioContext.prototype.createDelay !== 'function') + AudioContext.prototype.createDelay = AudioContext.prototype.createDelayNode; + if (typeof AudioContext.prototype.createScriptProcessor !== 'function') + AudioContext.prototype.createScriptProcessor = AudioContext.prototype.createJavaScriptNode; + if (typeof AudioContext.prototype.createPeriodicWave !== 'function') + AudioContext.prototype.createPeriodicWave = AudioContext.prototype.createWaveTable; + AudioContext.prototype.internal_createGain = AudioContext.prototype.createGain; + AudioContext.prototype.createGain = function () { + var node = this.internal_createGain(); + fixSetTarget(node.gain); + return node; + }; + AudioContext.prototype.internal_createDelay = AudioContext.prototype.createDelay; + AudioContext.prototype.createDelay = function (maxDelayTime) { + var node = maxDelayTime ? this.internal_createDelay(maxDelayTime) : this.internal_createDelay(); + fixSetTarget(node.delayTime); + return node; + }; + AudioContext.prototype.internal_createBufferSource = AudioContext.prototype.createBufferSource; + AudioContext.prototype.createBufferSource = function () { + var node = this.internal_createBufferSource(); + if (!node.start) { + node.start = function (when, offset, duration) { + if (offset || duration) + this.noteGrainOn(when || 0, offset, duration); + else + this.noteOn(when || 0); + }; + } else { + node.internal_start = node.start; + node.start = function (when, offset, duration) { + if (typeof duration !== 'undefined') + node.internal_start(when || 0, offset, duration); + else + node.internal_start(when || 0, offset || 0); + }; + } + if (!node.stop) { + node.stop = function (when) { + this.noteOff(when || 0); + }; + } else { + node.internal_stop = node.stop; + node.stop = function (when) { + node.internal_stop(when || 0); + }; + } + fixSetTarget(node.playbackRate); + return node; + }; + AudioContext.prototype.internal_createDynamicsCompressor = AudioContext.prototype.createDynamicsCompressor; + AudioContext.prototype.createDynamicsCompressor = function () { + var node = this.internal_createDynamicsCompressor(); + fixSetTarget(node.threshold); + fixSetTarget(node.knee); + fixSetTarget(node.ratio); + fixSetTarget(node.reduction); + fixSetTarget(node.attack); + fixSetTarget(node.release); + return node; + }; + AudioContext.prototype.internal_createBiquadFilter = AudioContext.prototype.createBiquadFilter; + AudioContext.prototype.createBiquadFilter = function () { + var node = this.internal_createBiquadFilter(); + fixSetTarget(node.frequency); + fixSetTarget(node.detune); + fixSetTarget(node.Q); + fixSetTarget(node.gain); + return node; + }; + if (typeof AudioContext.prototype.createOscillator !== 'function') { + AudioContext.prototype.internal_createOscillator = AudioContext.prototype.createOscillator; + AudioContext.prototype.createOscillator = function () { + var node = this.internal_createOscillator(); + if (!node.start) { + node.start = function (when) { + this.noteOn(when || 0); + }; + } else { + node.internal_start = node.start; + node.start = function (when) { + node.internal_start(when || 0); + }; + } + if (!node.stop) { + node.stop = function (when) { + this.noteOff(when || 0); + }; + } else { + node.internal_stop = node.stop; + node.stop = function (when) { + node.internal_stop(when || 0); + }; + } + if (!node.setPeriodicWave) + node.setPeriodicWave = node.setWaveTable; + fixSetTarget(node.frequency); + fixSetTarget(node.detune); + return node; + }; + } + } + if (window.hasOwnProperty('webkitOfflineAudioContext') && !window.hasOwnProperty('OfflineAudioContext')) { + window.OfflineAudioContext = window.webkitOfflineAudioContext; + } + }(window)); + // <-- end MonkeyPatch. + // Create the Audio Context + var audiocontext = new window.AudioContext(); + /** + *

Returns the Audio Context for this sketch. Useful for users + * who would like to dig deeper into the Web Audio API + * .

+ * + * @method getAudioContext + * @return {Object} AudioContext for this sketch + */ + p5.prototype.getAudioContext = function () { + return audiocontext; + }; + // Polyfill for AudioIn, also handled by p5.dom createCapture + navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; + /** + * Determine which filetypes are supported (inspired by buzz.js) + * The audio element (el) will only be used to test browser support for various audio formats + */ + var el = document.createElement('audio'); + p5.prototype.isSupported = function () { + return !!el.canPlayType; + }; + var isOGGSupported = function () { + return !!el.canPlayType && el.canPlayType('audio/ogg; codecs="vorbis"'); + }; + var isMP3Supported = function () { + return !!el.canPlayType && el.canPlayType('audio/mpeg;'); + }; + var isWAVSupported = function () { + return !!el.canPlayType && el.canPlayType('audio/wav; codecs="1"'); + }; + var isAACSupported = function () { + return !!el.canPlayType && (el.canPlayType('audio/x-m4a;') || el.canPlayType('audio/aac;')); + }; + var isAIFSupported = function () { + return !!el.canPlayType && el.canPlayType('audio/x-aiff;'); + }; + p5.prototype.isFileSupported = function (extension) { + switch (extension.toLowerCase()) { + case 'mp3': + return isMP3Supported(); + case 'wav': + return isWAVSupported(); + case 'ogg': + return isOGGSupported(); + case 'aac': + case 'm4a': + case 'mp4': + return isAACSupported(); + case 'aif': + case 'aiff': + return isAIFSupported(); + default: + return false; + } + }; + // if it is iOS, we have to have a user interaction to start Web Audio + // http://paulbakaus.com/tutorials/html5/web-audio-on-ios/ + var iOS = navigator.userAgent.match(/(iPad|iPhone|iPod)/g) ? true : false; + if (iOS) { + var iosStarted = false; + var startIOS = function () { + if (iosStarted) + return; + // create empty buffer + var buffer = audiocontext.createBuffer(1, 1, 22050); + var source = audiocontext.createBufferSource(); + source.buffer = buffer; + // connect to output (your speakers) + source.connect(audiocontext.destination); + // play the file + source.start(0); + console.log('start ios!'); + if (audiocontext.state === 'running') { + iosStarted = true; + } + }; + document.addEventListener('touchend', startIOS, false); + document.addEventListener('touchstart', startIOS, false); + } +}(); +var master; +'use strict'; +master = function () { + /** + * Master contains AudioContext and the master sound output. + */ + var Master = function () { + var audiocontext = p5.prototype.getAudioContext(); + this.input = audiocontext.createGain(); + this.output = audiocontext.createGain(); + //put a hard limiter on the output + this.limiter = audiocontext.createDynamicsCompressor(); + this.limiter.threshold.value = 0; + this.limiter.ratio.value = 20; + this.audiocontext = audiocontext; + this.output.disconnect(); + // an array of input sources + this.inputSources = []; + // connect input to limiter + this.input.connect(this.limiter); + // connect limiter to output + this.limiter.connect(this.output); + // meter is just for global Amplitude / FFT analysis + this.meter = audiocontext.createGain(); + this.fftMeter = audiocontext.createGain(); + this.output.connect(this.meter); + this.output.connect(this.fftMeter); + // connect output to destination + this.output.connect(this.audiocontext.destination); + // an array of all sounds in the sketch + this.soundArray = []; + // an array of all musical parts in the sketch + this.parts = []; + // file extensions to search for + this.extensions = []; + }; + // create a single instance of the p5Sound / master output for use within this sketch + var p5sound = new Master(); + /** + * Returns a number representing the master amplitude (volume) for sound + * in this sketch. + * + * @method getMasterVolume + * @return {Number} Master amplitude (volume) for sound in this sketch. + * Should be between 0.0 (silence) and 1.0. + */ + p5.prototype.getMasterVolume = function () { + return p5sound.output.gain.value; + }; + /** + *

Scale the output of all sound in this sketch

+ * Scaled between 0.0 (silence) and 1.0 (full volume). + * 1.0 is the maximum amplitude of a digital sound, so multiplying + * by greater than 1.0 may cause digital distortion. To + * fade, provide a rampTime parameter. For more + * complex fades, see the Env class. + * + * Alternately, you can pass in a signal source such as an + * oscillator to modulate the amplitude with an audio signal. + * + *

How This Works: When you load the p5.sound module, it + * creates a single instance of p5sound. All sound objects in this + * module output to p5sound before reaching your computer's output. + * So if you change the amplitude of p5sound, it impacts all of the + * sound in this module.

+ * + *

If no value is provided, returns a Web Audio API Gain Node

+ * + * @method masterVolume + * @param {Number|Object} volume Volume (amplitude) between 0.0 + * and 1.0 or modulating signal/oscillator + * @param {Number} [rampTime] Fade for t seconds + * @param {Number} [timeFromNow] Schedule this event to happen at + * t seconds in the future + */ + p5.prototype.masterVolume = function (vol, rampTime, tFromNow) { + if (typeof vol === 'number') { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = p5sound.output.gain.value; + p5sound.output.gain.cancelScheduledValues(now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + p5sound.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(p5sound.output.gain); + } else { + // return the Gain Node + return p5sound.output.gain; + } + }; + /** + * `p5.soundOut` is the p5.sound master output. It sends output to + * the destination of this window's web audio context. It contains + * Web Audio API nodes including a dyanmicsCompressor (.limiter), + * and Gain Nodes for .input and .output. + * + * @property {Object} soundOut + */ + p5.prototype.soundOut = p5.soundOut = p5sound; + /** + * a silent connection to the DesinationNode + * which will ensure that anything connected to it + * will not be garbage collected + * + * @private + */ + p5.soundOut._silentNode = p5sound.audiocontext.createGain(); + p5.soundOut._silentNode.gain.value = 0; + p5.soundOut._silentNode.connect(p5sound.audiocontext.destination); + return p5sound; +}(); +var helpers; +'use strict'; +helpers = function () { + var p5sound = master; + /** + * @class p5 + */ + /** + * Returns a number representing the sample rate, in samples per second, + * of all sound objects in this audio context. It is determined by the + * sampling rate of your operating system's sound card, and it is not + * currently possile to change. + * It is often 44100, or twice the range of human hearing. + * + * @method sampleRate + * @return {Number} samplerate samples per second + */ + p5.prototype.sampleRate = function () { + return p5sound.audiocontext.sampleRate; + }; + /** + * Returns the closest MIDI note value for + * a given frequency. + * + * @param {Number} frequency A freqeuncy, for example, the "A" + * above Middle C is 440Hz + * @return {Number} MIDI note value + */ + p5.prototype.freqToMidi = function (f) { + var mathlog2 = Math.log(f / 440) / Math.log(2); + var m = Math.round(12 * mathlog2) + 69; + return m; + }; + /** + * Returns the frequency value of a MIDI note value. + * General MIDI treats notes as integers where middle C + * is 60, C# is 61, D is 62 etc. Useful for generating + * musical frequencies with oscillators. + * + * @method midiToFreq + * @param {Number} midiNote The number of a MIDI note + * @return {Number} Frequency value of the given MIDI note + * @example + *
+ * var notes = [60, 64, 67, 72]; + * var i = 0; + * + * function setup() { + * osc = new p5.Oscillator('Triangle'); + * osc.start(); + * frameRate(1); + * } + * + * function draw() { + * var freq = midiToFreq(notes[i]); + * osc.freq(freq); + * i++; + * if (i >= notes.length){ + * i = 0; + * } + * } + *
+ */ + p5.prototype.midiToFreq = function (m) { + return 440 * Math.pow(2, (m - 69) / 12); + }; + /** + * List the SoundFile formats that you will include. LoadSound + * will search your directory for these extensions, and will pick + * a format that is compatable with the client's web browser. + * Here is a free online file + * converter. + * + * @method soundFormats + * @param {String} [...formats] i.e. 'mp3', 'wav', 'ogg' + * @example + *
+ * function preload() { + * // set the global sound formats + * soundFormats('mp3', 'ogg'); + * + * // load either beatbox.mp3, or .ogg, depending on browser + * mySound = loadSound('assets/beatbox.mp3'); + * } + * + * function setup() { + * mySound.play(); + * } + *
+ */ + p5.prototype.soundFormats = function () { + // reset extensions array + p5sound.extensions = []; + // add extensions + for (var i = 0; i < arguments.length; i++) { + arguments[i] = arguments[i].toLowerCase(); + if ([ + 'mp3', + 'wav', + 'ogg', + 'm4a', + 'aac' + ].indexOf(arguments[i]) > -1) { + p5sound.extensions.push(arguments[i]); + } else { + throw arguments[i] + ' is not a valid sound format!'; + } + } + }; + p5.prototype.disposeSound = function () { + for (var i = 0; i < p5sound.soundArray.length; i++) { + p5sound.soundArray[i].dispose(); + } + }; + // register removeSound to dispose of p5sound SoundFiles, Convolvers, + // Oscillators etc when sketch ends + p5.prototype.registerMethod('remove', p5.prototype.disposeSound); + p5.prototype._checkFileFormats = function (paths) { + var path; + // if path is a single string, check to see if extension is provided + if (typeof paths === 'string') { + path = paths; + // see if extension is provided + var extTest = path.split('.').pop(); + // if an extension is provided... + if ([ + 'mp3', + 'wav', + 'ogg', + 'm4a', + 'aac' + ].indexOf(extTest) > -1) { + if (p5.prototype.isFileSupported(extTest)) { + path = path; + } else { + var pathSplit = path.split('.'); + var pathCore = pathSplit[pathSplit.length - 1]; + for (var i = 0; i < p5sound.extensions.length; i++) { + var extension = p5sound.extensions[i]; + var supported = p5.prototype.isFileSupported(extension); + if (supported) { + pathCore = ''; + if (pathSplit.length === 2) { + pathCore += pathSplit[0]; + } + for (var i = 1; i <= pathSplit.length - 2; i++) { + var p = pathSplit[i]; + pathCore += '.' + p; + } + path = pathCore += '.'; + path = path += extension; + break; + } + } + } + } else { + for (var i = 0; i < p5sound.extensions.length; i++) { + var extension = p5sound.extensions[i]; + var supported = p5.prototype.isFileSupported(extension); + if (supported) { + path = path + '.' + extension; + break; + } + } + } + } else if (typeof paths === 'object') { + for (var i = 0; i < paths.length; i++) { + var extension = paths[i].split('.').pop(); + var supported = p5.prototype.isFileSupported(extension); + if (supported) { + // console.log('.'+extension + ' is ' + supported + + // ' supported by your browser.'); + path = paths[i]; + break; + } + } + } + return path; + }; + /** + * Used by Osc and Env to chain signal math + */ + p5.prototype._mathChain = function (o, math, thisChain, nextChain, type) { + // if this type of math already exists in the chain, replace it + for (var i in o.mathOps) { + if (o.mathOps[i] instanceof type) { + o.mathOps[i].dispose(); + thisChain = i; + if (thisChain < o.mathOps.length - 1) { + nextChain = o.mathOps[i + 1]; + } + } + } + o.mathOps[thisChain - 1].disconnect(); + o.mathOps[thisChain - 1].connect(math); + math.connect(nextChain); + o.mathOps[thisChain] = math; + return o; + }; + return { midiToFreq: p5.prototype.midiToFreq }; +}(master); +var errorHandler; +'use strict'; +errorHandler = function () { + /* + Helper function to generate an error + with a custom stack trace that points to the sketch + and removes other parts of the stack trace. + + @private + @class customError + @constructor + @param {String} name custom error name + @param {String} errorTrace custom error trace + @param {String} failedPath path to the file that failed to load + @property {String} name custom error name + @property {String} message custom error message + @property {String} stack trace the error back to a line in the user's sketch. + Note: this edits out stack trace within p5.js and p5.sound. + @property {String} originalStack unedited, original stack trace + @property {String} failedPath path to the file that failed to load + @return {Error} returns a custom Error object + */ + var CustomError = function (name, errorTrace, failedPath) { + var err = new Error(); + var tempStack, splitStack; + err.name = name; + err.originalStack = err.stack + errorTrace; + tempStack = err.stack + errorTrace; + err.failedPath = failedPath; + // only print the part of the stack trace that refers to the user code: + var splitStack = tempStack.split('\n'); + splitStack = splitStack.filter(function (ln) { + return !ln.match(/(p5.|native code|globalInit)/g); + }); + err.stack = splitStack.join('\n'); + return err; + }; + return CustomError; +}(); +var panner; +'use strict'; +panner = function () { + var p5sound = master; + var ac = p5sound.audiocontext; + // Stereo panner + // if there is a stereo panner node use it + if (typeof ac.createStereoPanner !== 'undefined') { + p5.Panner = function (input, output) { + this.stereoPanner = this.input = ac.createStereoPanner(); + input.connect(this.stereoPanner); + this.stereoPanner.connect(output); + }; + p5.Panner.prototype.pan = function (val, tFromNow) { + var time = tFromNow || 0; + var t = ac.currentTime + time; + this.stereoPanner.pan.linearRampToValueAtTime(val, t); + }; + //not implemented because stereopanner + //node does not require this and will automatically + //convert single channel or multichannel to stereo. + //tested with single and stereo, not with (>2) multichannel + p5.Panner.prototype.inputChannels = function () { + }; + p5.Panner.prototype.connect = function (obj) { + this.stereoPanner.connect(obj); + }; + p5.Panner.prototype.disconnect = function () { + this.stereoPanner.disconnect(); + }; + } else { + // if there is no createStereoPanner object + // such as in safari 7.1.7 at the time of writing this + // use this method to create the effect + p5.Panner = function (input, output, numInputChannels) { + this.input = ac.createGain(); + input.connect(this.input); + this.left = ac.createGain(); + this.right = ac.createGain(); + this.left.channelInterpretation = 'discrete'; + this.right.channelInterpretation = 'discrete'; + // if input is stereo + if (numInputChannels > 1) { + this.splitter = ac.createChannelSplitter(2); + this.input.connect(this.splitter); + this.splitter.connect(this.left, 1); + this.splitter.connect(this.right, 0); + } else { + this.input.connect(this.left); + this.input.connect(this.right); + } + this.output = ac.createChannelMerger(2); + this.left.connect(this.output, 0, 1); + this.right.connect(this.output, 0, 0); + this.output.connect(output); + }; + // -1 is left, +1 is right + p5.Panner.prototype.pan = function (val, tFromNow) { + var time = tFromNow || 0; + var t = ac.currentTime + time; + var v = (val + 1) / 2; + var rightVal = Math.cos(v * Math.PI / 2); + var leftVal = Math.sin(v * Math.PI / 2); + this.left.gain.linearRampToValueAtTime(leftVal, t); + this.right.gain.linearRampToValueAtTime(rightVal, t); + }; + p5.Panner.prototype.inputChannels = function (numChannels) { + if (numChannels === 1) { + this.input.disconnect(); + this.input.connect(this.left); + this.input.connect(this.right); + } else if (numChannels === 2) { + if (typeof (this.splitter === 'undefined')) { + this.splitter = ac.createChannelSplitter(2); + } + this.input.disconnect(); + this.input.connect(this.splitter); + this.splitter.connect(this.left, 1); + this.splitter.connect(this.right, 0); + } + }; + p5.Panner.prototype.connect = function (obj) { + this.output.connect(obj); + }; + p5.Panner.prototype.disconnect = function () { + this.output.disconnect(); + }; + } +}(master); +var soundfile; +'use strict'; +soundfile = function () { + var CustomError = errorHandler; + var p5sound = master; + var ac = p5sound.audiocontext; + var midiToFreq = helpers.midiToFreq; + /** + *

SoundFile object with a path to a file.

+ * + *

The p5.SoundFile may not be available immediately because + * it loads the file information asynchronously.

+ * + *

To do something with the sound as soon as it loads + * pass the name of a function as the second parameter.

+ * + *

Only one file path is required. However, audio file formats + * (i.e. mp3, ogg, wav and m4a/aac) are not supported by all + * web browsers. If you want to ensure compatability, instead of a single + * file path, you may include an Array of filepaths, and the browser will + * choose a format that works.

+ * + * @class p5.SoundFile + * @constructor + * @param {String|Array} path path to a sound file (String). Optionally, + * you may include multiple file formats in + * an array. Alternately, accepts an object + * from the HTML5 File API, or a p5.File. + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if file fails to + * load. This function will receive an error or + * XMLHttpRequest object with information + * about what went wrong. + * @param {Function} [whileLoadingCallback] Name of a function to call while file + * is loading. That function will + * receive progress of the request to + * load the sound file + * (between 0 and 1) as its first + * parameter. This progress + * does not account for the additional + * time needed to decode the audio data. + * + * @example + *
+ * + * function preload() { + * soundFormats('mp3', 'ogg'); + * mySound = loadSound('assets/doorbell.mp3'); + * } + * + * function setup() { + * mySound.setVolume(0.1); + * mySound.play(); + * } + * + *
+ */ + p5.SoundFile = function (paths, onload, onerror, whileLoading) { + if (typeof paths !== 'undefined') { + if (typeof paths === 'string' || typeof paths[0] === 'string') { + var path = p5.prototype._checkFileFormats(paths); + this.url = path; + } else if (typeof paths === 'object') { + if (!(window.File && window.FileReader && window.FileList && window.Blob)) { + // The File API isn't supported in this browser + throw 'Unable to load file because the File API is not supported'; + } + } + // if type is a p5.File...get the actual file + if (paths.file) { + paths = paths.file; + } + this.file = paths; + } + // private _onended callback, set by the method: onended(callback) + this._onended = function () { + }; + this._looping = false; + this._playing = false; + this._paused = false; + this._pauseTime = 0; + // cues for scheduling events with addCue() removeCue() + this._cues = []; + // position of the most recently played sample + this._lastPos = 0; + this._counterNode = null; + this._scopeNode = null; + // array of sources so that they can all be stopped! + this.bufferSourceNodes = []; + // current source + this.bufferSourceNode = null; + this.buffer = null; + this.playbackRate = 1; + this.input = p5sound.audiocontext.createGain(); + this.output = p5sound.audiocontext.createGain(); + this.reversed = false; + // start and end of playback / loop + this.startTime = 0; + this.endTime = null; + this.pauseTime = 0; + // "restart" would stop playback before retriggering + this.mode = 'sustain'; + // time that playback was started, in millis + this.startMillis = null; + // stereo panning + this.panPosition = 0; + this.panner = new p5.Panner(this.output, p5sound.input, 2); + // it is possible to instantiate a soundfile with no path + if (this.url || this.file) { + this.load(onload, onerror); + } + // add this p5.SoundFile to the soundArray + p5sound.soundArray.push(this); + if (typeof whileLoading === 'function') { + this._whileLoading = whileLoading; + } else { + this._whileLoading = function () { + }; + } + }; + // register preload handling of loadSound + p5.prototype.registerPreloadMethod('loadSound', p5.prototype); + /** + * loadSound() returns a new p5.SoundFile from a specified + * path. If called during preload(), the p5.SoundFile will be ready + * to play in time for setup() and draw(). If called outside of + * preload, the p5.SoundFile will not be ready immediately, so + * loadSound accepts a callback as the second parameter. Using a + * + * local server is recommended when loading external files. + * + * @method loadSound + * @param {String|Array} path Path to the sound file, or an array with + * paths to soundfiles in multiple formats + * i.e. ['sound.ogg', 'sound.mp3']. + * Alternately, accepts an object: either + * from the HTML5 File API, or a p5.File. + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if there is + * an error loading the file. + * @param {Function} [whileLoading] Name of a function to call while file is loading. + * This function will receive the percentage loaded + * so far, from 0.0 to 1.0. + * @return {SoundFile} Returns a p5.SoundFile + * @example + *
+ * function preload() { + * mySound = loadSound('assets/doorbell.mp3'); + * } + * + * function setup() { + * mySound.setVolume(0.1); + * mySound.play(); + * } + *
+ */ + p5.prototype.loadSound = function (path, callback, onerror, whileLoading) { + // if loading locally without a server + if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') { + window.alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS'); + } + var self = this; + var s = new p5.SoundFile(path, function () { + if (typeof callback === 'function') { + callback.apply(self, arguments); + } + self._decrementPreload(); + }, onerror, whileLoading); + return s; + }; + /** + * This is a helper function that the p5.SoundFile calls to load + * itself. Accepts a callback (the name of another function) + * as an optional parameter. + * + * @private + * @param {Function} [successCallback] Name of a function to call once file loads + * @param {Function} [errorCallback] Name of a function to call if there is an error + */ + p5.SoundFile.prototype.load = function (callback, errorCallback) { + var self = this; + var errorTrace = new Error().stack; + if (this.url !== undefined && this.url !== '') { + var request = new XMLHttpRequest(); + request.addEventListener('progress', function (evt) { + self._updateProgress(evt); + }, false); + request.open('GET', this.url, true); + request.responseType = 'arraybuffer'; + request.onload = function () { + if (request.status === 200) { + // on sucess loading file: + ac.decodeAudioData(request.response, // success decoding buffer: + function (buff) { + self.buffer = buff; + self.panner.inputChannels(buff.numberOfChannels); + if (callback) { + callback(self); + } + }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015 + function () { + var err = new CustomError('decodeAudioData', errorTrace, self.url); + var msg = 'AudioContext error at decodeAudioData for ' + self.url; + if (errorCallback) { + err.msg = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + }); + } else { + var err = new CustomError('loadSound', errorTrace, self.url); + var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')'; + if (errorCallback) { + err.message = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + } + }; + // if there is another error, aside from 404... + request.onerror = function () { + var err = new CustomError('loadSound', errorTrace, self.url); + var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.'; + if (errorCallback) { + err.message = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + }; + request.send(); + } else if (this.file !== undefined) { + var reader = new FileReader(); + reader.onload = function () { + ac.decodeAudioData(reader.result, function (buff) { + self.buffer = buff; + self.panner.inputChannels(buff.numberOfChannels); + if (callback) { + callback(self); + } + }); + }; + reader.onerror = function (e) { + if (onerror) { + onerror(e); + } + }; + reader.readAsArrayBuffer(this.file); + } + }; + // TO DO: use this method to create a loading bar that shows progress during file upload/decode. + p5.SoundFile.prototype._updateProgress = function (evt) { + if (evt.lengthComputable) { + var percentComplete = evt.loaded / evt.total * 0.99; + this._whileLoading(percentComplete, evt); + } else { + // Unable to compute progress information since the total size is unknown + this._whileLoading('size unknown'); + } + }; + /** + * Returns true if the sound file finished loading successfully. + * + * @method isLoaded + * @return {Boolean} + */ + p5.SoundFile.prototype.isLoaded = function () { + if (this.buffer) { + return true; + } else { + return false; + } + }; + /** + * Play the p5.SoundFile + * + * @method play + * @param {Number} [startTime] (optional) schedule playback to start (in seconds from now). + * @param {Number} [rate] (optional) playback rate + * @param {Number} [amp] (optional) amplitude (volume) + * of playback + * @param {Number} [cueStart] (optional) cue start time in seconds + * @param {Number} [duration] (optional) duration of playback in seconds + */ + p5.SoundFile.prototype.play = function (startTime, rate, amp, _cueStart, duration) { + var self = this; + var now = p5sound.audiocontext.currentTime; + var cueStart, cueEnd; + var time = startTime || 0; + if (time < 0) { + time = 0; + } + time = time + now; + this.rate(rate); + this.setVolume(amp); + // TO DO: if already playing, create array of buffers for easy stop() + if (this.buffer) { + // reset the pause time (if it was paused) + this._pauseTime = 0; + // handle restart playmode + if (this.mode === 'restart' && this.buffer && this.bufferSourceNode) { + this.bufferSourceNode.stop(time); + this._counterNode.stop(time); + } + // make a new source and counter. They are automatically assigned playbackRate and buffer + this.bufferSourceNode = this._initSourceNode(); + // garbage collect counterNode and create a new one + if (this._counterNode) + this._counterNode = undefined; + this._counterNode = this._initCounterNode(); + if (_cueStart) { + if (_cueStart >= 0 && _cueStart < this.buffer.duration) { + // this.startTime = cueStart; + cueStart = _cueStart; + } else { + throw 'start time out of range'; + } + } else { + cueStart = 0; + } + if (duration) { + // if duration is greater than buffer.duration, just play entire file anyway rather than throw an error + duration = duration <= this.buffer.duration - cueStart ? duration : this.buffer.duration; + } else { + duration = this.buffer.duration - cueStart; + } + this.bufferSourceNode.connect(this.output); + // if it was paused, play at the pause position + if (this._paused) { + this.bufferSourceNode.start(time, this.pauseTime, duration); + this._counterNode.start(time, this.pauseTime, duration); + } else { + this.bufferSourceNode.start(time, cueStart, duration); + this._counterNode.start(time, cueStart, duration); + } + this._playing = true; + this._paused = false; + // add source to sources array, which is used in stopAll() + this.bufferSourceNodes.push(this.bufferSourceNode); + this.bufferSourceNode._arrayIndex = this.bufferSourceNodes.length - 1; + // delete this.bufferSourceNode from the sources array when it is done playing: + var clearOnEnd = function () { + this._playing = false; + this.removeEventListener('ended', clearOnEnd, false); + // call the onended callback + self._onended(self); + self.bufferSourceNodes.forEach(function (n, i) { + if (n._playing === false) { + self.bufferSourceNodes.splice(i); + } + }); + if (self.bufferSourceNodes.length === 0) { + self._playing = false; + } + }; + this.bufferSourceNode.onended = clearOnEnd; + } else { + throw 'not ready to play file, buffer has yet to load. Try preload()'; + } + // if looping, will restart at original time + this.bufferSourceNode.loop = this._looping; + this._counterNode.loop = this._looping; + if (this._looping === true) { + cueEnd = cueStart + duration; + this.bufferSourceNode.loopStart = cueStart; + this.bufferSourceNode.loopEnd = cueEnd; + this._counterNode.loopStart = cueStart; + this._counterNode.loopEnd = cueEnd; + } + }; + /** + * p5.SoundFile has two play modes: restart and + * sustain. Play Mode determines what happens to a + * p5.SoundFile if it is triggered while in the middle of playback. + * In sustain mode, playback will continue simultaneous to the + * new playback. In restart mode, play() will stop playback + * and start over. Sustain is the default mode. + * + * @method playMode + * @param {String} str 'restart' or 'sustain' + * @example + *
+ * function setup(){ + * mySound = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * function mouseClicked() { + * mySound.playMode('sustain'); + * mySound.play(); + * } + * function keyPressed() { + * mySound.playMode('restart'); + * mySound.play(); + * } + * + *
+ */ + p5.SoundFile.prototype.playMode = function (str) { + var s = str.toLowerCase(); + // if restart, stop all other sounds from playing + if (s === 'restart' && this.buffer && this.bufferSourceNode) { + for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) { + var now = p5sound.audiocontext.currentTime; + this.bufferSourceNodes[i].stop(now); + } + } + // set play mode to effect future playback + if (s === 'restart' || s === 'sustain') { + this.mode = s; + } else { + throw 'Invalid play mode. Must be either "restart" or "sustain"'; + } + }; + /** + * Pauses a file that is currently playing. If the file is not + * playing, then nothing will happen. + * + * After pausing, .play() will resume from the paused + * position. + * If p5.SoundFile had been set to loop before it was paused, + * it will continue to loop after it is unpaused with .play(). + * + * @method pause + * @param {Number} [startTime] (optional) schedule event to occur + * seconds from now + * @example + *
+ * var soundFile; + * + * function preload() { + * soundFormats('ogg', 'mp3'); + * soundFile = loadSound('assets/Damscray_-_Dancing_Tiger_02.mp3'); + * } + * function setup() { + * background(0, 255, 0); + * soundFile.setVolume(0.1); + * soundFile.loop(); + * } + * function keyTyped() { + * if (key == 'p') { + * soundFile.pause(); + * background(255, 0, 0); + * } + * } + * + * function keyReleased() { + * if (key == 'p') { + * soundFile.play(); + * background(0, 255, 0); + * } + * } + * + *
+ */ + p5.SoundFile.prototype.pause = function (startTime) { + var now = p5sound.audiocontext.currentTime; + var time = startTime || 0; + var pTime = time + now; + if (this.isPlaying() && this.buffer && this.bufferSourceNode) { + this.pauseTime = this.currentTime(); + this.bufferSourceNode.stop(pTime); + this._counterNode.stop(pTime); + this._paused = true; + this._playing = false; + this._pauseTime = this.currentTime(); + } else { + this._pauseTime = 0; + } + }; + /** + * Loop the p5.SoundFile. Accepts optional parameters to set the + * playback rate, playback volume, loopStart, loopEnd. + * + * @method loop + * @param {Number} [startTime] (optional) schedule event to occur + * seconds from now + * @param {Number} [rate] (optional) playback rate + * @param {Number} [amp] (optional) playback volume + * @param {Number} [cueLoopStart](optional) startTime in seconds + * @param {Number} [duration] (optional) loop duration in seconds + */ + p5.SoundFile.prototype.loop = function (startTime, rate, amp, loopStart, duration) { + this._looping = true; + this.play(startTime, rate, amp, loopStart, duration); + }; + /** + * Set a p5.SoundFile's looping flag to true or false. If the sound + * is currently playing, this change will take effect when it + * reaches the end of the current playback. + * + * @param {Boolean} Boolean set looping to true or false + */ + p5.SoundFile.prototype.setLoop = function (bool) { + if (bool === true) { + this._looping = true; + } else if (bool === false) { + this._looping = false; + } else { + throw 'Error: setLoop accepts either true or false'; + } + if (this.bufferSourceNode) { + this.bufferSourceNode.loop = this._looping; + this._counterNode.loop = this._looping; + } + }; + /** + * Returns 'true' if a p5.SoundFile is currently looping and playing, 'false' if not. + * + * @return {Boolean} + */ + p5.SoundFile.prototype.isLooping = function () { + if (!this.bufferSourceNode) { + return false; + } + if (this._looping === true && this.isPlaying() === true) { + return true; + } + return false; + }; + /** + * Returns true if a p5.SoundFile is playing, false if not (i.e. + * paused or stopped). + * + * @method isPlaying + * @return {Boolean} + */ + p5.SoundFile.prototype.isPlaying = function () { + return this._playing; + }; + /** + * Returns true if a p5.SoundFile is paused, false if not (i.e. + * playing or stopped). + * + * @method isPaused + * @return {Boolean} + */ + p5.SoundFile.prototype.isPaused = function () { + return this._paused; + }; + /** + * Stop soundfile playback. + * + * @method stop + * @param {Number} [startTime] (optional) schedule event to occur + * in seconds from now + */ + p5.SoundFile.prototype.stop = function (timeFromNow) { + var time = timeFromNow || 0; + if (this.mode === 'sustain') { + this.stopAll(time); + this._playing = false; + this.pauseTime = 0; + this._paused = false; + } else if (this.buffer && this.bufferSourceNode) { + var now = p5sound.audiocontext.currentTime; + var t = time || 0; + this.pauseTime = 0; + this.bufferSourceNode.stop(now + t); + this._counterNode.stop(now + t); + this._playing = false; + this._paused = false; + } + }; + /** + * Stop playback on all of this soundfile's sources. + * @private + */ + p5.SoundFile.prototype.stopAll = function (_time) { + var now = p5sound.audiocontext.currentTime; + var time = _time || 0; + if (this.buffer && this.bufferSourceNode) { + for (var i = 0; i < this.bufferSourceNodes.length; i++) { + if (typeof this.bufferSourceNodes[i] !== undefined) { + try { + this.bufferSourceNodes[i].onended = function () { + }; + this.bufferSourceNodes[i].stop(now + time); + } catch (e) { + } + } + } + this._counterNode.stop(now + time); + this._onended(this); + } + }; + /** + * Multiply the output volume (amplitude) of a sound file + * between 0.0 (silence) and 1.0 (full volume). + * 1.0 is the maximum amplitude of a digital sound, so multiplying + * by greater than 1.0 may cause digital distortion. To + * fade, provide a rampTime parameter. For more + * complex fades, see the Env class. + * + * Alternately, you can pass in a signal source such as an + * oscillator to modulate the amplitude with an audio signal. + * + * @method setVolume + * @param {Number|Object} volume Volume (amplitude) between 0.0 + * and 1.0 or modulating signal/oscillator + * @param {Number} [rampTime] Fade for t seconds + * @param {Number} [timeFromNow] Schedule this event to happen at + * t seconds in the future + */ + p5.SoundFile.prototype.setVolume = function (vol, _rampTime, _tFromNow) { + if (typeof vol === 'number') { + var rampTime = _rampTime || 0; + var tFromNow = _tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = this.output.gain.value; + this.output.gain.cancelScheduledValues(now + tFromNow); + this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(this.output.gain); + } else { + // return the Gain Node + return this.output.gain; + } + }; + // same as setVolume, to match Processing Sound + p5.SoundFile.prototype.amp = p5.SoundFile.prototype.setVolume; + // these are the same thing + p5.SoundFile.prototype.fade = p5.SoundFile.prototype.setVolume; + p5.SoundFile.prototype.getVolume = function () { + return this.output.gain.value; + }; + /** + * Set the stereo panning of a p5.sound object to + * a floating point number between -1.0 (left) and 1.0 (right). + * Default is 0.0 (center). + * + * @method pan + * @param {Number} [panValue] Set the stereo panner + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + * @example + *
+ * + * var ball = {}; + * var soundFile; + * + * function setup() { + * soundFormats('ogg', 'mp3'); + * soundFile = loadSound('assets/beatbox.mp3'); + * } + * + * function draw() { + * background(0); + * ball.x = constrain(mouseX, 0, width); + * ellipse(ball.x, height/2, 20, 20) + * } + * + * function mousePressed(){ + * // map the ball's x location to a panning degree + * // between -1.0 (left) and 1.0 (right) + * var panning = map(ball.x, 0., width,-1.0, 1.0); + * soundFile.pan(panning); + * soundFile.play(); + * } + *
+ */ + p5.SoundFile.prototype.pan = function (pval, tFromNow) { + this.panPosition = pval; + this.panner.pan(pval, tFromNow); + }; + /** + * Returns the current stereo pan position (-1.0 to 1.0) + * + * @return {Number} Returns the stereo pan setting of the Oscillator + * as a number between -1.0 (left) and 1.0 (right). + * 0.0 is center and default. + */ + p5.SoundFile.prototype.getPan = function () { + return this.panPosition; + }; + /** + * Set the playback rate of a sound file. Will change the speed and the pitch. + * Values less than zero will reverse the audio buffer. + * + * @method rate + * @param {Number} [playbackRate] Set the playback rate. 1.0 is normal, + * .5 is half-speed, 2.0 is twice as fast. + * Values less than zero play backwards. + * @example + *
+ * var song; + * + * function preload() { + * song = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * + * function setup() { + * song.loop(); + * } + * + * function draw() { + * background(200); + * + * // Set the rate to a range between 0.1 and 4 + * // Changing the rate also alters the pitch + * var speed = map(mouseY, 0.1, height, 0, 2); + * speed = constrain(speed, 0.01, 4); + * song.rate(speed); + * + * // Draw a circle to show what is going on + * stroke(0); + * fill(51, 100); + * ellipse(mouseX, 100, 48, 48); + * } + * + * + *
+ * + */ + p5.SoundFile.prototype.rate = function (playbackRate) { + if (typeof playbackRate === 'undefined') { + return this.playbackRate; + } + if (playbackRate === 0) { + playbackRate = 1e-13; + } else if (playbackRate < 0 && !this.reversed) { + var cPos = this.currentTime(); + var newPos = (cPos - this.duration()) / playbackRate; + this.pauseTime = newPos; + this.reverseBuffer(); + playbackRate = Math.abs(playbackRate); + } else if (playbackRate > 0 && this.reversed) { + this.reverseBuffer(); + } + if (this.bufferSourceNode) { + var now = p5sound.audiocontext.currentTime; + this.bufferSourceNode.playbackRate.cancelScheduledValues(now); + this.bufferSourceNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now); + this._counterNode.playbackRate.cancelScheduledValues(now); + this._counterNode.playbackRate.linearRampToValueAtTime(Math.abs(playbackRate), now); + } + this.playbackRate = playbackRate; + return this.playbackRate; + }; + // TO DO: document this + p5.SoundFile.prototype.setPitch = function (num) { + var newPlaybackRate = midiToFreq(num) / midiToFreq(60); + this.rate(newPlaybackRate); + }; + p5.SoundFile.prototype.getPlaybackRate = function () { + return this.playbackRate; + }; + /** + * Returns the duration of a sound file in seconds. + * + * @method duration + * @return {Number} The duration of the soundFile in seconds. + */ + p5.SoundFile.prototype.duration = function () { + // Return Duration + if (this.buffer) { + return this.buffer.duration; + } else { + return 0; + } + }; + /** + * Return the current position of the p5.SoundFile playhead, in seconds. + * Note that if you change the playbackRate while the p5.SoundFile is + * playing, the results may not be accurate. + * + * @method currentTime + * @return {Number} currentTime of the soundFile in seconds. + */ + p5.SoundFile.prototype.currentTime = function () { + // TO DO --> make reverse() flip these values appropriately + if (this._pauseTime > 0) { + return this._pauseTime; + } else { + return this._lastPos / ac.sampleRate; + } + }; + /** + * Move the playhead of the song to a position, in seconds. Start timing + * and playback duration. If none are given, will reset the file to play + * entire duration from start to finish. + * + * @method jump + * @param {Number} cueTime cueTime of the soundFile in seconds. + * @param {Number} duration duration in seconds. + */ + p5.SoundFile.prototype.jump = function (cueTime, duration) { + if (cueTime < 0 || cueTime > this.buffer.duration) { + throw 'jump time out of range'; + } + if (duration > this.buffer.duration - cueTime) { + throw 'end time out of range'; + } + var cTime = cueTime || 0; + var dur = duration || this.buffer.duration - cueTime; + if (this.isPlaying()) { + this.stop(); + } + this.play(0, this.playbackRate, this.output.gain.value, cTime, dur); + }; + /** + * Return the number of channels in a sound file. + * For example, Mono = 1, Stereo = 2. + * + * @method channels + * @return {Number} [channels] + */ + p5.SoundFile.prototype.channels = function () { + return this.buffer.numberOfChannels; + }; + /** + * Return the sample rate of the sound file. + * + * @method sampleRate + * @return {Number} [sampleRate] + */ + p5.SoundFile.prototype.sampleRate = function () { + return this.buffer.sampleRate; + }; + /** + * Return the number of samples in a sound file. + * Equal to sampleRate * duration. + * + * @method frames + * @return {Number} [sampleCount] + */ + p5.SoundFile.prototype.frames = function () { + return this.buffer.length; + }; + /** + * Returns an array of amplitude peaks in a p5.SoundFile that can be + * used to draw a static waveform. Scans through the p5.SoundFile's + * audio buffer to find the greatest amplitudes. Accepts one + * parameter, 'length', which determines size of the array. + * Larger arrays result in more precise waveform visualizations. + * + * Inspired by Wavesurfer.js. + * + * @method getPeaks + * @params {Number} [length] length is the size of the returned array. + * Larger length results in more precision. + * Defaults to 5*width of the browser window. + * @returns {Float32Array} Array of peaks. + */ + p5.SoundFile.prototype.getPeaks = function (length) { + if (this.buffer) { + // set length to window's width if no length is provided + if (!length) { + length = window.width * 5; + } + if (this.buffer) { + var buffer = this.buffer; + var sampleSize = buffer.length / length; + var sampleStep = ~~(sampleSize / 10) || 1; + var channels = buffer.numberOfChannels; + var peaks = new Float32Array(Math.round(length)); + for (var c = 0; c < channels; c++) { + var chan = buffer.getChannelData(c); + for (var i = 0; i < length; i++) { + var start = ~~(i * sampleSize); + var end = ~~(start + sampleSize); + var max = 0; + for (var j = start; j < end; j += sampleStep) { + var value = chan[j]; + if (value > max) { + max = value; + } else if (-value > max) { + max = value; + } + } + if (c === 0 || Math.abs(max) > peaks[i]) { + peaks[i] = max; + } + } + } + return peaks; + } + } else { + throw 'Cannot load peaks yet, buffer is not loaded'; + } + }; + /** + * Reverses the p5.SoundFile's buffer source. + * Playback must be handled separately (see example). + * + * @method reverseBuffer + * @example + *
+ * var drum; + * + * function preload() { + * drum = loadSound('assets/drum.mp3'); + * } + * + * function setup() { + * drum.reverseBuffer(); + * drum.play(); + * } + * + * + *
+ */ + p5.SoundFile.prototype.reverseBuffer = function () { + var curVol = this.getVolume(); + this.setVolume(0, 0.01, 0); + if (this.buffer) { + for (var i = 0; i < this.buffer.numberOfChannels; i++) { + Array.prototype.reverse.call(this.buffer.getChannelData(i)); + } + // set reversed flag + this.reversed = !this.reversed; + } else { + throw 'SoundFile is not done loading'; + } + this.setVolume(curVol, 0.01, 0.0101); + }; + /** + * Schedule an event to be called when the soundfile + * reaches the end of a buffer. If the soundfile is + * playing through once, this will be called when it + * ends. If it is looping, it will be called when + * stop is called. + * + * @method onended + * @param {Function} callback function to call when the + * soundfile has ended. + */ + p5.SoundFile.prototype.onended = function (callback) { + this._onended = callback; + return this; + }; + p5.SoundFile.prototype.add = function () { + }; + p5.SoundFile.prototype.dispose = function () { + var now = p5sound.audiocontext.currentTime; + // remove reference to soundfile + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.stop(now); + if (this.buffer && this.bufferSourceNode) { + for (var i = 0; i < this.bufferSourceNodes.length - 1; i++) { + if (this.bufferSourceNodes[i] !== null) { + this.bufferSourceNodes[i].disconnect(); + try { + this.bufferSourceNodes[i].stop(now); + } catch (e) { + console.warning('no buffer source node to dispose'); + } + this.bufferSourceNodes[i] = null; + } + } + if (this.isPlaying()) { + try { + this._counterNode.stop(now); + } catch (e) { + console.log(e); + } + this._counterNode = null; + } + } + if (this.output) { + this.output.disconnect(); + this.output = null; + } + if (this.panner) { + this.panner.disconnect(); + this.panner = null; + } + }; + /** + * Connects the output of a p5sound object to input of another + * p5.sound object. For example, you may connect a p5.SoundFile to an + * FFT or an Effect. If no parameter is given, it will connect to + * the master output. Most p5sound objects connect to the master + * output when they are created. + * + * @method connect + * @param {Object} [object] Audio object that accepts an input + */ + p5.SoundFile.prototype.connect = function (unit) { + if (!unit) { + this.panner.connect(p5sound.input); + } else { + if (unit.hasOwnProperty('input')) { + this.panner.connect(unit.input); + } else { + this.panner.connect(unit); + } + } + }; + /** + * Disconnects the output of this p5sound object. + * + * @method disconnect + */ + p5.SoundFile.prototype.disconnect = function () { + this.panner.disconnect(); + }; + /** + */ + p5.SoundFile.prototype.getLevel = function () { + console.warn('p5.SoundFile.getLevel has been removed from the library. Use p5.Amplitude instead'); + }; + /** + * Reset the source for this SoundFile to a + * new path (URL). + * + * @method setPath + * @param {String} path path to audio file + * @param {Function} callback Callback + */ + p5.SoundFile.prototype.setPath = function (p, callback) { + var path = p5.prototype._checkFileFormats(p); + this.url = path; + this.load(callback); + }; + /** + * Replace the current Audio Buffer with a new Buffer. + * + * @param {Array} buf Array of Float32 Array(s). 2 Float32 Arrays + * will create a stereo source. 1 will create + * a mono source. + */ + p5.SoundFile.prototype.setBuffer = function (buf) { + var numChannels = buf.length; + var size = buf[0].length; + var newBuffer = ac.createBuffer(numChannels, size, ac.sampleRate); + if (!(buf[0] instanceof Float32Array)) { + buf[0] = new Float32Array(buf[0]); + } + for (var channelNum = 0; channelNum < numChannels; channelNum++) { + var channel = newBuffer.getChannelData(channelNum); + channel.set(buf[channelNum]); + } + this.buffer = newBuffer; + // set numbers of channels on input to the panner + this.panner.inputChannels(numChannels); + }; + ////////////////////////////////////////////////// + // script processor node with an empty buffer to help + // keep a sample-accurate position in playback buffer. + // Inspired by Chinmay Pendharkar's technique for Sonoport --> http://bit.ly/1HwdCsV + // Copyright [2015] [Sonoport (Asia) Pte. Ltd.], + // Licensed under the Apache License http://apache.org/licenses/LICENSE-2.0 + //////////////////////////////////////////////////////////////////////////////////// + var _createCounterBuffer = function (buffer) { + var array = new Float32Array(buffer.length); + var audioBuf = ac.createBuffer(1, buffer.length, 44100); + for (var index = 0; index < buffer.length; index++) { + array[index] = index; + } + audioBuf.getChannelData(0).set(array); + return audioBuf; + }; + // initialize counterNode, set its initial buffer and playbackRate + p5.SoundFile.prototype._initCounterNode = function () { + var self = this; + var now = ac.currentTime; + var cNode = ac.createBufferSource(); + // dispose of scope node if it already exists + if (self._scopeNode) { + self._scopeNode.disconnect(); + self._scopeNode.onaudioprocess = undefined; + self._scopeNode = null; + } + self._scopeNode = ac.createScriptProcessor(256, 1, 1); + // create counter buffer of the same length as self.buffer + cNode.buffer = _createCounterBuffer(self.buffer); + cNode.playbackRate.setValueAtTime(self.playbackRate, now); + cNode.connect(self._scopeNode); + self._scopeNode.connect(p5.soundOut._silentNode); + self._scopeNode.onaudioprocess = function (processEvent) { + var inputBuffer = processEvent.inputBuffer.getChannelData(0); + // update the lastPos + self._lastPos = inputBuffer[inputBuffer.length - 1] || 0; + // do any callbacks that have been scheduled + self._onTimeUpdate(self._lastPos); + }; + return cNode; + }; + // initialize sourceNode, set its initial buffer and playbackRate + p5.SoundFile.prototype._initSourceNode = function () { + var bufferSourceNode = ac.createBufferSource(); + bufferSourceNode.buffer = this.buffer; + bufferSourceNode.playbackRate.value = this.playbackRate; + return bufferSourceNode; + }; + /** + * processPeaks returns an array of timestamps where it thinks there is a beat. + * + * This is an asynchronous function that processes the soundfile in an offline audio context, + * and sends the results to your callback function. + * + * The process involves running the soundfile through a lowpass filter, and finding all of the + * peaks above the initial threshold. If the total number of peaks are below the minimum number of peaks, + * it decreases the threshold and re-runs the analysis until either minPeaks or minThreshold are reached. + * + * @method processPeaks + * @param {Function} callback a function to call once this data is returned + * @param {Number} [initThreshold] initial threshold defaults to 0.9 + * @param {Number} [minThreshold] minimum threshold defaults to 0.22 + * @param {Number} [minPeaks] minimum number of peaks defaults to 200 + * @return {Array} Array of timestamped peaks + */ + p5.SoundFile.prototype.processPeaks = function (callback, _initThreshold, _minThreshold, _minPeaks) { + var bufLen = this.buffer.length; + var sampleRate = this.buffer.sampleRate; + var buffer = this.buffer; + var allPeaks = []; + var initialThreshold = _initThreshold || 0.9, threshold = initialThreshold, minThreshold = _minThreshold || 0.22, minPeaks = _minPeaks || 200; + // Create offline context + var offlineContext = new window.OfflineAudioContext(1, bufLen, sampleRate); + // create buffer source + var source = offlineContext.createBufferSource(); + source.buffer = buffer; + // Create filter. TO DO: allow custom setting of filter + var filter = offlineContext.createBiquadFilter(); + filter.type = 'lowpass'; + source.connect(filter); + filter.connect(offlineContext.destination); + // start playing at time:0 + source.start(0); + offlineContext.startRendering(); + // Render the song + // act on the result + offlineContext.oncomplete = function (e) { + var filteredBuffer = e.renderedBuffer; + var bufferData = filteredBuffer.getChannelData(0); + // step 1: + // create Peak instances, add them to array, with strength and sampleIndex + do { + allPeaks = getPeaksAtThreshold(bufferData, threshold); + threshold -= 0.005; + } while (Object.keys(allPeaks).length < minPeaks && threshold >= minThreshold); + // step 2: + // find intervals for each peak in the sampleIndex, add tempos array + var intervalCounts = countIntervalsBetweenNearbyPeaks(allPeaks); + // step 3: find top tempos + var groups = groupNeighborsByTempo(intervalCounts, filteredBuffer.sampleRate); + // sort top intervals + var topTempos = groups.sort(function (intA, intB) { + return intB.count - intA.count; + }).splice(0, 5); + // set this SoundFile's tempo to the top tempo ?? + this.tempo = topTempos[0].tempo; + // step 4: + // new array of peaks at top tempo within a bpmVariance + var bpmVariance = 5; + var tempoPeaks = getPeaksAtTopTempo(allPeaks, topTempos[0].tempo, filteredBuffer.sampleRate, bpmVariance); + callback(tempoPeaks); + }; + }; + // process peaks + var Peak = function (amp, i) { + this.sampleIndex = i; + this.amplitude = amp; + this.tempos = []; + this.intervals = []; + }; + // 1. for processPeaks() Function to identify peaks above a threshold + // returns an array of peak indexes as frames (samples) of the original soundfile + function getPeaksAtThreshold(data, threshold) { + var peaksObj = {}; + var length = data.length; + for (var i = 0; i < length; i++) { + if (data[i] > threshold) { + var amp = data[i]; + var peak = new Peak(amp, i); + peaksObj[i] = peak; + // Skip forward ~ 1/8s to get past this peak. + i += 6000; + } + i++; + } + return peaksObj; + } + // 2. for processPeaks() + function countIntervalsBetweenNearbyPeaks(peaksObj) { + var intervalCounts = []; + var peaksArray = Object.keys(peaksObj).sort(); + for (var index = 0; index < peaksArray.length; index++) { + // find intervals in comparison to nearby peaks + for (var i = 0; i < 10; i++) { + var startPeak = peaksObj[peaksArray[index]]; + var endPeak = peaksObj[peaksArray[index + i]]; + if (startPeak && endPeak) { + var startPos = startPeak.sampleIndex; + var endPos = endPeak.sampleIndex; + var interval = endPos - startPos; + // add a sample interval to the startPeak in the allPeaks array + if (interval > 0) { + startPeak.intervals.push(interval); + } + // tally the intervals and return interval counts + var foundInterval = intervalCounts.some(function (intervalCount) { + if (intervalCount.interval === interval) { + intervalCount.count++; + return intervalCount; + } + }); + // store with JSON like formatting + if (!foundInterval) { + intervalCounts.push({ + interval: interval, + count: 1 + }); + } + } + } + } + return intervalCounts; + } + // 3. for processPeaks --> find tempo + function groupNeighborsByTempo(intervalCounts, sampleRate) { + var tempoCounts = []; + intervalCounts.forEach(function (intervalCount) { + try { + // Convert an interval to tempo + var theoreticalTempo = Math.abs(60 / (intervalCount.interval / sampleRate)); + theoreticalTempo = mapTempo(theoreticalTempo); + var foundTempo = tempoCounts.some(function (tempoCount) { + if (tempoCount.tempo === theoreticalTempo) + return tempoCount.count += intervalCount.count; + }); + if (!foundTempo) { + if (isNaN(theoreticalTempo)) { + return; + } + tempoCounts.push({ + tempo: Math.round(theoreticalTempo), + count: intervalCount.count + }); + } + } catch (e) { + throw e; + } + }); + return tempoCounts; + } + // 4. for processPeaks - get peaks at top tempo + function getPeaksAtTopTempo(peaksObj, tempo, sampleRate, bpmVariance) { + var peaksAtTopTempo = []; + var peaksArray = Object.keys(peaksObj).sort(); + // TO DO: filter out peaks that have the tempo and return + for (var i = 0; i < peaksArray.length; i++) { + var key = peaksArray[i]; + var peak = peaksObj[key]; + for (var j = 0; j < peak.intervals.length; j++) { + var intervalBPM = Math.round(Math.abs(60 / (peak.intervals[j] / sampleRate))); + intervalBPM = mapTempo(intervalBPM); + if (Math.abs(intervalBPM - tempo) < bpmVariance) { + // convert sampleIndex to seconds + peaksAtTopTempo.push(peak.sampleIndex / 44100); + } + } + } + // filter out peaks that are very close to each other + peaksAtTopTempo = peaksAtTopTempo.filter(function (peakTime, index, arr) { + var dif = arr[index + 1] - peakTime; + if (dif > 0.01) { + return true; + } + }); + return peaksAtTopTempo; + } + // helper function for processPeaks + function mapTempo(theoreticalTempo) { + // these scenarios create infinite while loop + if (!isFinite(theoreticalTempo) || theoreticalTempo === 0) { + return; + } + // Adjust the tempo to fit within the 90-180 BPM range + while (theoreticalTempo < 90) + theoreticalTempo *= 2; + while (theoreticalTempo > 180 && theoreticalTempo > 90) + theoreticalTempo /= 2; + return theoreticalTempo; + } + /*** SCHEDULE EVENTS ***/ + // Cue inspired by JavaScript setTimeout, and the + // Tone.js Transport Timeline Event, MIT License Yotam Mann 2015 tonejs.org + var Cue = function (callback, time, id, val) { + this.callback = callback; + this.time = time; + this.id = id; + this.val = val; + }; + /** + * Schedule events to trigger every time a MediaElement + * (audio/video) reaches a playback cue point. + * + * Accepts a callback function, a time (in seconds) at which to trigger + * the callback, and an optional parameter for the callback. + * + * Time will be passed as the first parameter to the callback function, + * and param will be the second parameter. + * + * + * @method addCue + * @param {Number} time Time in seconds, relative to this media + * element's playback. For example, to trigger + * an event every time playback reaches two + * seconds, pass in the number 2. This will be + * passed as the first parameter to + * the callback function. + * @param {Function} callback Name of a function that will be + * called at the given time. The callback will + * receive time and (optionally) param as its + * two parameters. + * @param {Object} [value] An object to be passed as the + * second parameter to the + * callback function. + * @return {Number} id ID of this cue, + * useful for removeCue(id) + * @example + *
+ * function setup() { + * background(0); + * noStroke(); + * fill(255); + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * mySound = loadSound('assets/beat.mp3'); + * + * // schedule calls to changeText + * mySound.addCue(0.50, changeText, "hello" ); + * mySound.addCue(1.00, changeText, "p5" ); + * mySound.addCue(1.50, changeText, "what" ); + * mySound.addCue(2.00, changeText, "do" ); + * mySound.addCue(2.50, changeText, "you" ); + * mySound.addCue(3.00, changeText, "want" ); + * mySound.addCue(4.00, changeText, "to" ); + * mySound.addCue(5.00, changeText, "make" ); + * mySound.addCue(6.00, changeText, "?" ); + * } + * + * function changeText(val) { + * background(0); + * text(val, width/2, height/2); + * } + * + * function mouseClicked() { + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * if (mySound.isPlaying() ) { + * mySound.stop(); + * } else { + * mySound.play(); + * } + * } + * } + *
+ */ + p5.SoundFile.prototype.addCue = function (time, callback, val) { + var id = this._cueIDCounter++; + var cue = new Cue(callback, time, id, val); + this._cues.push(cue); + // if (!this.elt.ontimeupdate) { + // this.elt.ontimeupdate = this._onTimeUpdate.bind(this); + // } + return id; + }; + /** + * Remove a callback based on its ID. The ID is returned by the + * addCue method. + * + * @method removeCue + * @param {Number} id ID of the cue, as returned by addCue + */ + p5.SoundFile.prototype.removeCue = function (id) { + var cueLength = this._cues.length; + for (var i = 0; i < cueLength; i++) { + var cue = this._cues[i]; + if (cue.id === id) { + this.cues.splice(i, 1); + } + } + if (this._cues.length === 0) { + } + }; + /** + * Remove all of the callbacks that had originally been scheduled + * via the addCue method. + * + * @method clearCues + */ + p5.SoundFile.prototype.clearCues = function () { + this._cues = []; + }; + // private method that checks for cues to be fired if events + // have been scheduled using addCue(callback, time). + p5.SoundFile.prototype._onTimeUpdate = function (position) { + var playbackTime = position / this.buffer.sampleRate; + var cueLength = this._cues.length; + for (var i = 0; i < cueLength; i++) { + var cue = this._cues[i]; + var callbackTime = cue.time; + var val = cue.val; + if (this._prevTime < callbackTime && callbackTime <= playbackTime) { + // pass the scheduled callbackTime as parameter to the callback + cue.callback(val); + } + } + this._prevTime = playbackTime; + }; +}(sndcore, errorHandler, master, helpers); +var amplitude; +'use strict'; +amplitude = function () { + var p5sound = master; + /** + * Amplitude measures volume between 0.0 and 1.0. + * Listens to all p5sound by default, or use setInput() + * to listen to a specific sound source. Accepts an optional + * smoothing value, which defaults to 0. + * + * @class p5.Amplitude + * @constructor + * @param {Number} [smoothing] between 0.0 and .999 to smooth + * amplitude readings (defaults to 0) + * @example + *
+ * var sound, amplitude, cnv; + * + * function preload(){ + * sound = loadSound('assets/beat.mp3'); + * } + * function setup() { + * cnv = createCanvas(100,100); + * amplitude = new p5.Amplitude(); + * + * // start / stop the sound when canvas is clicked + * cnv.mouseClicked(function() { + * if (sound.isPlaying() ){ + * sound.stop(); + * } else { + * sound.play(); + * } + * }); + * } + * function draw() { + * background(0); + * fill(255); + * var level = amplitude.getLevel(); + * var size = map(level, 0, 1, 0, 200); + * ellipse(width/2, height/2, size, size); + * } + * + *
+ */ + p5.Amplitude = function (smoothing) { + // Set to 2048 for now. In future iterations, this should be inherited or parsed from p5sound's default + this.bufferSize = 2048; + // set audio context + this.audiocontext = p5sound.audiocontext; + this.processor = this.audiocontext.createScriptProcessor(this.bufferSize, 2, 1); + // for connections + this.input = this.processor; + this.output = this.audiocontext.createGain(); + // smoothing defaults to 0 + this.smoothing = smoothing || 0; + // the variables to return + this.volume = 0; + this.average = 0; + this.stereoVol = [ + 0, + 0 + ]; + this.stereoAvg = [ + 0, + 0 + ]; + this.stereoVolNorm = [ + 0, + 0 + ]; + this.volMax = 0.001; + this.normalize = false; + this.processor.onaudioprocess = this._audioProcess.bind(this); + this.processor.connect(this.output); + this.output.gain.value = 0; + // this may only be necessary because of a Chrome bug + this.output.connect(this.audiocontext.destination); + // connect to p5sound master output by default, unless set by input() + p5sound.meter.connect(this.processor); + // add this p5.SoundFile to the soundArray + p5sound.soundArray.push(this); + }; + /** + * Connects to the p5sound instance (master output) by default. + * Optionally, you can pass in a specific source (i.e. a soundfile). + * + * @method setInput + * @param {soundObject|undefined} [snd] set the sound source + * (optional, defaults to + * master output) + * @param {Number|undefined} [smoothing] a range between 0.0 and 1.0 + * to smooth amplitude readings + * @example + *
+ * function preload(){ + * sound1 = loadSound('assets/beat.mp3'); + * sound2 = loadSound('assets/drum.mp3'); + * } + * function setup(){ + * amplitude = new p5.Amplitude(); + * sound1.play(); + * sound2.play(); + * amplitude.setInput(sound2); + * } + * function draw() { + * background(0); + * fill(255); + * var level = amplitude.getLevel(); + * var size = map(level, 0, 1, 0, 200); + * ellipse(width/2, height/2, size, size); + * } + * function mouseClicked(){ + * sound1.stop(); + * sound2.stop(); + * } + *
+ */ + p5.Amplitude.prototype.setInput = function (source, smoothing) { + p5sound.meter.disconnect(); + if (smoothing) { + this.smoothing = smoothing; + } + // connect to the master out of p5s instance if no snd is provided + if (source == null) { + console.log('Amplitude input source is not ready! Connecting to master output instead'); + p5sound.meter.connect(this.processor); + } else if (source instanceof p5.Signal) { + source.output.connect(this.processor); + } else if (source) { + source.connect(this.processor); + this.processor.disconnect(); + this.processor.connect(this.output); + } else { + p5sound.meter.connect(this.processor); + } + }; + p5.Amplitude.prototype.connect = function (unit) { + if (unit) { + if (unit.hasOwnProperty('input')) { + this.output.connect(unit.input); + } else { + this.output.connect(unit); + } + } else { + this.output.connect(this.panner.connect(p5sound.input)); + } + }; + p5.Amplitude.prototype.disconnect = function () { + this.output.disconnect(); + }; + // TO DO make this stereo / dependent on # of audio channels + p5.Amplitude.prototype._audioProcess = function (event) { + for (var channel = 0; channel < event.inputBuffer.numberOfChannels; channel++) { + var inputBuffer = event.inputBuffer.getChannelData(channel); + var bufLength = inputBuffer.length; + var total = 0; + var sum = 0; + var x; + for (var i = 0; i < bufLength; i++) { + x = inputBuffer[i]; + if (this.normalize) { + total += Math.max(Math.min(x / this.volMax, 1), -1); + sum += Math.max(Math.min(x / this.volMax, 1), -1) * Math.max(Math.min(x / this.volMax, 1), -1); + } else { + total += x; + sum += x * x; + } + } + var average = total / bufLength; + // ... then take the square root of the sum. + var rms = Math.sqrt(sum / bufLength); + this.stereoVol[channel] = Math.max(rms, this.stereoVol[channel] * this.smoothing); + this.stereoAvg[channel] = Math.max(average, this.stereoVol[channel] * this.smoothing); + this.volMax = Math.max(this.stereoVol[channel], this.volMax); + } + // add volume from all channels together + var self = this; + var volSum = this.stereoVol.reduce(function (previousValue, currentValue, index) { + self.stereoVolNorm[index - 1] = Math.max(Math.min(self.stereoVol[index - 1] / self.volMax, 1), 0); + self.stereoVolNorm[index] = Math.max(Math.min(self.stereoVol[index] / self.volMax, 1), 0); + return previousValue + currentValue; + }); + // volume is average of channels + this.volume = volSum / this.stereoVol.length; + // normalized value + this.volNorm = Math.max(Math.min(this.volume / this.volMax, 1), 0); + }; + /** + * Returns a single Amplitude reading at the moment it is called. + * For continuous readings, run in the draw loop. + * + * @method getLevel + * @param {Number} [channel] Optionally return only channel 0 (left) or 1 (right) + * @return {Number} Amplitude as a number between 0.0 and 1.0 + * @example + *
+ * function preload(){ + * sound = loadSound('assets/beat.mp3'); + * } + * function setup() { + * amplitude = new p5.Amplitude(); + * sound.play(); + * } + * function draw() { + * background(0); + * fill(255); + * var level = amplitude.getLevel(); + * var size = map(level, 0, 1, 0, 200); + * ellipse(width/2, height/2, size, size); + * } + * function mouseClicked(){ + * sound.stop(); + * } + *
+ */ + p5.Amplitude.prototype.getLevel = function (channel) { + if (typeof channel !== 'undefined') { + if (this.normalize) { + return this.stereoVolNorm[channel]; + } else { + return this.stereoVol[channel]; + } + } else if (this.normalize) { + return this.volNorm; + } else { + return this.volume; + } + }; + /** + * Determines whether the results of Amplitude.process() will be + * Normalized. To normalize, Amplitude finds the difference the + * loudest reading it has processed and the maximum amplitude of + * 1.0. Amplitude adds this difference to all values to produce + * results that will reliably map between 0.0 and 1.0. However, + * if a louder moment occurs, the amount that Normalize adds to + * all the values will change. Accepts an optional boolean parameter + * (true or false). Normalizing is off by default. + * + * @method toggleNormalize + * @param {boolean} [boolean] set normalize to true (1) or false (0) + */ + p5.Amplitude.prototype.toggleNormalize = function (bool) { + if (typeof bool === 'boolean') { + this.normalize = bool; + } else { + this.normalize = !this.normalize; + } + }; + /** + * Smooth Amplitude analysis by averaging with the last analysis + * frame. Off by default. + * + * @method smooth + * @param {Number} set smoothing from 0.0 <= 1 + */ + p5.Amplitude.prototype.smooth = function (s) { + if (s >= 0 && s < 1) { + this.smoothing = s; + } else { + console.log('Error: smoothing must be between 0 and 1'); + } + }; + p5.Amplitude.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.input.disconnect(); + this.output.disconnect(); + this.input = this.processor = undefined; + this.output = undefined; + }; +}(master); +var fft; +'use strict'; +fft = function () { + var p5sound = master; + /** + *

FFT (Fast Fourier Transform) is an analysis algorithm that + * isolates individual + * + * audio frequencies within a waveform.

+ * + *

Once instantiated, a p5.FFT object can return an array based on + * two types of analyses:
• FFT.waveform() computes + * amplitude values along the time domain. The array indices correspond + * to samples across a brief moment in time. Each value represents + * amplitude of the waveform at that sample of time.
+ * • FFT.analyze() computes amplitude values along the + * frequency domain. The array indices correspond to frequencies (i.e. + * pitches), from the lowest to the highest that humans can hear. Each + * value represents amplitude at that slice of the frequency spectrum. + * Use with getEnergy() to measure amplitude at specific + * frequencies, or within a range of frequencies.

+ * + *

FFT analyzes a very short snapshot of sound called a sample + * buffer. It returns an array of amplitude measurements, referred + * to as bins. The array is 1024 bins long by default. + * You can change the bin array length, but it must be a power of 2 + * between 16 and 1024 in order for the FFT algorithm to function + * correctly. The actual size of the FFT buffer is twice the + * number of bins, so given a standard sample rate, the buffer is + * 2048/44100 seconds long.

+ * + * + * @class p5.FFT + * @constructor + * @param {Number} [smoothing] Smooth results of Freq Spectrum. + * 0.0 < smoothing < 1.0. + * Defaults to 0.8. + * @param {Number} [bins] Length of resulting array. + * Must be a power of two between + * 16 and 1024. Defaults to 1024. + * @example + *
+ * function preload(){ + * sound = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * + * function setup(){ + * var cnv = createCanvas(100,100); + * cnv.mouseClicked(togglePlay); + * fft = new p5.FFT(); + * sound.amp(0.2); + * } + * + * function draw(){ + * background(0); + * + * var spectrum = fft.analyze(); + * noStroke(); + * fill(0,255,0); // spectrum is green + * for (var i = 0; i< spectrum.length; i++){ + * var x = map(i, 0, spectrum.length, 0, width); + * var h = -height + map(spectrum[i], 0, 255, height, 0); + * rect(x, height, width / spectrum.length, h ) + * } + * + * var waveform = fft.waveform(); + * noFill(); + * beginShape(); + * stroke(255,0,0); // waveform is red + * strokeWeight(1); + * for (var i = 0; i< waveform.length; i++){ + * var x = map(i, 0, waveform.length, 0, width); + * var y = map( waveform[i], -1, 1, 0, height); + * vertex(x,y); + * } + * endShape(); + * + * text('click to play/pause', 4, 10); + * } + * + * // fade sound if mouse is over canvas + * function togglePlay() { + * if (sound.isPlaying()) { + * sound.pause(); + * } else { + * sound.loop(); + * } + * } + *
+ */ + p5.FFT = function (smoothing, bins) { + this.input = this.analyser = p5sound.audiocontext.createAnalyser(); + Object.defineProperties(this, { + 'bins': { + get: function () { + return this.analyser.fftSize / 2; + }, + set: function (b) { + this.analyser.fftSize = b * 2; + }, + configurable: true, + enumerable: true + }, + 'smoothing': { + get: function () { + return this.analyser.smoothingTimeConstant; + }, + set: function (s) { + this.analyser.smoothingTimeConstant = s; + }, + configurable: true, + enumerable: true + } + }); + // set default smoothing and bins + this.smooth(smoothing); + this.bins = bins || 1024; + // default connections to p5sound fftMeter + p5sound.fftMeter.connect(this.analyser); + this.freqDomain = new Uint8Array(this.analyser.frequencyBinCount); + this.timeDomain = new Uint8Array(this.analyser.frequencyBinCount); + // predefined frequency ranges, these will be tweakable + this.bass = [ + 20, + 140 + ]; + this.lowMid = [ + 140, + 400 + ]; + this.mid = [ + 400, + 2600 + ]; + this.highMid = [ + 2600, + 5200 + ]; + this.treble = [ + 5200, + 14000 + ]; + // add this p5.SoundFile to the soundArray + p5sound.soundArray.push(this); + }; + /** + * Set the input source for the FFT analysis. If no source is + * provided, FFT will analyze all sound in the sketch. + * + * @method setInput + * @param {Object} [source] p5.sound object (or web audio API source node) + */ + p5.FFT.prototype.setInput = function (source) { + if (!source) { + p5sound.fftMeter.connect(this.analyser); + } else { + if (source.output) { + source.output.connect(this.analyser); + } else if (source.connect) { + source.connect(this.analyser); + } + p5sound.fftMeter.disconnect(); + } + }; + /** + * Returns an array of amplitude values (between -1.0 and +1.0) that represent + * a snapshot of amplitude readings in a single buffer. Length will be + * equal to bins (defaults to 1024). Can be used to draw the waveform + * of a sound. + * + * @method waveform + * @param {Number} [bins] Must be a power of two between + * 16 and 1024. Defaults to 1024. + * @param {String} [precision] If any value is provided, will return results + * in a Float32 Array which is more precise + * than a regular array. + * @return {Array} Array Array of amplitude values (-1 to 1) + * over time. Array length = bins. + * + */ + p5.FFT.prototype.waveform = function () { + var bins, mode, normalArray; + for (var i = 0; i < arguments.length; i++) { + if (typeof arguments[i] === 'number') { + bins = arguments[i]; + this.analyser.fftSize = bins * 2; + } + if (typeof arguments[i] === 'string') { + mode = arguments[i]; + } + } + // getFloatFrequencyData doesnt work in Safari as of 5/2015 + if (mode && !p5.prototype._isSafari()) { + timeToFloat(this, this.timeDomain); + this.analyser.getFloatTimeDomainData(this.timeDomain); + return this.timeDomain; + } else { + timeToInt(this, this.timeDomain); + this.analyser.getByteTimeDomainData(this.timeDomain); + var normalArray = new Array(); + for (var j = 0; j < this.timeDomain.length; j++) { + var scaled = p5.prototype.map(this.timeDomain[j], 0, 255, -1, 1); + normalArray.push(scaled); + } + return normalArray; + } + }; + /** + * Returns an array of amplitude values (between 0 and 255) + * across the frequency spectrum. Length is equal to FFT bins + * (1024 by default). The array indices correspond to frequencies + * (i.e. pitches), from the lowest to the highest that humans can + * hear. Each value represents amplitude at that slice of the + * frequency spectrum. Must be called prior to using + * getEnergy(). + * + * @method analyze + * @param {Number} [bins] Must be a power of two between + * 16 and 1024. Defaults to 1024. + * @param {Number} [scale] If "dB," returns decibel + * float measurements between + * -140 and 0 (max). + * Otherwise returns integers from 0-255. + * @return {Array} spectrum Array of energy (amplitude/volume) + * values across the frequency spectrum. + * Lowest energy (silence) = 0, highest + * possible is 255. + * @example + *
+ * var osc; + * var fft; + * + * function setup(){ + * createCanvas(100,100); + * osc = new p5.Oscillator(); + * osc.amp(0); + * osc.start(); + * fft = new p5.FFT(); + * } + * + * function draw(){ + * background(0); + * + * var freq = map(mouseX, 0, 800, 20, 15000); + * freq = constrain(freq, 1, 20000); + * osc.freq(freq); + * + * var spectrum = fft.analyze(); + * noStroke(); + * fill(0,255,0); // spectrum is green + * for (var i = 0; i< spectrum.length; i++){ + * var x = map(i, 0, spectrum.length, 0, width); + * var h = -height + map(spectrum[i], 0, 255, height, 0); + * rect(x, height, width / spectrum.length, h ); + * } + * + * stroke(255); + * text('Freq: ' + round(freq)+'Hz', 10, 10); + * + * isMouseOverCanvas(); + * } + * + * // only play sound when mouse is over canvas + * function isMouseOverCanvas() { + * var mX = mouseX, mY = mouseY; + * if (mX > 0 && mX < width && mY < height && mY > 0) { + * osc.amp(0.5, 0.2); + * } else { + * osc.amp(0, 0.2); + * } + * } + *
+ * + * + */ + p5.FFT.prototype.analyze = function () { + var mode; + for (var i = 0; i < arguments.length; i++) { + if (typeof arguments[i] === 'number') { + this.bins = arguments[i]; + this.analyser.fftSize = this.bins * 2; + } + if (typeof arguments[i] === 'string') { + mode = arguments[i]; + } + } + if (mode && mode.toLowerCase() === 'db') { + freqToFloat(this); + this.analyser.getFloatFrequencyData(this.freqDomain); + return this.freqDomain; + } else { + freqToInt(this, this.freqDomain); + this.analyser.getByteFrequencyData(this.freqDomain); + var normalArray = Array.apply([], this.freqDomain); + normalArray.length === this.analyser.fftSize; + normalArray.constructor === Array; + return normalArray; + } + }; + /** + * Returns the amount of energy (volume) at a specific + * + * frequency, or the average amount of energy between two + * frequencies. Accepts Number(s) corresponding + * to frequency (in Hz), or a String corresponding to predefined + * frequency ranges ("bass", "lowMid", "mid", "highMid", "treble"). + * Returns a range between 0 (no energy/volume at that frequency) and + * 255 (maximum energy). + * NOTE: analyze() must be called prior to getEnergy(). Analyze() + * tells the FFT to analyze frequency data, and getEnergy() uses + * the results determine the value at a specific frequency or + * range of frequencies.

+ * + * @method getEnergy + * @param {Number|String} frequency1 Will return a value representing + * energy at this frequency. Alternately, + * the strings "bass", "lowMid" "mid", + * "highMid", and "treble" will return + * predefined frequency ranges. + * @param {Number} [frequency2] If a second frequency is given, + * will return average amount of + * energy that exists between the + * two frequencies. + * @return {Number} Energy Energy (volume/amplitude) from + * 0 and 255. + * + */ + p5.FFT.prototype.getEnergy = function (frequency1, frequency2) { + var nyquist = p5sound.audiocontext.sampleRate / 2; + if (frequency1 === 'bass') { + frequency1 = this.bass[0]; + frequency2 = this.bass[1]; + } else if (frequency1 === 'lowMid') { + frequency1 = this.lowMid[0]; + frequency2 = this.lowMid[1]; + } else if (frequency1 === 'mid') { + frequency1 = this.mid[0]; + frequency2 = this.mid[1]; + } else if (frequency1 === 'highMid') { + frequency1 = this.highMid[0]; + frequency2 = this.highMid[1]; + } else if (frequency1 === 'treble') { + frequency1 = this.treble[0]; + frequency2 = this.treble[1]; + } + if (typeof frequency1 !== 'number') { + throw 'invalid input for getEnergy()'; + } else if (!frequency2) { + var index = Math.round(frequency1 / nyquist * this.freqDomain.length); + return this.freqDomain[index]; + } else if (frequency1 && frequency2) { + // if second is higher than first + if (frequency1 > frequency2) { + var swap = frequency2; + frequency2 = frequency1; + frequency1 = swap; + } + var lowIndex = Math.round(frequency1 / nyquist * this.freqDomain.length); + var highIndex = Math.round(frequency2 / nyquist * this.freqDomain.length); + var total = 0; + var numFrequencies = 0; + // add up all of the values for the frequencies + for (var i = lowIndex; i <= highIndex; i++) { + total += this.freqDomain[i]; + numFrequencies += 1; + } + // divide by total number of frequencies + var toReturn = total / numFrequencies; + return toReturn; + } else { + throw 'invalid input for getEnergy()'; + } + }; + // compatability with v.012, changed to getEnergy in v.0121. Will be deprecated... + p5.FFT.prototype.getFreq = function (freq1, freq2) { + console.log('getFreq() is deprecated. Please use getEnergy() instead.'); + var x = this.getEnergy(freq1, freq2); + return x; + }; + /** + * Returns the + * + * spectral centroid of the input signal. + * NOTE: analyze() must be called prior to getCentroid(). Analyze() + * tells the FFT to analyze frequency data, and getCentroid() uses + * the results determine the spectral centroid.

+ * + * @method getCentroid + * @return {Number} Spectral Centroid Frequency Frequency of the spectral centroid in Hz. + * + * + * @example + *
+ * + * + *function setup(){ + * cnv = createCanvas(800,400); + * sound = new p5.AudioIn(); + * sound.start(); + * fft = new p5.FFT(); + * sound.connect(fft); + *} + * + * + *function draw(){ + * + * var centroidplot = 0.0; + * var spectralCentroid = 0; + * + * + * background(0); + * stroke(0,255,0); + * var spectrum = fft.analyze(); + * fill(0,255,0); // spectrum is green + * + * //draw the spectrum + * + * for (var i = 0; i< spectrum.length; i++){ + * var x = map(log(i), 0, log(spectrum.length), 0, width); + * var h = map(spectrum[i], 0, 255, 0, height); + * var rectangle_width = (log(i+1)-log(i))*(width/log(spectrum.length)); + * rect(x, height, rectangle_width, -h ) + * } + + * var nyquist = 22050; + * + * // get the centroid + * spectralCentroid = fft.getCentroid(); + * + * // the mean_freq_index calculation is for the display. + * var mean_freq_index = spectralCentroid/(nyquist/spectrum.length); + * + * centroidplot = map(log(mean_freq_index), 0, log(spectrum.length), 0, width); + * + * + * stroke(255,0,0); // the line showing where the centroid is will be red + * + * rect(centroidplot, 0, width / spectrum.length, height) + * noStroke(); + * fill(255,255,255); // text is white + * textSize(40); + * text("centroid: "+round(spectralCentroid)+" Hz", 10, 40); + *} + *
+ */ + p5.FFT.prototype.getCentroid = function () { + var nyquist = p5sound.audiocontext.sampleRate / 2; + var cumulative_sum = 0; + var centroid_normalization = 0; + for (var i = 0; i < this.freqDomain.length; i++) { + cumulative_sum += i * this.freqDomain[i]; + centroid_normalization += this.freqDomain[i]; + } + var mean_freq_index = 0; + if (centroid_normalization !== 0) { + mean_freq_index = cumulative_sum / centroid_normalization; + } + var spec_centroid_freq = mean_freq_index * (nyquist / this.freqDomain.length); + return spec_centroid_freq; + }; + /** + * Smooth FFT analysis by averaging with the last analysis frame. + * + * @method smooth + * @param {Number} smoothing 0.0 < smoothing < 1.0. + * Defaults to 0.8. + */ + p5.FFT.prototype.smooth = function (s) { + if (typeof s !== 'undefined') { + this.smoothing = s; + } + return this.smoothing; + }; + p5.FFT.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.analyser.disconnect(); + this.analyser = undefined; + }; + /** + * Returns an array of average amplitude values for a given number + * of frequency bands split equally. N defaults to 16. + * NOTE: analyze() must be called prior to linAverages(). Analyze() + * tells the FFT to analyze frequency data, and linAverages() uses + * the results to group them into a smaller set of averages.

+ * + * @method linAverages + * @param {Number} N Number of returned frequency groups + * @return {Array} linearAverages Array of average amplitude values for each group + */ + p5.FFT.prototype.linAverages = function (N) { + var N = N || 16; + // This prevents undefined, null or 0 values of N + var spectrum = this.freqDomain; + var spectrumLength = spectrum.length; + var spectrumStep = Math.floor(spectrumLength / N); + var linearAverages = new Array(N); + // Keep a second index for the current average group and place the values accordingly + // with only one loop in the spectrum data + var groupIndex = 0; + for (var specIndex = 0; specIndex < spectrumLength; specIndex++) { + linearAverages[groupIndex] = linearAverages[groupIndex] !== undefined ? (linearAverages[groupIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex]; + // Increase the group index when the last element of the group is processed + if (specIndex % spectrumStep === spectrumStep - 1) { + groupIndex++; + } + } + return linearAverages; + }; + /** + * Returns an array of average amplitude values of the spectrum, for a given + * set of + * Octave Bands + * NOTE: analyze() must be called prior to logAverages(). Analyze() + * tells the FFT to analyze frequency data, and logAverages() uses + * the results to group them into a smaller set of averages.

+ * + * @method logAverages + * @param {Array} octaveBands Array of Octave Bands objects for grouping + * @return {Array} logAverages Array of average amplitude values for each group + */ + p5.FFT.prototype.logAverages = function (octaveBands) { + var nyquist = p5sound.audiocontext.sampleRate / 2; + var spectrum = this.freqDomain; + var spectrumLength = spectrum.length; + var logAverages = new Array(octaveBands.length); + // Keep a second index for the current average group and place the values accordingly + // With only one loop in the spectrum data + var octaveIndex = 0; + for (var specIndex = 0; specIndex < spectrumLength; specIndex++) { + var specIndexFrequency = Math.round(specIndex * nyquist / this.freqDomain.length); + // Increase the group index if the current frequency exceeds the limits of the band + if (specIndexFrequency > octaveBands[octaveIndex].hi) { + octaveIndex++; + } + logAverages[octaveIndex] = logAverages[octaveIndex] !== undefined ? (logAverages[octaveIndex] + spectrum[specIndex]) / 2 : spectrum[specIndex]; + } + return logAverages; + }; + /** + * Calculates and Returns the 1/N + * Octave Bands + * N defaults to 3 and minimum central frequency to 15.625Hz. + * (1/3 Octave Bands ~= 31 Frequency Bands) + * Setting fCtr0 to a central value of a higher octave will ignore the lower bands + * and produce less frequency groups. + * + * @method getOctaveBands + * @param {Number} N Specifies the 1/N type of generated octave bands + * @param {Number} fCtr0 Minimum central frequency for the lowest band + * @return {Array} octaveBands Array of octave band objects with their bounds + */ + p5.FFT.prototype.getOctaveBands = function (N, fCtr0) { + var N = N || 3; + // Default to 1/3 Octave Bands + var fCtr0 = fCtr0 || 15.625; + // Minimum central frequency, defaults to 15.625Hz + var octaveBands = []; + var lastFrequencyBand = { + lo: fCtr0 / Math.pow(2, 1 / (2 * N)), + ctr: fCtr0, + hi: fCtr0 * Math.pow(2, 1 / (2 * N)) + }; + octaveBands.push(lastFrequencyBand); + var nyquist = p5sound.audiocontext.sampleRate / 2; + while (lastFrequencyBand.hi < nyquist) { + var newFrequencyBand = {}; + newFrequencyBand.lo = lastFrequencyBand.hi; + newFrequencyBand.ctr = lastFrequencyBand.ctr * Math.pow(2, 1 / N); + newFrequencyBand.hi = newFrequencyBand.ctr * Math.pow(2, 1 / (2 * N)); + octaveBands.push(newFrequencyBand); + lastFrequencyBand = newFrequencyBand; + } + return octaveBands; + }; + // helper methods to convert type from float (dB) to int (0-255) + var freqToFloat = function (fft) { + if (fft.freqDomain instanceof Float32Array === false) { + fft.freqDomain = new Float32Array(fft.analyser.frequencyBinCount); + } + }; + var freqToInt = function (fft) { + if (fft.freqDomain instanceof Uint8Array === false) { + fft.freqDomain = new Uint8Array(fft.analyser.frequencyBinCount); + } + }; + var timeToFloat = function (fft) { + if (fft.timeDomain instanceof Float32Array === false) { + fft.timeDomain = new Float32Array(fft.analyser.frequencyBinCount); + } + }; + var timeToInt = function (fft) { + if (fft.timeDomain instanceof Uint8Array === false) { + fft.timeDomain = new Uint8Array(fft.analyser.frequencyBinCount); + } + }; +}(master); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Tone; +Tone_core_Tone = function () { + 'use strict'; + var Tone = function (inputs, outputs) { + if (this.isUndef(inputs) || inputs === 1) { + this.input = this.context.createGain(); + } else if (inputs > 1) { + this.input = new Array(inputs); + } + if (this.isUndef(outputs) || outputs === 1) { + this.output = this.context.createGain(); + } else if (outputs > 1) { + this.output = new Array(inputs); + } + }; + Tone.prototype.set = function (params, value, rampTime) { + if (this.isObject(params)) { + rampTime = value; + } else if (this.isString(params)) { + var tmpObj = {}; + tmpObj[params] = value; + params = tmpObj; + } + paramLoop: + for (var attr in params) { + value = params[attr]; + var parent = this; + if (attr.indexOf('.') !== -1) { + var attrSplit = attr.split('.'); + for (var i = 0; i < attrSplit.length - 1; i++) { + parent = parent[attrSplit[i]]; + if (parent instanceof Tone) { + attrSplit.splice(0, i + 1); + var innerParam = attrSplit.join('.'); + parent.set(innerParam, value); + continue paramLoop; + } + } + attr = attrSplit[attrSplit.length - 1]; + } + var param = parent[attr]; + if (this.isUndef(param)) { + continue; + } + if (Tone.Signal && param instanceof Tone.Signal || Tone.Param && param instanceof Tone.Param) { + if (param.value !== value) { + if (this.isUndef(rampTime)) { + param.value = value; + } else { + param.rampTo(value, rampTime); + } + } + } else if (param instanceof AudioParam) { + if (param.value !== value) { + param.value = value; + } + } else if (param instanceof Tone) { + param.set(value); + } else if (param !== value) { + parent[attr] = value; + } + } + return this; + }; + Tone.prototype.get = function (params) { + if (this.isUndef(params)) { + params = this._collectDefaults(this.constructor); + } else if (this.isString(params)) { + params = [params]; + } + var ret = {}; + for (var i = 0; i < params.length; i++) { + var attr = params[i]; + var parent = this; + var subRet = ret; + if (attr.indexOf('.') !== -1) { + var attrSplit = attr.split('.'); + for (var j = 0; j < attrSplit.length - 1; j++) { + var subAttr = attrSplit[j]; + subRet[subAttr] = subRet[subAttr] || {}; + subRet = subRet[subAttr]; + parent = parent[subAttr]; + } + attr = attrSplit[attrSplit.length - 1]; + } + var param = parent[attr]; + if (this.isObject(params[attr])) { + subRet[attr] = param.get(); + } else if (Tone.Signal && param instanceof Tone.Signal) { + subRet[attr] = param.value; + } else if (Tone.Param && param instanceof Tone.Param) { + subRet[attr] = param.value; + } else if (param instanceof AudioParam) { + subRet[attr] = param.value; + } else if (param instanceof Tone) { + subRet[attr] = param.get(); + } else if (!this.isFunction(param) && !this.isUndef(param)) { + subRet[attr] = param; + } + } + return ret; + }; + Tone.prototype._collectDefaults = function (constr) { + var ret = []; + if (!this.isUndef(constr.defaults)) { + ret = Object.keys(constr.defaults); + } + if (!this.isUndef(constr._super)) { + var superDefs = this._collectDefaults(constr._super); + for (var i = 0; i < superDefs.length; i++) { + if (ret.indexOf(superDefs[i]) === -1) { + ret.push(superDefs[i]); + } + } + } + return ret; + }; + Tone.prototype.toString = function () { + for (var className in Tone) { + var isLetter = className[0].match(/^[A-Z]$/); + var sameConstructor = Tone[className] === this.constructor; + if (this.isFunction(Tone[className]) && isLetter && sameConstructor) { + return className; + } + } + return 'Tone'; + }; + Object.defineProperty(Tone.prototype, 'numberOfInputs', { + get: function () { + if (this.input) { + if (this.isArray(this.input)) { + return this.input.length; + } else { + return 1; + } + } else { + return 0; + } + } + }); + Object.defineProperty(Tone.prototype, 'numberOfOutputs', { + get: function () { + if (this.output) { + if (this.isArray(this.output)) { + return this.output.length; + } else { + return 1; + } + } else { + return 0; + } + } + }); + Tone.prototype.dispose = function () { + if (!this.isUndef(this.input)) { + if (this.input instanceof AudioNode) { + this.input.disconnect(); + } + this.input = null; + } + if (!this.isUndef(this.output)) { + if (this.output instanceof AudioNode) { + this.output.disconnect(); + } + this.output = null; + } + return this; + }; + Tone.prototype.connect = function (unit, outputNum, inputNum) { + if (Array.isArray(this.output)) { + outputNum = this.defaultArg(outputNum, 0); + this.output[outputNum].connect(unit, 0, inputNum); + } else { + this.output.connect(unit, outputNum, inputNum); + } + return this; + }; + Tone.prototype.disconnect = function (destination, outputNum, inputNum) { + if (this.isArray(this.output)) { + if (this.isNumber(destination)) { + this.output[destination].disconnect(); + } else { + outputNum = this.defaultArg(outputNum, 0); + this.output[outputNum].disconnect(destination, 0, inputNum); + } + } else { + this.output.disconnect.apply(this.output, arguments); + } + }; + Tone.prototype.connectSeries = function () { + if (arguments.length > 1) { + var currentUnit = arguments[0]; + for (var i = 1; i < arguments.length; i++) { + var toUnit = arguments[i]; + currentUnit.connect(toUnit); + currentUnit = toUnit; + } + } + return this; + }; + Tone.prototype.chain = function () { + if (arguments.length > 0) { + var currentUnit = this; + for (var i = 0; i < arguments.length; i++) { + var toUnit = arguments[i]; + currentUnit.connect(toUnit); + currentUnit = toUnit; + } + } + return this; + }; + Tone.prototype.fan = function () { + if (arguments.length > 0) { + for (var i = 0; i < arguments.length; i++) { + this.connect(arguments[i]); + } + } + return this; + }; + AudioNode.prototype.chain = Tone.prototype.chain; + AudioNode.prototype.fan = Tone.prototype.fan; + Tone.prototype.defaultArg = function (given, fallback) { + if (this.isObject(given) && this.isObject(fallback)) { + var ret = {}; + for (var givenProp in given) { + ret[givenProp] = this.defaultArg(fallback[givenProp], given[givenProp]); + } + for (var fallbackProp in fallback) { + ret[fallbackProp] = this.defaultArg(given[fallbackProp], fallback[fallbackProp]); + } + return ret; + } else { + return this.isUndef(given) ? fallback : given; + } + }; + Tone.prototype.optionsObject = function (values, keys, defaults) { + var options = {}; + if (values.length === 1 && this.isObject(values[0])) { + options = values[0]; + } else { + for (var i = 0; i < keys.length; i++) { + options[keys[i]] = values[i]; + } + } + if (!this.isUndef(defaults)) { + return this.defaultArg(options, defaults); + } else { + return options; + } + }; + Tone.prototype.isUndef = function (val) { + return typeof val === 'undefined'; + }; + Tone.prototype.isFunction = function (val) { + return typeof val === 'function'; + }; + Tone.prototype.isNumber = function (arg) { + return typeof arg === 'number'; + }; + Tone.prototype.isObject = function (arg) { + return Object.prototype.toString.call(arg) === '[object Object]' && arg.constructor === Object; + }; + Tone.prototype.isBoolean = function (arg) { + return typeof arg === 'boolean'; + }; + Tone.prototype.isArray = function (arg) { + return Array.isArray(arg); + }; + Tone.prototype.isString = function (arg) { + return typeof arg === 'string'; + }; + Tone.noOp = function () { + }; + Tone.prototype._readOnly = function (property) { + if (Array.isArray(property)) { + for (var i = 0; i < property.length; i++) { + this._readOnly(property[i]); + } + } else { + Object.defineProperty(this, property, { + writable: false, + enumerable: true + }); + } + }; + Tone.prototype._writable = function (property) { + if (Array.isArray(property)) { + for (var i = 0; i < property.length; i++) { + this._writable(property[i]); + } + } else { + Object.defineProperty(this, property, { writable: true }); + } + }; + Tone.State = { + Started: 'started', + Stopped: 'stopped', + Paused: 'paused' + }; + Tone.prototype.equalPowerScale = function (percent) { + var piFactor = 0.5 * Math.PI; + return Math.sin(percent * piFactor); + }; + Tone.prototype.dbToGain = function (db) { + return Math.pow(2, db / 6); + }; + Tone.prototype.gainToDb = function (gain) { + return 20 * (Math.log(gain) / Math.LN10); + }; + Tone.prototype.intervalToFrequencyRatio = function (interval) { + return Math.pow(2, interval / 12); + }; + Tone.prototype.now = function () { + return Tone.context.now(); + }; + Tone.now = function () { + return Tone.context.now(); + }; + Tone.extend = function (child, parent) { + if (Tone.prototype.isUndef(parent)) { + parent = Tone; + } + function TempConstructor() { + } + TempConstructor.prototype = parent.prototype; + child.prototype = new TempConstructor(); + child.prototype.constructor = child; + child._super = parent; + }; + var audioContext; + Object.defineProperty(Tone, 'context', { + get: function () { + return audioContext; + }, + set: function (context) { + if (Tone.Context && context instanceof Tone.Context) { + audioContext = context; + } else { + audioContext = new Tone.Context(context); + } + if (Tone.Context) { + Tone.Context.emit('init', audioContext); + } + } + }); + Object.defineProperty(Tone.prototype, 'context', { + get: function () { + return Tone.context; + } + }); + Tone.setContext = function (ctx) { + Tone.context = ctx; + }; + Object.defineProperty(Tone.prototype, 'blockTime', { + get: function () { + return 128 / this.context.sampleRate; + } + }); + Object.defineProperty(Tone.prototype, 'sampleTime', { + get: function () { + return 1 / this.context.sampleRate; + } + }); + Object.defineProperty(Tone, 'supported', { + get: function () { + var hasAudioContext = window.hasOwnProperty('AudioContext') || window.hasOwnProperty('webkitAudioContext'); + var hasPromises = window.hasOwnProperty('Promise'); + var hasWorkers = window.hasOwnProperty('Worker'); + return hasAudioContext && hasPromises && hasWorkers; + } + }); + Tone.version = 'r10'; + if (!window.TONE_SILENCE_VERSION_LOGGING) { + } + return Tone; +}(); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_SignalBase; +Tone_signal_SignalBase = function (Tone) { + 'use strict'; + Tone.SignalBase = function () { + }; + Tone.extend(Tone.SignalBase); + Tone.SignalBase.prototype.connect = function (node, outputNumber, inputNumber) { + if (Tone.Signal && Tone.Signal === node.constructor || Tone.Param && Tone.Param === node.constructor || Tone.TimelineSignal && Tone.TimelineSignal === node.constructor) { + node._param.cancelScheduledValues(0); + node._param.value = 0; + node.overridden = true; + } else if (node instanceof AudioParam) { + node.cancelScheduledValues(0); + node.value = 0; + } + Tone.prototype.connect.call(this, node, outputNumber, inputNumber); + return this; + }; + return Tone.SignalBase; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_WaveShaper; +Tone_signal_WaveShaper = function (Tone) { + 'use strict'; + Tone.WaveShaper = function (mapping, bufferLen) { + this._shaper = this.input = this.output = this.context.createWaveShaper(); + this._curve = null; + if (Array.isArray(mapping)) { + this.curve = mapping; + } else if (isFinite(mapping) || this.isUndef(mapping)) { + this._curve = new Float32Array(this.defaultArg(mapping, 1024)); + } else if (this.isFunction(mapping)) { + this._curve = new Float32Array(this.defaultArg(bufferLen, 1024)); + this.setMap(mapping); + } + }; + Tone.extend(Tone.WaveShaper, Tone.SignalBase); + Tone.WaveShaper.prototype.setMap = function (mapping) { + for (var i = 0, len = this._curve.length; i < len; i++) { + var normalized = i / (len - 1) * 2 - 1; + this._curve[i] = mapping(normalized, i); + } + this._shaper.curve = this._curve; + return this; + }; + Object.defineProperty(Tone.WaveShaper.prototype, 'curve', { + get: function () { + return this._shaper.curve; + }, + set: function (mapping) { + this._curve = new Float32Array(mapping); + this._shaper.curve = this._curve; + } + }); + Object.defineProperty(Tone.WaveShaper.prototype, 'oversample', { + get: function () { + return this._shaper.oversample; + }, + set: function (oversampling) { + if ([ + 'none', + '2x', + '4x' + ].indexOf(oversampling) !== -1) { + this._shaper.oversample = oversampling; + } else { + throw new RangeError('Tone.WaveShaper: oversampling must be either \'none\', \'2x\', or \'4x\''); + } + } + }); + Tone.WaveShaper.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._shaper.disconnect(); + this._shaper = null; + this._curve = null; + return this; + }; + return Tone.WaveShaper; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_type_TimeBase; +Tone_type_TimeBase = function (Tone) { + Tone.TimeBase = function (val, units) { + if (this instanceof Tone.TimeBase) { + this._expr = this._noOp; + if (val instanceof Tone.TimeBase) { + this.copy(val); + } else if (!this.isUndef(units) || this.isNumber(val)) { + units = this.defaultArg(units, this._defaultUnits); + var method = this._primaryExpressions[units].method; + this._expr = method.bind(this, val); + } else if (this.isString(val)) { + this.set(val); + } else if (this.isUndef(val)) { + this._expr = this._defaultExpr(); + } + } else { + return new Tone.TimeBase(val, units); + } + }; + Tone.extend(Tone.TimeBase); + Tone.TimeBase.prototype.set = function (exprString) { + this._expr = this._parseExprString(exprString); + return this; + }; + Tone.TimeBase.prototype.clone = function () { + var instance = new this.constructor(); + instance.copy(this); + return instance; + }; + Tone.TimeBase.prototype.copy = function (time) { + var val = time._expr(); + return this.set(val); + }; + Tone.TimeBase.prototype._primaryExpressions = { + 'n': { + regexp: /^(\d+)n/i, + method: function (value) { + value = parseInt(value); + if (value === 1) { + return this._beatsToUnits(this._timeSignature()); + } else { + return this._beatsToUnits(4 / value); + } + } + }, + 't': { + regexp: /^(\d+)t/i, + method: function (value) { + value = parseInt(value); + return this._beatsToUnits(8 / (parseInt(value) * 3)); + } + }, + 'm': { + regexp: /^(\d+)m/i, + method: function (value) { + return this._beatsToUnits(parseInt(value) * this._timeSignature()); + } + }, + 'i': { + regexp: /^(\d+)i/i, + method: function (value) { + return this._ticksToUnits(parseInt(value)); + } + }, + 'hz': { + regexp: /^(\d+(?:\.\d+)?)hz/i, + method: function (value) { + return this._frequencyToUnits(parseFloat(value)); + } + }, + 'tr': { + regexp: /^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?/, + method: function (m, q, s) { + var total = 0; + if (m && m !== '0') { + total += this._beatsToUnits(this._timeSignature() * parseFloat(m)); + } + if (q && q !== '0') { + total += this._beatsToUnits(parseFloat(q)); + } + if (s && s !== '0') { + total += this._beatsToUnits(parseFloat(s) / 4); + } + return total; + } + }, + 's': { + regexp: /^(\d+(?:\.\d+)?s)/, + method: function (value) { + return this._secondsToUnits(parseFloat(value)); + } + }, + 'samples': { + regexp: /^(\d+)samples/, + method: function (value) { + return parseInt(value) / this.context.sampleRate; + } + }, + 'default': { + regexp: /^(\d+(?:\.\d+)?)/, + method: function (value) { + return this._primaryExpressions[this._defaultUnits].method.call(this, value); + } + } + }; + Tone.TimeBase.prototype._binaryExpressions = { + '+': { + regexp: /^\+/, + precedence: 2, + method: function (lh, rh) { + return lh() + rh(); + } + }, + '-': { + regexp: /^\-/, + precedence: 2, + method: function (lh, rh) { + return lh() - rh(); + } + }, + '*': { + regexp: /^\*/, + precedence: 1, + method: function (lh, rh) { + return lh() * rh(); + } + }, + '/': { + regexp: /^\//, + precedence: 1, + method: function (lh, rh) { + return lh() / rh(); + } + } + }; + Tone.TimeBase.prototype._unaryExpressions = { + 'neg': { + regexp: /^\-/, + method: function (lh) { + return -lh(); + } + } + }; + Tone.TimeBase.prototype._syntaxGlue = { + '(': { regexp: /^\(/ }, + ')': { regexp: /^\)/ } + }; + Tone.TimeBase.prototype._tokenize = function (expr) { + var position = -1; + var tokens = []; + while (expr.length > 0) { + expr = expr.trim(); + var token = getNextToken(expr, this); + tokens.push(token); + expr = expr.substr(token.value.length); + } + function getNextToken(expr, context) { + var expressions = [ + '_binaryExpressions', + '_unaryExpressions', + '_primaryExpressions', + '_syntaxGlue' + ]; + for (var i = 0; i < expressions.length; i++) { + var group = context[expressions[i]]; + for (var opName in group) { + var op = group[opName]; + var reg = op.regexp; + var match = expr.match(reg); + if (match !== null) { + return { + method: op.method, + precedence: op.precedence, + regexp: op.regexp, + value: match[0] + }; + } + } + } + throw new SyntaxError('Tone.TimeBase: Unexpected token ' + expr); + } + return { + next: function () { + return tokens[++position]; + }, + peek: function () { + return tokens[position + 1]; + } + }; + }; + Tone.TimeBase.prototype._matchGroup = function (token, group, prec) { + var ret = false; + if (!this.isUndef(token)) { + for (var opName in group) { + var op = group[opName]; + if (op.regexp.test(token.value)) { + if (!this.isUndef(prec)) { + if (op.precedence === prec) { + return op; + } + } else { + return op; + } + } + } + } + return ret; + }; + Tone.TimeBase.prototype._parseBinary = function (lexer, precedence) { + if (this.isUndef(precedence)) { + precedence = 2; + } + var expr; + if (precedence < 0) { + expr = this._parseUnary(lexer); + } else { + expr = this._parseBinary(lexer, precedence - 1); + } + var token = lexer.peek(); + while (token && this._matchGroup(token, this._binaryExpressions, precedence)) { + token = lexer.next(); + expr = token.method.bind(this, expr, this._parseBinary(lexer, precedence - 1)); + token = lexer.peek(); + } + return expr; + }; + Tone.TimeBase.prototype._parseUnary = function (lexer) { + var token, expr; + token = lexer.peek(); + var op = this._matchGroup(token, this._unaryExpressions); + if (op) { + token = lexer.next(); + expr = this._parseUnary(lexer); + return op.method.bind(this, expr); + } + return this._parsePrimary(lexer); + }; + Tone.TimeBase.prototype._parsePrimary = function (lexer) { + var token, expr; + token = lexer.peek(); + if (this.isUndef(token)) { + throw new SyntaxError('Tone.TimeBase: Unexpected end of expression'); + } + if (this._matchGroup(token, this._primaryExpressions)) { + token = lexer.next(); + var matching = token.value.match(token.regexp); + return token.method.bind(this, matching[1], matching[2], matching[3]); + } + if (token && token.value === '(') { + lexer.next(); + expr = this._parseBinary(lexer); + token = lexer.next(); + if (!(token && token.value === ')')) { + throw new SyntaxError('Expected )'); + } + return expr; + } + throw new SyntaxError('Tone.TimeBase: Cannot process token ' + token.value); + }; + Tone.TimeBase.prototype._parseExprString = function (exprString) { + if (!this.isString(exprString)) { + exprString = exprString.toString(); + } + var lexer = this._tokenize(exprString); + var tree = this._parseBinary(lexer); + return tree; + }; + Tone.TimeBase.prototype._noOp = function () { + return 0; + }; + Tone.TimeBase.prototype._defaultExpr = function () { + return this._noOp; + }; + Tone.TimeBase.prototype._defaultUnits = 's'; + Tone.TimeBase.prototype._frequencyToUnits = function (freq) { + return 1 / freq; + }; + Tone.TimeBase.prototype._beatsToUnits = function (beats) { + return 60 / Tone.Transport.bpm.value * beats; + }; + Tone.TimeBase.prototype._secondsToUnits = function (seconds) { + return seconds; + }; + Tone.TimeBase.prototype._ticksToUnits = function (ticks) { + return ticks * (this._beatsToUnits(1) / Tone.Transport.PPQ); + }; + Tone.TimeBase.prototype._timeSignature = function () { + return Tone.Transport.timeSignature; + }; + Tone.TimeBase.prototype._pushExpr = function (val, name, units) { + if (!(val instanceof Tone.TimeBase)) { + val = new this.constructor(val, units); + } + this._expr = this._binaryExpressions[name].method.bind(this, this._expr, val._expr); + return this; + }; + Tone.TimeBase.prototype.add = function (val, units) { + return this._pushExpr(val, '+', units); + }; + Tone.TimeBase.prototype.sub = function (val, units) { + return this._pushExpr(val, '-', units); + }; + Tone.TimeBase.prototype.mult = function (val, units) { + return this._pushExpr(val, '*', units); + }; + Tone.TimeBase.prototype.div = function (val, units) { + return this._pushExpr(val, '/', units); + }; + Tone.TimeBase.prototype.valueOf = function () { + return this._expr(); + }; + Tone.TimeBase.prototype.dispose = function () { + this._expr = null; + }; + return Tone.TimeBase; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_type_Time; +Tone_type_Time = function (Tone) { + Tone.Time = function (val, units) { + if (this instanceof Tone.Time) { + this._plusNow = false; + Tone.TimeBase.call(this, val, units); + } else { + return new Tone.Time(val, units); + } + }; + Tone.extend(Tone.Time, Tone.TimeBase); + Tone.Time.prototype._unaryExpressions = Object.create(Tone.TimeBase.prototype._unaryExpressions); + Tone.Time.prototype._unaryExpressions.quantize = { + regexp: /^@/, + method: function (rh) { + return Tone.Transport.nextSubdivision(rh()); + } + }; + Tone.Time.prototype._unaryExpressions.now = { + regexp: /^\+/, + method: function (lh) { + this._plusNow = true; + return lh(); + } + }; + Tone.Time.prototype.quantize = function (subdiv, percent) { + percent = this.defaultArg(percent, 1); + this._expr = function (expr, subdivision, percent) { + expr = expr(); + subdivision = subdivision.toSeconds(); + var multiple = Math.round(expr / subdivision); + var ideal = multiple * subdivision; + var diff = ideal - expr; + return expr + diff * percent; + }.bind(this, this._expr, new this.constructor(subdiv), percent); + return this; + }; + Tone.Time.prototype.addNow = function () { + this._plusNow = true; + return this; + }; + Tone.Time.prototype._defaultExpr = function () { + this._plusNow = true; + return this._noOp; + }; + Tone.Time.prototype.copy = function (time) { + Tone.TimeBase.prototype.copy.call(this, time); + this._plusNow = time._plusNow; + return this; + }; + Tone.Time.prototype.toNotation = function () { + var time = this.toSeconds(); + var testNotations = [ + '1m', + '2n', + '4n', + '8n', + '16n', + '32n', + '64n', + '128n' + ]; + var retNotation = this._toNotationHelper(time, testNotations); + var testTripletNotations = [ + '1m', + '2n', + '2t', + '4n', + '4t', + '8n', + '8t', + '16n', + '16t', + '32n', + '32t', + '64n', + '64t', + '128n' + ]; + var retTripletNotation = this._toNotationHelper(time, testTripletNotations); + if (retTripletNotation.split('+').length < retNotation.split('+').length) { + return retTripletNotation; + } else { + return retNotation; + } + }; + Tone.Time.prototype._toNotationHelper = function (units, testNotations) { + var threshold = this._notationToUnits(testNotations[testNotations.length - 1]); + var retNotation = ''; + for (var i = 0; i < testNotations.length; i++) { + var notationTime = this._notationToUnits(testNotations[i]); + var multiple = units / notationTime; + var floatingPointError = 0.000001; + if (1 - multiple % 1 < floatingPointError) { + multiple += floatingPointError; + } + multiple = Math.floor(multiple); + if (multiple > 0) { + if (multiple === 1) { + retNotation += testNotations[i]; + } else { + retNotation += multiple.toString() + '*' + testNotations[i]; + } + units -= multiple * notationTime; + if (units < threshold) { + break; + } else { + retNotation += ' + '; + } + } + } + if (retNotation === '') { + retNotation = '0'; + } + return retNotation; + }; + Tone.Time.prototype._notationToUnits = function (notation) { + var primaryExprs = this._primaryExpressions; + var notationExprs = [ + primaryExprs.n, + primaryExprs.t, + primaryExprs.m + ]; + for (var i = 0; i < notationExprs.length; i++) { + var expr = notationExprs[i]; + var match = notation.match(expr.regexp); + if (match) { + return expr.method.call(this, match[1]); + } + } + }; + Tone.Time.prototype.toBarsBeatsSixteenths = function () { + var quarterTime = this._beatsToUnits(1); + var quarters = this.toSeconds() / quarterTime; + var measures = Math.floor(quarters / this._timeSignature()); + var sixteenths = quarters % 1 * 4; + quarters = Math.floor(quarters) % this._timeSignature(); + sixteenths = sixteenths.toString(); + if (sixteenths.length > 3) { + sixteenths = parseFloat(sixteenths).toFixed(3); + } + var progress = [ + measures, + quarters, + sixteenths + ]; + return progress.join(':'); + }; + Tone.Time.prototype.toTicks = function () { + var quarterTime = this._beatsToUnits(1); + var quarters = this.valueOf() / quarterTime; + return Math.floor(quarters * Tone.Transport.PPQ); + }; + Tone.Time.prototype.toSamples = function () { + return this.toSeconds() * this.context.sampleRate; + }; + Tone.Time.prototype.toFrequency = function () { + return 1 / this.toSeconds(); + }; + Tone.Time.prototype.toSeconds = function () { + return this.valueOf(); + }; + Tone.Time.prototype.toMilliseconds = function () { + return this.toSeconds() * 1000; + }; + Tone.Time.prototype.valueOf = function () { + var val = this._expr(); + return val + (this._plusNow ? this.now() : 0); + }; + return Tone.Time; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_type_Frequency; +Tone_type_Frequency = function (Tone) { + Tone.Frequency = function (val, units) { + if (this instanceof Tone.Frequency) { + Tone.TimeBase.call(this, val, units); + } else { + return new Tone.Frequency(val, units); + } + }; + Tone.extend(Tone.Frequency, Tone.TimeBase); + Tone.Frequency.prototype._primaryExpressions = Object.create(Tone.TimeBase.prototype._primaryExpressions); + Tone.Frequency.prototype._primaryExpressions.midi = { + regexp: /^(\d+(?:\.\d+)?midi)/, + method: function (value) { + return this.midiToFrequency(value); + } + }; + Tone.Frequency.prototype._primaryExpressions.note = { + regexp: /^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i, + method: function (pitch, octave) { + var index = noteToScaleIndex[pitch.toLowerCase()]; + var noteNumber = index + (parseInt(octave) + 1) * 12; + return this.midiToFrequency(noteNumber); + } + }; + Tone.Frequency.prototype._primaryExpressions.tr = { + regexp: /^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?/, + method: function (m, q, s) { + var total = 1; + if (m && m !== '0') { + total *= this._beatsToUnits(this._timeSignature() * parseFloat(m)); + } + if (q && q !== '0') { + total *= this._beatsToUnits(parseFloat(q)); + } + if (s && s !== '0') { + total *= this._beatsToUnits(parseFloat(s) / 4); + } + return total; + } + }; + Tone.Frequency.prototype.transpose = function (interval) { + this._expr = function (expr, interval) { + var val = expr(); + return val * this.intervalToFrequencyRatio(interval); + }.bind(this, this._expr, interval); + return this; + }; + Tone.Frequency.prototype.harmonize = function (intervals) { + this._expr = function (expr, intervals) { + var val = expr(); + var ret = []; + for (var i = 0; i < intervals.length; i++) { + ret[i] = val * this.intervalToFrequencyRatio(intervals[i]); + } + return ret; + }.bind(this, this._expr, intervals); + return this; + }; + Tone.Frequency.prototype.toMidi = function () { + return this.frequencyToMidi(this.valueOf()); + }; + Tone.Frequency.prototype.toNote = function () { + var freq = this.valueOf(); + var log = Math.log(freq / Tone.Frequency.A4) / Math.LN2; + var noteNumber = Math.round(12 * log) + 57; + var octave = Math.floor(noteNumber / 12); + if (octave < 0) { + noteNumber += -12 * octave; + } + var noteName = scaleIndexToNote[noteNumber % 12]; + return noteName + octave.toString(); + }; + Tone.Frequency.prototype.toSeconds = function () { + return 1 / this.valueOf(); + }; + Tone.Frequency.prototype.toFrequency = function () { + return this.valueOf(); + }; + Tone.Frequency.prototype.toTicks = function () { + var quarterTime = this._beatsToUnits(1); + var quarters = this.valueOf() / quarterTime; + return Math.floor(quarters * Tone.Transport.PPQ); + }; + Tone.Frequency.prototype._frequencyToUnits = function (freq) { + return freq; + }; + Tone.Frequency.prototype._ticksToUnits = function (ticks) { + return 1 / (ticks * 60 / (Tone.Transport.bpm.value * Tone.Transport.PPQ)); + }; + Tone.Frequency.prototype._beatsToUnits = function (beats) { + return 1 / Tone.TimeBase.prototype._beatsToUnits.call(this, beats); + }; + Tone.Frequency.prototype._secondsToUnits = function (seconds) { + return 1 / seconds; + }; + Tone.Frequency.prototype._defaultUnits = 'hz'; + var noteToScaleIndex = { + 'cbb': -2, + 'cb': -1, + 'c': 0, + 'c#': 1, + 'cx': 2, + 'dbb': 0, + 'db': 1, + 'd': 2, + 'd#': 3, + 'dx': 4, + 'ebb': 2, + 'eb': 3, + 'e': 4, + 'e#': 5, + 'ex': 6, + 'fbb': 3, + 'fb': 4, + 'f': 5, + 'f#': 6, + 'fx': 7, + 'gbb': 5, + 'gb': 6, + 'g': 7, + 'g#': 8, + 'gx': 9, + 'abb': 7, + 'ab': 8, + 'a': 9, + 'a#': 10, + 'ax': 11, + 'bbb': 9, + 'bb': 10, + 'b': 11, + 'b#': 12, + 'bx': 13 + }; + var scaleIndexToNote = [ + 'C', + 'C#', + 'D', + 'D#', + 'E', + 'F', + 'F#', + 'G', + 'G#', + 'A', + 'A#', + 'B' + ]; + Tone.Frequency.A4 = 440; + Tone.Frequency.prototype.midiToFrequency = function (midi) { + return Tone.Frequency.A4 * Math.pow(2, (midi - 69) / 12); + }; + Tone.Frequency.prototype.frequencyToMidi = function (frequency) { + return 69 + 12 * Math.log(frequency / Tone.Frequency.A4) / Math.LN2; + }; + return Tone.Frequency; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_type_TransportTime; +Tone_type_TransportTime = function (Tone) { + Tone.TransportTime = function (val, units) { + if (this instanceof Tone.TransportTime) { + Tone.Time.call(this, val, units); + } else { + return new Tone.TransportTime(val, units); + } + }; + Tone.extend(Tone.TransportTime, Tone.Time); + Tone.TransportTime.prototype._unaryExpressions = Object.create(Tone.Time.prototype._unaryExpressions); + Tone.TransportTime.prototype._unaryExpressions.quantize = { + regexp: /^@/, + method: function (rh) { + var subdivision = this._secondsToTicks(rh()); + var multiple = Math.ceil(Tone.Transport.ticks / subdivision); + return this._ticksToUnits(multiple * subdivision); + } + }; + Tone.TransportTime.prototype._secondsToTicks = function (seconds) { + var quarterTime = this._beatsToUnits(1); + var quarters = seconds / quarterTime; + return Math.round(quarters * Tone.Transport.PPQ); + }; + Tone.TransportTime.prototype.valueOf = function () { + var val = this._secondsToTicks(this._expr()); + return val + (this._plusNow ? Tone.Transport.ticks : 0); + }; + Tone.TransportTime.prototype.toTicks = function () { + return this.valueOf(); + }; + Tone.TransportTime.prototype.toSeconds = function () { + var val = this._expr(); + return val + (this._plusNow ? Tone.Transport.seconds : 0); + }; + Tone.TransportTime.prototype.toFrequency = function () { + return 1 / this.toSeconds(); + }; + return Tone.TransportTime; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Emitter; +Tone_core_Emitter = function (Tone) { + 'use strict'; + Tone.Emitter = function () { + this._events = {}; + }; + Tone.extend(Tone.Emitter); + Tone.Emitter.prototype.on = function (event, callback) { + var events = event.split(/\W+/); + for (var i = 0; i < events.length; i++) { + var eventName = events[i]; + if (!this._events.hasOwnProperty(eventName)) { + this._events[eventName] = []; + } + this._events[eventName].push(callback); + } + return this; + }; + Tone.Emitter.prototype.off = function (event, callback) { + var events = event.split(/\W+/); + for (var ev = 0; ev < events.length; ev++) { + event = events[ev]; + if (this._events.hasOwnProperty(event)) { + if (Tone.prototype.isUndef(callback)) { + this._events[event] = []; + } else { + var eventList = this._events[event]; + for (var i = 0; i < eventList.length; i++) { + if (eventList[i] === callback) { + eventList.splice(i, 1); + } + } + } + } + } + return this; + }; + Tone.Emitter.prototype.emit = function (event) { + if (this._events) { + var args = Array.apply(null, arguments).slice(1); + if (this._events.hasOwnProperty(event)) { + var eventList = this._events[event]; + for (var i = 0, len = eventList.length; i < len; i++) { + eventList[i].apply(this, args); + } + } + } + return this; + }; + Tone.Emitter.mixin = function (object) { + var functions = [ + 'on', + 'off', + 'emit' + ]; + object._events = {}; + for (var i = 0; i < functions.length; i++) { + var func = functions[i]; + var emitterFunc = Tone.Emitter.prototype[func]; + object[func] = emitterFunc; + } + }; + Tone.Emitter.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._events = null; + return this; + }; + return Tone.Emitter; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Context; +Tone_core_Context = function (Tone) { + if (!window.hasOwnProperty('AudioContext') && window.hasOwnProperty('webkitAudioContext')) { + window.AudioContext = window.webkitAudioContext; + } + Tone.Context = function (context) { + Tone.Emitter.call(this); + if (!context) { + context = new window.AudioContext(); + } + this._context = context; + for (var prop in this._context) { + this._defineProperty(this._context, prop); + } + this._latencyHint = 'interactive'; + this._lookAhead = 0.1; + this._updateInterval = this._lookAhead / 3; + this._computedUpdateInterval = 0; + this._worker = this._createWorker(); + this._constants = {}; + }; + Tone.extend(Tone.Context, Tone.Emitter); + Tone.Emitter.mixin(Tone.Context); + Tone.Context.prototype._defineProperty = function (context, prop) { + if (this.isUndef(this[prop])) { + Object.defineProperty(this, prop, { + get: function () { + if (typeof context[prop] === 'function') { + return context[prop].bind(context); + } else { + return context[prop]; + } + }, + set: function (val) { + context[prop] = val; + } + }); + } + }; + Tone.Context.prototype.now = function () { + return this._context.currentTime; + }; + Tone.Context.prototype._createWorker = function () { + window.URL = window.URL || window.webkitURL; + var blob = new Blob(['var timeoutTime = ' + (this._updateInterval * 1000).toFixed(1) + ';' + 'self.onmessage = function(msg){' + '\ttimeoutTime = parseInt(msg.data);' + '};' + 'function tick(){' + '\tsetTimeout(tick, timeoutTime);' + '\tself.postMessage(\'tick\');' + '}' + 'tick();']); + var blobUrl = URL.createObjectURL(blob); + var worker = new Worker(blobUrl); + worker.addEventListener('message', function () { + this.emit('tick'); + }.bind(this)); + worker.addEventListener('message', function () { + var now = this.now(); + if (this.isNumber(this._lastUpdate)) { + var diff = now - this._lastUpdate; + this._computedUpdateInterval = Math.max(diff, this._computedUpdateInterval * 0.97); + } + this._lastUpdate = now; + }.bind(this)); + return worker; + }; + Tone.Context.prototype.getConstant = function (val) { + if (this._constants[val]) { + return this._constants[val]; + } else { + var buffer = this._context.createBuffer(1, 128, this._context.sampleRate); + var arr = buffer.getChannelData(0); + for (var i = 0; i < arr.length; i++) { + arr[i] = val; + } + var constant = this._context.createBufferSource(); + constant.channelCount = 1; + constant.channelCountMode = 'explicit'; + constant.buffer = buffer; + constant.loop = true; + constant.start(0); + this._constants[val] = constant; + return constant; + } + }; + Object.defineProperty(Tone.Context.prototype, 'lag', { + get: function () { + var diff = this._computedUpdateInterval - this._updateInterval; + diff = Math.max(diff, 0); + return diff; + } + }); + Object.defineProperty(Tone.Context.prototype, 'lookAhead', { + get: function () { + return this._lookAhead; + }, + set: function (lA) { + this._lookAhead = lA; + } + }); + Object.defineProperty(Tone.Context.prototype, 'updateInterval', { + get: function () { + return this._updateInterval; + }, + set: function (interval) { + this._updateInterval = Math.max(interval, Tone.prototype.blockTime); + this._worker.postMessage(Math.max(interval * 1000, 1)); + } + }); + Object.defineProperty(Tone.Context.prototype, 'latencyHint', { + get: function () { + return this._latencyHint; + }, + set: function (hint) { + var lookAhead = hint; + this._latencyHint = hint; + if (this.isString(hint)) { + switch (hint) { + case 'interactive': + lookAhead = 0.1; + this._context.latencyHint = hint; + break; + case 'playback': + lookAhead = 0.8; + this._context.latencyHint = hint; + break; + case 'balanced': + lookAhead = 0.25; + this._context.latencyHint = hint; + break; + case 'fastest': + lookAhead = 0.01; + break; + } + } + this.lookAhead = lookAhead; + this.updateInterval = lookAhead / 3; + } + }); + function shimConnect() { + var nativeConnect = AudioNode.prototype.connect; + var nativeDisconnect = AudioNode.prototype.disconnect; + function toneConnect(B, outNum, inNum) { + if (B.input) { + if (Array.isArray(B.input)) { + if (Tone.prototype.isUndef(inNum)) { + inNum = 0; + } + this.connect(B.input[inNum]); + } else { + this.connect(B.input, outNum, inNum); + } + } else { + try { + if (B instanceof AudioNode) { + nativeConnect.call(this, B, outNum, inNum); + } else { + nativeConnect.call(this, B, outNum); + } + } catch (e) { + throw new Error('error connecting to node: ' + B + '\n' + e); + } + } + } + function toneDisconnect(B, outNum, inNum) { + if (B && B.input && Array.isArray(B.input)) { + if (Tone.prototype.isUndef(inNum)) { + inNum = 0; + } + this.disconnect(B.input[inNum], outNum, inNum); + } else if (B && B.input) { + this.disconnect(B.input, outNum, inNum); + } else { + try { + nativeDisconnect.apply(this, arguments); + } catch (e) { + throw new Error('error disconnecting node: ' + B + '\n' + e); + } + } + } + if (AudioNode.prototype.connect !== toneConnect) { + AudioNode.prototype.connect = toneConnect; + AudioNode.prototype.disconnect = toneDisconnect; + } + } + if (Tone.supported) { + shimConnect(); + Tone.context = new Tone.Context(); + } else { + console.warn('This browser does not support Tone.js'); + } + return Tone.Context; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_type_Type; +Tone_type_Type = function (Tone) { + Tone.Type = { + Default: 'number', + Time: 'time', + Frequency: 'frequency', + TransportTime: 'transportTime', + Ticks: 'ticks', + NormalRange: 'normalRange', + AudioRange: 'audioRange', + Decibels: 'db', + Interval: 'interval', + BPM: 'bpm', + Positive: 'positive', + Cents: 'cents', + Degrees: 'degrees', + MIDI: 'midi', + BarsBeatsSixteenths: 'barsBeatsSixteenths', + Samples: 'samples', + Hertz: 'hertz', + Note: 'note', + Milliseconds: 'milliseconds', + Seconds: 'seconds', + Notation: 'notation' + }; + Tone.prototype.toSeconds = function (time) { + if (this.isNumber(time)) { + return time; + } else if (this.isUndef(time)) { + return this.now(); + } else if (this.isString(time)) { + return new Tone.Time(time).toSeconds(); + } else if (time instanceof Tone.TimeBase) { + return time.toSeconds(); + } + }; + Tone.prototype.toFrequency = function (freq) { + if (this.isNumber(freq)) { + return freq; + } else if (this.isString(freq) || this.isUndef(freq)) { + return new Tone.Frequency(freq).valueOf(); + } else if (freq instanceof Tone.TimeBase) { + return freq.toFrequency(); + } + }; + Tone.prototype.toTicks = function (time) { + if (this.isNumber(time) || this.isString(time)) { + return new Tone.TransportTime(time).toTicks(); + } else if (this.isUndef(time)) { + return Tone.Transport.ticks; + } else if (time instanceof Tone.TimeBase) { + return time.toTicks(); + } + }; + return Tone; +}(Tone_core_Tone, Tone_type_Time, Tone_type_Frequency, Tone_type_TransportTime); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Param; +Tone_core_Param = function (Tone) { + 'use strict'; + Tone.Param = function () { + var options = this.optionsObject(arguments, [ + 'param', + 'units', + 'convert' + ], Tone.Param.defaults); + this._param = this.input = options.param; + this.units = options.units; + this.convert = options.convert; + this.overridden = false; + this._lfo = null; + if (this.isObject(options.lfo)) { + this.value = options.lfo; + } else if (!this.isUndef(options.value)) { + this.value = options.value; + } + }; + Tone.extend(Tone.Param); + Tone.Param.defaults = { + 'units': Tone.Type.Default, + 'convert': true, + 'param': undefined + }; + Object.defineProperty(Tone.Param.prototype, 'value', { + get: function () { + return this._toUnits(this._param.value); + }, + set: function (value) { + if (this.isObject(value)) { + if (this.isUndef(Tone.LFO)) { + throw new Error('Include \'Tone.LFO\' to use an LFO as a Param value.'); + } + if (this._lfo) { + this._lfo.dispose(); + } + this._lfo = new Tone.LFO(value).start(); + this._lfo.connect(this.input); + } else { + var convertedVal = this._fromUnits(value); + this._param.cancelScheduledValues(0); + this._param.value = convertedVal; + } + } + }); + Tone.Param.prototype._fromUnits = function (val) { + if (this.convert || this.isUndef(this.convert)) { + switch (this.units) { + case Tone.Type.Time: + return this.toSeconds(val); + case Tone.Type.Frequency: + return this.toFrequency(val); + case Tone.Type.Decibels: + return this.dbToGain(val); + case Tone.Type.NormalRange: + return Math.min(Math.max(val, 0), 1); + case Tone.Type.AudioRange: + return Math.min(Math.max(val, -1), 1); + case Tone.Type.Positive: + return Math.max(val, 0); + default: + return val; + } + } else { + return val; + } + }; + Tone.Param.prototype._toUnits = function (val) { + if (this.convert || this.isUndef(this.convert)) { + switch (this.units) { + case Tone.Type.Decibels: + return this.gainToDb(val); + default: + return val; + } + } else { + return val; + } + }; + Tone.Param.prototype._minOutput = 0.00001; + Tone.Param.prototype.setValueAtTime = function (value, time) { + value = this._fromUnits(value); + time = this.toSeconds(time); + if (time <= this.now() + this.blockTime) { + this._param.value = value; + } else { + this._param.setValueAtTime(value, time); + } + return this; + }; + Tone.Param.prototype.setRampPoint = function (now) { + now = this.defaultArg(now, this.now()); + var currentVal = this._param.value; + if (currentVal === 0) { + currentVal = this._minOutput; + } + this._param.setValueAtTime(currentVal, now); + return this; + }; + Tone.Param.prototype.linearRampToValueAtTime = function (value, endTime) { + value = this._fromUnits(value); + this._param.linearRampToValueAtTime(value, this.toSeconds(endTime)); + return this; + }; + Tone.Param.prototype.exponentialRampToValueAtTime = function (value, endTime) { + value = this._fromUnits(value); + value = Math.max(this._minOutput, value); + this._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime)); + return this; + }; + Tone.Param.prototype.exponentialRampToValue = function (value, rampTime, startTime) { + startTime = this.toSeconds(startTime); + this.setRampPoint(startTime); + this.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime)); + return this; + }; + Tone.Param.prototype.linearRampToValue = function (value, rampTime, startTime) { + startTime = this.toSeconds(startTime); + this.setRampPoint(startTime); + this.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime)); + return this; + }; + Tone.Param.prototype.setTargetAtTime = function (value, startTime, timeConstant) { + value = this._fromUnits(value); + value = Math.max(this._minOutput, value); + timeConstant = Math.max(this._minOutput, timeConstant); + this._param.setTargetAtTime(value, this.toSeconds(startTime), timeConstant); + return this; + }; + Tone.Param.prototype.setValueCurveAtTime = function (values, startTime, duration) { + for (var i = 0; i < values.length; i++) { + values[i] = this._fromUnits(values[i]); + } + this._param.setValueCurveAtTime(values, this.toSeconds(startTime), this.toSeconds(duration)); + return this; + }; + Tone.Param.prototype.cancelScheduledValues = function (startTime) { + this._param.cancelScheduledValues(this.toSeconds(startTime)); + return this; + }; + Tone.Param.prototype.rampTo = function (value, rampTime, startTime) { + rampTime = this.defaultArg(rampTime, 0); + if (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM || this.units === Tone.Type.Decibels) { + this.exponentialRampToValue(value, rampTime, startTime); + } else { + this.linearRampToValue(value, rampTime, startTime); + } + return this; + }; + Object.defineProperty(Tone.Param.prototype, 'lfo', { + get: function () { + return this._lfo; + } + }); + Tone.Param.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._param = null; + if (this._lfo) { + this._lfo.dispose(); + this._lfo = null; + } + return this; + }; + return Tone.Param; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Gain; +Tone_core_Gain = function (Tone) { + 'use strict'; + if (window.GainNode && !AudioContext.prototype.createGain) { + AudioContext.prototype.createGain = AudioContext.prototype.createGainNode; + } + Tone.Gain = function () { + var options = this.optionsObject(arguments, [ + 'gain', + 'units' + ], Tone.Gain.defaults); + this.input = this.output = this._gainNode = this.context.createGain(); + this.gain = new Tone.Param({ + 'param': this._gainNode.gain, + 'units': options.units, + 'value': options.gain, + 'convert': options.convert + }); + this._readOnly('gain'); + }; + Tone.extend(Tone.Gain); + Tone.Gain.defaults = { + 'gain': 1, + 'convert': true + }; + Tone.Gain.prototype.dispose = function () { + Tone.Param.prototype.dispose.call(this); + this._gainNode.disconnect(); + this._gainNode = null; + this._writable('gain'); + this.gain.dispose(); + this.gain = null; + }; + Tone.prototype.createInsOuts = function (inputs, outputs) { + if (inputs === 1) { + this.input = new Tone.Gain(); + } else if (inputs > 1) { + this.input = new Array(inputs); + } + if (outputs === 1) { + this.output = new Tone.Gain(); + } else if (outputs > 1) { + this.output = new Array(inputs); + } + }; + return Tone.Gain; +}(Tone_core_Tone, Tone_core_Param); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Signal; +Tone_signal_Signal = function (Tone) { + 'use strict'; + Tone.Signal = function () { + var options = this.optionsObject(arguments, [ + 'value', + 'units' + ], Tone.Signal.defaults); + this.output = this._gain = this.context.createGain(); + options.param = this._gain.gain; + Tone.Param.call(this, options); + this.input = this._param = this._gain.gain; + this.context.getConstant(1).chain(this._gain); + }; + Tone.extend(Tone.Signal, Tone.Param); + Tone.Signal.defaults = { + 'value': 0, + 'units': Tone.Type.Default, + 'convert': true + }; + Tone.Signal.prototype.connect = Tone.SignalBase.prototype.connect; + Tone.Signal.prototype.dispose = function () { + Tone.Param.prototype.dispose.call(this); + this._param = null; + this._gain.disconnect(); + this._gain = null; + return this; + }; + return Tone.Signal; +}(Tone_core_Tone, Tone_signal_WaveShaper, Tone_type_Type, Tone_core_Param); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Add; +Tone_signal_Add = function (Tone) { + 'use strict'; + Tone.Add = function (value) { + this.createInsOuts(2, 0); + this._sum = this.input[0] = this.input[1] = this.output = new Tone.Gain(); + this._param = this.input[1] = new Tone.Signal(value); + this._param.connect(this._sum); + }; + Tone.extend(Tone.Add, Tone.Signal); + Tone.Add.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._sum.dispose(); + this._sum = null; + this._param.dispose(); + this._param = null; + return this; + }; + return Tone.Add; +}(Tone_core_Tone, Tone_signal_Signal); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Multiply; +Tone_signal_Multiply = function (Tone) { + 'use strict'; + Tone.Multiply = function (value) { + this.createInsOuts(2, 0); + this._mult = this.input[0] = this.output = new Tone.Gain(); + this._param = this.input[1] = this.output.gain; + this._param.value = this.defaultArg(value, 0); + }; + Tone.extend(Tone.Multiply, Tone.Signal); + Tone.Multiply.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._mult.dispose(); + this._mult = null; + this._param = null; + return this; + }; + return Tone.Multiply; +}(Tone_core_Tone, Tone_signal_Signal); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Scale; +Tone_signal_Scale = function (Tone) { + 'use strict'; + Tone.Scale = function (outputMin, outputMax) { + this._outputMin = this.defaultArg(outputMin, 0); + this._outputMax = this.defaultArg(outputMax, 1); + this._scale = this.input = new Tone.Multiply(1); + this._add = this.output = new Tone.Add(0); + this._scale.connect(this._add); + this._setRange(); + }; + Tone.extend(Tone.Scale, Tone.SignalBase); + Object.defineProperty(Tone.Scale.prototype, 'min', { + get: function () { + return this._outputMin; + }, + set: function (min) { + this._outputMin = min; + this._setRange(); + } + }); + Object.defineProperty(Tone.Scale.prototype, 'max', { + get: function () { + return this._outputMax; + }, + set: function (max) { + this._outputMax = max; + this._setRange(); + } + }); + Tone.Scale.prototype._setRange = function () { + this._add.value = this._outputMin; + this._scale.value = this._outputMax - this._outputMin; + }; + Tone.Scale.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._add.dispose(); + this._add = null; + this._scale.dispose(); + this._scale = null; + return this; + }; + return Tone.Scale; +}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Multiply); +var signal; +'use strict'; +signal = function () { + // Signal is built with the Tone.js signal by Yotam Mann + // https://github.com/TONEnoTONE/Tone.js/ + var Signal = Tone_signal_Signal; + var Add = Tone_signal_Add; + var Mult = Tone_signal_Multiply; + var Scale = Tone_signal_Scale; + var Tone = Tone_core_Tone; + var p5sound = master; + Tone.setContext(p5sound.audiocontext); + /** + *

p5.Signal is a constant audio-rate signal used by p5.Oscillator + * and p5.Envelope for modulation math.

+ * + *

This is necessary because Web Audio is processed on a seprate clock. + * For example, the p5 draw loop runs about 60 times per second. But + * the audio clock must process samples 44100 times per second. If we + * want to add a value to each of those samples, we can't do it in the + * draw loop, but we can do it by adding a constant-rate audio signal.This class mostly functions behind the scenes in p5.sound, and returns + * a Tone.Signal from the Tone.js library by Yotam Mann. + * If you want to work directly with audio signals for modular + * synthesis, check out + * tone.js.

+ * + * @class p5.Signal + * @constructor + * @return {Tone.Signal} A Signal object from the Tone.js library + * @example + *
+ * function setup() { + * carrier = new p5.Oscillator('sine'); + * carrier.amp(1); // set amplitude + * carrier.freq(220); // set frequency + * carrier.start(); // start oscillating + * + * modulator = new p5.Oscillator('sawtooth'); + * modulator.disconnect(); + * modulator.amp(1); + * modulator.freq(4); + * modulator.start(); + * + * // Modulator's default amplitude range is -1 to 1. + * // Multiply it by -200, so the range is -200 to 200 + * // then add 220 so the range is 20 to 420 + * carrier.freq( modulator.mult(-200).add(220) ); + * } + *
+ */ + p5.Signal = function (value) { + var s = new Signal(value); + // p5sound.soundArray.push(s); + return s; + }; + /** + * Fade to value, for smooth transitions + * + * @method fade + * @param {Number} value Value to set this signal + * @param {Number} [secondsFromNow] Length of fade, in seconds from now + */ + Signal.prototype.fade = Signal.prototype.linearRampToValueAtTime; + Mult.prototype.fade = Signal.prototype.fade; + Add.prototype.fade = Signal.prototype.fade; + Scale.prototype.fade = Signal.prototype.fade; + /** + * Connect a p5.sound object or Web Audio node to this + * p5.Signal so that its amplitude values can be scaled. + * + * @param {Object} input + */ + Signal.prototype.setInput = function (_input) { + _input.connect(this); + }; + Mult.prototype.setInput = Signal.prototype.setInput; + Add.prototype.setInput = Signal.prototype.setInput; + Scale.prototype.setInput = Signal.prototype.setInput; + // signals can add / mult / scale themselves + /** + * Add a constant value to this audio signal, + * and return the resulting audio signal. Does + * not change the value of the original signal, + * instead it returns a new p5.SignalAdd. + * + * @method add + * @param {Number} number + * @return {p5.SignalAdd} object + */ + Signal.prototype.add = function (num) { + var add = new Add(num); + // add.setInput(this); + this.connect(add); + return add; + }; + Mult.prototype.add = Signal.prototype.add; + Add.prototype.add = Signal.prototype.add; + Scale.prototype.add = Signal.prototype.add; + /** + * Multiply this signal by a constant value, + * and return the resulting audio signal. Does + * not change the value of the original signal, + * instead it returns a new p5.SignalMult. + * + * @method mult + * @param {Number} number to multiply + * @return {Tone.Multiply} object + */ + Signal.prototype.mult = function (num) { + var mult = new Mult(num); + // mult.setInput(this); + this.connect(mult); + return mult; + }; + Mult.prototype.mult = Signal.prototype.mult; + Add.prototype.mult = Signal.prototype.mult; + Scale.prototype.mult = Signal.prototype.mult; + /** + * Scale this signal value to a given range, + * and return the result as an audio signal. Does + * not change the value of the original signal, + * instead it returns a new p5.SignalScale. + * + * @method scale + * @param {Number} number to multiply + * @param {Number} inMin input range minumum + * @param {Number} inMax input range maximum + * @param {Number} outMin input range minumum + * @param {Number} outMax input range maximum + * @return {p5.SignalScale} object + */ + Signal.prototype.scale = function (inMin, inMax, outMin, outMax) { + var mapOutMin, mapOutMax; + if (arguments.length === 4) { + mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5; + mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5; + } else { + mapOutMin = arguments[0]; + mapOutMax = arguments[1]; + } + var scale = new Scale(mapOutMin, mapOutMax); + this.connect(scale); + return scale; + }; + Mult.prototype.scale = Signal.prototype.scale; + Add.prototype.scale = Signal.prototype.scale; + Scale.prototype.scale = Signal.prototype.scale; +}(Tone_signal_Signal, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_core_Tone, master); +var oscillator; +'use strict'; +oscillator = function () { + var p5sound = master; + var Add = Tone_signal_Add; + var Mult = Tone_signal_Multiply; + var Scale = Tone_signal_Scale; + /** + *

Creates a signal that oscillates between -1.0 and 1.0. + * By default, the oscillation takes the form of a sinusoidal + * shape ('sine'). Additional types include 'triangle', + * 'sawtooth' and 'square'. The frequency defaults to + * 440 oscillations per second (440Hz, equal to the pitch of an + * 'A' note).

+ * + *

Set the type of oscillation with setType(), or by instantiating a + * specific oscillator: p5.SinOsc, p5.TriOsc, p5.SqrOsc, or p5.SawOsc. + *

+ * + * @class p5.Oscillator + * @constructor + * @param {Number} [freq] frequency defaults to 440Hz + * @param {String} [type] type of oscillator. Options: + * 'sine' (default), 'triangle', + * 'sawtooth', 'square' + * @example + *
+ * var osc; + * var playing = false; + * + * function setup() { + * backgroundColor = color(255,0,255); + * textAlign(CENTER); + * + * osc = new p5.Oscillator(); + * osc.setType('sine'); + * osc.freq(240); + * osc.amp(0); + * osc.start(); + * } + * + * function draw() { + * background(backgroundColor) + * text('click to play', width/2, height/2); + * } + * + * function mouseClicked() { + * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) { + * if (!playing) { + * // ramp amplitude to 0.5 over 0.1 seconds + * osc.amp(0.5, 0.05); + * playing = true; + * backgroundColor = color(0,255,255); + * } else { + * // ramp amplitude to 0 over 0.5 seconds + * osc.amp(0, 0.5); + * playing = false; + * backgroundColor = color(255,0,255); + * } + * } + * } + *
+ */ + p5.Oscillator = function (freq, type) { + if (typeof freq === 'string') { + var f = type; + type = freq; + freq = f; + } + if (typeof type === 'number') { + var f = type; + type = freq; + freq = f; + } + this.started = false; + // components + this.phaseAmount = undefined; + this.oscillator = p5sound.audiocontext.createOscillator(); + this.f = freq || 440; + // frequency + this.oscillator.type = type || 'sine'; + this.oscillator.frequency.setValueAtTime(this.f, p5sound.audiocontext.currentTime); + // connections + this.output = p5sound.audiocontext.createGain(); + this._freqMods = []; + // modulators connected to this oscillator's frequency + // set default output gain to 0.5 + this.output.gain.value = 0.5; + this.output.gain.setValueAtTime(0.5, p5sound.audiocontext.currentTime); + this.oscillator.connect(this.output); + // stereo panning + this.panPosition = 0; + this.connection = p5sound.input; + // connect to p5sound by default + this.panner = new p5.Panner(this.output, this.connection, 1); + //array of math operation signal chaining + this.mathOps = [this.output]; + // add to the soundArray so we can dispose of the osc later + p5sound.soundArray.push(this); + }; + /** + * Start an oscillator. Accepts an optional parameter to + * determine how long (in seconds from now) until the + * oscillator starts. + * + * @method start + * @param {Number} [time] startTime in seconds from now. + * @param {Number} [frequency] frequency in Hz. + */ + p5.Oscillator.prototype.start = function (time, f) { + if (this.started) { + var now = p5sound.audiocontext.currentTime; + this.stop(now); + } + if (!this.started) { + var freq = f || this.f; + var type = this.oscillator.type; + // set old osc free to be garbage collected (memory) + if (this.oscillator) { + this.oscillator.disconnect(); + this.oscillator = undefined; + } + // var detune = this.oscillator.frequency.value; + this.oscillator = p5sound.audiocontext.createOscillator(); + this.oscillator.frequency.value = Math.abs(freq); + this.oscillator.type = type; + // this.oscillator.detune.value = detune; + this.oscillator.connect(this.output); + time = time || 0; + this.oscillator.start(time + p5sound.audiocontext.currentTime); + this.freqNode = this.oscillator.frequency; + // if other oscillators are already connected to this osc's freq + for (var i in this._freqMods) { + if (typeof this._freqMods[i].connect !== 'undefined') { + this._freqMods[i].connect(this.oscillator.frequency); + } + } + this.started = true; + } + }; + /** + * Stop an oscillator. Accepts an optional parameter + * to determine how long (in seconds from now) until the + * oscillator stops. + * + * @method stop + * @param {Number} secondsFromNow Time, in seconds from now. + */ + p5.Oscillator.prototype.stop = function (time) { + if (this.started) { + var t = time || 0; + var now = p5sound.audiocontext.currentTime; + this.oscillator.stop(t + now); + this.started = false; + } + }; + /** + * Set the amplitude between 0 and 1.0. Or, pass in an object + * such as an oscillator to modulate amplitude with an audio signal. + * + * @method amp + * @param {Number|Object} vol between 0 and 1.0 + * or a modulating signal/oscillator + * @param {Number} [rampTime] create a fade that lasts rampTime + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + * @return {AudioParam} gain If no value is provided, + * returns the Web Audio API + * AudioParam that controls + * this oscillator's + * gain/amplitude/volume) + */ + p5.Oscillator.prototype.amp = function (vol, rampTime, tFromNow) { + var self = this; + if (typeof vol === 'number') { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + } else if (vol) { + vol.connect(self.output.gain); + } else { + // return the Gain Node + return this.output.gain; + } + }; + // these are now the same thing + p5.Oscillator.prototype.fade = p5.Oscillator.prototype.amp; + p5.Oscillator.prototype.getAmp = function () { + return this.output.gain.value; + }; + /** + * Set frequency of an oscillator to a value. Or, pass in an object + * such as an oscillator to modulate the frequency with an audio signal. + * + * @method freq + * @param {Number|Object} Frequency Frequency in Hz + * or modulating signal/oscillator + * @param {Number} [rampTime] Ramp time (in seconds) + * @param {Number} [timeFromNow] Schedule this event to happen + * at x seconds from now + * @return {AudioParam} Frequency If no value is provided, + * returns the Web Audio API + * AudioParam that controls + * this oscillator's frequency + * @example + *
+ * var osc = new p5.Oscillator(300); + * osc.start(); + * osc.freq(40, 10); + *
+ */ + p5.Oscillator.prototype.freq = function (val, rampTime, tFromNow) { + if (typeof val === 'number' && !isNaN(val)) { + this.f = val; + var now = p5sound.audiocontext.currentTime; + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + // var currentFreq = this.oscillator.frequency.value; + // this.oscillator.frequency.cancelScheduledValues(now); + if (rampTime === 0) { + this.oscillator.frequency.cancelScheduledValues(now); + this.oscillator.frequency.setValueAtTime(val, tFromNow + now); + } else { + if (val > 0) { + this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now); + } else { + this.oscillator.frequency.linearRampToValueAtTime(val, tFromNow + rampTime + now); + } + } + // reset phase if oscillator has a phase + if (this.phaseAmount) { + this.phase(this.phaseAmount); + } + } else if (val) { + if (val.output) { + val = val.output; + } + val.connect(this.oscillator.frequency); + // keep track of what is modulating this param + // so it can be re-connected if + this._freqMods.push(val); + } else { + // return the Frequency Node + return this.oscillator.frequency; + } + }; + p5.Oscillator.prototype.getFreq = function () { + return this.oscillator.frequency.value; + }; + /** + * Set type to 'sine', 'triangle', 'sawtooth' or 'square'. + * + * @method setType + * @param {String} type 'sine', 'triangle', 'sawtooth' or 'square'. + */ + p5.Oscillator.prototype.setType = function (type) { + this.oscillator.type = type; + }; + p5.Oscillator.prototype.getType = function () { + return this.oscillator.type; + }; + /** + * Connect to a p5.sound / Web Audio object. + * + * @method connect + * @param {Object} unit A p5.sound or Web Audio object + */ + p5.Oscillator.prototype.connect = function (unit) { + if (!unit) { + this.panner.connect(p5sound.input); + } else if (unit.hasOwnProperty('input')) { + this.panner.connect(unit.input); + this.connection = unit.input; + } else { + this.panner.connect(unit); + this.connection = unit; + } + }; + /** + * Disconnect all outputs + * + * @method disconnect + */ + p5.Oscillator.prototype.disconnect = function () { + this.output.disconnect(); + this.panner.disconnect(); + this.output.connect(this.panner); + this.oscMods = []; + }; + /** + * Pan between Left (-1) and Right (1) + * + * @method pan + * @param {Number} panning Number between -1 and 1 + * @param {Number} timeFromNow schedule this event to happen + * seconds from now + */ + p5.Oscillator.prototype.pan = function (pval, tFromNow) { + this.panPosition = pval; + this.panner.pan(pval, tFromNow); + }; + p5.Oscillator.prototype.getPan = function () { + return this.panPosition; + }; + // get rid of the oscillator + p5.Oscillator.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + if (this.oscillator) { + var now = p5sound.audiocontext.currentTime; + this.stop(now); + this.disconnect(); + this.panner = null; + this.oscillator = null; + } + // if it is a Pulse + if (this.osc2) { + this.osc2.dispose(); + } + }; + /** + * Set the phase of an oscillator between 0.0 and 1.0. + * In this implementation, phase is a delay time + * based on the oscillator's current frequency. + * + * @method phase + * @param {Number} phase float between 0.0 and 1.0 + */ + p5.Oscillator.prototype.phase = function (p) { + var delayAmt = p5.prototype.map(p, 0, 1, 0, 1 / this.f); + var now = p5sound.audiocontext.currentTime; + this.phaseAmount = p; + if (!this.dNode) { + // create a delay node + this.dNode = p5sound.audiocontext.createDelay(); + // put the delay node in between output and panner + this.oscillator.disconnect(); + this.oscillator.connect(this.dNode); + this.dNode.connect(this.output); + } + // set delay time to match phase: + this.dNode.delayTime.setValueAtTime(delayAmt, now); + }; + // ========================== // + // SIGNAL MATH FOR MODULATION // + // ========================== // + // return sigChain(this, scale, thisChain, nextChain, Scale); + var sigChain = function (o, mathObj, thisChain, nextChain, type) { + var chainSource = o.oscillator; + // if this type of math already exists in the chain, replace it + for (var i in o.mathOps) { + if (o.mathOps[i] instanceof type) { + chainSource.disconnect(); + o.mathOps[i].dispose(); + thisChain = i; + // assume nextChain is output gain node unless... + if (thisChain < o.mathOps.length - 2) { + nextChain = o.mathOps[i + 1]; + } + } + } + if (thisChain === o.mathOps.length - 1) { + o.mathOps.push(nextChain); + } + // assume source is the oscillator unless i > 0 + if (i > 0) { + chainSource = o.mathOps[i - 1]; + } + chainSource.disconnect(); + chainSource.connect(mathObj); + mathObj.connect(nextChain); + o.mathOps[thisChain] = mathObj; + return o; + }; + /** + * Add a value to the p5.Oscillator's output amplitude, + * and return the oscillator. Calling this method again + * will override the initial add() with a new value. + * + * @method add + * @param {Number} number Constant number to add + * @return {p5.Oscillator} Oscillator Returns this oscillator + * with scaled output + * + */ + p5.Oscillator.prototype.add = function (num) { + var add = new Add(num); + var thisChain = this.mathOps.length - 1; + var nextChain = this.output; + return sigChain(this, add, thisChain, nextChain, Add); + }; + /** + * Multiply the p5.Oscillator's output amplitude + * by a fixed value (i.e. turn it up!). Calling this method + * again will override the initial mult() with a new value. + * + * @method mult + * @param {Number} number Constant number to multiply + * @return {p5.Oscillator} Oscillator Returns this oscillator + * with multiplied output + */ + p5.Oscillator.prototype.mult = function (num) { + var mult = new Mult(num); + var thisChain = this.mathOps.length - 1; + var nextChain = this.output; + return sigChain(this, mult, thisChain, nextChain, Mult); + }; + /** + * Scale this oscillator's amplitude values to a given + * range, and return the oscillator. Calling this method + * again will override the initial scale() with new values. + * + * @method scale + * @param {Number} inMin input range minumum + * @param {Number} inMax input range maximum + * @param {Number} outMin input range minumum + * @param {Number} outMax input range maximum + * @return {p5.Oscillator} Oscillator Returns this oscillator + * with scaled output + */ + p5.Oscillator.prototype.scale = function (inMin, inMax, outMin, outMax) { + var mapOutMin, mapOutMax; + if (arguments.length === 4) { + mapOutMin = p5.prototype.map(outMin, inMin, inMax, 0, 1) - 0.5; + mapOutMax = p5.prototype.map(outMax, inMin, inMax, 0, 1) - 0.5; + } else { + mapOutMin = arguments[0]; + mapOutMax = arguments[1]; + } + var scale = new Scale(mapOutMin, mapOutMax); + var thisChain = this.mathOps.length - 1; + var nextChain = this.output; + return sigChain(this, scale, thisChain, nextChain, Scale); + }; + // ============================== // + // SinOsc, TriOsc, SqrOsc, SawOsc // + // ============================== // + /** + * Constructor: new p5.SinOsc(). + * This creates a Sine Wave Oscillator and is + * equivalent to new p5.Oscillator('sine') + * or creating a p5.Oscillator and then calling + * its method setType('sine'). + * See p5.Oscillator for methods. + * + * @class p5.SinOsc + * @constructor + * @extends {p5.Oscillator} + * @param {Number} [freq] Set the frequency + */ + p5.SinOsc = function (freq) { + p5.Oscillator.call(this, freq, 'sine'); + }; + p5.SinOsc.prototype = Object.create(p5.Oscillator.prototype); + /** + * Constructor: new p5.TriOsc(). + * This creates a Triangle Wave Oscillator and is + * equivalent to new p5.Oscillator('triangle') + * or creating a p5.Oscillator and then calling + * its method setType('triangle'). + * See p5.Oscillator for methods. + * + * @class p5.TriOsc + * @constructor + * @extends {p5.Oscillator} + * @param {Number} [freq] Set the frequency + */ + p5.TriOsc = function (freq) { + p5.Oscillator.call(this, freq, 'triangle'); + }; + p5.TriOsc.prototype = Object.create(p5.Oscillator.prototype); + /** + * Constructor: new p5.SawOsc(). + * This creates a SawTooth Wave Oscillator and is + * equivalent to new p5.Oscillator('sawtooth') + * or creating a p5.Oscillator and then calling + * its method setType('sawtooth'). + * See p5.Oscillator for methods. + * + * @class p5.SawOsc + * @constructor + * @extends {p5.Oscillator} + * @param {Number} [freq] Set the frequency + */ + p5.SawOsc = function (freq) { + p5.Oscillator.call(this, freq, 'sawtooth'); + }; + p5.SawOsc.prototype = Object.create(p5.Oscillator.prototype); + /** + * Constructor: new p5.SqrOsc(). + * This creates a Square Wave Oscillator and is + * equivalent to new p5.Oscillator('square') + * or creating a p5.Oscillator and then calling + * its method setType('square'). + * See p5.Oscillator for methods. + * + * @class p5.SqrOsc + * @constructor + * @extends {p5.Oscillator} + * @param {Number} [freq] Set the frequency + */ + p5.SqrOsc = function (freq) { + p5.Oscillator.call(this, freq, 'square'); + }; + p5.SqrOsc.prototype = Object.create(p5.Oscillator.prototype); +}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Timeline; +Tone_core_Timeline = function (Tone) { + 'use strict'; + Tone.Timeline = function () { + var options = this.optionsObject(arguments, ['memory'], Tone.Timeline.defaults); + this._timeline = []; + this._toRemove = []; + this._iterating = false; + this.memory = options.memory; + }; + Tone.extend(Tone.Timeline); + Tone.Timeline.defaults = { 'memory': Infinity }; + Object.defineProperty(Tone.Timeline.prototype, 'length', { + get: function () { + return this._timeline.length; + } + }); + Tone.Timeline.prototype.add = function (event) { + if (this.isUndef(event.time)) { + throw new Error('Tone.Timeline: events must have a time attribute'); + } + if (this._timeline.length) { + var index = this._search(event.time); + this._timeline.splice(index + 1, 0, event); + } else { + this._timeline.push(event); + } + if (this.length > this.memory) { + var diff = this.length - this.memory; + this._timeline.splice(0, diff); + } + return this; + }; + Tone.Timeline.prototype.remove = function (event) { + if (this._iterating) { + this._toRemove.push(event); + } else { + var index = this._timeline.indexOf(event); + if (index !== -1) { + this._timeline.splice(index, 1); + } + } + return this; + }; + Tone.Timeline.prototype.get = function (time) { + var index = this._search(time); + if (index !== -1) { + return this._timeline[index]; + } else { + return null; + } + }; + Tone.Timeline.prototype.peek = function () { + return this._timeline[0]; + }; + Tone.Timeline.prototype.shift = function () { + return this._timeline.shift(); + }; + Tone.Timeline.prototype.getAfter = function (time) { + var index = this._search(time); + if (index + 1 < this._timeline.length) { + return this._timeline[index + 1]; + } else { + return null; + } + }; + Tone.Timeline.prototype.getBefore = function (time) { + var len = this._timeline.length; + if (len > 0 && this._timeline[len - 1].time < time) { + return this._timeline[len - 1]; + } + var index = this._search(time); + if (index - 1 >= 0) { + return this._timeline[index - 1]; + } else { + return null; + } + }; + Tone.Timeline.prototype.cancel = function (after) { + if (this._timeline.length > 1) { + var index = this._search(after); + if (index >= 0) { + if (this._timeline[index].time === after) { + for (var i = index; i >= 0; i--) { + if (this._timeline[i].time === after) { + index = i; + } else { + break; + } + } + this._timeline = this._timeline.slice(0, index); + } else { + this._timeline = this._timeline.slice(0, index + 1); + } + } else { + this._timeline = []; + } + } else if (this._timeline.length === 1) { + if (this._timeline[0].time >= after) { + this._timeline = []; + } + } + return this; + }; + Tone.Timeline.prototype.cancelBefore = function (time) { + if (this._timeline.length) { + var index = this._search(time); + if (index >= 0) { + this._timeline = this._timeline.slice(index + 1); + } + } + return this; + }; + Tone.Timeline.prototype._search = function (time) { + var beginning = 0; + var len = this._timeline.length; + var end = len; + if (len > 0 && this._timeline[len - 1].time <= time) { + return len - 1; + } + while (beginning < end) { + var midPoint = Math.floor(beginning + (end - beginning) / 2); + var event = this._timeline[midPoint]; + var nextEvent = this._timeline[midPoint + 1]; + if (event.time === time) { + for (var i = midPoint; i < this._timeline.length; i++) { + var testEvent = this._timeline[i]; + if (testEvent.time === time) { + midPoint = i; + } + } + return midPoint; + } else if (event.time < time && nextEvent.time > time) { + return midPoint; + } else if (event.time > time) { + end = midPoint; + } else if (event.time < time) { + beginning = midPoint + 1; + } + } + return -1; + }; + Tone.Timeline.prototype._iterate = function (callback, lowerBound, upperBound) { + this._iterating = true; + lowerBound = this.defaultArg(lowerBound, 0); + upperBound = this.defaultArg(upperBound, this._timeline.length - 1); + for (var i = lowerBound; i <= upperBound; i++) { + callback(this._timeline[i]); + } + this._iterating = false; + if (this._toRemove.length > 0) { + for (var j = 0; j < this._toRemove.length; j++) { + var index = this._timeline.indexOf(this._toRemove[j]); + if (index !== -1) { + this._timeline.splice(index, 1); + } + } + this._toRemove = []; + } + }; + Tone.Timeline.prototype.forEach = function (callback) { + this._iterate(callback); + return this; + }; + Tone.Timeline.prototype.forEachBefore = function (time, callback) { + var upperBound = this._search(time); + if (upperBound !== -1) { + this._iterate(callback, 0, upperBound); + } + return this; + }; + Tone.Timeline.prototype.forEachAfter = function (time, callback) { + var lowerBound = this._search(time); + this._iterate(callback, lowerBound + 1); + return this; + }; + Tone.Timeline.prototype.forEachFrom = function (time, callback) { + var lowerBound = this._search(time); + while (lowerBound >= 0 && this._timeline[lowerBound].time >= time) { + lowerBound--; + } + this._iterate(callback, lowerBound + 1); + return this; + }; + Tone.Timeline.prototype.forEachAtTime = function (time, callback) { + var upperBound = this._search(time); + if (upperBound !== -1) { + this._iterate(function (event) { + if (event.time === time) { + callback(event); + } + }, 0, upperBound); + } + return this; + }; + Tone.Timeline.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._timeline = null; + this._toRemove = null; + }; + return Tone.Timeline; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_TimelineSignal; +Tone_signal_TimelineSignal = function (Tone) { + 'use strict'; + Tone.TimelineSignal = function () { + var options = this.optionsObject(arguments, [ + 'value', + 'units' + ], Tone.Signal.defaults); + this._events = new Tone.Timeline(10); + Tone.Signal.apply(this, options); + options.param = this._param; + Tone.Param.call(this, options); + this._initial = this._fromUnits(this._param.value); + }; + Tone.extend(Tone.TimelineSignal, Tone.Param); + Tone.TimelineSignal.Type = { + Linear: 'linear', + Exponential: 'exponential', + Target: 'target', + Curve: 'curve', + Set: 'set' + }; + Object.defineProperty(Tone.TimelineSignal.prototype, 'value', { + get: function () { + var now = this.now(); + var val = this.getValueAtTime(now); + return this._toUnits(val); + }, + set: function (value) { + var convertedVal = this._fromUnits(value); + this._initial = convertedVal; + this.cancelScheduledValues(); + this._param.value = convertedVal; + } + }); + Tone.TimelineSignal.prototype.setValueAtTime = function (value, startTime) { + value = this._fromUnits(value); + startTime = this.toSeconds(startTime); + this._events.add({ + 'type': Tone.TimelineSignal.Type.Set, + 'value': value, + 'time': startTime + }); + this._param.setValueAtTime(value, startTime); + return this; + }; + Tone.TimelineSignal.prototype.linearRampToValueAtTime = function (value, endTime) { + value = this._fromUnits(value); + endTime = this.toSeconds(endTime); + this._events.add({ + 'type': Tone.TimelineSignal.Type.Linear, + 'value': value, + 'time': endTime + }); + this._param.linearRampToValueAtTime(value, endTime); + return this; + }; + Tone.TimelineSignal.prototype.exponentialRampToValueAtTime = function (value, endTime) { + endTime = this.toSeconds(endTime); + var beforeEvent = this._searchBefore(endTime); + if (beforeEvent && beforeEvent.value === 0) { + this.setValueAtTime(this._minOutput, beforeEvent.time); + } + value = this._fromUnits(value); + var setValue = Math.max(value, this._minOutput); + this._events.add({ + 'type': Tone.TimelineSignal.Type.Exponential, + 'value': setValue, + 'time': endTime + }); + if (value < this._minOutput) { + this._param.exponentialRampToValueAtTime(this._minOutput, endTime - this.sampleTime); + this.setValueAtTime(0, endTime); + } else { + this._param.exponentialRampToValueAtTime(value, endTime); + } + return this; + }; + Tone.TimelineSignal.prototype.setTargetAtTime = function (value, startTime, timeConstant) { + value = this._fromUnits(value); + value = Math.max(this._minOutput, value); + timeConstant = Math.max(this._minOutput, timeConstant); + startTime = this.toSeconds(startTime); + this._events.add({ + 'type': Tone.TimelineSignal.Type.Target, + 'value': value, + 'time': startTime, + 'constant': timeConstant + }); + this._param.setTargetAtTime(value, startTime, timeConstant); + return this; + }; + Tone.TimelineSignal.prototype.setValueCurveAtTime = function (values, startTime, duration, scaling) { + scaling = this.defaultArg(scaling, 1); + var floats = new Array(values.length); + for (var i = 0; i < floats.length; i++) { + floats[i] = this._fromUnits(values[i]) * scaling; + } + startTime = this.toSeconds(startTime); + duration = this.toSeconds(duration); + this._events.add({ + 'type': Tone.TimelineSignal.Type.Curve, + 'value': floats, + 'time': startTime, + 'duration': duration + }); + this._param.setValueAtTime(floats[0], startTime); + for (var j = 1; j < floats.length; j++) { + var segmentTime = startTime + j / (floats.length - 1) * duration; + this._param.linearRampToValueAtTime(floats[j], segmentTime); + } + return this; + }; + Tone.TimelineSignal.prototype.cancelScheduledValues = function (after) { + after = this.toSeconds(after); + this._events.cancel(after); + this._param.cancelScheduledValues(after); + return this; + }; + Tone.TimelineSignal.prototype.setRampPoint = function (time) { + time = this.toSeconds(time); + var val = this._toUnits(this.getValueAtTime(time)); + var before = this._searchBefore(time); + if (before && before.time === time) { + this.cancelScheduledValues(time + this.sampleTime); + } else if (before && before.type === Tone.TimelineSignal.Type.Curve && before.time + before.duration > time) { + this.cancelScheduledValues(time); + this.linearRampToValueAtTime(val, time); + } else { + var after = this._searchAfter(time); + if (after) { + this.cancelScheduledValues(time); + if (after.type === Tone.TimelineSignal.Type.Linear) { + this.linearRampToValueAtTime(val, time); + } else if (after.type === Tone.TimelineSignal.Type.Exponential) { + this.exponentialRampToValueAtTime(val, time); + } + } + this.setValueAtTime(val, time); + } + return this; + }; + Tone.TimelineSignal.prototype.linearRampToValueBetween = function (value, start, finish) { + this.setRampPoint(start); + this.linearRampToValueAtTime(value, finish); + return this; + }; + Tone.TimelineSignal.prototype.exponentialRampToValueBetween = function (value, start, finish) { + this.setRampPoint(start); + this.exponentialRampToValueAtTime(value, finish); + return this; + }; + Tone.TimelineSignal.prototype._searchBefore = function (time) { + return this._events.get(time); + }; + Tone.TimelineSignal.prototype._searchAfter = function (time) { + return this._events.getAfter(time); + }; + Tone.TimelineSignal.prototype.getValueAtTime = function (time) { + time = this.toSeconds(time); + var after = this._searchAfter(time); + var before = this._searchBefore(time); + var value = this._initial; + if (before === null) { + value = this._initial; + } else if (before.type === Tone.TimelineSignal.Type.Target) { + var previous = this._events.getBefore(before.time); + var previouVal; + if (previous === null) { + previouVal = this._initial; + } else { + previouVal = previous.value; + } + value = this._exponentialApproach(before.time, previouVal, before.value, before.constant, time); + } else if (before.type === Tone.TimelineSignal.Type.Curve) { + value = this._curveInterpolate(before.time, before.value, before.duration, time); + } else if (after === null) { + value = before.value; + } else if (after.type === Tone.TimelineSignal.Type.Linear) { + value = this._linearInterpolate(before.time, before.value, after.time, after.value, time); + } else if (after.type === Tone.TimelineSignal.Type.Exponential) { + value = this._exponentialInterpolate(before.time, before.value, after.time, after.value, time); + } else { + value = before.value; + } + return value; + }; + Tone.TimelineSignal.prototype.connect = Tone.SignalBase.prototype.connect; + Tone.TimelineSignal.prototype._exponentialApproach = function (t0, v0, v1, timeConstant, t) { + return v1 + (v0 - v1) * Math.exp(-(t - t0) / timeConstant); + }; + Tone.TimelineSignal.prototype._linearInterpolate = function (t0, v0, t1, v1, t) { + return v0 + (v1 - v0) * ((t - t0) / (t1 - t0)); + }; + Tone.TimelineSignal.prototype._exponentialInterpolate = function (t0, v0, t1, v1, t) { + v0 = Math.max(this._minOutput, v0); + return v0 * Math.pow(v1 / v0, (t - t0) / (t1 - t0)); + }; + Tone.TimelineSignal.prototype._curveInterpolate = function (start, curve, duration, time) { + var len = curve.length; + if (time >= start + duration) { + return curve[len - 1]; + } else if (time <= start) { + return curve[0]; + } else { + var progress = (time - start) / duration; + var lowerIndex = Math.floor((len - 1) * progress); + var upperIndex = Math.ceil((len - 1) * progress); + var lowerVal = curve[lowerIndex]; + var upperVal = curve[upperIndex]; + if (upperIndex === lowerIndex) { + return lowerVal; + } else { + return this._linearInterpolate(lowerIndex, lowerVal, upperIndex, upperVal, progress * (len - 1)); + } + } + }; + Tone.TimelineSignal.prototype.dispose = function () { + Tone.Signal.prototype.dispose.call(this); + Tone.Param.prototype.dispose.call(this); + this._events.dispose(); + this._events = null; + }; + return Tone.TimelineSignal; +}(Tone_core_Tone, Tone_signal_Signal); +var env; +'use strict'; +env = function () { + var p5sound = master; + var Add = Tone_signal_Add; + var Mult = Tone_signal_Multiply; + var Scale = Tone_signal_Scale; + var TimelineSignal = Tone_signal_TimelineSignal; + var Tone = Tone_core_Tone; + Tone.setContext(p5sound.audiocontext); + /** + *

Envelopes are pre-defined amplitude distribution over time. + * Typically, envelopes are used to control the output volume + * of an object, a series of fades referred to as Attack, Decay, + * Sustain and Release ( + * ADSR + * ). Envelopes can also control other Web Audio Parameters—for example, a p5.Env can + * control an Oscillator's frequency like this: osc.freq(env).

+ *

Use setRange to change the attack/release level. + * Use setADSR to change attackTime, decayTime, sustainPercent and releaseTime.

+ *

Use the play method to play the entire envelope, + * the ramp method for a pingable trigger, + * or triggerAttack/ + * triggerRelease to trigger noteOn/noteOff.

+ * + * @class p5.Env + * @constructor + * @example + *
+ * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.2; + * var susPercent = 0.2; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(playEnv); + * } + * + * function playEnv(){ + * env.play(); + * } + *
+ */ + p5.Env = function (t1, l1, t2, l2, t3, l3) { + /** + * Time until envelope reaches attackLevel + * @property attackTime + */ + this.aTime = t1 || 0.1; + /** + * Level once attack is complete. + * @property attackLevel + */ + this.aLevel = l1 || 1; + /** + * Time until envelope reaches decayLevel. + * @property decayTime + */ + this.dTime = t2 || 0.5; + /** + * Level after decay. The envelope will sustain here until it is released. + * @property decayLevel + */ + this.dLevel = l2 || 0; + /** + * Duration of the release portion of the envelope. + * @property releaseTime + */ + this.rTime = t3 || 0; + /** + * Level at the end of the release. + * @property releaseLevel + */ + this.rLevel = l3 || 0; + this._rampHighPercentage = 0.98; + this._rampLowPercentage = 0.02; + this.output = p5sound.audiocontext.createGain(); + this.control = new TimelineSignal(); + this._init(); + // this makes sure the envelope starts at zero + this.control.connect(this.output); + // connect to the output + this.connection = null; + // store connection + //array of math operation signal chaining + this.mathOps = [this.control]; + //whether envelope should be linear or exponential curve + this.isExponential = false; + // oscillator or buffer source to clear on env complete + // to save resources if/when it is retriggered + this.sourceToClear = null; + // set to true if attack is set, then false on release + this.wasTriggered = false; + // add to the soundArray so we can dispose of the env later + p5sound.soundArray.push(this); + }; + // this init function just smooths the starting value to zero and gives a start point for the timeline + // - it was necessary to remove glitches at the beginning. + p5.Env.prototype._init = function () { + var now = p5sound.audiocontext.currentTime; + var t = now; + this.control.setTargetAtTime(0.00001, t, 0.001); + //also, compute the correct time constants + this._setRampAD(this.aTime, this.dTime); + }; + /** + * Reset the envelope with a series of time/value pairs. + * + * @method set + * @param {Number} attackTime Time (in seconds) before level + * reaches attackLevel + * @param {Number} attackLevel Typically an amplitude between + * 0.0 and 1.0 + * @param {Number} decayTime Time + * @param {Number} decayLevel Amplitude (In a standard ADSR envelope, + * decayLevel = sustainLevel) + * @param {Number} releaseTime Release Time (in seconds) + * @param {Number} releaseLevel Amplitude + * @example + *
+ * var t1 = 0.1; // attack time in seconds + * var l1 = 0.7; // attack level 0.0 to 1.0 + * var t2 = 0.3; // decay time in seconds + * var l2 = 0.1; // decay level 0.0 to 1.0 + * var t3 = 0.2; // sustain time in seconds + * var l3 = dL; // sustain level 0.0 to 1.0 + * // release level defaults to zero + * + * var env; + * var triOsc; + * + * function setup() { + * background(0); + * noStroke(); + * fill(255); + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(t1, l1, t2, l2, t3, l3); + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); // give the env control of the triOsc's amp + * triOsc.start(); + * } + * + * // mouseClick triggers envelope if over canvas + * function mouseClicked() { + * // is mouse over canvas? + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * env.play(triOsc); + * } + * } + *
+ * + */ + p5.Env.prototype.set = function (t1, l1, t2, l2, t3, l3) { + this.aTime = t1; + this.aLevel = l1; + this.dTime = t2 || 0; + this.dLevel = l2 || 0; + this.rTime = t3 || 0; + this.rLevel = l3 || 0; + // set time constants for ramp + this._setRampAD(t1, t2); + }; + /** + * Set values like a traditional + * + * ADSR envelope + * . + * + * @method setADSR + * @param {Number} attackTime Time (in seconds before envelope + * reaches Attack Level + * @param {Number} [decayTime] Time (in seconds) before envelope + * reaches Decay/Sustain Level + * @param {Number} [susRatio] Ratio between attackLevel and releaseLevel, on a scale from 0 to 1, + * where 1.0 = attackLevel, 0.0 = releaseLevel. + * The susRatio determines the decayLevel and the level at which the + * sustain portion of the envelope will sustain. + * For example, if attackLevel is 0.4, releaseLevel is 0, + * and susAmt is 0.5, the decayLevel would be 0.2. If attackLevel is + * increased to 1.0 (using setRange), + * then decayLevel would increase proportionally, to become 0.5. + * @param {Number} [releaseTime] Time in seconds from now (defaults to 0) + * @example + *
+ * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.2; + * var susPercent = 0.2; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(playEnv); + * } + * + * function playEnv(){ + * env.play(); + * } + *
+ */ + p5.Env.prototype.setADSR = function (aTime, dTime, sPercent, rTime) { + this.aTime = aTime; + this.dTime = dTime || 0; + // lerp + this.sPercent = sPercent || 0; + this.dLevel = typeof sPercent !== 'undefined' ? sPercent * (this.aLevel - this.rLevel) + this.rLevel : 0; + this.rTime = rTime || 0; + // also set time constants for ramp + this._setRampAD(aTime, dTime); + }; + /** + * Set max (attackLevel) and min (releaseLevel) of envelope. + * + * @method setRange + * @param {Number} aLevel attack level (defaults to 1) + * @param {Number} rLevel release level (defaults to 0) + * @example + *
+ * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.2; + * var susPercent = 0.2; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(playEnv); + * } + * + * function playEnv(){ + * env.play(); + * } + *
+ */ + p5.Env.prototype.setRange = function (aLevel, rLevel) { + this.aLevel = aLevel || 1; + this.rLevel = rLevel || 0; + }; + // private (undocumented) method called when ADSR is set to set time constants for ramp + // + // Set the + // time constants for simple exponential ramps. + // The larger the time constant value, the slower the + // transition will be. + // + // method _setRampAD + // param {Number} attackTimeConstant attack time constant + // param {Number} decayTimeConstant decay time constant + // + p5.Env.prototype._setRampAD = function (t1, t2) { + this._rampAttackTime = this.checkExpInput(t1); + this._rampDecayTime = this.checkExpInput(t2); + var TCDenominator = 1; + /// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage) + TCDenominator = Math.log(1 / this.checkExpInput(1 - this._rampHighPercentage)); + this._rampAttackTC = t1 / this.checkExpInput(TCDenominator); + TCDenominator = Math.log(1 / this._rampLowPercentage); + this._rampDecayTC = t2 / this.checkExpInput(TCDenominator); + }; + // private method + p5.Env.prototype.setRampPercentages = function (p1, p2) { + //set the percentages that the simple exponential ramps go to + this._rampHighPercentage = this.checkExpInput(p1); + this._rampLowPercentage = this.checkExpInput(p2); + var TCDenominator = 1; + //now re-compute the time constants based on those percentages + /// Aatish Bhatia's calculation for time constant for rise(to adjust 1/1-e calculation to any percentage) + TCDenominator = Math.log(1 / this.checkExpInput(1 - this._rampHighPercentage)); + this._rampAttackTC = this._rampAttackTime / this.checkExpInput(TCDenominator); + TCDenominator = Math.log(1 / this._rampLowPercentage); + this._rampDecayTC = this._rampDecayTime / this.checkExpInput(TCDenominator); + }; + /** + * Assign a parameter to be controlled by this envelope. + * If a p5.Sound object is given, then the p5.Env will control its + * output gain. If multiple inputs are provided, the env will + * control all of them. + * + * @method setInput + * @param {Object} [...inputs] A p5.sound object or + * Web Audio Param. + */ + p5.Env.prototype.setInput = function () { + for (var i = 0; i < arguments.length; i++) { + this.connect(arguments[i]); + } + }; + /** + * Set whether the envelope ramp is linear (default) or exponential. + * Exponential ramps can be useful because we perceive amplitude + * and frequency logarithmically. + * + * @method setExp + * @param {Boolean} isExp true is exponential, false is linear + */ + p5.Env.prototype.setExp = function (isExp) { + this.isExponential = isExp; + }; + //helper method to protect against zero values being sent to exponential functions + p5.Env.prototype.checkExpInput = function (value) { + if (value <= 0) { + value = 1e-8; + } + return value; + }; + /** + * Play tells the envelope to start acting on a given input. + * If the input is a p5.sound object (i.e. AudioIn, Oscillator, + * SoundFile), then Env will control its output volume. + * Envelopes can also be used to control any + * Web Audio Audio Param. + * + * @method play + * @param {Object} unit A p5.sound object or + * Web Audio Param. + * @param {Number} [startTime] time from now (in seconds) at which to play + * @param {Number} [sustainTime] time to sustain before releasing the envelope + * @example + *
+ * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.2; + * var susPercent = 0.2; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(playEnv); + * } + * + * function playEnv(){ + * // trigger env on triOsc, 0 seconds from now + * // After decay, sustain for 0.2 seconds before release + * env.play(triOsc, 0, 0.2); + * } + *
+ */ + p5.Env.prototype.play = function (unit, secondsFromNow, susTime) { + var tFromNow = secondsFromNow || 0; + var susTime = susTime || 0; + if (unit) { + if (this.connection !== unit) { + this.connect(unit); + } + } + this.triggerAttack(unit, tFromNow); + this.triggerRelease(unit, tFromNow + this.aTime + this.dTime + susTime); + }; + /** + * Trigger the Attack, and Decay portion of the Envelope. + * Similar to holding down a key on a piano, but it will + * hold the sustain level until you let go. Input can be + * any p5.sound object, or a + * Web Audio Param. + * + * @method triggerAttack + * @param {Object} unit p5.sound Object or Web Audio Param + * @param {Number} secondsFromNow time from now (in seconds) + * @example + *
+ * + * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.3; + * var susPercent = 0.4; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * background(200); + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(envAttack); + * } + * + * function envAttack(){ + * console.log('trigger attack'); + * env.triggerAttack(); + * + * background(0,255,0); + * text('attack!', width/2, height/2); + * } + * + * function mouseReleased() { + * env.triggerRelease(); + * + * background(200); + * text('click to play', width/2, height/2); + * } + *
+ */ + p5.Env.prototype.triggerAttack = function (unit, secondsFromNow) { + var now = p5sound.audiocontext.currentTime; + var tFromNow = secondsFromNow || 0; + var t = now + tFromNow; + this.lastAttack = t; + this.wasTriggered = true; + if (unit) { + if (this.connection !== unit) { + this.connect(unit); + } + } + // get and set value (with linear ramp) to anchor automation + var valToSet = this.control.getValueAtTime(t); + this.control.cancelScheduledValues(t); + // not sure if this is necessary + if (this.isExponential === true) { + this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t); + } else { + this.control.linearRampToValueAtTime(valToSet, t); + } + // after each ramp completes, cancel scheduled values + // (so they can be overridden in case env has been re-triggered) + // then, set current value (with linearRamp to avoid click) + // then, schedule the next automation... + // attack + t += this.aTime; + if (this.isExponential === true) { + this.control.exponentialRampToValueAtTime(this.checkExpInput(this.aLevel), t); + valToSet = this.checkExpInput(this.control.getValueAtTime(t)); + this.control.cancelScheduledValues(t); + this.control.exponentialRampToValueAtTime(valToSet, t); + } else { + this.control.linearRampToValueAtTime(this.aLevel, t); + valToSet = this.control.getValueAtTime(t); + this.control.cancelScheduledValues(t); + this.control.linearRampToValueAtTime(valToSet, t); + } + // decay to decay level (if using ADSR, then decay level == sustain level) + t += this.dTime; + if (this.isExponential === true) { + this.control.exponentialRampToValueAtTime(this.checkExpInput(this.dLevel), t); + valToSet = this.checkExpInput(this.control.getValueAtTime(t)); + this.control.cancelScheduledValues(t); + this.control.exponentialRampToValueAtTime(valToSet, t); + } else { + this.control.linearRampToValueAtTime(this.dLevel, t); + valToSet = this.control.getValueAtTime(t); + this.control.cancelScheduledValues(t); + this.control.linearRampToValueAtTime(valToSet, t); + } + }; + /** + * Trigger the Release of the Envelope. This is similar to releasing + * the key on a piano and letting the sound fade according to the + * release level and release time. + * + * @method triggerRelease + * @param {Object} unit p5.sound Object or Web Audio Param + * @param {Number} secondsFromNow time to trigger the release + * @example + *
+ * + * var attackLevel = 1.0; + * var releaseLevel = 0; + * + * var attackTime = 0.001 + * var decayTime = 0.3; + * var susPercent = 0.4; + * var releaseTime = 0.5; + * + * var env, triOsc; + * + * function setup() { + * var cnv = createCanvas(100, 100); + * background(200); + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime, susPercent, releaseTime); + * env.setRange(attackLevel, releaseLevel); + * + * triOsc = new p5.Oscillator('triangle'); + * triOsc.amp(env); + * triOsc.start(); + * triOsc.freq(220); + * + * cnv.mousePressed(envAttack); + * } + * + * function envAttack(){ + * console.log('trigger attack'); + * env.triggerAttack(); + * + * background(0,255,0); + * text('attack!', width/2, height/2); + * } + * + * function mouseReleased() { + * env.triggerRelease(); + * + * background(200); + * text('click to play', width/2, height/2); + * } + *
+ */ + p5.Env.prototype.triggerRelease = function (unit, secondsFromNow) { + // only trigger a release if an attack was triggered + if (!this.wasTriggered) { + // this currently causes a bit of trouble: + // if a later release has been scheduled (via the play function) + // a new earlier release won't interrupt it, because + // this.wasTriggered has already been set to false. + // If we want new earlier releases to override, then we need to + // keep track of the last release time, and if the new release time is + // earlier, then use it. + return; + } + var now = p5sound.audiocontext.currentTime; + var tFromNow = secondsFromNow || 0; + var t = now + tFromNow; + if (unit) { + if (this.connection !== unit) { + this.connect(unit); + } + } + // get and set value (with linear or exponential ramp) to anchor automation + var valToSet = this.control.getValueAtTime(t); + this.control.cancelScheduledValues(t); + // not sure if this is necessary + if (this.isExponential === true) { + this.control.exponentialRampToValueAtTime(this.checkExpInput(valToSet), t); + } else { + this.control.linearRampToValueAtTime(valToSet, t); + } + // release + t += this.rTime; + if (this.isExponential === true) { + this.control.exponentialRampToValueAtTime(this.checkExpInput(this.rLevel), t); + valToSet = this.checkExpInput(this.control.getValueAtTime(t)); + this.control.cancelScheduledValues(t); + this.control.exponentialRampToValueAtTime(valToSet, t); + } else { + this.control.linearRampToValueAtTime(this.rLevel, t); + valToSet = this.control.getValueAtTime(t); + this.control.cancelScheduledValues(t); + this.control.linearRampToValueAtTime(valToSet, t); + } + this.wasTriggered = false; + }; + /** + * Exponentially ramp to a value using the first two + * values from setADSR(attackTime, decayTime) + * as + * time constants for simple exponential ramps. + * If the value is higher than current value, it uses attackTime, + * while a decrease uses decayTime. + * + * @method ramp + * @param {Object} unit p5.sound Object or Web Audio Param + * @param {Number} secondsFromNow When to trigger the ramp + * @param {Number} v Target value + * @param {Number} [v2] Second target value (optional) + * @example + *
+ * var env, osc, amp, cnv; + * + * var attackTime = 0.001; + * var decayTime = 0.2; + * var attackLevel = 1; + * var decayLevel = 0; + * + * function setup() { + * cnv = createCanvas(100, 100); + * fill(0,255,0); + * noStroke(); + * + * env = new p5.Env(); + * env.setADSR(attackTime, decayTime); + * + * osc = new p5.Oscillator(); + * osc.amp(env); + * osc.start(); + * + * amp = new p5.Amplitude(); + * + * cnv.mousePressed(triggerRamp); + * } + * + * function triggerRamp() { + * env.ramp(osc, 0, attackLevel, decayLevel); + * } + * + * function draw() { + * background(20,20,20); + * text('click me', 10, 20); + * var h = map(amp.getLevel(), 0, 0.4, 0, height);; + * + * rect(0, height, width, -h); + * } + *
+ */ + p5.Env.prototype.ramp = function (unit, secondsFromNow, v1, v2) { + var now = p5sound.audiocontext.currentTime; + var tFromNow = secondsFromNow || 0; + var t = now + tFromNow; + var destination1 = this.checkExpInput(v1); + var destination2 = typeof v2 !== 'undefined' ? this.checkExpInput(v2) : undefined; + // connect env to unit if not already connected + if (unit) { + if (this.connection !== unit) { + this.connect(unit); + } + } + //get current value + var currentVal = this.checkExpInput(this.control.getValueAtTime(t)); + this.control.cancelScheduledValues(t); + //if it's going up + if (destination1 > currentVal) { + this.control.setTargetAtTime(destination1, t, this._rampAttackTC); + t += this._rampAttackTime; + } else if (destination1 < currentVal) { + this.control.setTargetAtTime(destination1, t, this._rampDecayTC); + t += this._rampDecayTime; + } + // Now the second part of envelope begins + if (destination2 === undefined) + return; + //if it's going up + if (destination2 > destination1) { + this.control.setTargetAtTime(destination2, t, this._rampAttackTC); + } else if (destination2 < destination1) { + this.control.setTargetAtTime(destination2, t, this._rampDecayTC); + } + }; + p5.Env.prototype.connect = function (unit) { + this.connection = unit; + // assume we're talking about output gain + // unless given a different audio param + if (unit instanceof p5.Oscillator || unit instanceof p5.SoundFile || unit instanceof p5.AudioIn || unit instanceof p5.Reverb || unit instanceof p5.Noise || unit instanceof p5.Filter || unit instanceof p5.Delay) { + unit = unit.output.gain; + } + if (unit instanceof AudioParam) { + //set the initial value + unit.setValueAtTime(0, p5sound.audiocontext.currentTime); + } + if (unit instanceof p5.Signal) { + unit.setValue(0); + } + this.output.connect(unit); + }; + p5.Env.prototype.disconnect = function () { + this.output.disconnect(); + }; + // Signal Math + /** + * Add a value to the p5.Oscillator's output amplitude, + * and return the oscillator. Calling this method + * again will override the initial add() with new values. + * + * @method add + * @param {Number} number Constant number to add + * @return {p5.Env} Envelope Returns this envelope + * with scaled output + */ + p5.Env.prototype.add = function (num) { + var add = new Add(num); + var thisChain = this.mathOps.length; + var nextChain = this.output; + return p5.prototype._mathChain(this, add, thisChain, nextChain, Add); + }; + /** + * Multiply the p5.Env's output amplitude + * by a fixed value. Calling this method + * again will override the initial mult() with new values. + * + * @method mult + * @param {Number} number Constant number to multiply + * @return {p5.Env} Envelope Returns this envelope + * with scaled output + */ + p5.Env.prototype.mult = function (num) { + var mult = new Mult(num); + var thisChain = this.mathOps.length; + var nextChain = this.output; + return p5.prototype._mathChain(this, mult, thisChain, nextChain, Mult); + }; + /** + * Scale this envelope's amplitude values to a given + * range, and return the envelope. Calling this method + * again will override the initial scale() with new values. + * + * @method scale + * @param {Number} inMin input range minumum + * @param {Number} inMax input range maximum + * @param {Number} outMin input range minumum + * @param {Number} outMax input range maximum + * @return {p5.Env} Envelope Returns this envelope + * with scaled output + */ + p5.Env.prototype.scale = function (inMin, inMax, outMin, outMax) { + var scale = new Scale(inMin, inMax, outMin, outMax); + var thisChain = this.mathOps.length; + var nextChain = this.output; + return p5.prototype._mathChain(this, scale, thisChain, nextChain, Scale); + }; + // get rid of the oscillator + p5.Env.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.disconnect(); + try { + this.control.dispose(); + this.control = null; + } catch (e) { + console.warn(e, 'already disposed p5.Env'); + } + for (var i = 1; i < this.mathOps.length; i++) { + this.mathOps[i].dispose(); + } + }; +}(master, Tone_signal_Add, Tone_signal_Multiply, Tone_signal_Scale, Tone_signal_TimelineSignal, Tone_core_Tone); +var pulse; +'use strict'; +pulse = function () { + var p5sound = master; + /** + * Creates a Pulse object, an oscillator that implements + * Pulse Width Modulation. + * The pulse is created with two oscillators. + * Accepts a parameter for frequency, and to set the + * width between the pulses. See + * p5.Oscillator for a full list of methods. + * + * @class p5.Pulse + * @extends p5.Oscillator + * @constructor + * @param {Number} [freq] Frequency in oscillations per second (Hz) + * @param {Number} [w] Width between the pulses (0 to 1.0, + * defaults to 0) + * @example + *
+ * var pulse; + * function setup() { + * background(0); + * + * // Create and start the pulse wave oscillator + * pulse = new p5.Pulse(); + * pulse.amp(0.5); + * pulse.freq(220); + * pulse.start(); + * } + * + * function draw() { + * var w = map(mouseX, 0, width, 0, 1); + * w = constrain(w, 0, 1); + * pulse.width(w) + * } + *
+ */ + p5.Pulse = function (freq, w) { + p5.Oscillator.call(this, freq, 'sawtooth'); + // width of PWM, should be betw 0 to 1.0 + this.w = w || 0; + // create a second oscillator with inverse frequency + this.osc2 = new p5.SawOsc(freq); + // create a delay node + this.dNode = p5sound.audiocontext.createDelay(); + // dc offset + this.dcOffset = createDCOffset(); + this.dcGain = p5sound.audiocontext.createGain(); + this.dcOffset.connect(this.dcGain); + this.dcGain.connect(this.output); + // set delay time based on PWM width + this.f = freq || 440; + var mW = this.w / this.oscillator.frequency.value; + this.dNode.delayTime.value = mW; + this.dcGain.gain.value = 1.7 * (0.5 - this.w); + // disconnect osc2 and connect it to delay, which is connected to output + this.osc2.disconnect(); + this.osc2.panner.disconnect(); + this.osc2.amp(-1); + // inverted amplitude + this.osc2.output.connect(this.dNode); + this.dNode.connect(this.output); + this.output.gain.value = 1; + this.output.connect(this.panner); + }; + p5.Pulse.prototype = Object.create(p5.Oscillator.prototype); + /** + * Set the width of a Pulse object (an oscillator that implements + * Pulse Width Modulation). + * + * @method width + * @param {Number} [width] Width between the pulses (0 to 1.0, + * defaults to 0) + */ + p5.Pulse.prototype.width = function (w) { + if (typeof w === 'number') { + if (w <= 1 && w >= 0) { + this.w = w; + // set delay time based on PWM width + // var mW = map(this.w, 0, 1.0, 0, 1/this.f); + var mW = this.w / this.oscillator.frequency.value; + this.dNode.delayTime.value = mW; + } + this.dcGain.gain.value = 1.7 * (0.5 - this.w); + } else { + w.connect(this.dNode.delayTime); + var sig = new p5.SignalAdd(-0.5); + sig.setInput(w); + sig = sig.mult(-1); + sig = sig.mult(1.7); + sig.connect(this.dcGain.gain); + } + }; + p5.Pulse.prototype.start = function (f, time) { + var now = p5sound.audiocontext.currentTime; + var t = time || 0; + if (!this.started) { + var freq = f || this.f; + var type = this.oscillator.type; + this.oscillator = p5sound.audiocontext.createOscillator(); + this.oscillator.frequency.setValueAtTime(freq, now); + this.oscillator.type = type; + this.oscillator.connect(this.output); + this.oscillator.start(t + now); + // set up osc2 + this.osc2.oscillator = p5sound.audiocontext.createOscillator(); + this.osc2.oscillator.frequency.setValueAtTime(freq, t + now); + this.osc2.oscillator.type = type; + this.osc2.oscillator.connect(this.osc2.output); + this.osc2.start(t + now); + this.freqNode = [ + this.oscillator.frequency, + this.osc2.oscillator.frequency + ]; + // start dcOffset, too + this.dcOffset = createDCOffset(); + this.dcOffset.connect(this.dcGain); + this.dcOffset.start(t + now); + // if LFO connections depend on these oscillators + if (this.mods !== undefined && this.mods.frequency !== undefined) { + this.mods.frequency.connect(this.freqNode[0]); + this.mods.frequency.connect(this.freqNode[1]); + } + this.started = true; + this.osc2.started = true; + } + }; + p5.Pulse.prototype.stop = function (time) { + if (this.started) { + var t = time || 0; + var now = p5sound.audiocontext.currentTime; + this.oscillator.stop(t + now); + this.osc2.oscillator.stop(t + now); + this.dcOffset.stop(t + now); + this.started = false; + this.osc2.started = false; + } + }; + p5.Pulse.prototype.freq = function (val, rampTime, tFromNow) { + if (typeof val === 'number') { + this.f = val; + var now = p5sound.audiocontext.currentTime; + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var currentFreq = this.oscillator.frequency.value; + this.oscillator.frequency.cancelScheduledValues(now); + this.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow); + this.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now); + this.osc2.oscillator.frequency.cancelScheduledValues(now); + this.osc2.oscillator.frequency.setValueAtTime(currentFreq, now + tFromNow); + this.osc2.oscillator.frequency.exponentialRampToValueAtTime(val, tFromNow + rampTime + now); + if (this.freqMod) { + this.freqMod.output.disconnect(); + this.freqMod = null; + } + } else if (val.output) { + val.output.disconnect(); + val.output.connect(this.oscillator.frequency); + val.output.connect(this.osc2.oscillator.frequency); + this.freqMod = val; + } + }; + // inspiration: http://webaudiodemos.appspot.com/oscilloscope/ + function createDCOffset() { + var ac = p5sound.audiocontext; + var buffer = ac.createBuffer(1, 2048, ac.sampleRate); + var data = buffer.getChannelData(0); + for (var i = 0; i < 2048; i++) + data[i] = 1; + var bufferSource = ac.createBufferSource(); + bufferSource.buffer = buffer; + bufferSource.loop = true; + return bufferSource; + } +}(master, oscillator); +var noise; +'use strict'; +noise = function () { + var p5sound = master; + /** + * Noise is a type of oscillator that generates a buffer with random values. + * + * @class p5.Noise + * @extends p5.Oscillator + * @constructor + * @param {String} type Type of noise can be 'white' (default), + * 'brown' or 'pink'. + */ + p5.Noise = function (type) { + var assignType; + p5.Oscillator.call(this); + delete this.f; + delete this.freq; + delete this.oscillator; + if (type === 'brown') { + assignType = _brownNoise; + } else if (type === 'pink') { + assignType = _pinkNoise; + } else { + assignType = _whiteNoise; + } + this.buffer = assignType; + }; + p5.Noise.prototype = Object.create(p5.Oscillator.prototype); + // generate noise buffers + var _whiteNoise = function () { + var bufferSize = 2 * p5sound.audiocontext.sampleRate; + var whiteBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate); + var noiseData = whiteBuffer.getChannelData(0); + for (var i = 0; i < bufferSize; i++) { + noiseData[i] = Math.random() * 2 - 1; + } + whiteBuffer.type = 'white'; + return whiteBuffer; + }(); + var _pinkNoise = function () { + var bufferSize = 2 * p5sound.audiocontext.sampleRate; + var pinkBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate); + var noiseData = pinkBuffer.getChannelData(0); + var b0, b1, b2, b3, b4, b5, b6; + b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0; + for (var i = 0; i < bufferSize; i++) { + var white = Math.random() * 2 - 1; + b0 = 0.99886 * b0 + white * 0.0555179; + b1 = 0.99332 * b1 + white * 0.0750759; + b2 = 0.969 * b2 + white * 0.153852; + b3 = 0.8665 * b3 + white * 0.3104856; + b4 = 0.55 * b4 + white * 0.5329522; + b5 = -0.7616 * b5 - white * 0.016898; + noiseData[i] = b0 + b1 + b2 + b3 + b4 + b5 + b6 + white * 0.5362; + noiseData[i] *= 0.11; + // (roughly) compensate for gain + b6 = white * 0.115926; + } + pinkBuffer.type = 'pink'; + return pinkBuffer; + }(); + var _brownNoise = function () { + var bufferSize = 2 * p5sound.audiocontext.sampleRate; + var brownBuffer = p5sound.audiocontext.createBuffer(1, bufferSize, p5sound.audiocontext.sampleRate); + var noiseData = brownBuffer.getChannelData(0); + var lastOut = 0; + for (var i = 0; i < bufferSize; i++) { + var white = Math.random() * 2 - 1; + noiseData[i] = (lastOut + 0.02 * white) / 1.02; + lastOut = noiseData[i]; + noiseData[i] *= 3.5; + } + brownBuffer.type = 'brown'; + return brownBuffer; + }(); + /** + * Set type of noise to 'white', 'pink' or 'brown'. + * White is the default. + * + * @method setType + * @param {String} [type] 'white', 'pink' or 'brown' + */ + p5.Noise.prototype.setType = function (type) { + switch (type) { + case 'white': + this.buffer = _whiteNoise; + break; + case 'pink': + this.buffer = _pinkNoise; + break; + case 'brown': + this.buffer = _brownNoise; + break; + default: + this.buffer = _whiteNoise; + } + if (this.started) { + var now = p5sound.audiocontext.currentTime; + this.stop(now); + this.start(now + 0.01); + } + }; + p5.Noise.prototype.getType = function () { + return this.buffer.type; + }; + /** + * Start the noise + * + * @method start + */ + p5.Noise.prototype.start = function () { + if (this.started) { + this.stop(); + } + this.noise = p5sound.audiocontext.createBufferSource(); + this.noise.buffer = this.buffer; + this.noise.loop = true; + this.noise.connect(this.output); + var now = p5sound.audiocontext.currentTime; + this.noise.start(now); + this.started = true; + }; + /** + * Stop the noise. + * + * @method stop + */ + p5.Noise.prototype.stop = function () { + var now = p5sound.audiocontext.currentTime; + if (this.noise) { + this.noise.stop(now); + this.started = false; + } + }; + /** + * Pan the noise. + * + * @method pan + * @param {Number} panning Number between -1 (left) + * and 1 (right) + * @param {Number} timeFromNow schedule this event to happen + * seconds from now + */ + /** + * Set the amplitude of the noise between 0 and 1.0. Or, + * modulate amplitude with an audio signal such as an oscillator. + * + * @param {Number|Object} volume amplitude between 0 and 1.0 + * or modulating signal/oscillator + * @param {Number} [rampTime] create a fade that lasts rampTime + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + */ + /** + * Send output to a p5.sound or web audio object + * + * @method connect + * @param {Object} unit + */ + /** + * Disconnect all output. + * + * @method disconnect + */ + p5.Noise.prototype.dispose = function () { + var now = p5sound.audiocontext.currentTime; + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + if (this.noise) { + this.noise.disconnect(); + this.stop(now); + } + if (this.output) { + this.output.disconnect(); + } + if (this.panner) { + this.panner.disconnect(); + } + this.output = null; + this.panner = null; + this.buffer = null; + this.noise = null; + }; +}(master); +var audioin; +'use strict'; +audioin = function () { + var p5sound = master; + /** + *

Get audio from an input, i.e. your computer's microphone.

+ * + *

Turn the mic on/off with the start() and stop() methods. When the mic + * is on, its volume can be measured with getLevel or by connecting an + * FFT object.

+ * + *

If you want to hear the AudioIn, use the .connect() method. + * AudioIn does not connect to p5.sound output by default to prevent + * feedback.

+ * + *

Note: This uses the getUserMedia/ + * Stream API, which is not supported by certain browsers. Access in Chrome browser + * is limited to localhost and https, but access over http may be limited.

+ * + * @class p5.AudioIn + * @constructor + * @param {Function} [errorCallback] A function to call if there is an error + * accessing the AudioIn. For example, + * Safari and iOS devices do not + * currently allow microphone access. + * @example + *
+ * var mic; + * function setup(){ + * mic = new p5.AudioIn() + * mic.start(); + * } + * function draw(){ + * background(0); + * micLevel = mic.getLevel(); + * ellipse(width/2, constrain(height-micLevel*height*5, 0, height), 10, 10); + * } + *
+ */ + p5.AudioIn = function (errorCallback) { + // set up audio input + this.input = p5sound.audiocontext.createGain(); + this.output = p5sound.audiocontext.createGain(); + this.stream = null; + this.mediaStream = null; + this.currentSource = 0; + /** + * Client must allow browser to access their microphone / audioin source. + * Default: false. Will become true when the client enables acces. + * + * @property {Boolean} enabled + */ + this.enabled = false; + // create an amplitude, connect to it by default but not to master out + this.amplitude = new p5.Amplitude(); + this.output.connect(this.amplitude.input); + // Some browsers let developer determine their input sources + if (typeof window.MediaStreamTrack === 'undefined') { + if (errorCallback) { + errorCallback(); + } else { + window.alert('This browser does not support AudioIn'); + } + } else if (typeof window.MediaDevices.enumerateDevices === 'function') { + // Chrome supports getSources to list inputs. Dev picks default + window.MediaDevices.enumerateDevices(this._gotSources); + } else { + } + // add to soundArray so we can dispose on close + p5sound.soundArray.push(this); + }; + /** + * Start processing audio input. This enables the use of other + * AudioIn methods like getLevel(). Note that by default, AudioIn + * is not connected to p5.sound's output. So you won't hear + * anything unless you use the connect() method.
+ * + * Certain browsers limit access to the user's microphone. For example, + * Chrome only allows access from localhost and over https. For this reason, + * you may want to include an errorCallback—a function that is called in case + * the browser won't provide mic access. + * + * @method start + * @param {Function} [successCallback] Name of a function to call on + * success. + * @param {Function} [errorCallback] Name of a function to call if + * there was an error. For example, + * some browsers do not support + * getUserMedia. + */ + p5.AudioIn.prototype.start = function (successCallback, errorCallback) { + var self = this; + // if stream was already started... + // if _gotSources() i.e. developers determine which source to use + if (p5sound.inputSources[self.currentSource]) { + // set the audio source + var audioSource = p5sound.inputSources[self.currentSource].id; + var constraints = { audio: { optional: [{ sourceId: audioSource }] } }; + window.navigator.getUserMedia(constraints, this._onStream = function (stream) { + self.stream = stream; + self.enabled = true; + // Wrap a MediaStreamSourceNode around the live input + self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream); + self.mediaStream.connect(self.output); + if (successCallback) + successCallback(); + // only send to the Amplitude reader, so we can see it but not hear it. + self.amplitude.setInput(self.output); + }, this._onStreamError = function (e) { + if (errorCallback) + errorCallback(e); + else + console.error(e); + }); + } else { + // if Firefox where users select their source via browser + // if (typeof MediaStreamTrack.getSources === 'undefined') { + // Only get the audio stream. + window.navigator.getUserMedia({ 'audio': true }, this._onStream = function (stream) { + self.stream = stream; + self.enabled = true; + // Wrap a MediaStreamSourceNode around the live input + self.mediaStream = p5sound.audiocontext.createMediaStreamSource(stream); + self.mediaStream.connect(self.output); + // only send to the Amplitude reader, so we can see it but not hear it. + self.amplitude.setInput(self.output); + if (successCallback) + successCallback(); + }, this._onStreamError = function (e) { + if (errorCallback) + errorCallback(e); + else + console.error(e); + }); + } + }; + /** + * Turn the AudioIn off. If the AudioIn is stopped, it cannot getLevel(). + * If re-starting, the user may be prompted for permission access. + * + * @method stop + */ + p5.AudioIn.prototype.stop = function () { + if (this.stream) { + // assume only one track + this.stream.getTracks()[0].stop(); + } + }; + /** + * Connect to an audio unit. If no parameter is provided, will + * connect to the master output (i.e. your speakers).
+ * + * @method connect + * @param {Object} [unit] An object that accepts audio input, + * such as an FFT + */ + p5.AudioIn.prototype.connect = function (unit) { + if (unit) { + if (unit.hasOwnProperty('input')) { + this.output.connect(unit.input); + } else if (unit.hasOwnProperty('analyser')) { + this.output.connect(unit.analyser); + } else { + this.output.connect(unit); + } + } else { + this.output.connect(p5sound.input); + } + }; + /** + * Disconnect the AudioIn from all audio units. For example, if + * connect() had been called, disconnect() will stop sending + * signal to your speakers.
+ * + * @method disconnect + */ + p5.AudioIn.prototype.disconnect = function () { + this.output.disconnect(); + // stay connected to amplitude even if not outputting to p5 + this.output.connect(this.amplitude.input); + }; + /** + * Read the Amplitude (volume level) of an AudioIn. The AudioIn + * class contains its own instance of the Amplitude class to help + * make it easy to get a microphone's volume level. Accepts an + * optional smoothing value (0.0 < 1.0). NOTE: AudioIn must + * .start() before using .getLevel().
+ * + * @method getLevel + * @param {Number} [smoothing] Smoothing is 0.0 by default. + * Smooths values based on previous values. + * @return {Number} Volume level (between 0.0 and 1.0) + */ + p5.AudioIn.prototype.getLevel = function (smoothing) { + if (smoothing) { + this.amplitude.smoothing = smoothing; + } + return this.amplitude.getLevel(); + }; + /** + * Add input sources to the list of available sources. + * + * @private + */ + p5.AudioIn.prototype._gotSources = function (sourceInfos) { + for (var i = 0; i < sourceInfos.length; i++) { + var sourceInfo = sourceInfos[i]; + if (sourceInfo.kind === 'audio') { + // add the inputs to inputSources + //p5sound.inputSources.push(sourceInfo); + return sourceInfo; + } + } + }; + /** + * Set amplitude (volume) of a mic input between 0 and 1.0.
+ * + * @method amp + * @param {Number} vol between 0 and 1.0 + * @param {Number} [time] ramp time (optional) + */ + p5.AudioIn.prototype.amp = function (vol, t) { + if (t) { + var rampTime = t || 0; + var currentVol = this.output.gain.value; + this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); + this.output.gain.setValueAtTime(currentVol, p5sound.audiocontext.currentTime); + this.output.gain.linearRampToValueAtTime(vol, rampTime + p5sound.audiocontext.currentTime); + } else { + this.output.gain.cancelScheduledValues(p5sound.audiocontext.currentTime); + this.output.gain.setValueAtTime(vol, p5sound.audiocontext.currentTime); + } + }; + p5.AudioIn.prototype.listSources = function () { + console.log('listSources is deprecated - please use AudioIn.getSources'); + console.log('input sources: '); + if (p5sound.inputSources.length > 0) { + return p5sound.inputSources; + } else { + return 'This browser does not support MediaStreamTrack.getSources()'; + } + }; + /** + * Chrome only. Returns a list of available input sources + * and allows the user to set the media source. Firefox allows + * the user to choose from input sources in the permissions dialogue + * instead of enumerating available sources and selecting one. + * Note: in order to have descriptive media names your page must be + * served over a secure (HTTPS) connection and the page should + * request user media before enumerating devices. Otherwise device + * ID will be a long device ID number and does not specify device + * type. For example see + * https://simpl.info/getusermedia/sources/index.html vs. + * http://simpl.info/getusermedia/sources/index.html + * + * @method getSources + * @param {Function} callback a callback to handle the sources + * when they have been enumerated + * @example + *
+ * var audiograb; + * + * function setup(){ + * //new audioIn + * audioGrab = new p5.AudioIn(); + * + * audioGrab.getSources(function(sourceList) { + * //print out the array of available sources + * console.log(sourceList); + * //set the source to the first item in the inputSources array + * audioGrab.setSource(0); + * }); + * } + *
+ */ + p5.AudioIn.prototype.getSources = function (callback) { + if (typeof window.MediaStreamTrack.getSources === 'function') { + window.MediaStreamTrack.getSources(function (data) { + for (var i = 0, max = data.length; i < max; i++) { + var sourceInfo = data[i]; + if (sourceInfo.kind === 'audio') { + // add the inputs to inputSources + p5sound.inputSources.push(sourceInfo); + } + } + callback(p5sound.inputSources); + }); + } else { + console.log('This browser does not support MediaStreamTrack.getSources()'); + } + }; + /** + * Set the input source. Accepts a number representing a + * position in the array returned by listSources(). + * This is only available in browsers that support + * MediaStreamTrack.getSources(). Instead, some browsers + * give users the option to set their own media source.
+ * + * @method setSource + * @param {number} num position of input source in the array + */ + p5.AudioIn.prototype.setSource = function (num) { + // TO DO - set input by string or # (array position) + var self = this; + if (p5sound.inputSources.length > 0 && num < p5sound.inputSources.length) { + // set the current source + self.currentSource = num; + console.log('set source to ' + p5sound.inputSources[self.currentSource].id); + } else { + console.log('unable to set input source'); + } + }; + // private method + p5.AudioIn.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.stop(); + if (this.output) { + this.output.disconnect(); + } + if (this.amplitude) { + this.amplitude.disconnect(); + } + this.amplitude = null; + this.output = null; + }; +}(master); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Negate; +Tone_signal_Negate = function (Tone) { + 'use strict'; + Tone.Negate = function () { + this._multiply = this.input = this.output = new Tone.Multiply(-1); + }; + Tone.extend(Tone.Negate, Tone.SignalBase); + Tone.Negate.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._multiply.dispose(); + this._multiply = null; + return this; + }; + return Tone.Negate; +}(Tone_core_Tone, Tone_signal_Multiply); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Subtract; +Tone_signal_Subtract = function (Tone) { + 'use strict'; + Tone.Subtract = function (value) { + this.createInsOuts(2, 0); + this._sum = this.input[0] = this.output = new Tone.Gain(); + this._neg = new Tone.Negate(); + this._param = this.input[1] = new Tone.Signal(value); + this._param.chain(this._neg, this._sum); + }; + Tone.extend(Tone.Subtract, Tone.Signal); + Tone.Subtract.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._neg.dispose(); + this._neg = null; + this._sum.disconnect(); + this._sum = null; + this._param.dispose(); + this._param = null; + return this; + }; + return Tone.Subtract; +}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Negate, Tone_signal_Signal); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_GreaterThanZero; +Tone_signal_GreaterThanZero = function (Tone) { + 'use strict'; + Tone.GreaterThanZero = function () { + this._thresh = this.output = new Tone.WaveShaper(function (val) { + if (val <= 0) { + return 0; + } else { + return 1; + } + }, 127); + this._scale = this.input = new Tone.Multiply(10000); + this._scale.connect(this._thresh); + }; + Tone.extend(Tone.GreaterThanZero, Tone.SignalBase); + Tone.GreaterThanZero.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._scale.dispose(); + this._scale = null; + this._thresh.dispose(); + this._thresh = null; + return this; + }; + return Tone.GreaterThanZero; +}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Multiply); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_GreaterThan; +Tone_signal_GreaterThan = function (Tone) { + 'use strict'; + Tone.GreaterThan = function (value) { + this.createInsOuts(2, 0); + this._param = this.input[0] = new Tone.Subtract(value); + this.input[1] = this._param.input[1]; + this._gtz = this.output = new Tone.GreaterThanZero(); + this._param.connect(this._gtz); + }; + Tone.extend(Tone.GreaterThan, Tone.Signal); + Tone.GreaterThan.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._param.dispose(); + this._param = null; + this._gtz.dispose(); + this._gtz = null; + return this; + }; + return Tone.GreaterThan; +}(Tone_core_Tone, Tone_signal_GreaterThanZero, Tone_signal_Subtract); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Abs; +Tone_signal_Abs = function (Tone) { + 'use strict'; + Tone.Abs = function () { + this._abs = this.input = this.output = new Tone.WaveShaper(function (val) { + if (val === 0) { + return 0; + } else { + return Math.abs(val); + } + }, 127); + }; + Tone.extend(Tone.Abs, Tone.SignalBase); + Tone.Abs.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._abs.dispose(); + this._abs = null; + return this; + }; + return Tone.Abs; +}(Tone_core_Tone, Tone_signal_WaveShaper); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Modulo; +Tone_signal_Modulo = function (Tone) { + 'use strict'; + Tone.Modulo = function (modulus) { + this.createInsOuts(1, 0); + this._shaper = new Tone.WaveShaper(Math.pow(2, 16)); + this._multiply = new Tone.Multiply(); + this._subtract = this.output = new Tone.Subtract(); + this._modSignal = new Tone.Signal(modulus); + this.input.fan(this._shaper, this._subtract); + this._modSignal.connect(this._multiply, 0, 0); + this._shaper.connect(this._multiply, 0, 1); + this._multiply.connect(this._subtract, 0, 1); + this._setWaveShaper(modulus); + }; + Tone.extend(Tone.Modulo, Tone.SignalBase); + Tone.Modulo.prototype._setWaveShaper = function (mod) { + this._shaper.setMap(function (val) { + var multiple = Math.floor((val + 0.0001) / mod); + return multiple; + }); + }; + Object.defineProperty(Tone.Modulo.prototype, 'value', { + get: function () { + return this._modSignal.value; + }, + set: function (mod) { + this._modSignal.value = mod; + this._setWaveShaper(mod); + } + }); + Tone.Modulo.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._shaper.dispose(); + this._shaper = null; + this._multiply.dispose(); + this._multiply = null; + this._subtract.dispose(); + this._subtract = null; + this._modSignal.dispose(); + this._modSignal = null; + return this; + }; + return Tone.Modulo; +}(Tone_core_Tone, Tone_signal_WaveShaper, Tone_signal_Multiply); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Pow; +Tone_signal_Pow = function (Tone) { + 'use strict'; + Tone.Pow = function (exp) { + this._exp = this.defaultArg(exp, 1); + this._expScaler = this.input = this.output = new Tone.WaveShaper(this._expFunc(this._exp), 8192); + }; + Tone.extend(Tone.Pow, Tone.SignalBase); + Object.defineProperty(Tone.Pow.prototype, 'value', { + get: function () { + return this._exp; + }, + set: function (exp) { + this._exp = exp; + this._expScaler.setMap(this._expFunc(this._exp)); + } + }); + Tone.Pow.prototype._expFunc = function (exp) { + return function (val) { + return Math.pow(Math.abs(val), exp); + }; + }; + Tone.Pow.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._expScaler.dispose(); + this._expScaler = null; + return this; + }; + return Tone.Pow; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_AudioToGain; +Tone_signal_AudioToGain = function (Tone) { + 'use strict'; + Tone.AudioToGain = function () { + this._norm = this.input = this.output = new Tone.WaveShaper(function (x) { + return (x + 1) / 2; + }); + }; + Tone.extend(Tone.AudioToGain, Tone.SignalBase); + Tone.AudioToGain.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._norm.dispose(); + this._norm = null; + return this; + }; + return Tone.AudioToGain; +}(Tone_core_Tone, Tone_signal_WaveShaper); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_Expr; +Tone_signal_Expr = function (Tone) { + 'use strict'; + Tone.Expr = function () { + var expr = this._replacements(Array.prototype.slice.call(arguments)); + var inputCount = this._parseInputs(expr); + this._nodes = []; + this.input = new Array(inputCount); + for (var i = 0; i < inputCount; i++) { + this.input[i] = this.context.createGain(); + } + var tree = this._parseTree(expr); + var result; + try { + result = this._eval(tree); + } catch (e) { + this._disposeNodes(); + throw new Error('Tone.Expr: Could evaluate expression: ' + expr); + } + this.output = result; + }; + Tone.extend(Tone.Expr, Tone.SignalBase); + function applyBinary(Constructor, args, self) { + var op = new Constructor(); + self._eval(args[0]).connect(op, 0, 0); + self._eval(args[1]).connect(op, 0, 1); + return op; + } + function applyUnary(Constructor, args, self) { + var op = new Constructor(); + self._eval(args[0]).connect(op, 0, 0); + return op; + } + function getNumber(arg) { + return arg ? parseFloat(arg) : undefined; + } + function literalNumber(arg) { + return arg && arg.args ? parseFloat(arg.args) : undefined; + } + Tone.Expr._Expressions = { + 'value': { + 'signal': { + regexp: /^\d+\.\d+|^\d+/, + method: function (arg) { + var sig = new Tone.Signal(getNumber(arg)); + return sig; + } + }, + 'input': { + regexp: /^\$\d/, + method: function (arg, self) { + return self.input[getNumber(arg.substr(1))]; + } + } + }, + 'glue': { + '(': { regexp: /^\(/ }, + ')': { regexp: /^\)/ }, + ',': { regexp: /^,/ } + }, + 'func': { + 'abs': { + regexp: /^abs/, + method: applyUnary.bind(this, Tone.Abs) + }, + 'mod': { + regexp: /^mod/, + method: function (args, self) { + var modulus = literalNumber(args[1]); + var op = new Tone.Modulo(modulus); + self._eval(args[0]).connect(op); + return op; + } + }, + 'pow': { + regexp: /^pow/, + method: function (args, self) { + var exp = literalNumber(args[1]); + var op = new Tone.Pow(exp); + self._eval(args[0]).connect(op); + return op; + } + }, + 'a2g': { + regexp: /^a2g/, + method: function (args, self) { + var op = new Tone.AudioToGain(); + self._eval(args[0]).connect(op); + return op; + } + } + }, + 'binary': { + '+': { + regexp: /^\+/, + precedence: 1, + method: applyBinary.bind(this, Tone.Add) + }, + '-': { + regexp: /^\-/, + precedence: 1, + method: function (args, self) { + if (args.length === 1) { + return applyUnary(Tone.Negate, args, self); + } else { + return applyBinary(Tone.Subtract, args, self); + } + } + }, + '*': { + regexp: /^\*/, + precedence: 0, + method: applyBinary.bind(this, Tone.Multiply) + } + }, + 'unary': { + '-': { + regexp: /^\-/, + method: applyUnary.bind(this, Tone.Negate) + }, + '!': { + regexp: /^\!/, + method: applyUnary.bind(this, Tone.NOT) + } + } + }; + Tone.Expr.prototype._parseInputs = function (expr) { + var inputArray = expr.match(/\$\d/g); + var inputMax = 0; + if (inputArray !== null) { + for (var i = 0; i < inputArray.length; i++) { + var inputNum = parseInt(inputArray[i].substr(1)) + 1; + inputMax = Math.max(inputMax, inputNum); + } + } + return inputMax; + }; + Tone.Expr.prototype._replacements = function (args) { + var expr = args.shift(); + for (var i = 0; i < args.length; i++) { + expr = expr.replace(/\%/i, args[i]); + } + return expr; + }; + Tone.Expr.prototype._tokenize = function (expr) { + var position = -1; + var tokens = []; + while (expr.length > 0) { + expr = expr.trim(); + var token = getNextToken(expr); + tokens.push(token); + expr = expr.substr(token.value.length); + } + function getNextToken(expr) { + for (var type in Tone.Expr._Expressions) { + var group = Tone.Expr._Expressions[type]; + for (var opName in group) { + var op = group[opName]; + var reg = op.regexp; + var match = expr.match(reg); + if (match !== null) { + return { + type: type, + value: match[0], + method: op.method + }; + } + } + } + throw new SyntaxError('Tone.Expr: Unexpected token ' + expr); + } + return { + next: function () { + return tokens[++position]; + }, + peek: function () { + return tokens[position + 1]; + } + }; + }; + Tone.Expr.prototype._parseTree = function (expr) { + var lexer = this._tokenize(expr); + var isUndef = this.isUndef.bind(this); + function matchSyntax(token, syn) { + return !isUndef(token) && token.type === 'glue' && token.value === syn; + } + function matchGroup(token, groupName, prec) { + var ret = false; + var group = Tone.Expr._Expressions[groupName]; + if (!isUndef(token)) { + for (var opName in group) { + var op = group[opName]; + if (op.regexp.test(token.value)) { + if (!isUndef(prec)) { + if (op.precedence === prec) { + return true; + } + } else { + return true; + } + } + } + } + return ret; + } + function parseExpression(precedence) { + if (isUndef(precedence)) { + precedence = 5; + } + var expr; + if (precedence < 0) { + expr = parseUnary(); + } else { + expr = parseExpression(precedence - 1); + } + var token = lexer.peek(); + while (matchGroup(token, 'binary', precedence)) { + token = lexer.next(); + expr = { + operator: token.value, + method: token.method, + args: [ + expr, + parseExpression(precedence - 1) + ] + }; + token = lexer.peek(); + } + return expr; + } + function parseUnary() { + var token, expr; + token = lexer.peek(); + if (matchGroup(token, 'unary')) { + token = lexer.next(); + expr = parseUnary(); + return { + operator: token.value, + method: token.method, + args: [expr] + }; + } + return parsePrimary(); + } + function parsePrimary() { + var token, expr; + token = lexer.peek(); + if (isUndef(token)) { + throw new SyntaxError('Tone.Expr: Unexpected termination of expression'); + } + if (token.type === 'func') { + token = lexer.next(); + return parseFunctionCall(token); + } + if (token.type === 'value') { + token = lexer.next(); + return { + method: token.method, + args: token.value + }; + } + if (matchSyntax(token, '(')) { + lexer.next(); + expr = parseExpression(); + token = lexer.next(); + if (!matchSyntax(token, ')')) { + throw new SyntaxError('Expected )'); + } + return expr; + } + throw new SyntaxError('Tone.Expr: Parse error, cannot process token ' + token.value); + } + function parseFunctionCall(func) { + var token, args = []; + token = lexer.next(); + if (!matchSyntax(token, '(')) { + throw new SyntaxError('Tone.Expr: Expected ( in a function call "' + func.value + '"'); + } + token = lexer.peek(); + if (!matchSyntax(token, ')')) { + args = parseArgumentList(); + } + token = lexer.next(); + if (!matchSyntax(token, ')')) { + throw new SyntaxError('Tone.Expr: Expected ) in a function call "' + func.value + '"'); + } + return { + method: func.method, + args: args, + name: name + }; + } + function parseArgumentList() { + var token, expr, args = []; + while (true) { + expr = parseExpression(); + if (isUndef(expr)) { + break; + } + args.push(expr); + token = lexer.peek(); + if (!matchSyntax(token, ',')) { + break; + } + lexer.next(); + } + return args; + } + return parseExpression(); + }; + Tone.Expr.prototype._eval = function (tree) { + if (!this.isUndef(tree)) { + var node = tree.method(tree.args, this); + this._nodes.push(node); + return node; + } + }; + Tone.Expr.prototype._disposeNodes = function () { + for (var i = 0; i < this._nodes.length; i++) { + var node = this._nodes[i]; + if (this.isFunction(node.dispose)) { + node.dispose(); + } else if (this.isFunction(node.disconnect)) { + node.disconnect(); + } + node = null; + this._nodes[i] = null; + } + this._nodes = null; + }; + Tone.Expr.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._disposeNodes(); + }; + return Tone.Expr; +}(Tone_core_Tone, Tone_signal_Add, Tone_signal_Subtract, Tone_signal_Multiply, Tone_signal_GreaterThan, Tone_signal_GreaterThanZero, Tone_signal_Abs, Tone_signal_Negate, Tone_signal_Modulo, Tone_signal_Pow); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_signal_EqualPowerGain; +Tone_signal_EqualPowerGain = function (Tone) { + 'use strict'; + Tone.EqualPowerGain = function () { + this._eqPower = this.input = this.output = new Tone.WaveShaper(function (val) { + if (Math.abs(val) < 0.001) { + return 0; + } else { + return this.equalPowerScale(val); + } + }.bind(this), 4096); + }; + Tone.extend(Tone.EqualPowerGain, Tone.SignalBase); + Tone.EqualPowerGain.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._eqPower.dispose(); + this._eqPower = null; + return this; + }; + return Tone.EqualPowerGain; +}(Tone_core_Tone); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_component_CrossFade; +Tone_component_CrossFade = function (Tone) { + 'use strict'; + Tone.CrossFade = function (initialFade) { + this.createInsOuts(2, 1); + this.a = this.input[0] = new Tone.Gain(); + this.b = this.input[1] = new Tone.Gain(); + this.fade = new Tone.Signal(this.defaultArg(initialFade, 0.5), Tone.Type.NormalRange); + this._equalPowerA = new Tone.EqualPowerGain(); + this._equalPowerB = new Tone.EqualPowerGain(); + this._invert = new Tone.Expr('1 - $0'); + this.a.connect(this.output); + this.b.connect(this.output); + this.fade.chain(this._equalPowerB, this.b.gain); + this.fade.chain(this._invert, this._equalPowerA, this.a.gain); + this._readOnly('fade'); + }; + Tone.extend(Tone.CrossFade); + Tone.CrossFade.prototype.dispose = function () { + Tone.prototype.dispose.call(this); + this._writable('fade'); + this._equalPowerA.dispose(); + this._equalPowerA = null; + this._equalPowerB.dispose(); + this._equalPowerB = null; + this.fade.dispose(); + this.fade = null; + this._invert.dispose(); + this._invert = null; + this.a.dispose(); + this.a = null; + this.b.dispose(); + this.b = null; + return this; + }; + return Tone.CrossFade; +}(Tone_core_Tone, Tone_signal_Signal, Tone_signal_Expr, Tone_signal_EqualPowerGain); +var effect; +'use strict'; +effect = function () { + var p5sound = master; + var CrossFade = Tone_component_CrossFade; + /** + * Effect is a base class for audio effects in p5.
+ * This module handles the nodes and methods that are + * common and useful for current and future effects. + * + * + * This class is extended by p5.Distortion, + * p5.Compressor, + * p5.Delay, + * p5.Filter, + * p5.Reverb. + * + * @class p5.Effect + * @constructor + * + * @param {Object} [ac] Reference to the audio context of the p5 object + * @param {WebAudioNode} [input] Gain Node effect wrapper + * @param {WebAudioNode} [output] Gain Node effect wrapper + * @param {Object} [_drywet] Tone.JS CrossFade node (defaults to value: 1) + * @param {WebAudioNode} [wet] Effects that extend this class should connect + * to the wet signal to this gain node, so that dry and wet + * signals are mixed properly. + */ + p5.Effect = function () { + this.ac = p5sound.audiocontext; + this.input = this.ac.createGain(); + this.output = this.ac.createGain(); + /** + * The p5.Effect class is built + * using Tone.js CrossFade + * @private + */ + this._drywet = new CrossFade(1); + /** + * In classes that extend + * p5.Effect, connect effect nodes + * to the wet parameter + */ + this.wet = this.ac.createGain(); + this.input.connect(this._drywet.a); + this.wet.connect(this._drywet.b); + this._drywet.connect(this.output); + this.connect(); + //Add to the soundArray + p5sound.soundArray.push(this); + }; + /** + * Set the output volume of the filter. + * + * @method amp + * @param {Number} [vol] amplitude between 0 and 1.0 + * @param {Number} [rampTime] create a fade that lasts until rampTime + * @param {Number} [tFromNow] schedule this event to happen in tFromNow seconds + */ + p5.Effect.prototype.amp = function (vol, rampTime, tFromNow) { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = this.output.gain.value; + this.output.gain.cancelScheduledValues(now); + this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow + 0.001); + this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime + 0.001); + }; + /** + * Link effects together in a chain + * Example usage: filter.chain(reverb, delay, panner); + * May be used with an open-ended number of arguments + * + * @method chain + * @param {Object} [arguments] Chain together multiple sound objects + */ + p5.Effect.prototype.chain = function () { + if (arguments.length > 0) { + this.connect(arguments[0]); + for (var i = 1; i < arguments.length; i += 1) { + arguments[i - 1].connect(arguments[i]); + } + } + return this; + }; + /** + * Adjust the dry/wet value. + * + * @method drywet + * @param {Number} [fade] The desired drywet value (0 - 1.0) + */ + p5.Effect.prototype.drywet = function (fade) { + if (typeof fade !== 'undefined') { + this._drywet.fade.value = fade; + } + return this._drywet.fade.value; + }; + /** + * Send output to a p5.js-sound, Web Audio Node, or use signal to + * control an AudioParam + * + * @method connect + * @param {Object} unit + */ + p5.Effect.prototype.connect = function (unit) { + var u = unit || p5.soundOut.input; + this.output.connect(u.input ? u.input : u); + }; + /** + * Disconnect all output. + * + * @method disconnect + */ + p5.Effect.prototype.disconnect = function () { + this.output.disconnect(); + }; + p5.Effect.prototype.dispose = function () { + // remove refernce form soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.input.disconnect(); + this.input = undefined; + this.output.disconnect(); + this.output = undefined; + this._drywet.disconnect(); + delete this._drywet; + this.wet.disconnect(); + delete this.wet; + this.ac = undefined; + }; + return p5.Effect; +}(master, Tone_component_CrossFade); +var filter; +'use strict'; +filter = function () { + var p5sound = master; + var Effect = effect; + /** + *

A p5.Filter uses a Web Audio Biquad Filter to filter + * the frequency response of an input source. Subclasses + * include:

+ * * p5.LowPass: + * Allows frequencies below the cutoff frequency to pass through, + * and attenuates frequencies above the cutoff.
+ * * p5.HighPass: + * The opposite of a lowpass filter.
+ * * p5.BandPass: + * Allows a range of frequencies to pass through and attenuates + * the frequencies below and above this frequency range.
+ * + * The .res() method controls either width of the + * bandpass, or resonance of the low/highpass cutoff frequency. + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * + * @class p5.Filter + * @extends p5.Effect + * @constructor + * @param {String} [type] 'lowpass' (default), 'highpass', 'bandpass' + * @example + *
+ * var fft, noise, filter; + * + * function setup() { + * fill(255, 40, 255); + * + * filter = new p5.BandPass(); + * + * noise = new p5.Noise(); + * // disconnect unfiltered noise, + * // and connect to filter + * noise.disconnect(); + * noise.connect(filter); + * noise.start(); + * + * fft = new p5.FFT(); + * } + * + * function draw() { + * background(30); + * + * // set the BandPass frequency based on mouseX + * var freq = map(mouseX, 0, width, 20, 10000); + * filter.freq(freq); + * // give the filter a narrow band (lower res = wider bandpass) + * filter.res(50); + * + * // draw filtered spectrum + * var spectrum = fft.analyze(); + * noStroke(); + * for (var i = 0; i < spectrum.length; i++) { + * var x = map(i, 0, spectrum.length, 0, width); + * var h = -height + map(spectrum[i], 0, 255, height, 0); + * rect(x, height, width/spectrum.length, h); + * } + * + * isMouseOverCanvas(); + * } + * + * function isMouseOverCanvas() { + * var mX = mouseX, mY = mouseY; + * if (mX > 0 && mX < width && mY < height && mY > 0) { + * noise.amp(0.5, 0.2); + * } else { + * noise.amp(0, 0.2); + * } + * } + *
+ */ + //constructor with inheritance + p5.Filter = function (type) { + Effect.call(this); + //add extend Effect by adding a Biquad Filter + /** + * The p5.Filter is built with a + * + * Web Audio BiquadFilter Node. + * + * @property {DelayNode} biquadFilter + */ + this.biquad = this.ac.createBiquadFilter(); + this.input.connect(this.biquad); + this.biquad.connect(this.wet); + if (type) { + this.setType(type); + } + //Properties useful for the toggle method. + this._on = true; + this._untoggledType = this.biquad.type; + }; + p5.Filter.prototype = Object.create(Effect.prototype); + /** + * Filter an audio signal according to a set + * of filter parameters. + * + * @method process + * @param {Object} Signal An object that outputs audio + * @param {Number} [freq] Frequency in Hz, from 10 to 22050 + * @param {Number} [res] Resonance/Width of the filter frequency + * from 0.001 to 1000 + */ + p5.Filter.prototype.process = function (src, freq, res, time) { + src.connect(this.input); + this.set(freq, res, time); + }; + /** + * Set the frequency and the resonance of the filter. + * + * @method set + * @param {Number} [freq] Frequency in Hz, from 10 to 22050 + * @param {Number} [res] Resonance (Q) from 0.001 to 1000 + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + */ + p5.Filter.prototype.set = function (freq, res, time) { + if (freq) { + this.freq(freq, time); + } + if (res) { + this.res(res, time); + } + }; + /** + * Set the filter frequency, in Hz, from 10 to 22050 (the range of + * human hearing, although in reality most people hear in a narrower + * range). + * + * @method freq + * @param {Number} freq Filter Frequency + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + * @return {Number} value Returns the current frequency value + */ + p5.Filter.prototype.freq = function (freq, time) { + var t = time || 0; + if (freq <= 0) { + freq = 1; + } + if (typeof freq === 'number') { + this.biquad.frequency.value = freq; + this.biquad.frequency.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.biquad.frequency.exponentialRampToValueAtTime(freq, this.ac.currentTime + 0.02 + t); + } else if (freq) { + freq.connect(this.biquad.frequency); + } + return this.biquad.frequency.value; + }; + /** + * Controls either width of a bandpass frequency, + * or the resonance of a low/highpass cutoff frequency. + * + * @method res + * @param {Number} res Resonance/Width of filter freq + * from 0.001 to 1000 + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + * @return {Number} value Returns the current res value + */ + p5.Filter.prototype.res = function (res, time) { + var t = time || 0; + if (typeof res === 'number') { + this.biquad.Q.value = res; + this.biquad.Q.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.biquad.Q.linearRampToValueAtTime(res, this.ac.currentTime + 0.02 + t); + } else if (res) { + res.connect(this.biquad.Q); + } + return this.biquad.Q.value; + }; + /** + * Controls the gain attribute of a Biquad Filter. + * This is distinctly different from .amp() which is inherited from p5.Effect + * .amp() controls the volume via the output gain node + * p5.Filter.gain() controls the gain parameter of a Biquad Filter node. + * + * @param {Number} gain + * @return {Number} Returns the current or updated gain value + */ + p5.Filter.prototype.gain = function (gain, time) { + var t = time || 0; + if (typeof gain === 'number') { + this.biquad.gain.value = gain; + this.biquad.gain.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.biquad.gain.linearRampToValueAtTime(gain, this.ac.currentTime + 0.02 + t); + } else if (gain) { + gain.connect(this.biquad.gain); + } + return this.biquad.gain.value; + }; + /** + * Toggle function. Switches between the specified type and allpass + * @return {boolean} [Toggle value] + */ + p5.Filter.prototype.toggle = function () { + this._on = !this._on; + if (this._on === true) { + this.biquad.type = this._untoggledType; + } else if (this._on === false) { + this.biquad.type = 'allpass'; + } + return this._on; + }; + /** + * Set the type of a p5.Filter. Possible types include: + * "lowpass" (default), "highpass", "bandpass", + * "lowshelf", "highshelf", "peaking", "notch", + * "allpass". + * + * @method setType + * @param {String} t + */ + p5.Filter.prototype.setType = function (t) { + this.biquad.type = t; + this._untoggledType = this.biquad.type; + }; + p5.Filter.prototype.dispose = function () { + // remove reference from soundArray + Effect.prototype.dispose.apply(this); + this.biquad.disconnect(); + this.biquad = undefined; + }; + /** + * Constructor: new p5.LowPass() Filter. + * This is the same as creating a p5.Filter and then calling + * its method setType('lowpass'). + * See p5.Filter for methods. + * + * @class p5.LowPass + * @constructor + * @extends {p5.Filter} + */ + p5.LowPass = function () { + p5.Filter.call(this, 'lowpass'); + }; + p5.LowPass.prototype = Object.create(p5.Filter.prototype); + /** + * Constructor: new p5.HighPass() Filter. + * This is the same as creating a p5.Filter and then calling + * its method setType('highpass'). + * See p5.Filter for methods. + * + * @class p5.HighPass + * @constructor + * @extends {p5.Filter} + */ + p5.HighPass = function () { + p5.Filter.call(this, 'highpass'); + }; + p5.HighPass.prototype = Object.create(p5.Filter.prototype); + /** + * Constructor: new p5.BandPass() Filter. + * This is the same as creating a p5.Filter and then calling + * its method setType('bandpass'). + * See p5.Filter for methods. + * + * @class BandPass + * @constructor + * @extends {p5.Filter} + */ + p5.BandPass = function () { + p5.Filter.call(this, 'bandpass'); + }; + p5.BandPass.prototype = Object.create(p5.Filter.prototype); + return p5.Filter; +}(master, effect); +var src_eqFilter; +'use strict'; +src_eqFilter = function () { + var Filter = filter; + var p5sound = master; + /** + * EQFilter extends p5.Filter with constraints + * necessary for the p5.EQ + * + * @private + */ + var EQFilter = function (freq, res) { + Filter.call(this, 'peaking'); + this.disconnect(); + this.set(freq, res); + this.biquad.gain.value = 0; + delete this.input; + delete this.output; + delete this._drywet; + delete this.wet; + }; + EQFilter.prototype = Object.create(Filter.prototype); + EQFilter.prototype.amp = function () { + console.warn('`amp()` is not available for p5.EQ bands. Use `.gain()`'); + }; + EQFilter.prototype.drywet = function () { + console.warn('`drywet()` is not available for p5.EQ bands.'); + }; + EQFilter.prototype.connect = function (unit) { + var u = unit || p5.soundOut.input; + if (this.biquad) { + this.biquad.connect(u.input ? u.input : u); + } else { + this.output.connect(u.input ? u.input : u); + } + }; + EQFilter.prototype.disconnect = function () { + this.biquad.disconnect(); + }; + EQFilter.prototype.dispose = function () { + // remove reference form soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.disconnect(); + delete this.biquad; + }; + return EQFilter; +}(filter, master); +var eq; +'use strict'; +eq = function () { + var Effect = effect; + var EQFilter = src_eqFilter; + /** + * p5.EQ is an audio effect that performs the function of a multiband + * audio equalizer. Equalization is used to adjust the balance of + * frequency compoenents of an audio signal. This process is commonly used + * in sound production and recording to change the waveform before it reaches + * a sound output device. EQ can also be used as an audio effect to create + * interesting distortions by filtering out parts of the spectrum. p5.EQ is + * built using a chain of Web Audio Biquad Filter Nodes and can be + * instantiated with 3 or 8 bands. Bands can be added or removed from + * the EQ by directly modifying p5.EQ.bands (the array that stores filters). + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * + * @class p5.EQ + * @constructor + * @extends p5.Effect + * @param {Number} [_eqsize] Constructor will accept 3 or 8, defaults to 3 + * @return {Object} p5.EQ object + * + * @example + *
+ * var eq; + * var band_names; + * var band_index; + * + * var soundFile, play; + * + * function preload() { + * soundFormats('mp3', 'ogg'); + * soundFile = loadSound('assets/beat'); + * } + * + * function setup() { + * eq = new p5.EQ(3); + * soundFile.disconnect(); + * eq.process(soundFile); + * + * band_names = ['lows','mids','highs']; + * band_index = 0; + * play = false; + * textAlign(CENTER); + * } + * + * function draw() { + * background(30); + * noStroke(); + * fill(255); + * text('click to kill',50,25); + * + * fill(255, 40, 255); + * textSize(26); + * text(band_names[band_index],50,55); + * + * fill(255); + * textSize(9); + * text('space = play/pause',50,80); + * } + * + * //If mouse is over canvas, cycle to the next band and kill the frequency + * function mouseClicked() { + * for (var i = 0; i < eq.bands.length; i++) { + * eq.bands[i].gain(0); + * } + * eq.bands[band_index].gain(-40); + * if (mouseX > 0 && mouseX < width && mouseY < height && mouseY > 0) { + * band_index === 2 ? band_index = 0 : band_index++; + * } + * } + * + * //use space bar to trigger play / pause + * function keyPressed() { + * if (key===' ') { + * play = !play + * play ? soundFile.loop() : soundFile.pause(); + * } + * } + *
+ */ + p5.EQ = function (_eqsize) { + Effect.call(this); + //p5.EQ can be of size (3) or (8), defaults to 3 + _eqsize = _eqsize === 3 || _eqsize === 8 ? _eqsize : 3; + var factor; + _eqsize === 3 ? factor = Math.pow(2, 3) : factor = 2; + /** + * The p5.EQ is built with abstracted p5.Filter objects. + * To modify any bands, use methods of the + * p5.Filter API, especially `gain` and `freq`. + * Bands are stored in an array, with indices 0 - 3, or 0 - 7 + * @property {Array} bands + * + */ + this.bands = []; + var freq, res; + for (var i = 0; i < _eqsize; i++) { + if (i === _eqsize - 1) { + freq = 21000; + res = 0.01; + } else if (i === 0) { + freq = 100; + res = 0.1; + } else if (i === 1) { + freq = _eqsize === 3 ? 360 * factor : 360; + res = 1; + } else { + freq = this.bands[i - 1].freq() * factor; + res = 1; + } + this.bands[i] = this._newBand(freq, res); + if (i > 0) { + this.bands[i - 1].connect(this.bands[i].biquad); + } else { + this.input.connect(this.bands[i].biquad); + } + } + this.bands[_eqsize - 1].connect(this.output); + }; + p5.EQ.prototype = Object.create(Effect.prototype); + /** + * Process an input by connecting it to the EQ + * @method process + * @param {Object} src Audio source + */ + p5.EQ.prototype.process = function (src) { + src.connect(this.input); + }; + // /** + // * Set the frequency and gain of each band in the EQ. This method should be + // * called with 3 or 8 frequency and gain pairs, depending on the size of the EQ. + // * ex. eq.set(freq0, gain0, freq1, gain1, freq2, gain2); + // * + // * @method set + // * @param {Number} [freq0] Frequency value for band with index 0 + // * @param {Number} [gain0] Gain value for band with index 0 + // * @param {Number} [freq1] Frequency value for band with index 1 + // * @param {Number} [gain1] Gain value for band with index 1 + // * @param {Number} [freq2] Frequency value for band with index 2 + // * @param {Number} [gain2] Gain value for band with index 2 + // * @param {Number} [freq3] Frequency value for band with index 3 + // * @param {Number} [gain3] Gain value for band with index 3 + // * @param {Number} [freq4] Frequency value for band with index 4 + // * @param {Number} [gain4] Gain value for band with index 4 + // * @param {Number} [freq5] Frequency value for band with index 5 + // * @param {Number} [gain5] Gain value for band with index 5 + // * @param {Number} [freq6] Frequency value for band with index 6 + // * @param {Number} [gain6] Gain value for band with index 6 + // * @param {Number} [freq7] Frequency value for band with index 7 + // * @param {Number} [gain7] Gain value for band with index 7 + // */ + p5.EQ.prototype.set = function () { + if (arguments.length === this.bands.length * 2) { + for (var i = 0; i < arguments.length; i += 2) { + this.bands[i / 2].freq(arguments[i]); + this.bands[i / 2].gain(arguments[i + 1]); + } + } else { + console.error('Argument mismatch. .set() should be called with ' + this.bands.length * 2 + ' arguments. (one frequency and gain value pair for each band of the eq)'); + } + }; + /** + * Add a new band. Creates a p5.Filter and strips away everything but + * the raw biquad filter. This method returns an abstracted p5.Filter, + * which can be added to p5.EQ.bands, in order to create new EQ bands. + * @private + * @method _newBand + * @param {Number} freq + * @param {Number} res + * @return {Obect} Abstracted Filter + */ + p5.EQ.prototype._newBand = function (freq, res) { + return new EQFilter(freq, res); + }; + p5.EQ.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + while (this.bands.length > 0) { + delete this.bands.pop().dispose(); + } + delete this.bands; + }; + return p5.EQ; +}(effect, src_eqFilter); +var panner3d; +'use strict'; +panner3d = function () { + var p5sound = master; + var Effect = effect; + /** + * Spatializer is a class that can construct both a Spatial Panner + * and a Spatial Listener. The panner is based on the + * Web Audio Spatial Panner Node + * https://www.w3.org/TR/webaudio/#the-spatializernode-interface + * This panner is a spatial processing node that allows audio to be positioned + * and oriented in 3D space. + * + * The Listener modifies the properties of the Audio Context Listener. + * Both objects types use the same methods. The default is a spatial panner. + * + * p5.Panner3D - Constructs a Spatial Panner
+ * p5.Listener3D - Constructs a Spatial Listener
+ * + * @class Spatializer + * @constructor + * @return {Object} p5.Panner3D Object + * + * @param {Web Audio Node} spatializer Web Audio Spatial Panning Node + * @param {AudioParam} spatializer.panningModel "equal power" or "HRTF" + * @param {AudioParam} spatializer.distanceModel "linear", "inverse", or "exponential" + * @param {String} [type] [Specify construction of a spatial panner or listener] + */ + p5.Panner3D = function (type) { + Effect.call(this); + this.panner = this.ac.createPanner(); + this.panner.panningModel = 'HRTF'; + this.panner.distanceModel = 'linear'; + this.panner.connect(this.output); + this.input.connect(this.panner); + }; + p5.Panner3D.prototype = Object.create(Effect.prototype); + /** + * Connect an audio sorce + * @param {Object} src Input source + */ + p5.Panner3D.prototype.process = function (src) { + src.connect(this.input); + }; + /** + * Set the X,Y,Z position of the Panner + * @param {[Number]} xVal + * @param {[Number]} yVal + * @param {[Number]} zVal + * @param {[Number]} time + * @return {[Array]} [Updated x, y, z values as an array] + */ + p5.Panner3D.prototype.position = function (xVal, yVal, zVal, time) { + this.positionX(xVal, time); + this.positionY(yVal, time); + this.positionZ(zVal, time); + return [ + this.panner.positionX.value, + this.panner.positionY.value, + this.panner.positionZ.value + ]; + }; + /** + * Getter and setter methods for position coordinates + * @return {Number} [updated coordinate value] + */ + p5.Panner3D.prototype.positionX = function (xVal, time) { + var t = time || 0; + if (typeof xVal === 'number') { + this.panner.positionX.value = xVal; + this.panner.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.positionX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t); + } else if (xVal) { + xVal.connect(this.panner.positionX); + } + return this.panner.positionX.value; + }; + p5.Panner3D.prototype.positionY = function (yVal, time) { + var t = time || 0; + if (typeof yVal === 'number') { + this.panner.positionY.value = yVal; + this.panner.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.positionY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t); + } else if (yVal) { + yVal.connect(this.panner.positionY); + } + return this.panner.positionY.value; + }; + p5.Panner3D.prototype.positionZ = function (zVal, time) { + var t = time || 0; + if (typeof zVal === 'number') { + this.panner.positionZ.value = zVal; + this.panner.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.positionZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t); + } else if (zVal) { + zVal.connect(this.panner.positionZ); + } + return this.panner.positionZ.value; + }; + p5.Panner3D.prototype.orient = function (xVal, yVal, zVal, time) { + this.orientX(xVal, time); + this.orientY(yVal, time); + this.orientZ(zVal, time); + return [ + this.panner.orientationX.value, + this.panner.orientationY.value, + this.panner.orientationZ.value + ]; + }; + /** + * Getter and setter methods for orient coordinates + * @return {Number} [updated coordinate value] + */ + p5.Panner3D.prototype.orientX = function (xVal, time) { + var t = time || 0; + if (typeof xVal === 'number') { + this.panner.orientationX.value = xVal; + this.panner.orientationX.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.orientationX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t); + } else if (xVal) { + xVal.connect(this.panner.orientationX); + } + return this.panner.orientationX.value; + }; + p5.Panner3D.prototype.orientY = function (yVal, time) { + var t = time || 0; + if (typeof yVal === 'number') { + this.panner.orientationY.value = yVal; + this.panner.orientationY.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.orientationY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t); + } else if (yVal) { + yVal.connect(this.panner.orientationY); + } + return this.panner.orientationY.value; + }; + p5.Panner3D.prototype.orientZ = function (zVal, time) { + var t = time || 0; + if (typeof zVal === 'number') { + this.panner.orientationZ.value = zVal; + this.panner.orientationZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.panner.orientationZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t); + } else if (zVal) { + zVal.connect(this.panner.orientationY); + } + return this.panner.orientationZ.value; + }; + return p5.Panner3D; +}(master, effect); +var listener3d; +'use strict'; +listener3d = function () { + var p5sound = master; + var Effect = effect; + /** + * Spatializer is a class that can construct both a Spatial Panner + * and a Spatial Listener. The panner is based on the + * Web Audio Spatial Panner Node + * https://www.w3.org/TR/webaudio/#the-spatializernode-interface + * This panner is a spatial processing node that allows audio to be positioned + * and oriented in 3D space. + * + * The Listener modifies the properties of the Audio Context Listener. + * Both objects types use the same methods. The default is a spatial panner. + * + * p5.Panner3D - Constructs a Spatial Panner
+ * p5.Listener3D - Constructs a Spatial Listener
+ * + * @class Spatializer + * @constructor + * @return {Object} p5.Listener3D Object + * + * @param {Web Audio Node} spatializer Web Audio Spatial Panning Node + * @param {AudioParam} spatializer.panningModel "equal power" or "HRTF" + * @param {AudioParam} spatializer.distanceModel "linear", "inverse", or "exponential" + * @param {String} [type] [Specify construction of a spatial panner or listener] + */ + p5.Listener3D = function (type) { + // if (type==="listener") { + this.ac = p5sound.audiocontext; + this.listener = this.ac.listener; + }; + /** + * Connect an audio sorce + * @param {Object} src Input source + */ + p5.Listener3D.prototype.process = function (src) { + src.connect(this.input); + }; + /** + * Set the X,Y,Z position of the Panner + * @param {[Number]} xVal + * @param {[Number]} yVal + * @param {[Number]} zVal + * @param {[Number]} time + * @return {[Array]} [Updated x, y, z values as an array] + */ + p5.Listener3D.prototype.position = function (xVal, yVal, zVal, time) { + this.positionX(xVal, time); + this.positionY(yVal, time); + this.positionZ(zVal, time); + return [ + this.spatializer.positionX.value, + this.spatializer.positionY.value, + this.spatializer.positionZ.value + ]; + }; + /** + * Getter and setter methods for position coordinates + * @return {Number} [updated coordinate value] + */ + p5.Listener3D.prototype.positionX = function (xVal, time) { + var t = time || 0; + if (typeof xVal === 'number') { + this.spatializer.positionX.value = xVal; + this.spatializer.positionX.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.positionX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t); + } else if (xVal) { + xVal.connect(this.spatializer.positionX); + } + return this.spatializer.positionX.value; + }; + p5.Listener3D.prototype.positionY = function (yVal, time) { + var t = time || 0; + if (typeof yVal === 'number') { + this.spatializer.positionY.value = yVal; + this.spatializer.positionY.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.positionY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t); + } else if (yVal) { + yVal.connect(this.spatializer.positionY); + } + return this.spatializer.positionY.value; + }; + p5.Listener3D.prototype.positionZ = function (zVal, time) { + var t = time || 0; + if (typeof zVal === 'number') { + this.spatializer.positionZ.value = zVal; + this.spatializer.positionZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.positionZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t); + } else if (zVal) { + zVal.connect(this.spatializer.positionZ); + } + return this.spatializer.positionZ.value; + }; + /** + * [Overrides the Spatializer orient() method because Listener has slightly + * different params. In human terms, Forward vectors are the direction the + * nose is pointing. Up vectors are the direction of the top of the head. + * + * @param {[Number]} xValF [Forward vector X direction] + * @param {[Number]} yValF [Forward vector Y direction] + * @param {[Number]} zValF [Forward vector Z direction] + * @param {[Number]} xValU [Up vector X direction] + * @param {[Number]} yValU [Up vector Y direction] + * @param {[Number]} zValU [Up vector Z direction] + * @param {[Number]} time + * @return {[Array]} [All orienation params] + */ + p5.Listener3D.prototype.orient = function (xValF, yValF, zValF, xValU, yValU, zValU, time) { + if (arguments.length === 3 || arguments.length === 4) { + time = arguments[3]; + this.orientForward(xValF, yValF, zValF, time); + } else if (arguments.length === 6 || arguments === 7) { + this.orientForward(xValF, yValF, zValF); + this.orientUp(xValU, yValU, zValU, time); + } + return [ + this.spatializer.forwardX.value, + this.spatializer.forwardY.value, + this.spatializer.forwardZ.value, + this.spatializer.upX.value, + this.spatializer.upY.value, + this.spatializer.upZ.value + ]; + }; + p5.Listener3D.prototype.orientForward = function (xValF, yValF, zValF, time) { + this.forwardX(xValF, time); + this.forwardY(yValF, time); + this.forwardZ(zValF, time); + return [ + this.spatializer.forwardX, + this.spatializer.forwardY, + this.spatializer.forwardZ + ]; + }; + p5.Listener3D.prototype.orientUp = function (xValU, yValU, zValU, time) { + this.upX(xValU, time); + this.upY(yValU, time); + this.upZ(zValU, time); + return [ + this.spatializer.upX, + this.spatializer.upY, + this.spatializer.upZ + ]; + }; + /** + * Getter and setter methods for orient coordinates + * @return {Number} [updated coordinate value] + */ + p5.Listener3D.prototype.forwardX = function (xVal, time) { + var t = time || 0; + if (typeof xVal === 'number') { + this.spatializer.forwardX.value = xVal; + this.spatializer.forwardX.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.forwardX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t); + } else if (xVal) { + xVal.connect(this.spatializer.forwardX); + } + return this.spatializer.forwardX.value; + }; + p5.Listener3D.prototype.forwardY = function (yVal, time) { + var t = time || 0; + if (typeof yVal === 'number') { + this.spatializer.forwardY.value = yVal; + this.spatializer.forwardY.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.forwardY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t); + } else if (yVal) { + yVal.connect(this.spatializer.forwardY); + } + return this.spatializer.forwardY.value; + }; + p5.Listener3D.prototype.forwardZ = function (zVal, time) { + var t = time || 0; + if (typeof zVal === 'number') { + this.spatializer.forwardZ.value = zVal; + this.spatializer.forwardZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.forwardZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t); + } else if (zVal) { + zVal.connect(this.spatializer.forwardZ); + } + return this.spatializer.forwardZ.value; + }; + p5.Listener3D.prototype.upX = function (xVal, time) { + var t = time || 0; + if (typeof xVal === 'number') { + this.spatializer.upX.value = xVal; + this.spatializer.upX.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.upX.linearRampToValueAtTime(xVal, this.ac.currentTime + 0.02 + t); + } else if (xVal) { + xVal.connect(this.spatializer.upX); + } + return this.spatializer.upX.value; + }; + p5.Listener3D.prototype.upY = function (yVal, time) { + var t = time || 0; + if (typeof yVal === 'number') { + this.spatializer.upY.value = yVal; + this.spatializer.upY.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.upY.linearRampToValueAtTime(yVal, this.ac.currentTime + 0.02 + t); + } else if (yVal) { + yVal.connect(this.spatializer.upY); + } + return this.spatializer.upY.value; + }; + p5.Listener3D.prototype.upZ = function (zVal, time) { + var t = time || 0; + if (typeof zVal === 'number') { + this.spatializer.upZ.value = zVal; + this.spatializer.upZ.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.spatializer.upZ.linearRampToValueAtTime(zVal, this.ac.currentTime + 0.02 + t); + } else if (zVal) { + zVal.connect(this.spatializer.upZ); + } + return this.spatializer.upZ.value; + }; + return p5.Listener3D; +}(master, effect); +var delay; +'use strict'; +delay = function () { + var Filter = filter; + var Effect = effect; + /** + * Delay is an echo effect. It processes an existing sound source, + * and outputs a delayed version of that sound. The p5.Delay can + * produce different effects depending on the delayTime, feedback, + * filter, and type. In the example below, a feedback of 0.5 (the + * defaul value) will produce a looping delay that decreases in + * volume by 50% each repeat. A filter will cut out the high + * frequencies so that the delay does not sound as piercing as the + * original source. + * + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * @class p5.Delay + * @extends p5.Effect + * @constructor + * @example + *
+ * var noise, env, delay; + * + * function setup() { + * background(0); + * noStroke(); + * fill(255); + * textAlign(CENTER); + * text('click to play', width/2, height/2); + * + * noise = new p5.Noise('brown'); + * noise.amp(0); + * noise.start(); + * + * delay = new p5.Delay(); + * + * // delay.process() accepts 4 parameters: + * // source, delayTime, feedback, filter frequency + * // play with these numbers!! + * delay.process(noise, .12, .7, 2300); + * + * // play the noise with an envelope, + * // a series of fades ( time / value pairs ) + * env = new p5.Env(.01, 0.2, .2, .1); + * } + * + * // mouseClick triggers envelope + * function mouseClicked() { + * // is mouse over canvas? + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * env.play(noise); + * } + * } + *
+ */ + p5.Delay = function () { + Effect.call(this); + this._split = this.ac.createChannelSplitter(2); + this._merge = this.ac.createChannelMerger(2); + this._leftGain = this.ac.createGain(); + this._rightGain = this.ac.createGain(); + /** + * The p5.Delay is built with two + * + * Web Audio Delay Nodes, one for each stereo channel. + * + * @property {DelayNode} leftDelay + */ + this.leftDelay = this.ac.createDelay(); + /** + * The p5.Delay is built with two + * + * Web Audio Delay Nodes, one for each stereo channel. + * + * @property {DelayNode} rightDelay + */ + this.rightDelay = this.ac.createDelay(); + this._leftFilter = new Filter(); + this._rightFilter = new Filter(); + this._leftFilter.disconnect(); + this._rightFilter.disconnect(); + this._leftFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime); + this._rightFilter.biquad.frequency.setValueAtTime(1200, this.ac.currentTime); + this._leftFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime); + this._rightFilter.biquad.Q.setValueAtTime(0.3, this.ac.currentTime); + // graph routing + this.input.connect(this._split); + this.leftDelay.connect(this._leftGain); + this.rightDelay.connect(this._rightGain); + this._leftGain.connect(this._leftFilter.input); + this._rightGain.connect(this._rightFilter.input); + this._merge.connect(this.wet); + this._leftFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime); + this._rightFilter.biquad.gain.setValueAtTime(1, this.ac.currentTime); + // default routing + this.setType(0); + this._maxDelay = this.leftDelay.delayTime.maxValue; + // set initial feedback to 0.5 + this.feedback(0.5); + }; + p5.Delay.prototype = Object.create(Effect.prototype); + /** + * Add delay to an audio signal according to a set + * of delay parameters. + * + * @method process + * @param {Object} Signal An object that outputs audio + * @param {Number} [delayTime] Time (in seconds) of the delay/echo. + * Some browsers limit delayTime to + * 1 second. + * @param {Number} [feedback] sends the delay back through itself + * in a loop that decreases in volume + * each time. + * @param {Number} [lowPass] Cutoff frequency. Only frequencies + * below the lowPass will be part of the + * delay. + */ + p5.Delay.prototype.process = function (src, _delayTime, _feedback, _filter) { + var feedback = _feedback || 0; + var delayTime = _delayTime || 0; + if (feedback >= 1) { + throw new Error('Feedback value will force a positive feedback loop.'); + } + if (delayTime >= this._maxDelay) { + throw new Error('Delay Time exceeds maximum delay time of ' + this._maxDelay + ' second.'); + } + src.connect(this.input); + this.leftDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime); + this.rightDelay.delayTime.setValueAtTime(delayTime, this.ac.currentTime); + this._leftGain.gain.value = feedback; + this._rightGain.gain.value = feedback; + if (_filter) { + this._leftFilter.freq(_filter); + this._rightFilter.freq(_filter); + } + }; + /** + * Set the delay (echo) time, in seconds. Usually this value will be + * a floating point number between 0.0 and 1.0. + * + * @method delayTime + * @param {Number} delayTime Time (in seconds) of the delay + */ + p5.Delay.prototype.delayTime = function (t) { + // if t is an audio node... + if (typeof t !== 'number') { + t.connect(this.leftDelay.delayTime); + t.connect(this.rightDelay.delayTime); + } else { + this.leftDelay.delayTime.cancelScheduledValues(this.ac.currentTime); + this.rightDelay.delayTime.cancelScheduledValues(this.ac.currentTime); + this.leftDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime); + this.rightDelay.delayTime.linearRampToValueAtTime(t, this.ac.currentTime); + } + }; + /** + * Feedback occurs when Delay sends its signal back through its input + * in a loop. The feedback amount determines how much signal to send each + * time through the loop. A feedback greater than 1.0 is not desirable because + * it will increase the overall output each time through the loop, + * creating an infinite feedback loop. The default value is 0.5 + * + * @method feedback + * @param {Number|Object} feedback 0.0 to 1.0, or an object such as an + * Oscillator that can be used to + * modulate this param + * @returns {Number} Feedback value + * + */ + p5.Delay.prototype.feedback = function (f) { + // if f is an audio node... + if (f && typeof f !== 'number') { + f.connect(this._leftGain.gain); + f.connect(this._rightGain.gain); + } else if (f >= 1) { + throw new Error('Feedback value will force a positive feedback loop.'); + } else if (typeof f === 'number') { + this._leftGain.gain.value = f; + this._rightGain.gain.value = f; + } + // return value of feedback + return this._leftGain.gain.value; + }; + /** + * Set a lowpass filter frequency for the delay. A lowpass filter + * will cut off any frequencies higher than the filter frequency. + * + * @method filter + * @param {Number|Object} cutoffFreq A lowpass filter will cut off any + * frequencies higher than the filter frequency. + * @param {Number|Object} res Resonance of the filter frequency + * cutoff, or an object (i.e. a p5.Oscillator) + * that can be used to modulate this parameter. + * High numbers (i.e. 15) will produce a resonance, + * low numbers (i.e. .2) will produce a slope. + */ + p5.Delay.prototype.filter = function (freq, q) { + this._leftFilter.set(freq, q); + this._rightFilter.set(freq, q); + }; + /** + * Choose a preset type of delay. 'pingPong' bounces the signal + * from the left to the right channel to produce a stereo effect. + * Any other parameter will revert to the default delay setting. + * + * @method setType + * @param {String|Number} type 'pingPong' (1) or 'default' (0) + */ + p5.Delay.prototype.setType = function (t) { + if (t === 1) { + t = 'pingPong'; + } + this._split.disconnect(); + this._leftFilter.disconnect(); + this._rightFilter.disconnect(); + this._split.connect(this.leftDelay, 0); + this._split.connect(this.rightDelay, 1); + switch (t) { + case 'pingPong': + this._rightFilter.setType(this._leftFilter.biquad.type); + this._leftFilter.output.connect(this._merge, 0, 0); + this._rightFilter.output.connect(this._merge, 0, 1); + this._leftFilter.output.connect(this.rightDelay); + this._rightFilter.output.connect(this.leftDelay); + break; + default: + this._leftFilter.output.connect(this._merge, 0, 0); + this._rightFilter.output.connect(this._merge, 0, 1); + this._leftFilter.output.connect(this.leftDelay); + this._rightFilter.output.connect(this.rightDelay); + } + }; + // DocBlocks for methods inherited from p5.Effect + /** + * Set the output level of the delay effect. + * + * @method amp + * @param {Number} volume amplitude between 0 and 1.0 + * @param {Number} [rampTime] create a fade that lasts rampTime + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + */ + /** + * Send output to a p5.sound or web audio object + * + * @method connect + * @param {Object} unit + */ + /** + * Disconnect all output. + * + * @method disconnect + */ + p5.Delay.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + this._split.disconnect(); + this._leftFilter.dispose(); + this._rightFilter.dispose(); + this._merge.disconnect(); + this._leftGain.disconnect(); + this._rightGain.disconnect(); + this.leftDelay.disconnect(); + this.rightDelay.disconnect(); + this._split = undefined; + this._leftFilter = undefined; + this._rightFilter = undefined; + this._merge = undefined; + this._leftGain = undefined; + this._rightGain = undefined; + this.leftDelay = undefined; + this.rightDelay = undefined; + }; +}(filter, effect); +var reverb; +'use strict'; +reverb = function () { + var CustomError = errorHandler; + var Effect = effect; + /** + * Reverb adds depth to a sound through a large number of decaying + * echoes. It creates the perception that sound is occurring in a + * physical space. The p5.Reverb has paramters for Time (how long does the + * reverb last) and decayRate (how much the sound decays with each echo) + * that can be set with the .set() or .process() methods. The p5.Convolver + * extends p5.Reverb allowing you to recreate the sound of actual physical + * spaces through convolution. + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * + * @class p5.Reverb + * @extends p5.Effect + * @constructor + * @example + *
+ * var soundFile, reverb; + * function preload() { + * soundFile = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * + * function setup() { + * reverb = new p5.Reverb(); + * soundFile.disconnect(); // so we'll only hear reverb... + * + * // connect soundFile to reverb, process w/ + * // 3 second reverbTime, decayRate of 2% + * reverb.process(soundFile, 3, 2); + * soundFile.play(); + * } + *
+ */ + p5.Reverb = function () { + Effect.call(this); + this.convolverNode = this.ac.createConvolver(); + // otherwise, Safari distorts + this.input.gain.value = 0.5; + this.input.connect(this.convolverNode); + this.convolverNode.connect(this.wet); + // default params + this._seconds = 3; + this._decay = 2; + this._reverse = false; + this._buildImpulse(); + }; + p5.Reverb.prototype = Object.create(Effect.prototype); + /** + * Connect a source to the reverb, and assign reverb parameters. + * + * @method process + * @param {Object} src p5.sound / Web Audio object with a sound + * output. + * @param {Number} [seconds] Duration of the reverb, in seconds. + * Min: 0, Max: 10. Defaults to 3. + * @param {Number} [decayRate] Percentage of decay with each echo. + * Min: 0, Max: 100. Defaults to 2. + * @param {Boolean} [reverse] Play the reverb backwards or forwards. + */ + p5.Reverb.prototype.process = function (src, seconds, decayRate, reverse) { + src.connect(this.input); + var rebuild = false; + if (seconds) { + this._seconds = seconds; + rebuild = true; + } + if (decayRate) { + this._decay = decayRate; + } + if (reverse) { + this._reverse = reverse; + } + if (rebuild) { + this._buildImpulse(); + } + }; + /** + * Set the reverb settings. Similar to .process(), but without + * assigning a new input. + * + * @method set + * @param {Number} [seconds] Duration of the reverb, in seconds. + * Min: 0, Max: 10. Defaults to 3. + * @param {Number} [decayRate] Percentage of decay with each echo. + * Min: 0, Max: 100. Defaults to 2. + * @param {Boolean} [reverse] Play the reverb backwards or forwards. + */ + p5.Reverb.prototype.set = function (seconds, decayRate, reverse) { + var rebuild = false; + if (seconds) { + this._seconds = seconds; + rebuild = true; + } + if (decayRate) { + this._decay = decayRate; + } + if (reverse) { + this._reverse = reverse; + } + if (rebuild) { + this._buildImpulse(); + } + }; + // DocBlocks for methods inherited from p5.Effect + /** + * Set the output level of the reverb effect. + * + * @method amp + * @param {Number} volume amplitude between 0 and 1.0 + * @param {Number} [rampTime] create a fade that lasts rampTime + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + */ + /** + * Send output to a p5.sound or web audio object + * + * @method connect + * @param {Object} unit + */ + /** + * Disconnect all output. + * + * @method disconnect + */ + /** + * Inspired by Simple Reverb by Jordan Santell + * https://github.com/web-audio-components/simple-reverb/blob/master/index.js + * + * Utility function for building an impulse response + * based on the module parameters. + * + * @private + */ + p5.Reverb.prototype._buildImpulse = function () { + var rate = this.ac.sampleRate; + var length = rate * this._seconds; + var decay = this._decay; + var impulse = this.ac.createBuffer(2, length, rate); + var impulseL = impulse.getChannelData(0); + var impulseR = impulse.getChannelData(1); + var n, i; + for (i = 0; i < length; i++) { + n = this._reverse ? length - i : i; + impulseL[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); + impulseR[i] = (Math.random() * 2 - 1) * Math.pow(1 - n / length, decay); + } + this.convolverNode.buffer = impulse; + }; + p5.Reverb.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + if (this.convolverNode) { + this.convolverNode.buffer = null; + this.convolverNode = null; + } + }; + // ======================================================================= + // *** p5.Convolver *** + // ======================================================================= + /** + *

p5.Convolver extends p5.Reverb. It can emulate the sound of real + * physical spaces through a process called + * convolution.

+ * + *

Convolution multiplies any audio input by an "impulse response" + * to simulate the dispersion of sound over time. The impulse response is + * generated from an audio file that you provide. One way to + * generate an impulse response is to pop a balloon in a reverberant space + * and record the echo. Convolution can also be used to experiment with + * sound.

+ * + *

Use the method createConvolution(path) to instantiate a + * p5.Convolver with a path to your impulse response audio file.

+ * + * @class p5.Convolver + * @extends p5.Effect + * @constructor + * @param {String} path path to a sound file + * @param {Function} [callback] function to call when loading succeeds + * @param {Function} [errorCallback] function to call if loading fails. + * This function will receive an error or + * XMLHttpRequest object with information + * about what went wrong. + * @example + *
+ * var cVerb, sound; + * function preload() { + * // We have both MP3 and OGG versions of all sound assets + * soundFormats('ogg', 'mp3'); + * + * // Try replacing 'bx-spring' with other soundfiles like + * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox' + * cVerb = createConvolver('assets/bx-spring.mp3'); + * + * // Try replacing 'Damscray_DancingTiger' with + * // 'beat', 'doorbell', lucky_dragons_-_power_melody' + * sound = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * + * function setup() { + * // disconnect from master output... + * sound.disconnect(); + * + * // ...and process with cVerb + * // so that we only hear the convolution + * cVerb.process(sound); + * + * sound.play(); + * } + *
+ */ + p5.Convolver = function (path, callback, errorCallback) { + Effect.call(this); + /** + * Internally, the p5.Convolver uses the a + * + * Web Audio Convolver Node. + * + * @property {ConvolverNode} convolverNod + */ + this.convolverNode = this.ac.createConvolver(); + // otherwise, Safari distorts + this.input.gain.value = 0.5; + this.input.connect(this.convolverNode); + this.convolverNode.connect(this.wet); + if (path) { + this.impulses = []; + this._loadBuffer(path, callback, errorCallback); + } else { + // parameters + this._seconds = 3; + this._decay = 2; + this._reverse = false; + this._buildImpulse(); + } + }; + p5.Convolver.prototype = Object.create(p5.Reverb.prototype); + p5.prototype.registerPreloadMethod('createConvolver', p5.prototype); + /** + * Create a p5.Convolver. Accepts a path to a soundfile + * that will be used to generate an impulse response. + * + * @method createConvolver + * @param {String} path path to a sound file + * @param {Function} [callback] function to call if loading is successful. + * The object will be passed in as the argument + * to the callback function. + * @param {Function} [errorCallback] function to call if loading is not successful. + * A custom error will be passed in as the argument + * to the callback function. + * @return {p5.Convolver} + * @example + *
+ * var cVerb, sound; + * function preload() { + * // We have both MP3 and OGG versions of all sound assets + * soundFormats('ogg', 'mp3'); + * + * // Try replacing 'bx-spring' with other soundfiles like + * // 'concrete-tunnel' 'small-plate' 'drum' 'beatbox' + * cVerb = createConvolver('assets/bx-spring.mp3'); + * + * // Try replacing 'Damscray_DancingTiger' with + * // 'beat', 'doorbell', lucky_dragons_-_power_melody' + * sound = loadSound('assets/Damscray_DancingTiger.mp3'); + * } + * + * function setup() { + * // disconnect from master output... + * sound.disconnect(); + * + * // ...and process with cVerb + * // so that we only hear the convolution + * cVerb.process(sound); + * + * sound.play(); + * } + *
+ */ + p5.prototype.createConvolver = function (path, callback, errorCallback) { + // if loading locally without a server + if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') { + alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS'); + } + var cReverb = new p5.Convolver(path, callback, errorCallback); + cReverb.impulses = []; + return cReverb; + }; + /** + * Private method to load a buffer as an Impulse Response, + * assign it to the convolverNode, and add to the Array of .impulses. + * + * @param {String} path + * @param {Function} callback + * @param {Function} errorCallback + * @private + */ + p5.Convolver.prototype._loadBuffer = function (path, callback, errorCallback) { + var path = p5.prototype._checkFileFormats(path); + var self = this; + var errorTrace = new Error().stack; + var ac = p5.prototype.getAudioContext(); + var request = new XMLHttpRequest(); + request.open('GET', path, true); + request.responseType = 'arraybuffer'; + request.onload = function () { + if (request.status === 200) { + // on success loading file: + ac.decodeAudioData(request.response, function (buff) { + var buffer = {}; + var chunks = path.split('/'); + buffer.name = chunks[chunks.length - 1]; + buffer.audioBuffer = buff; + self.impulses.push(buffer); + self.convolverNode.buffer = buffer.audioBuffer; + if (callback) { + callback(buffer); + } + }, // error decoding buffer. "e" is undefined in Chrome 11/22/2015 + function () { + var err = new CustomError('decodeAudioData', errorTrace, self.url); + var msg = 'AudioContext error at decodeAudioData for ' + self.url; + if (errorCallback) { + err.msg = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + }); + } else { + var err = new CustomError('loadConvolver', errorTrace, self.url); + var msg = 'Unable to load ' + self.url + '. The request status was: ' + request.status + ' (' + request.statusText + ')'; + if (errorCallback) { + err.message = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + } + }; + // if there is another error, aside from 404... + request.onerror = function () { + var err = new CustomError('loadConvolver', errorTrace, self.url); + var msg = 'There was no response from the server at ' + self.url + '. Check the url and internet connectivity.'; + if (errorCallback) { + err.message = msg; + errorCallback(err); + } else { + console.error(msg + '\n The error stack trace includes: \n' + err.stack); + } + }; + request.send(); + }; + p5.Convolver.prototype.set = null; + /** + * Connect a source to the reverb, and assign reverb parameters. + * + * @method process + * @param {Object} src p5.sound / Web Audio object with a sound + * output. + * @example + *
+ * var cVerb, sound; + * function preload() { + * soundFormats('ogg', 'mp3'); + * + * cVerb = createConvolver('assets/concrete-tunnel.mp3'); + * + * sound = loadSound('assets/beat.mp3'); + * } + * + * function setup() { + * // disconnect from master output... + * sound.disconnect(); + * + * // ...and process with (i.e. connect to) cVerb + * // so that we only hear the convolution + * cVerb.process(sound); + * + * sound.play(); + * } + *
+ */ + p5.Convolver.prototype.process = function (src) { + src.connect(this.input); + }; + /** + * If you load multiple impulse files using the .addImpulse method, + * they will be stored as Objects in this Array. Toggle between them + * with the toggleImpulse(id) method. + * + * @property {Array} impulses + */ + p5.Convolver.prototype.impulses = []; + /** + * Load and assign a new Impulse Response to the p5.Convolver. + * The impulse is added to the .impulses array. Previous + * impulses can be accessed with the .toggleImpulse(id) + * method. + * + * @method addImpulse + * @param {String} path path to a sound file + * @param {Function} callback function (optional) + * @param {Function} errorCallback function (optional) + */ + p5.Convolver.prototype.addImpulse = function (path, callback, errorCallback) { + // if loading locally without a server + if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') { + alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS'); + } + this._loadBuffer(path, callback, errorCallback); + }; + /** + * Similar to .addImpulse, except that the .impulses + * Array is reset to save memory. A new .impulses + * array is created with this impulse as the only item. + * + * @method resetImpulse + * @param {String} path path to a sound file + * @param {Function} callback function (optional) + * @param {Function} errorCallback function (optional) + */ + p5.Convolver.prototype.resetImpulse = function (path, callback, errorCallback) { + // if loading locally without a server + if (window.location.origin.indexOf('file://') > -1 && window.cordova === 'undefined') { + alert('This sketch may require a server to load external files. Please see http://bit.ly/1qcInwS'); + } + this.impulses = []; + this._loadBuffer(path, callback, errorCallback); + }; + /** + * If you have used .addImpulse() to add multiple impulses + * to a p5.Convolver, then you can use this method to toggle between + * the items in the .impulses Array. Accepts a parameter + * to identify which impulse you wish to use, identified either by its + * original filename (String) or by its position in the .impulses + * Array (Number).
+ * You can access the objects in the .impulses Array directly. Each + * Object has two attributes: an .audioBuffer (type: + * Web Audio + * AudioBuffer) and a .name, a String that corresponds + * with the original filename. + * + * @method toggleImpulse + * @param {String|Number} id Identify the impulse by its original filename + * (String), or by its position in the + * .impulses Array (Number). + */ + p5.Convolver.prototype.toggleImpulse = function (id) { + if (typeof id === 'number' && id < this.impulses.length) { + this.convolverNode.buffer = this.impulses[id].audioBuffer; + } + if (typeof id === 'string') { + for (var i = 0; i < this.impulses.length; i++) { + if (this.impulses[i].name === id) { + this.convolverNode.buffer = this.impulses[i].audioBuffer; + break; + } + } + } + }; + p5.Convolver.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + // remove all the Impulse Response buffers + for (var i in this.impulses) { + if (this.impulses[i]) { + this.impulses[i] = null; + } + } + this.convolverNode.disconnect(); + this.concolverNode = null; + }; +}(errorHandler, effect, sndcore); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_TimelineState; +Tone_core_TimelineState = function (Tone) { + 'use strict'; + Tone.TimelineState = function (initial) { + Tone.Timeline.call(this); + this._initial = initial; + }; + Tone.extend(Tone.TimelineState, Tone.Timeline); + Tone.TimelineState.prototype.getValueAtTime = function (time) { + var event = this.get(time); + if (event !== null) { + return event.state; + } else { + return this._initial; + } + }; + Tone.TimelineState.prototype.setStateAtTime = function (state, time) { + this.add({ + 'state': state, + 'time': time + }); + }; + return Tone.TimelineState; +}(Tone_core_Tone, Tone_core_Timeline); +/** Tone.js module by Yotam Mann, MIT License 2016 http://opensource.org/licenses/MIT **/ +var Tone_core_Clock; +Tone_core_Clock = function (Tone) { + 'use strict'; + Tone.Clock = function () { + Tone.Emitter.call(this); + var options = this.optionsObject(arguments, [ + 'callback', + 'frequency' + ], Tone.Clock.defaults); + this.callback = options.callback; + this._nextTick = 0; + this._lastState = Tone.State.Stopped; + this.frequency = new Tone.TimelineSignal(options.frequency, Tone.Type.Frequency); + this._readOnly('frequency'); + this.ticks = 0; + this._state = new Tone.TimelineState(Tone.State.Stopped); + this._boundLoop = this._loop.bind(this); + this.context.on('tick', this._boundLoop); + }; + Tone.extend(Tone.Clock, Tone.Emitter); + Tone.Clock.defaults = { + 'callback': Tone.noOp, + 'frequency': 1, + 'lookAhead': 'auto' + }; + Object.defineProperty(Tone.Clock.prototype, 'state', { + get: function () { + return this._state.getValueAtTime(this.now()); + } + }); + Tone.Clock.prototype.start = function (time, offset) { + time = this.toSeconds(time); + if (this._state.getValueAtTime(time) !== Tone.State.Started) { + this._state.add({ + 'state': Tone.State.Started, + 'time': time, + 'offset': offset + }); + } + return this; + }; + Tone.Clock.prototype.stop = function (time) { + time = this.toSeconds(time); + this._state.cancel(time); + this._state.setStateAtTime(Tone.State.Stopped, time); + return this; + }; + Tone.Clock.prototype.pause = function (time) { + time = this.toSeconds(time); + if (this._state.getValueAtTime(time) === Tone.State.Started) { + this._state.setStateAtTime(Tone.State.Paused, time); + } + return this; + }; + Tone.Clock.prototype._loop = function () { + var now = this.now(); + var lookAhead = this.context.lookAhead; + var updateInterval = this.context.updateInterval; + var lagCompensation = this.context.lag * 2; + var loopInterval = now + lookAhead + updateInterval + lagCompensation; + while (loopInterval > this._nextTick && this._state) { + var currentState = this._state.getValueAtTime(this._nextTick); + if (currentState !== this._lastState) { + this._lastState = currentState; + var event = this._state.get(this._nextTick); + if (currentState === Tone.State.Started) { + this._nextTick = event.time; + if (!this.isUndef(event.offset)) { + this.ticks = event.offset; + } + this.emit('start', event.time, this.ticks); + } else if (currentState === Tone.State.Stopped) { + this.ticks = 0; + this.emit('stop', event.time); + } else if (currentState === Tone.State.Paused) { + this.emit('pause', event.time); + } + } + var tickTime = this._nextTick; + if (this.frequency) { + this._nextTick += 1 / this.frequency.getValueAtTime(this._nextTick); + if (currentState === Tone.State.Started) { + this.callback(tickTime); + this.ticks++; + } + } + } + }; + Tone.Clock.prototype.getStateAtTime = function (time) { + time = this.toSeconds(time); + return this._state.getValueAtTime(time); + }; + Tone.Clock.prototype.dispose = function () { + Tone.Emitter.prototype.dispose.call(this); + this.context.off('tick', this._boundLoop); + this._writable('frequency'); + this.frequency.dispose(); + this.frequency = null; + this._boundLoop = null; + this._nextTick = Infinity; + this.callback = null; + this._state.dispose(); + this._state = null; + }; + return Tone.Clock; +}(Tone_core_Tone, Tone_signal_TimelineSignal, Tone_core_TimelineState, Tone_core_Emitter); +var metro; +'use strict'; +metro = function () { + var p5sound = master; + // requires the Tone.js library's Clock (MIT license, Yotam Mann) + // https://github.com/TONEnoTONE/Tone.js/ + var Clock = Tone_core_Clock; + p5.Metro = function () { + this.clock = new Clock({ 'callback': this.ontick.bind(this) }); + this.syncedParts = []; + this.bpm = 120; + // gets overridden by p5.Part + this._init(); + this.prevTick = 0; + this.tatumTime = 0; + this.tickCallback = function () { + }; + }; + p5.Metro.prototype.ontick = function (tickTime) { + var elapsedTime = tickTime - this.prevTick; + var secondsFromNow = tickTime - p5sound.audiocontext.currentTime; + if (elapsedTime - this.tatumTime <= -0.02) { + return; + } else { + // console.log('ok', this.syncedParts[0].phrases[0].name); + this.prevTick = tickTime; + // for all of the active things on the metro: + var self = this; + this.syncedParts.forEach(function (thisPart) { + if (!thisPart.isPlaying) + return; + thisPart.incrementStep(secondsFromNow); + // each synced source keeps track of its own beat number + thisPart.phrases.forEach(function (thisPhrase) { + var phraseArray = thisPhrase.sequence; + var bNum = self.metroTicks % phraseArray.length; + if (phraseArray[bNum] !== 0 && (self.metroTicks < phraseArray.length || !thisPhrase.looping)) { + thisPhrase.callback(secondsFromNow, phraseArray[bNum]); + } + }); + }); + this.metroTicks += 1; + this.tickCallback(secondsFromNow); + } + }; + p5.Metro.prototype.setBPM = function (bpm, rampTime) { + var beatTime = 60 / (bpm * this.tatums); + var now = p5sound.audiocontext.currentTime; + this.tatumTime = beatTime; + var rampTime = rampTime || 0; + this.clock.frequency.setValueAtTime(this.clock.frequency.value, now); + this.clock.frequency.linearRampToValueAtTime(bpm, now + rampTime); + this.bpm = bpm; + }; + p5.Metro.prototype.getBPM = function () { + return this.clock.getRate() / this.tatums * 60; + }; + p5.Metro.prototype._init = function () { + this.metroTicks = 0; + }; + // clear existing synced parts, add only this one + p5.Metro.prototype.resetSync = function (part) { + this.syncedParts = [part]; + }; + // push a new synced part to the array + p5.Metro.prototype.pushSync = function (part) { + this.syncedParts.push(part); + }; + p5.Metro.prototype.start = function (timeFromNow) { + var t = timeFromNow || 0; + var now = p5sound.audiocontext.currentTime; + this.clock.start(now + t); + this.setBPM(this.bpm); + }; + p5.Metro.prototype.stop = function (timeFromNow) { + var t = timeFromNow || 0; + var now = p5sound.audiocontext.currentTime; + this.clock.stop(now + t); + }; + p5.Metro.prototype.beatLength = function (tatums) { + this.tatums = 1 / tatums / 4; + }; +}(master, Tone_core_Clock); +var looper; +'use strict'; +looper = function () { + var p5sound = master; + var BPM = 120; + /** + * Set the global tempo, in beats per minute, for all + * p5.Parts. This method will impact all active p5.Parts. + * + * @param {Number} BPM Beats Per Minute + * @param {Number} rampTime Seconds from now + */ + p5.prototype.setBPM = function (bpm, rampTime) { + BPM = bpm; + for (var i in p5sound.parts) { + if (p5sound.parts[i]) { + p5sound.parts[i].setBPM(bpm, rampTime); + } + } + }; + /** + *

A phrase is a pattern of musical events over time, i.e. + * a series of notes and rests.

+ * + *

Phrases must be added to a p5.Part for playback, and + * each part can play multiple phrases at the same time. + * For example, one Phrase might be a kick drum, another + * could be a snare, and another could be the bassline.

+ * + *

The first parameter is a name so that the phrase can be + * modified or deleted later. The callback is a a function that + * this phrase will call at every step—for example it might be + * called playNote(value){}. The array determines + * which value is passed into the callback at each step of the + * phrase. It can be numbers, an object with multiple numbers, + * or a zero (0) indicates a rest so the callback won't be called).

+ * + * @class p5.Phrase + * @constructor + * @param {String} name Name so that you can access the Phrase. + * @param {Function} callback The name of a function that this phrase + * will call. Typically it will play a sound, + * and accept two parameters: a time at which + * to play the sound (in seconds from now), + * and a value from the sequence array. The + * time should be passed into the play() or + * start() method to ensure precision. + * @param {Array} sequence Array of values to pass into the callback + * at each step of the phrase. + * @example + *
+ * var mySound, myPhrase, myPart; + * var pattern = [1,0,0,2,0,2,0,0]; + * var msg = 'click to play'; + * + * function preload() { + * mySound = loadSound('assets/beatbox.mp3'); + * } + * + * function setup() { + * noStroke(); + * fill(255); + * textAlign(CENTER); + * masterVolume(0.1); + * + * myPhrase = new p5.Phrase('bbox', makeSound, pattern); + * myPart = new p5.Part(); + * myPart.addPhrase(myPhrase); + * myPart.setBPM(60); + * } + * + * function draw() { + * background(0); + * text(msg, width/2, height/2); + * } + * + * function makeSound(time, playbackRate) { + * mySound.rate(playbackRate); + * mySound.play(time); + * } + * + * function mouseClicked() { + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * myPart.start(); + * msg = 'playing pattern'; + * } + * } + * + *
+ */ + p5.Phrase = function (name, callback, sequence) { + this.phraseStep = 0; + this.name = name; + this.callback = callback; + /** + * Array of values to pass into the callback + * at each step of the phrase. Depending on the callback + * function's requirements, these values may be numbers, + * strings, or an object with multiple parameters. + * Zero (0) indicates a rest. + * + * @property {Array} sequence + */ + this.sequence = sequence; + }; + /** + *

A p5.Part plays back one or more p5.Phrases. Instantiate a part + * with steps and tatums. By default, each step represents 1/16th note.

+ * + *

See p5.Phrase for more about musical timing.

+ * + * @class p5.Part + * @constructor + * @param {Number} [steps] Steps in the part + * @param {Number} [tatums] Divisions of a beat (default is 1/16, a quarter note) + * @example + *
+ * var box, drum, myPart; + * var boxPat = [1,0,0,2,0,2,0,0]; + * var drumPat = [0,1,1,0,2,0,1,0]; + * var msg = 'click to play'; + * + * function preload() { + * box = loadSound('assets/beatbox.mp3'); + * drum = loadSound('assets/drum.mp3'); + * } + * + * function setup() { + * noStroke(); + * fill(255); + * textAlign(CENTER); + * masterVolume(0.1); + * + * var boxPhrase = new p5.Phrase('box', playBox, boxPat); + * var drumPhrase = new p5.Phrase('drum', playDrum, drumPat); + * myPart = new p5.Part(); + * myPart.addPhrase(boxPhrase); + * myPart.addPhrase(drumPhrase); + * myPart.setBPM(60); + * masterVolume(0.1); + * } + * + * function draw() { + * background(0); + * text(msg, width/2, height/2); + * } + * + * function playBox(time, playbackRate) { + * box.rate(playbackRate); + * box.play(time); + * } + * + * function playDrum(time, playbackRate) { + * drum.rate(playbackRate); + * drum.play(time); + * } + * + * function mouseClicked() { + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * myPart.start(); + * msg = 'playing part'; + * } + * } + *
+ */ + p5.Part = function (steps, bLength) { + this.length = steps || 0; + // how many beats + this.partStep = 0; + this.phrases = []; + this.isPlaying = false; + this.noLoop(); + this.tatums = bLength || 0.0625; + // defaults to quarter note + this.metro = new p5.Metro(); + this.metro._init(); + this.metro.beatLength(this.tatums); + this.metro.setBPM(BPM); + p5sound.parts.push(this); + this.callback = function () { + }; + }; + /** + * Set the tempo of this part, in Beats Per Minute. + * + * @method setBPM + * @param {Number} BPM Beats Per Minute + * @param {Number} [rampTime] Seconds from now + */ + p5.Part.prototype.setBPM = function (tempo, rampTime) { + this.metro.setBPM(tempo, rampTime); + }; + /** + * Returns the Beats Per Minute of this currently part. + * + * @method getBPM + * @return {Number} + */ + p5.Part.prototype.getBPM = function () { + return this.metro.getBPM(); + }; + /** + * Start playback of this part. It will play + * through all of its phrases at a speed + * determined by setBPM. + * + * @method start + * @param {Number} [time] seconds from now + */ + p5.Part.prototype.start = function (time) { + if (!this.isPlaying) { + this.isPlaying = true; + this.metro.resetSync(this); + var t = time || 0; + this.metro.start(t); + } + }; + /** + * Loop playback of this part. It will begin + * looping through all of its phrases at a speed + * determined by setBPM. + * + * @method loop + * @param {Number} [time] seconds from now + */ + p5.Part.prototype.loop = function (time) { + this.looping = true; + // rest onended function + this.onended = function () { + this.partStep = 0; + }; + var t = time || 0; + this.start(t); + }; + /** + * Tell the part to stop looping. + * + * @method noLoop + */ + p5.Part.prototype.noLoop = function () { + this.looping = false; + // rest onended function + this.onended = function () { + this.stop(); + }; + }; + /** + * Stop the part and cue it to step 0. + * + * @method stop + * @param {Number} [time] seconds from now + */ + p5.Part.prototype.stop = function (time) { + this.partStep = 0; + this.pause(time); + }; + /** + * Pause the part. Playback will resume + * from the current step. + * + * @method pause + * @param {Number} time seconds from now + */ + p5.Part.prototype.pause = function (time) { + this.isPlaying = false; + var t = time || 0; + this.metro.stop(t); + }; + /** + * Add a p5.Phrase to this Part. + * + * @method addPhrase + * @param {p5.Phrase} phrase reference to a p5.Phrase + */ + p5.Part.prototype.addPhrase = function (name, callback, array) { + var p; + if (arguments.length === 3) { + p = new p5.Phrase(name, callback, array); + } else if (arguments[0] instanceof p5.Phrase) { + p = arguments[0]; + } else { + throw 'invalid input. addPhrase accepts name, callback, array or a p5.Phrase'; + } + this.phrases.push(p); + // reset the length if phrase is longer than part's existing length + if (p.sequence.length > this.length) { + this.length = p.sequence.length; + } + }; + /** + * Remove a phrase from this part, based on the name it was + * given when it was created. + * + * @method removePhrase + * @param {String} phraseName + */ + p5.Part.prototype.removePhrase = function (name) { + for (var i in this.phrases) { + if (this.phrases[i].name === name) { + this.phrases.splice(i, 1); + } + } + }; + /** + * Get a phrase from this part, based on the name it was + * given when it was created. Now you can modify its array. + * + * @method getPhrase + * @param {String} phraseName + */ + p5.Part.prototype.getPhrase = function (name) { + for (var i in this.phrases) { + if (this.phrases[i].name === name) { + return this.phrases[i]; + } + } + }; + /** + * Get a phrase from this part, based on the name it was + * given when it was created. Now you can modify its array. + * + * @method replaceSequence + * @param {String} phraseName + * @param {Array} sequence Array of values to pass into the callback + * at each step of the phrase. + */ + p5.Part.prototype.replaceSequence = function (name, array) { + for (var i in this.phrases) { + if (this.phrases[i].name === name) { + this.phrases[i].sequence = array; + } + } + }; + p5.Part.prototype.incrementStep = function (time) { + if (this.partStep < this.length - 1) { + this.callback(time); + this.partStep += 1; + } else { + if (!this.looping && this.partStep === this.length - 1) { + console.log('done'); + // this.callback(time); + this.onended(); + } + } + }; + /** + * Fire a callback function at every step. + * + * @method onStep + * @param {Function} callback The name of the callback + * you want to fire + * on every beat/tatum. + */ + p5.Part.prototype.onStep = function (callback) { + this.callback = callback; + }; + // =============== + // p5.Score + // =============== + /** + * A Score consists of a series of Parts. The parts will + * be played back in order. For example, you could have an + * A part, a B part, and a C part, and play them back in this order + * new p5.Score(a, a, b, a, c) + * + * @class p5.Score + * @constructor + * @param {p5.Part} [...parts] One or multiple parts, to be played in sequence. + */ + p5.Score = function () { + // for all of the arguments + this.parts = []; + this.currentPart = 0; + var thisScore = this; + for (var i in arguments) { + if (arguments[i] && this.parts[i]) { + this.parts[i] = arguments[i]; + this.parts[i].nextPart = this.parts[i + 1]; + this.parts[i].onended = function () { + thisScore.resetPart(i); + playNextPart(thisScore); + }; + } + } + this.looping = false; + }; + p5.Score.prototype.onended = function () { + if (this.looping) { + // this.resetParts(); + this.parts[0].start(); + } else { + this.parts[this.parts.length - 1].onended = function () { + this.stop(); + this.resetParts(); + }; + } + this.currentPart = 0; + }; + /** + * Start playback of the score. + * + * @method start + */ + p5.Score.prototype.start = function () { + this.parts[this.currentPart].start(); + this.scoreStep = 0; + }; + /** + * Stop playback of the score. + * + * @method stop + */ + p5.Score.prototype.stop = function () { + this.parts[this.currentPart].stop(); + this.currentPart = 0; + this.scoreStep = 0; + }; + /** + * Pause playback of the score. + * + * @method pause + */ + p5.Score.prototype.pause = function () { + this.parts[this.currentPart].stop(); + }; + /** + * Loop playback of the score. + * + * @method loop + */ + p5.Score.prototype.loop = function () { + this.looping = true; + this.start(); + }; + /** + * Stop looping playback of the score. If it + * is currently playing, this will go into effect + * after the current round of playback completes. + * + * @method noLoop + */ + p5.Score.prototype.noLoop = function () { + this.looping = false; + }; + p5.Score.prototype.resetParts = function () { + var self = this; + this.parts.forEach(function (part) { + self.resetParts[part]; + }); + }; + p5.Score.prototype.resetPart = function (i) { + this.parts[i].stop(); + this.parts[i].partStep = 0; + for (var p in this.parts[i].phrases) { + if (this.parts[i]) { + this.parts[i].phrases[p].phraseStep = 0; + } + } + }; + /** + * Set the tempo for all parts in the score + * + * @param {Number} BPM Beats Per Minute + * @param {Number} rampTime Seconds from now + */ + p5.Score.prototype.setBPM = function (bpm, rampTime) { + for (var i in this.parts) { + if (this.parts[i]) { + this.parts[i].setBPM(bpm, rampTime); + } + } + }; + function playNextPart(aScore) { + aScore.currentPart++; + if (aScore.currentPart >= aScore.parts.length) { + aScore.scoreStep = 0; + aScore.onended(); + } else { + aScore.scoreStep = 0; + aScore.parts[aScore.currentPart - 1].stop(); + aScore.parts[aScore.currentPart].start(); + } + } +}(master); +var compressor; +compressor = function () { + 'use strict'; + var p5sound = master; + var Effect = effect; + var CustomError = errorHandler; + /** + * Compressor is an audio effect class that performs dynamics compression + * on an audio input source. This is a very commonly used technique in music + * and sound production. Compression creates an overall louder, richer, + * and fuller sound by lowering the volume of louds and raising that of softs. + * Compression can be used to avoid clipping (sound distortion due to + * peaks in volume) and is especially useful when many sounds are played + * at once. Compression can be used on indivudal sound sources in addition + * to the master output. + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * + * @class p5.Compressor + * @constructor + * @extends p5.Effect + * + * + */ + p5.Compressor = function () { + Effect.call(this); + /** + * The p5.Compressor is built with a Web Audio Dynamics Compressor Node + * + * @property {WebAudioNode} compressor + */ + this.compressor = this.ac.createDynamicsCompressor(); + this.input.connect(this.compressor); + this.compressor.connect(this.wet); + }; + p5.Compressor.prototype = Object.create(Effect.prototype); + /** + * Performs the same function as .connect, but also accepts + * optional parameters to set compressor's audioParams + * @method process + * + * @param {Object} src Sound source to be connected + * + * @param {Number} [attack] The amount of time (in seconds) to reduce the gain by 10dB, + * default = .003, range 0 - 1 + * @param {Number} [knee] A decibel value representing the range above the + * threshold where the curve smoothly transitions to the "ratio" portion. + * default = 30, range 0 - 40 + * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output + * default = 12, range 1 - 20 + * @param {Number} [threshold] The decibel value above which the compression will start taking effect + * default = -24, range -100 - 0 + * @param {Number} [release] The amount of time (in seconds) to increase the gain by 10dB + * default = .25, range 0 - 1 + */ + p5.Compressor.prototype.process = function (src, attack, knee, ratio, threshold, release) { + src.connect(this.input); + this.set(attack, knee, ratio, threshold, release); + }; + /** + * Set the paramters of a compressor. + * @method set + * @param {Number} attack The amount of time (in seconds) to reduce the gain by 10dB, + * default = .003, range 0 - 1 + * @param {Number} knee A decibel value representing the range above the + * threshold where the curve smoothly transitions to the "ratio" portion. + * default = 30, range 0 - 40 + * @param {Number} ratio The amount of dB change in input for a 1 dB change in output + * default = 12, range 1 - 20 + * @param {Number} threshold The decibel value above which the compression will start taking effect + * default = -24, range -100 - 0 + * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB + * default = .25, range 0 - 1 + */ + p5.Compressor.prototype.set = function (attack, knee, ratio, threshold, release) { + if (typeof attack !== 'undefined') { + this.attack(attack); + } + if (typeof knee !== 'undefined') { + this.knee(knee); + } + if (typeof ratio !== 'undefined') { + this.ratio(ratio); + } + if (typeof threshold !== 'undefined') { + this.threshold(threshold); + } + if (typeof release !== 'undefined') { + this.release(release); + } + }; + /** + * Get current attack or set value w/ time ramp + * + * + * @method attack + * @param {Number} [attack] Attack is the amount of time (in seconds) to reduce the gain by 10dB, + * default = .003, range 0 - 1 + * @param {Number} [time] Assign time value to schedule the change in value + */ + p5.Compressor.prototype.attack = function (attack, time) { + var t = time || 0; + if (typeof attack == 'number') { + this.compressor.attack.value = attack; + this.compressor.attack.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.compressor.attack.linearRampToValueAtTime(attack, this.ac.currentTime + 0.02 + t); + } else if (typeof attack !== 'undefined') { + attack.connect(this.compressor.attack); + } + return this.compressor.attack.value; + }; + /** + * Get current knee or set value w/ time ramp + * + * @method knee + * @param {Number} [knee] A decibel value representing the range above the + * threshold where the curve smoothly transitions to the "ratio" portion. + * default = 30, range 0 - 40 + * @param {Number} [time] Assign time value to schedule the change in value + */ + p5.Compressor.prototype.knee = function (knee, time) { + var t = time || 0; + if (typeof knee == 'number') { + this.compressor.knee.value = knee; + this.compressor.knee.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.compressor.knee.linearRampToValueAtTime(knee, this.ac.currentTime + 0.02 + t); + } else if (typeof knee !== 'undefined') { + knee.connect(this.compressor.knee); + } + return this.compressor.knee.value; + }; + /** + * Get current ratio or set value w/ time ramp + * @method ratio + * + * @param {Number} [ratio] The amount of dB change in input for a 1 dB change in output + * default = 12, range 1 - 20 + * @param {Number} [time] Assign time value to schedule the change in value + */ + p5.Compressor.prototype.ratio = function (ratio, time) { + var t = time || 0; + if (typeof ratio == 'number') { + this.compressor.ratio.value = ratio; + this.compressor.ratio.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.compressor.ratio.linearRampToValueAtTime(ratio, this.ac.currentTime + 0.02 + t); + } else if (typeof ratio !== 'undefined') { + ratio.connect(this.compressor.ratio); + } + return this.compressor.ratio.value; + }; + /** + * Get current threshold or set value w/ time ramp + * @method threshold + * + * @param {Number} threshold The decibel value above which the compression will start taking effect + * default = -24, range -100 - 0 + * @param {Number} [time] Assign time value to schedule the change in value + */ + p5.Compressor.prototype.threshold = function (threshold, time) { + var t = time || 0; + if (typeof threshold == 'number') { + this.compressor.threshold.value = threshold; + this.compressor.threshold.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.compressor.threshold.linearRampToValueAtTime(threshold, this.ac.currentTime + 0.02 + t); + } else if (typeof threshold !== 'undefined') { + threshold.connect(this.compressor.threshold); + } + return this.compressor.threshold.value; + }; + /** + * Get current release or set value w/ time ramp + * @method release + * + * @param {Number} release The amount of time (in seconds) to increase the gain by 10dB + * default = .25, range 0 - 1 + * + * @param {Number} [time] Assign time value to schedule the change in value + */ + p5.Compressor.prototype.release = function (release, time) { + var t = time || 0; + if (typeof release == 'number') { + this.compressor.release.value = release; + this.compressor.release.cancelScheduledValues(this.ac.currentTime + 0.01 + t); + this.compressor.release.linearRampToValueAtTime(release, this.ac.currentTime + 0.02 + t); + } else if (typeof number !== 'undefined') { + release.connect(this.compressor.release); + } + return this.compressor.release.value; + }; + /** + * Return the current reduction value + * @return {Number} Value of the amount of gain reduction that is applied to the signal + */ + p5.Compressor.prototype.reduction = function () { + return this.compressor.reduction.value; + }; + p5.Compressor.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + this.compressor.disconnect(); + this.compressor = undefined; + }; + return p5.Compressor; +}(master, effect, errorHandler); +var soundRecorder; +'use strict'; +soundRecorder = function () { + var p5sound = master; + var ac = p5sound.audiocontext; + /** + *

Record sounds for playback and/or to save as a .wav file. + * The p5.SoundRecorder records all sound output from your sketch, + * or can be assigned a specific source with setInput().

+ *

The record() method accepts a p5.SoundFile as a parameter. + * When playback is stopped (either after the given amount of time, + * or with the stop() method), the p5.SoundRecorder will send its + * recording to that p5.SoundFile for playback.

+ * + * @class p5.SoundRecorder + * @constructor + * @example + *
+ * var mic, recorder, soundFile; + * var state = 0; + * + * function setup() { + * background(200); + * // create an audio in + * mic = new p5.AudioIn(); + * + * // prompts user to enable their browser mic + * mic.start(); + * + * // create a sound recorder + * recorder = new p5.SoundRecorder(); + * + * // connect the mic to the recorder + * recorder.setInput(mic); + * + * // this sound file will be used to + * // playback & save the recording + * soundFile = new p5.SoundFile(); + * + * text('keyPress to record', 20, 20); + * } + * + * function keyPressed() { + * // make sure user enabled the mic + * if (state === 0 && mic.enabled) { + * + * // record to our p5.SoundFile + * recorder.record(soundFile); + * + * background(255,0,0); + * text('Recording!', 20, 20); + * state++; + * } + * else if (state === 1) { + * background(0,255,0); + * + * // stop recorder and + * // send result to soundFile + * recorder.stop(); + * + * text('Stopped', 20, 20); + * state++; + * } + * + * else if (state === 2) { + * soundFile.play(); // play the result! + * save(soundFile, 'mySound.wav'); + * state++; + * } + * } + *
+ */ + p5.SoundRecorder = function () { + this.input = ac.createGain(); + this.output = ac.createGain(); + this.recording = false; + this.bufferSize = 1024; + this._channels = 2; + // stereo (default) + this._clear(); + // initialize variables + this._jsNode = ac.createScriptProcessor(this.bufferSize, this._channels, 2); + this._jsNode.onaudioprocess = this._audioprocess.bind(this); + /** + * callback invoked when the recording is over + * @private + * @type Function(Float32Array) + */ + this._callback = function () { + }; + // connections + this._jsNode.connect(p5.soundOut._silentNode); + this.setInput(); + // add this p5.SoundFile to the soundArray + p5sound.soundArray.push(this); + }; + /** + * Connect a specific device to the p5.SoundRecorder. + * If no parameter is given, p5.SoundRecorer will record + * all audible p5.sound from your sketch. + * + * @method setInput + * @param {Object} [unit] p5.sound object or a web audio unit + * that outputs sound + */ + p5.SoundRecorder.prototype.setInput = function (unit) { + this.input.disconnect(); + this.input = null; + this.input = ac.createGain(); + this.input.connect(this._jsNode); + this.input.connect(this.output); + if (unit) { + unit.connect(this.input); + } else { + p5.soundOut.output.connect(this.input); + } + }; + /** + * Start recording. To access the recording, provide + * a p5.SoundFile as the first parameter. The p5.SoundRecorder + * will send its recording to that p5.SoundFile for playback once + * recording is complete. Optional parameters include duration + * (in seconds) of the recording, and a callback function that + * will be called once the complete recording has been + * transfered to the p5.SoundFile. + * + * @method record + * @param {p5.SoundFile} soundFile p5.SoundFile + * @param {Number} [duration] Time (in seconds) + * @param {Function} [callback] The name of a function that will be + * called once the recording completes + */ + p5.SoundRecorder.prototype.record = function (sFile, duration, callback) { + this.recording = true; + if (duration) { + this.sampleLimit = Math.round(duration * ac.sampleRate); + } + if (sFile && callback) { + this._callback = function () { + this.buffer = this._getBuffer(); + sFile.setBuffer(this.buffer); + callback(); + }; + } else if (sFile) { + this._callback = function () { + this.buffer = this._getBuffer(); + sFile.setBuffer(this.buffer); + }; + } + }; + /** + * Stop the recording. Once the recording is stopped, + * the results will be sent to the p5.SoundFile that + * was given on .record(), and if a callback function + * was provided on record, that function will be called. + * + * @method stop + */ + p5.SoundRecorder.prototype.stop = function () { + this.recording = false; + this._callback(); + this._clear(); + }; + p5.SoundRecorder.prototype._clear = function () { + this._leftBuffers = []; + this._rightBuffers = []; + this.recordedSamples = 0; + this.sampleLimit = null; + }; + /** + * internal method called on audio process + * + * @private + * @param {AudioProcessorEvent} event + */ + p5.SoundRecorder.prototype._audioprocess = function (event) { + if (this.recording === false) { + return; + } else if (this.recording === true) { + // if we are past the duration, then stop... else: + if (this.sampleLimit && this.recordedSamples >= this.sampleLimit) { + this.stop(); + } else { + // get channel data + var left = event.inputBuffer.getChannelData(0); + var right = event.inputBuffer.getChannelData(1); + // clone the samples + this._leftBuffers.push(new Float32Array(left)); + this._rightBuffers.push(new Float32Array(right)); + this.recordedSamples += this.bufferSize; + } + } + }; + p5.SoundRecorder.prototype._getBuffer = function () { + var buffers = []; + buffers.push(this._mergeBuffers(this._leftBuffers)); + buffers.push(this._mergeBuffers(this._rightBuffers)); + return buffers; + }; + p5.SoundRecorder.prototype._mergeBuffers = function (channelBuffer) { + var result = new Float32Array(this.recordedSamples); + var offset = 0; + var lng = channelBuffer.length; + for (var i = 0; i < lng; i++) { + var buffer = channelBuffer[i]; + result.set(buffer, offset); + offset += buffer.length; + } + return result; + }; + p5.SoundRecorder.prototype.dispose = function () { + this._clear(); + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this._callback = function () { + }; + if (this.input) { + this.input.disconnect(); + } + this.input = null; + this._jsNode = null; + }; + /** + * Save a p5.SoundFile as a .wav audio file. + * + * @method saveSound + * @param {p5.SoundFile} soundFile p5.SoundFile that you wish to save + * @param {String} name name of the resulting .wav file. + */ + p5.prototype.saveSound = function (soundFile, name) { + var leftChannel, rightChannel; + leftChannel = soundFile.buffer.getChannelData(0); + // handle mono files + if (soundFile.buffer.numberOfChannels > 1) { + rightChannel = soundFile.buffer.getChannelData(1); + } else { + rightChannel = leftChannel; + } + var interleaved = interleave(leftChannel, rightChannel); + // create the buffer and view to create the .WAV file + var buffer = new window.ArrayBuffer(44 + interleaved.length * 2); + var view = new window.DataView(buffer); + // write the WAV container, + // check spec at: https://ccrma.stanford.edu/courses/422/projects/WaveFormat/ + // RIFF chunk descriptor + writeUTFBytes(view, 0, 'RIFF'); + view.setUint32(4, 36 + interleaved.length * 2, true); + writeUTFBytes(view, 8, 'WAVE'); + // FMT sub-chunk + writeUTFBytes(view, 12, 'fmt '); + view.setUint32(16, 16, true); + view.setUint16(20, 1, true); + // stereo (2 channels) + view.setUint16(22, 2, true); + view.setUint32(24, 44100, true); + view.setUint32(28, 44100 * 4, true); + view.setUint16(32, 4, true); + view.setUint16(34, 16, true); + // data sub-chunk + writeUTFBytes(view, 36, 'data'); + view.setUint32(40, interleaved.length * 2, true); + // write the PCM samples + var lng = interleaved.length; + var index = 44; + var volume = 1; + for (var i = 0; i < lng; i++) { + view.setInt16(index, interleaved[i] * (32767 * volume), true); + index += 2; + } + p5.prototype.writeFile([view], name, 'wav'); + }; + // helper methods to save waves + function interleave(leftChannel, rightChannel) { + var length = leftChannel.length + rightChannel.length; + var result = new Float32Array(length); + var inputIndex = 0; + for (var index = 0; index < length;) { + result[index++] = leftChannel[inputIndex]; + result[index++] = rightChannel[inputIndex]; + inputIndex++; + } + return result; + } + function writeUTFBytes(view, offset, string) { + var lng = string.length; + for (var i = 0; i < lng; i++) { + view.setUint8(offset + i, string.charCodeAt(i)); + } + } +}(sndcore, master); +var peakdetect; +'use strict'; +peakdetect = function () { + /** + *

PeakDetect works in conjunction with p5.FFT to + * look for onsets in some or all of the frequency spectrum. + *

+ *

+ * To use p5.PeakDetect, call update in the draw loop + * and pass in a p5.FFT object. + *

+ *

+ * You can listen for a specific part of the frequency spectrum by + * setting the range between freq1 and freq2. + *

+ * + *

threshold is the threshold for detecting a peak, + * scaled between 0 and 1. It is logarithmic, so 0.1 is half as loud + * as 1.0.

+ * + *

+ * The update method is meant to be run in the draw loop, and + * frames determines how many loops must pass before + * another peak can be detected. + * For example, if the frameRate() = 60, you could detect the beat of a + * 120 beat-per-minute song with this equation: + * framesPerPeak = 60 / (estimatedBPM / 60 ); + *

+ * + *

+ * Based on example contribtued by @b2renger, and a simple beat detection + * explanation by Felix Turner. + *

+ * + * @class p5.PeakDetect + * @constructor + * @param {Number} [freq1] lowFrequency - defaults to 20Hz + * @param {Number} [freq2] highFrequency - defaults to 20000 Hz + * @param {Number} [threshold] Threshold for detecting a beat between 0 and 1 + * scaled logarithmically where 0.1 is 1/2 the loudness + * of 1.0. Defaults to 0.35. + * @param {Number} [framesPerPeak] Defaults to 20. + * @example + *
+ * + * var cnv, soundFile, fft, peakDetect; + * var ellipseWidth = 10; + * + * function setup() { + * background(0); + * noStroke(); + * fill(255); + * textAlign(CENTER); + * + * soundFile = loadSound('assets/beat.mp3'); + * + * // p5.PeakDetect requires a p5.FFT + * fft = new p5.FFT(); + * peakDetect = new p5.PeakDetect(); + * + * } + * + * function draw() { + * background(0); + * text('click to play/pause', width/2, height/2); + * + * // peakDetect accepts an fft post-analysis + * fft.analyze(); + * peakDetect.update(fft); + * + * if ( peakDetect.isDetected ) { + * ellipseWidth = 50; + * } else { + * ellipseWidth *= 0.95; + * } + * + * ellipse(width/2, height/2, ellipseWidth, ellipseWidth); + * } + * + * // toggle play/stop when canvas is clicked + * function mouseClicked() { + * if (mouseX > 0 && mouseX < width && mouseY > 0 && mouseY < height) { + * if (soundFile.isPlaying() ) { + * soundFile.stop(); + * } else { + * soundFile.play(); + * } + * } + * } + *
+ */ + p5.PeakDetect = function (freq1, freq2, threshold, _framesPerPeak) { + // framesPerPeak determines how often to look for a beat. + // If a beat is provided, try to look for a beat based on bpm + this.framesPerPeak = _framesPerPeak || 20; + this.framesSinceLastPeak = 0; + this.decayRate = 0.95; + this.threshold = threshold || 0.35; + this.cutoff = 0; + // how much to increase the cutoff + // TO DO: document this / figure out how to make it accessible + this.cutoffMult = 1.5; + this.energy = 0; + this.penergy = 0; + // TO DO: document this property / figure out how to make it accessible + this.currentValue = 0; + /** + * isDetected is set to true when a peak is detected. + * + * @attribute isDetected {Boolean} + * @default false + */ + this.isDetected = false; + this.f1 = freq1 || 40; + this.f2 = freq2 || 20000; + // function to call when a peak is detected + this._onPeak = function () { + }; + }; + /** + * The update method is run in the draw loop. + * + * Accepts an FFT object. You must call .analyze() + * on the FFT object prior to updating the peakDetect + * because it relies on a completed FFT analysis. + * + * @method update + * @param {p5.FFT} fftObject A p5.FFT object + */ + p5.PeakDetect.prototype.update = function (fftObject) { + var nrg = this.energy = fftObject.getEnergy(this.f1, this.f2) / 255; + if (nrg > this.cutoff && nrg > this.threshold && nrg - this.penergy > 0) { + // trigger callback + this._onPeak(); + this.isDetected = true; + // debounce + this.cutoff = nrg * this.cutoffMult; + this.framesSinceLastPeak = 0; + } else { + this.isDetected = false; + if (this.framesSinceLastPeak <= this.framesPerPeak) { + this.framesSinceLastPeak++; + } else { + this.cutoff *= this.decayRate; + this.cutoff = Math.max(this.cutoff, this.threshold); + } + } + this.currentValue = nrg; + this.penergy = nrg; + }; + /** + * onPeak accepts two arguments: a function to call when + * a peak is detected. The value of the peak, + * between 0.0 and 1.0, is passed to the callback. + * + * @method onPeak + * @param {Function} callback Name of a function that will + * be called when a peak is + * detected. + * @param {Object} [val] Optional value to pass + * into the function when + * a peak is detected. + * @example + *
+ * var cnv, soundFile, fft, peakDetect; + * var ellipseWidth = 0; + * + * function setup() { + * cnv = createCanvas(100,100); + * textAlign(CENTER); + * + * soundFile = loadSound('assets/beat.mp3'); + * fft = new p5.FFT(); + * peakDetect = new p5.PeakDetect(); + * + * setupSound(); + * + * // when a beat is detected, call triggerBeat() + * peakDetect.onPeak(triggerBeat); + * } + * + * function draw() { + * background(0); + * fill(255); + * text('click to play', width/2, height/2); + * + * fft.analyze(); + * peakDetect.update(fft); + * + * ellipseWidth *= 0.95; + * ellipse(width/2, height/2, ellipseWidth, ellipseWidth); + * } + * + * // this function is called by peakDetect.onPeak + * function triggerBeat() { + * ellipseWidth = 50; + * } + * + * // mouseclick starts/stops sound + * function setupSound() { + * cnv.mouseClicked( function() { + * if (soundFile.isPlaying() ) { + * soundFile.stop(); + * } else { + * soundFile.play(); + * } + * }); + * } + *
+ */ + p5.PeakDetect.prototype.onPeak = function (callback, val) { + var self = this; + self._onPeak = function () { + callback(self.energy, val); + }; + }; +}(); +var gain; +'use strict'; +gain = function () { + var p5sound = master; + /** + * A gain node is usefull to set the relative volume of sound. + * It's typically used to build mixers. + * + * @class p5.Gain + * @constructor + * @example + *
+ * + * // load two soundfile and crossfade beetween them + * var sound1,sound2; + * var gain1, gain2, gain3; + * + * function preload(){ + * soundFormats('ogg', 'mp3'); + * sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01'); + * sound2 = loadSound('../_files/beat.mp3'); + * } + * + * function setup() { + * createCanvas(400,200); + * + * // create a 'master' gain to which we will connect both soundfiles + * gain3 = new p5.Gain(); + * gain3.connect(); + * + * // setup first sound for playing + * sound1.rate(1); + * sound1.loop(); + * sound1.disconnect(); // diconnect from p5 output + * + * gain1 = new p5.Gain(); // setup a gain node + * gain1.setInput(sound1); // connect the first sound to its input + * gain1.connect(gain3); // connect its output to the 'master' + * + * sound2.rate(1); + * sound2.disconnect(); + * sound2.loop(); + * + * gain2 = new p5.Gain(); + * gain2.setInput(sound2); + * gain2.connect(gain3); + * + * } + * + * function draw(){ + * background(180); + * + * // calculate the horizontal distance beetween the mouse and the right of the screen + * var d = dist(mouseX,0,width,0); + * + * // map the horizontal position of the mouse to values useable for volume control of sound1 + * var vol1 = map(mouseX,0,width,0,1); + * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa + * + * gain1.amp(vol1,0.5,0); + * gain2.amp(vol2,0.5,0); + * + * // map the vertical position of the mouse to values useable for 'master volume control' + * var vol3 = map(mouseY,0,height,0,1); + * gain3.amp(vol3,0.5,0); + * } + *
+ * + */ + p5.Gain = function () { + this.ac = p5sound.audiocontext; + this.input = this.ac.createGain(); + this.output = this.ac.createGain(); + // otherwise, Safari distorts + this.input.gain.value = 0.5; + this.input.connect(this.output); + // add to the soundArray + p5sound.soundArray.push(this); + }; + /** + * Connect a source to the gain node. + * + * @method setInput + * @param {Object} src p5.sound / Web Audio object with a sound + * output. + */ + p5.Gain.prototype.setInput = function (src) { + src.connect(this.input); + }; + /** + * Send output to a p5.sound or web audio object + * + * @method connect + * @param {Object} unit + */ + p5.Gain.prototype.connect = function (unit) { + var u = unit || p5.soundOut.input; + this.output.connect(u.input ? u.input : u); + }; + /** + * Disconnect all output. + * + * @method disconnect + */ + p5.Gain.prototype.disconnect = function () { + this.output.disconnect(); + }; + /** + * Set the output level of the gain node. + * + * @method amp + * @param {Number} volume amplitude between 0 and 1.0 + * @param {Number} [rampTime] create a fade that lasts rampTime + * @param {Number} [timeFromNow] schedule this event to happen + * seconds from now + */ + p5.Gain.prototype.amp = function (vol, rampTime, tFromNow) { + var rampTime = rampTime || 0; + var tFromNow = tFromNow || 0; + var now = p5sound.audiocontext.currentTime; + var currentVol = this.output.gain.value; + this.output.gain.cancelScheduledValues(now); + this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow); + this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime); + }; + p5.Gain.prototype.dispose = function () { + // remove reference from soundArray + var index = p5sound.soundArray.indexOf(this); + p5sound.soundArray.splice(index, 1); + this.output.disconnect(); + this.input.disconnect(); + this.output = undefined; + this.input = undefined; + }; +}(master, sndcore); +var distortion; +'use strict'; +distortion = function () { + var Effect = effect; + /* + * Adapted from [Kevin Ennis on StackOverflow](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion) + */ + function makeDistortionCurve(amount) { + var k = typeof amount === 'number' ? amount : 50; + var numSamples = 44100; + var curve = new Float32Array(numSamples); + var deg = Math.PI / 180; + var i = 0; + var x; + for (; i < numSamples; ++i) { + x = i * 2 / numSamples - 1; + curve[i] = (3 + k) * x * 20 * deg / (Math.PI + k * Math.abs(x)); + } + return curve; + } + /** + * A Distortion effect created with a Waveshaper Node, + * with an approach adapted from + * [Kevin Ennis](http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion) + * + * This class extends p5.Effect. + * Methods amp(), chain(), + * drywet(), connect(), and + * disconnect() are available. + * + * @class p5.Distortion + * @extends p5.Effect + * @constructor + * @param {Number} [amount=0.25] Unbounded distortion amount. + * Normal values range from 0-1. + * @param {String} [oversample='none'] 'none', '2x', or '4x'. + * + */ + p5.Distortion = function (amount, oversample) { + Effect.call(this); + if (typeof amount === 'undefined') { + amount = 0.25; + } + if (typeof amount !== 'number') { + throw new Error('amount must be a number'); + } + if (typeof oversample === 'undefined') { + oversample = '2x'; + } + if (typeof oversample !== 'string') { + throw new Error('oversample must be a String'); + } + var curveAmount = p5.prototype.map(amount, 0, 1, 0, 2000); + /** + * The p5.Distortion is built with a + * + * Web Audio WaveShaper Node. + * + * @property {AudioNode} WaveShaperNode + */ + this.waveShaperNode = this.ac.createWaveShaper(); + this.amount = curveAmount; + this.waveShaperNode.curve = makeDistortionCurve(curveAmount); + this.waveShaperNode.oversample = oversample; + this.input.connect(this.waveShaperNode); + this.waveShaperNode.connect(this.wet); + }; + p5.Distortion.prototype = Object.create(Effect.prototype); + /** + * Process a sound source, optionally specify amount and oversample values. + * + * @method process + * @param {Number} [amount=0.25] Unbounded distortion amount. + * Normal values range from 0-1. + * @param {String} [oversample='none'] 'none', '2x', or '4x'. + */ + p5.Distortion.prototype.process = function (src, amount, oversample) { + src.connect(this.input); + this.set(amount, oversample); + }; + /** + * Set the amount and oversample of the waveshaper distortion. + * + * @method set + * @param {Number} [amount=0.25] Unbounded distortion amount. + * Normal values range from 0-1. + * @param {String} [oversample='none'] 'none', '2x', or '4x'. + */ + p5.Distortion.prototype.set = function (amount, oversample) { + if (amount) { + var curveAmount = p5.prototype.map(amount, 0, 1, 0, 2000); + this.amount = curveAmount; + this.waveShaperNode.curve = makeDistortionCurve(curveAmount); + } + if (oversample) { + this.waveShaperNode.oversample = oversample; + } + }; + /** + * Return the distortion amount, typically between 0-1. + * + * @method getAmount + * @return {Number} Unbounded distortion amount. + * Normal values range from 0-1. + */ + p5.Distortion.prototype.getAmount = function () { + return this.amount; + }; + /** + * Return the oversampling. + * + * @method getOversample + * + * @return {String} Oversample can either be 'none', '2x', or '4x'. + */ + p5.Distortion.prototype.getOversample = function () { + return this.waveShaperNode.oversample; + }; + p5.Distortion.prototype.dispose = function () { + Effect.prototype.dispose.apply(this); + this.waveShaperNode.disconnect(); + this.waveShaperNode = null; + }; +}(effect); +var src_app; +'use strict'; +src_app = function () { + var p5SOUND = sndcore; + return p5SOUND; +}(sndcore, master, helpers, errorHandler, panner, soundfile, amplitude, fft, signal, oscillator, env, pulse, noise, audioin, filter, eq, panner3d, listener3d, delay, reverb, metro, looper, compressor, soundRecorder, peakdetect, gain, distortion); +})); \ No newline at end of file diff --git a/brendafernanda-anna/pinheiro.png b/brendafernanda-anna/pinheiro.png new file mode 100644 index 0000000..f62710d Binary files /dev/null and b/brendafernanda-anna/pinheiro.png differ diff --git a/brendafernanda-anna/script.js b/brendafernanda-anna/script.js new file mode 100644 index 0000000..217e195 --- /dev/null +++ b/brendafernanda-anna/script.js @@ -0,0 +1,259 @@ +/** + * p5js boilerplate + * + * + */ + +var neve;//Declara as variaveis +var boneco; +var cadente = null; +var pinheiro; +var rosaClaro; +var rosa; +var lilas; +var vermelho; +var estrelinha; + +var floco_1, + pos_rosaClaro, + pos_rosa, + pos_lilas, + pos_vermelho; + +var musica; + +var x, y; + +var pos_mouse_anterior = 0; +var pos_mouse_atual = 0; + +var lista_flocos = []; + +//Carrega arquivos externos para dentro do codigo +function preload() { + neve = loadImage('ceu.jpg');//Carrega a imagem de fundo + boneco = loadImage('bonecodeneve.png');//Carrega a imagem do boneco de neve + cadente = loadImage('estrela.png');//Carrega a imagem da estrela + pinheiro = loadImage('pinheiro.png');//Carrega a imagem da arvore + rosaClaro = loadImage('floco1.png');//Carrega a imagem do floco 1 + estrelinha = loadImage("estrelinha.png"); + rosa = loadImage('floco2.png');//Carrega a imagem do floco 2 + lilas = loadImage('floco3.png');//Carreg a imagem do floco 3 + vermelho = loadImage('floco4.png');//Carrega a imagem do floco 4 + soundFormats('mp3');//Define o formato do som + musica = loadSound('musica.mp3');//Carrega a musica que vai tocar +} + +// chamada no inicio do programa +function setup() { + // cria o quadro, com dimensoes 900 x 400 + createCanvas(1335, 620); + + x = 40;//Posicao do x da estrela + y = 10;//Posicao do y da estrela + + //Cria o vetor + //Escreve as coordenadas para desenhar o floco + pos_rosaCalaro = createVector(325, 125); + // floco_1 = new FlocoUm(325, 125, 55, -1); // -1 gera um erro + floco_1 = new FlocoUm(425, 85, 55, 3, random(1, 3));//Posicao x e y, tamanho e velocidade em que o floco vai girar + outroFlocoUm = new FlocoUm(135, 35, 85, 8, random(1, 3));//Desenha outro floco; posicao x e y, tamanho e velocidade + + pos_rosa = createVector(525, 135); + flocoDois = new FlocoDois(65, 90, 50, 9, random(1, 3));//Posicao x e y, tamanho e velocidade + outroFlocoDois = new FlocoDois(495, 50, 80, 2, random(1, 3));//Desenha outro floco; posicao x e y, tamanho e velocidade + + pos_lilas = createVector(435, 145); + flocoTres = new FlocoTres(600, 95, 35, 5, random(1, 3)); + outroFlocoTres = new FlocoTres(330, 55, 65, 7, random(1, 3)); + + pos_vermelho = createVector(675, 155); + flocoQuatro = new FlocoQuatro(245, 85, 45, 4, random(1, 3)); + outroFlocoQuatro = new FlocoQuatro(680, 55, 75, 6, random(1, 3)); + + musica.setVolume(1.0);//Define o volume da musica + musica.loop();//Faz a musica repitir, quando terminar + + var floco; + for(var i = 0; i < 100; i++) { + if (i % 4 == 0) floco = new FlocoUm(random(0, width), random(0, height), random(20, 80), random(0, 10), random(1, 3)); + if (i % 4 == 1) floco = new FlocoDois(random(0, width), random(0, height), random(20, 80), random(0, 10), random(1, 3)); + if (i % 4 == 2) floco = new FlocoTres(random(0, width), random(0, height), random(20, 80), random(0, 10), random(1, 3)); + if (i % 4 == 3) floco = new FlocoQuatro(random(0, width), random(0, height), random(20, 80), random(0, 10), random(1, 3)); + lista_flocos.push(floco); + } +} + +// chamada toda vez que o quadro for redesenhado +// ou seja, a cada frame da animacao +function draw() { + pos_mouse_atual = mouseX; + + if (pos_mouse_atual > pos_mouse_anterior) velocidade_vento = 1; + else if (pos_mouse_atual == pos_mouse_anterior) velocidade_vento = 0; + else velocidade_vento = -1; + + // pinta o fundo de preto + background(neve);//Coloca a imagem de fundo + + //Desenha as imagens, do boneco do pinheiro e da estrela e as posicoes x e y e o tamanho + image(boneco, 200, 440, 150, 150); + + image(cadente, x, y, 100, 100); + + x = x + 8; + //Velocidade + y = y + 2; + + //Faz a estrela ficar repetindo + if (x < 0) { + x = height; + } + + image(pinheiro, 1000, 277, 300, 320); + image(estrelinha, 1130, 265, 50, 50); + + //Declara a funcao para desenhar os flocos + floco_1.desenha(); + floco_1.mover(velocidade_vento); + outroFlocoUm.desenha(); + outroFlocoUm.mover(velocidade_vento); + + flocoDois.desenha();//image(rosa, pos_rosa.x, pos_rosa.y, 50,50); + flocoDois.mover(velocidade_vento); + outroFlocoDois.desenha(); + outroFlocoDois.mover(velocidade_vento); + + flocoTres.desenha();//image(lilas, pos_lilas.x, pos_lilas.y, 35,35); + flocoTres.mover(velocidade_vento); + outroFlocoTres.desenha(); + outroFlocoTres.mover(velocidade_vento); + + flocoQuatro.desenha();//image(vermelho, pos_vermelho.x, pos_vermelho.y, 45,45); + flocoQuatro.mover(velocidade_vento); + outroFlocoQuatro.desenha(); + outroFlocoQuatro.mover(velocidade_vento); + + if (frameCount % 60 == 0) pos_mouse_anterior = mouseX; + + for(var i = 0; i < lista_flocos.length; i++) { + lista_flocos[i].desenha(); + lista_flocos[i].mover(velocidade_vento); + } + +} + +//Cria a funcao para desenhar o floco +//usa as variaveis com a posicao, tamanho e velocidade do giro, que ja estao armazenadas +function FlocoUm(x, y, tamanho, velocidade_giro, velocidade_queda) { + if (velocidade_giro < 0 || velocidade_giro > 10) throw "velocidade_giro deve estar entre 0 e 10";//Envia um erro em relacao a velocidade do giro + this.pos = createVector(x, y);//Determina a posicao do floco + this.angulo = 0;//Determina o angulo em que o floco vai girar + this.tamanho = tamanho;//Determina o tamanho do floco + this.velocidade_giro = velocidade_giro;//Determina a velocidade com que o floco vai girar + this.velocidade_queda = velocidade_queda; + return this;//Retorna o floco, com a posicao, angulo, tamanho e velocidade do giro +} + +//Cria a classe para desenhar o floco +FlocoUm.prototype.desenha = function () { + push();//Inicia a transformacao + translate(this.pos.x + this.tamanho / 2, this.pos.y + this.tamanho / 2);//Muda o eixo do floco em relacao a posicao e ao tamanho + rotate(this.angulo);//Define o giro do floco em relacao ao angulo + image(rosaClaro, -this.tamanho / 2, -this.tamanho / 2, this.tamanho, this.tamanho);//Desenha e define o local em que o floco vai estar + this.angulo += 0.01 * this.velocidade_giro/5;//Faz o floco girar + pop();//Descarta as alteracoes feitas anteriormente pela transformacao +} + +//Cria a classe para desenhar o floco +FlocoUm.prototype.mover = function (velocidade_vento) { + this.pos.y += this.velocidade_queda; + if (this.pos.y > height) this.pos.y = -this.tamanho; + this.pos.x += velocidade_vento; + if (this.pos.x > width + this.tamanho) this.pos.x = -this.tamanho; + if (this.pos.x < -this.tamanho) this.pos.x = width + this.tamanho; +} + +function FlocoDois(x, y, tamanho, velocidade_giro, velocidade_queda) { + this.pos = createVector(x, y); + this.angulo = 0; + this.tamanho = tamanho; + this.velocidade_giro = velocidade_giro; + this.velocidade_queda = velocidade_queda; + return this; +} + +FlocoDois.prototype.desenha = function () { + push(); + translate(this.pos.x + this.tamanho / 2, this.pos.y + this.tamanho / 2); + rotate(this.angulo); + image(rosa, -this.tamanho / 2, -this.tamanho / 2, this.tamanho, this.tamanho); + this.angulo += 0.01 * this.velocidade_giro/5; + pop(); +} + +FlocoDois.prototype.mover = function (velocidade_vento) { + this.pos.y += this.velocidade_queda; + if (this.pos.y > height) this.pos.y = -this.tamanho; + this.pos.x += velocidade_vento; + if (this.pos.x > width + this.tamanho) this.pos.x = -this.tamanho; + if (this.pos.x < -this.tamanho) this.pos.x = width + this.tamanho; +} + +function FlocoTres(x, y, tamanho, velocidade_giro, velocidade_queda) { + this.pos = createVector(x, y); + this.angulo = 0; + this.tamanho = tamanho; + this.velocidade_giro = velocidade_giro; + this.velocidade_queda = velocidade_queda; + return this; +} + +FlocoTres.prototype.desenha = function () { + push(); + translate(this.pos.x + this.tamanho / 2, this.pos.y + this.tamanho / 2); + rotate(this.angulo); + image(lilas, -this.tamanho / 2, -this.tamanho / 2, this.tamanho, this.tamanho); + this.angulo += 0.01 * this.velocidade_giro/5; + pop(); +} + +FlocoTres.prototype.mover = function (velocidade_vento) { + this.pos.y += this.velocidade_queda; + if (this.pos.y > height) this.pos.y = -this.tamanho; + this.pos.x += velocidade_vento; + if (this.pos.x > width + this.tamanho) this.pos.x = -this.tamanho; + if (this.pos.x < -this.tamanho) this.pos.x = width + this.tamanho; +} + +function FlocoQuatro(x, y, tamanho, velocidade_giro, velocidade_queda) { + this.pos = createVector(x, y); + this.angulo = 0; + this.tamanho = tamanho; + this.velocidade_giro = velocidade_giro; + this.velocidade_queda = velocidade_queda; + return this; +} + +FlocoQuatro.prototype.desenha = function () { + push(); + translate(this.pos.x + this.tamanho / 2, this.pos.y + this.tamanho / 2); + rotate(this.angulo); + image(vermelho, -this.tamanho / 2, -this.tamanho / 2, this.tamanho, this.tamanho); + this.angulo += 0.01 * this.velocidade_giro/5; + pop(); +} + +FlocoQuatro.prototype.mover = function (velocidade_vento) { + this.pos.y += this.velocidade_queda; + if (this.pos.y > height) this.pos.y = -this.tamanho; + this.pos.x += velocidade_vento; + if (this.pos.x > width + this.tamanho) this.pos.x = -this.tamanho; + if (this.pos.x < -this.tamanho) this.pos.x = width + this.tamanho; +} + +//Funcao acionada toda vez que o mouse for pressionado +function mousePressed() { + x = 40;//Cada vez que pressiona o mouse posicao x recebe o valor + y = 10;//Cada vez que pressiona o mouse posicao y recebe o valor +} \ No newline at end of file