Merge branch 'dev'

This commit is contained in:
Yotam Mann 2017-09-16 15:35:09 -04:00
commit e0411838b1
57 changed files with 1654 additions and 24345 deletions

53
.gitignore vendored
View file

@ -1,56 +1,23 @@
.DS_Store
*.asd
*.scssc
examples/scratch.html
*.sublime-workspace
*.sublime-project
# grunt modules
node_modules
gulp/description
TODO.txt
# all the npm stuff
utils/npm/Tone
utils/npm/build/Tone.js
utils/npm/build/Tone.min.js
utils/npm/build/Tone.Preset.js
utils/npm/README.md
utils/jsdoc/*.json
examples/deps/FileSaver.js
examples/oscilloscope.html
.idea
wiki
test/performance
examples/crashes.html
examples/style/examples.css.map
examples/deps/Tone.dat.gui.js
examples/deps/dat.gui.js
test/mainTest.js
test/Main.js
build/p5.Tone.min.js
build/p5.Tone.js
.DS_Store
examples/scratch.html
examples/deps/FileSaver.js
examples/oscilloscope.html
examples/graph.html
*.asd
test/performance
test/mainTest.js
test/Main.js
test/supports.html
test/coverage/
build/*

View file

@ -2,17 +2,45 @@ sudo: false
dist: trusty
language: node_js
node_js:
- "8"
- '8'
addons:
chrome: stable
before_script:
- cd gulp
- npm install -g karma
- npm install -g gulp
- npm install
- git config --global user.email "travis@travis-ci.org"
- git config --global user.name "Travis CI"
- cd gulp
- npm install -g jsdoc
- npm install -g karma
- npm install -g gulp
- npm install
- git config --global user.email "travis@travis-ci.org"
- git config --global user.name "Travis CI"
script: gulp travis-test
after_success:
- sh success.sh
- sh success.sh
before_deploy:
- node increment_version.js
- cd ../
deploy:
- provider: npm
skip_cleanup: true
email: yotammann@gmail.com
api_key: $NPM_TOKEN
tag: next
on:
repo: Tonejs/Tone.js
branch: dev
# publish without @next when pushing on master
- provider: npm
skip_cleanup: true
email: yotammann@gmail.com
api_key: $NPM_TOKEN
on:
repo: Tonejs/Tone.js
branch: master
# publish build files for releases
- provider: releases
api-key: $GH_TOKEN
file_glob: true
file: build/*
skip_cleanup: true
on:
tags: true

View file

@ -3,12 +3,16 @@
* [Code coverage](https://coveralls.io/github/Tonejs/Tone.js) analysis
* [Dev build](https://tonejs.github.io/build/dev/Tone.js) with each successful commit
* [Versioned docs](https://tonejs.github.io/docs/Tone) plus a [dev build of the docs](https://tonejs.github.io/docs/dev/Tone) on successful commits
* Tone.AudioNode is base class for all classes which generate or process audio
* [Tone.AudioNode](https://tonejs.github.io/docs/AudioNode) is base class for all classes which generate or process audio
* [Tone.Sampler](https://tonejs.github.io/docs/Sampler) simplifies creating multisampled instruments
* [Tone.Solo](https://tonejs.github.io/docs/Solo) makes it easier to mute/solo audio
* [Mixer](https://tonejs.github.io/examples/#mixer) and [sampler](https://tonejs.github.io/examples/#sampler) examples
* Making type-checking methods static
* [Tone.TransportTimelineSignal](https://tonejs.github.io/docs/TransportTimelineSignal) is a signal which can be scheduled along the Transport
* [Tone.FFT](https://tonejs.github.io/docs/FFT) and [Tone.Waveform](https://tonejs.github.io/docs/Waveform) abstract Tone.Analyser
* [Tone.Meter](https://tonejs.github.io/docs/Meter) returns decibels
* [Tone.Envelope](https://tonejs.github.io/docs/Envelope) uses exponential approach instead of exponential curve for decay and release curves
* [Tone.BufferSource](https://tonejs.github.io/docs/BufferSource) fadeIn/Out can be either "linear" or "exponential" curve
### r10

View file

@ -15,6 +15,7 @@ Tone.js is a Web Audio framework for creating interactive music in the browser.
* download [full](https://tonejs.github.io/build/Tone.js) | [min](https://tonejs.github.io/build/Tone.min.js)
* `npm install tone`
* dev -> `npm install tone@next`
[Full Installation Instruction](https://github.com/Tonejs/Tone.js/wiki/Installation).

View file

@ -72,7 +72,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
};
/**
* Possible return types of Tone.Analyser.analyse()
* Possible return types of analyser.getValue()
* @enum {String}
*/
Tone.Analyser.Type = {
@ -85,7 +85,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
* result as a TypedArray.
* @returns {TypedArray}
*/
Tone.Analyser.prototype.analyse = function(){
Tone.Analyser.prototype.getValue = function(){
if (this._type === Tone.Analyser.Type.FFT){
this._analyser.getFloatFrequencyData(this._buffer);
} else if (this._type === Tone.Analyser.Type.Waveform){
@ -111,7 +111,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
});
/**
* The analysis function returned by Tone.Analyser.analyse(), either "fft" or "waveform".
* The analysis function returned by analyser.getValue(), either "fft" or "waveform".
* @memberOf Tone.Analyser#
* @type {String}
* @name type

View file

@ -245,9 +245,9 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
}
//attack
if (this._attackCurve === "linear"){
this._sig.linearRampToValue(velocity, attack, time);
this._sig.linearRampTo(velocity, attack, time);
} else if (this._attackCurve === "exponential"){
this._sig.exponentialRampToValue(velocity, attack, time);
this._sig.targetRampTo(velocity, attack, time);
} else if (attack > 0){
this._sig.setRampPoint(time);
var curve = this._attackCurve;
@ -262,7 +262,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
this._sig.setValueCurveAtTime(curve, time, attack, velocity);
}
//decay
this._sig.exponentialRampToValue(velocity * this.sustain, decay, attack + time);
this._sig.targetRampTo(velocity * this.sustain, decay, attack + time);
return this;
};
@ -280,9 +280,9 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
if (currentValue > 0){
var release = this.toSeconds(this.release);
if (this._releaseCurve === "linear"){
this._sig.linearRampToValue(0, release, time);
this._sig.linearRampTo(0, release, time);
} else if (this._releaseCurve === "exponential"){
this._sig.exponentialRampToValue(0, release, time);
this._sig.targetRampTo(0, release, time);
} else{
var curve = this._releaseCurve;
if (Tone.isArray(curve)){

69
Tone/component/FFT.js Normal file
View file

@ -0,0 +1,69 @@
define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], function (Tone) {
/**
* @class Get the current waveform data of the connected audio source.
* @extends {Tone.AudioNode}
* @param {Number=} size The size of the FFT. Value must be a power of
* two in the range 32 to 32768.
*/
Tone.FFT = function(){
var options = Tone.defaults(arguments, ["size"], Tone.FFT);
options.type = Tone.Analyser.Type.FFT;
Tone.AudioNode.call(this);
/**
* The analyser node.
* @private
* @type {Tone.Analyser}
*/
this._analyser = this.input = this.output = new Tone.Analyser(options);
};
Tone.extend(Tone.FFT, Tone.AudioNode);
/**
* The default values.
* @type {Object}
* @const
*/
Tone.FFT.defaults = {
"size" : 1024
};
/**
* Gets the waveform of the audio source. Returns the waveform data
* of length [size](#size) as a Float32Array with values between -1 and 1.
* @returns {TypedArray}
*/
Tone.FFT.prototype.getValue = function(){
return this._analyser.getValue();
};
/**
* The size of analysis. This must be a power of two in the range 32 to 32768.
* @memberOf Tone.FFT#
* @type {Number}
* @name size
*/
Object.defineProperty(Tone.FFT.prototype, "size", {
get : function(){
return this._analyser.size;
},
set : function(size){
this._analyser.size = size;
}
});
/**
* Clean up.
* @return {Tone.FFT} this
*/
Tone.FFT.prototype.dispose = function(){
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.dispose();
this._analyser = null;
};
return Tone.FFT;
});

View file

@ -9,36 +9,26 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
*
* @constructor
* @extends {Tone.AudioNode}
* @param {String} type Either "level" or "signal".
* @param {Number} smoothing The amount of smoothing applied between frames.
* @example
* var meter = new Tone.Meter();
* var mic = new Tone.UserMedia().open();
* //connect mic to the meter
* mic.connect(meter);
* //the current level of the mic input
* var level = meter.value;
* //the current level of the mic input in decibels
* var level = meter.getValue();
*/
Tone.Meter = function(){
var options = Tone.defaults(arguments, ["type", "smoothing"], Tone.Meter);
var options = Tone.defaults(arguments, ["smoothing"], Tone.Meter);
Tone.AudioNode.call(this);
/**
* The type of the meter, either "level" or "signal".
* A "level" meter will return the volume level (rms) of the
* input signal and a "signal" meter will return
* the signal value of the input.
* @type {String}
*/
this.type = options.type;
/**
* The analyser node which computes the levels.
* @private
* @type {Tone.Analyser}
*/
this.input = this.output = this._analyser = new Tone.Analyser("waveform", 512);
this.input = this.output = this._analyser = new Tone.Analyser("waveform", 1024);
/**
* The amount of carryover between the current and last frame.
@ -46,26 +36,10 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
* @type {Number}
*/
this.smoothing = options.smoothing;
/**
* The last computed value
* @type {Number}
* @private
*/
this._lastValue = 0;
};
Tone.extend(Tone.Meter, Tone.AudioNode);
/**
* @private
* @enum {String}
*/
Tone.Meter.Type = {
Level : "level",
Signal : "signal"
};
/**
* The defaults
* @type {Object}
@ -73,39 +47,44 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
* @const
*/
Tone.Meter.defaults = {
"smoothing" : 0.8,
"type" : Tone.Meter.Type.Level
"smoothing" : 0.8
};
/**
* The current value of the meter. A value of 1 is
* "unity".
* Get the current decibel value of the incoming signal
* @returns {Decibels}
*/
Tone.Meter.prototype.getLevel = function(){
this._analyser.type = "fft";
var values = this._analyser.getValue();
var offset = 28; // normalizes most signal levels
// TODO: compute loudness from FFT
return Math.max.apply(this, values) + offset;
};
/**
* Get the signal value of the incoming signal
* @returns {Number}
*/
Tone.Meter.prototype.getValue = function(){
this._analyser.type = "waveform";
var value = this._analyser.getValue();
return value[0];
};
/**
* A value from 0 -> 1 where 0 represents no time averaging with the last analysis frame.
* @memberOf Tone.Meter#
* @type {Number}
* @name value
* @name smoothing
* @readOnly
*/
Object.defineProperty(Tone.Meter.prototype, "value", {
Object.defineProperty(Tone.Meter.prototype, "smoothing", {
get : function(){
var signal = this._analyser.analyse();
if (this.type === Tone.Meter.Type.Level){
//rms
var sum = 0;
for (var i = 0; i < signal.length; i++){
sum += Math.pow(signal[i], 2);
}
var rms = Math.sqrt(sum / signal.length);
//smooth it
rms = Math.max(rms, this._lastValue * this.smoothing);
this._lastValue = rms;
//scale it
var unity = 0.35;
var val = rms / unity;
//scale the output curve
return Math.sqrt(val);
} else {
return signal[0];
}
return this._analyser.smoothing;
},
set : function(val){
this._analyser.smoothing = val;
},
});

View file

@ -0,0 +1,68 @@
define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], function (Tone) {
/**
* @class Get the current waveform data of the connected audio source.
* @extends {Tone.AudioNode}
* @param {Number=} size The size of the FFT. Value must be a power of
* two in the range 32 to 32768.
*/
Tone.Waveform = function(){
var options = Tone.defaults(arguments, ["size"], Tone.Waveform);
options.type = Tone.Analyser.Type.Waveform;
Tone.AudioNode.call(this);
/**
* The analyser node.
* @private
* @type {Tone.Analyser}
*/
this._analyser = this.input = this.output = new Tone.Analyser(options);
};
Tone.extend(Tone.Waveform, Tone.AudioNode);
/**
* The default values.
* @type {Object}
* @const
*/
Tone.Waveform.defaults = {
"size" : 1024
};
/**
* Gets the waveform of the audio source. Returns the waveform data
* of length [size](#size) as a Float32Array with values between -1 and 1.
* @returns {TypedArray}
*/
Tone.Waveform.prototype.getValue = function(){
return this._analyser.getValue();
};
/**
* The size of analysis. This must be a power of two in the range 32 to 32768.
* @memberOf Tone.Waveform#
* @type {Number}
* @name size
*/
Object.defineProperty(Tone.Waveform.prototype, "size", {
get : function(){
return this._analyser.size;
},
set : function(size){
this._analyser.size = size;
}
});
/**
* Clean up.
* @return {Tone.Waveform} this
*/
Tone.Waveform.prototype.dispose = function(){
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.dispose();
this._analyser = null;
};
return Tone.Waveform;
});

View file

@ -1,7 +1,7 @@
define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
/**
* @class Tone.AudioNode is a base class for classes which process audio.
* @class Tone.AudioNode is the base class for classes which process audio.
* AudioNodes have inputs and outputs.
* @param {AudioContext=} context The audio context to use with the class
* @extends {Tone}
@ -26,7 +26,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
/**
* Get the audio context belonging to this instance.
* @type {AudioNode}
* @type {Tone.Context}
* @memberOf Tone.AudioNode#
* @name context
* @readOnly
@ -144,6 +144,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
* node.chain(effect, panVol, Tone.Master);
* @param {...AudioParam|Tone|AudioNode} nodes
* @returns {Tone.AudioNode} this
* @private
*/
Tone.AudioNode.prototype.chain = function(){
var currentUnit = this;
@ -159,6 +160,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
* connect the output of this node to the rest of the nodes in parallel.
* @param {...AudioParam|Tone|AudioNode} nodes
* @returns {Tone.AudioNode} this
* @private
*/
Tone.AudioNode.prototype.fan = function(){
for (var i = 0; i < arguments.length; i++){

View file

@ -168,7 +168,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* freq.setValueAtTime("G4", "+1");
*/
Tone.Param.prototype.setValueAtTime = function(value, time){
this._param.setValueAtTime(this._fromUnits(value), this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._param.setValueAtTime(this._fromUnits(value), time);
return this;
};
@ -182,12 +184,12 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
*/
Tone.Param.prototype.setRampPoint = function(now){
now = Tone.defaultArg(now, this.now());
this.cancelAndHoldAtTime(this.context.currentTime);
var currentVal = this._param.value;
// exponentialRampToValueAt cannot ever ramp from or to 0
// More info: https://bugzilla.mozilla.org/show_bug.cgi?id=1125600#c2
if (currentVal === 0){
currentVal = this._minOutput;
}
// cancel and hold at the given time
this._param.setValueAtTime(currentVal, now);
return this;
};
@ -202,7 +204,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
*/
Tone.Param.prototype.linearRampToValueAtTime = function(value, endTime){
value = this._fromUnits(value);
this._param.linearRampToValueAtTime(value, this.toSeconds(endTime));
endTime = this.toSeconds(endTime);
Tone.isPast(endTime);
this._param.linearRampToValueAtTime(value, endTime);
return this;
};
@ -217,7 +221,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
Tone.Param.prototype.exponentialRampToValueAtTime = function(value, endTime){
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
this._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime));
endTime = this.toSeconds(endTime);
Tone.isPast(endTime);
this._param.exponentialRampToValueAtTime(value, endTime);
return this;
};
@ -233,9 +239,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @returns {Tone.Param} this
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampToValue(2, 4);
* signal.exponentialRampTo(2, 4);
*/
Tone.Param.prototype.exponentialRampToValue = function(value, rampTime, startTime){
Tone.Param.prototype.exponentialRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
@ -254,15 +260,46 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @returns {Tone.Param} this
* @example
* //linearly ramp to the value 4 over 3 seconds.
* signal.linearRampToValue(4, 3);
* signal.linearRampTo(4, 3);
*/
Tone.Param.prototype.linearRampToValue = function(value, rampTime, startTime){
Tone.Param.prototype.linearRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
return this;
};
/**
* Convert between Time and time constant. The time
* constant returned can be used in setTargetAtTime.
* @param {Time} time The time to convert
* @return {Number} The time constant to get an exponentially approaching
* curve to over 99% of towards the target value.
*/
Tone.Param.prototype.getTimeConstant = function(time){
return Math.log(this.toSeconds(time)+1)/Math.log(200);
};
/**
* Start exponentially approaching the target value at the given time. Since it
* is an exponential approach it will continue approaching after the ramp duration. The
* rampTime is the time that it takes to reach over 99% of the way towards the value.
* @param {number} value The value to ramp to.
* @param {Time} rampTime the time that it takes the
* value to ramp from it's current value
* @param {Time} [startTime=now] When the ramp should start.
* @returns {Tone.Param} this
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampTo(2, 4);
*/
Tone.Param.prototype.targetRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.setTargetAtTime(value, startTime, this.getTimeConstant(rampTime));
return this;
};
/**
* Start exponentially approaching the target value at the given time with
* a rate having the given time constant.
@ -314,6 +351,31 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
return this;
};
/**
* This is similar to [cancelScheduledValues](#cancelScheduledValues) except
* it holds the automated value at cancelTime until the next automated event.
* @param {Time} cancelTime
* @returns {Tone.Param} this
*/
Tone.Param.prototype.cancelAndHoldAtTime = function(cancelTime){
cancelTime = this.toSeconds(cancelTime);
if (this._param.cancelAndHoldAtTime){
this._param.cancelAndHoldAtTime(cancelTime);
} else {
//fallback for unsupported browsers
//can't cancel and hold at any time in the future
//just do it immediately for gapless automation curves
var now = this.context.currentTime;
this._param.cancelScheduledValues(now);
var currentVal = this._param.value;
if (currentVal === 0){
currentVal = this._minOutput;
}
this._param.setValueAtTime(currentVal, now + this.sampleTime);
}
return this;
};
/**
* Ramps to the given value over the duration of the rampTime.
* Automatically selects the best ramp type (exponential or linear)
@ -333,11 +395,11 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* signal.rampTo(0, 10, 5)
*/
Tone.Param.prototype.rampTo = function(value, rampTime, startTime){
rampTime = Tone.defaultArg(rampTime, 0);
rampTime = Tone.defaultArg(rampTime, 0.1);
if (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM || this.units === Tone.Type.Decibels){
this.exponentialRampToValue(value, rampTime, startTime);
this.exponentialRampTo(value, rampTime, startTime);
} else {
this.linearRampToValue(value, rampTime, startTime);
this.linearRampTo(value, rampTime, startTime);
}
return this;
};

View file

@ -29,6 +29,13 @@ define(["Tone/core/Tone"], function (Tone) {
*/
this._toRemove = [];
/**
* An array of items to add from the list (once it's done iterating)
* @type {Array}
* @private
*/
this._toAdd = [];
/**
* Flag if the timeline is mid iteration
* @private
@ -79,17 +86,17 @@ define(["Tone/core/Tone"], function (Tone) {
if (Tone.isUndef(event.time)){
throw new Error("Tone.Timeline: events must have a time attribute");
}
if (this._timeline.length){
if (this._iterating){
this._toAdd.push(event);
} else {
var index = this._search(event.time);
this._timeline.splice(index + 1, 0, event);
} else {
this._timeline.push(event);
}
//if the length is more than the memory, remove the previous ones
if (this.length > this.memory){
var diff = this.length - this.memory;
this._timeline.splice(0, diff);
}
}
return this;
};
@ -113,12 +120,12 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the nearest event whose time is less than or equal to the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object set after that time.
*/
Tone.Timeline.prototype.get = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
var index = this._search(time, comparitor);
Tone.Timeline.prototype.get = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var index = this._search(time, comparator);
if (index !== -1){
return this._timeline[index];
} else {
@ -145,12 +152,12 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the event which is scheduled after the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object after the given time
*/
Tone.Timeline.prototype.getAfter = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
var index = this._search(time, comparitor);
Tone.Timeline.prototype.getAfter = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var index = this._search(time, comparator);
if (index + 1 < this._timeline.length){
return this._timeline[index + 1];
} else {
@ -161,17 +168,17 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the event before the event at the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object before the given time
*/
Tone.Timeline.prototype.getBefore = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
Tone.Timeline.prototype.getBefore = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var len = this._timeline.length;
//if it's after the last item, return the last item
if (len > 0 && this._timeline[len - 1][comparitor] < time){
if (len > 0 && this._timeline[len - 1][comparator] < time){
return this._timeline[len - 1];
}
var index = this._search(time, comparitor);
var index = this._search(time, comparator);
if (index - 1 >= 0){
return this._timeline[index - 1];
} else {
@ -219,12 +226,10 @@ define(["Tone/core/Tone"], function (Tone) {
* @returns {Tone.Timeline} this
*/
Tone.Timeline.prototype.cancelBefore = function(time){
if (this._timeline.length){
var index = this._search(time);
if (index >= 0){
this._timeline = this._timeline.slice(index + 1);
}
}
return this;
};
@ -243,21 +248,24 @@ define(["Tone/core/Tone"], function (Tone) {
};
/**
* Does a binary serach on the timeline array and returns the
* Does a binary search on the timeline array and returns the
* nearest event index whose time is after or equal to the given time.
* If a time is searched before the first index in the timeline, -1 is returned.
* If the time is after the end, the index of the last item is returned.
* @param {Number} time
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @return {Number} the index in the timeline array
* @private
*/
Tone.Timeline.prototype._search = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
Tone.Timeline.prototype._search = function(time, comparator){
if (this._timeline.length === 0){
return -1;
}
comparator = Tone.defaultArg(comparator, "time");
var beginning = 0;
var len = this._timeline.length;
var end = len;
if (len > 0 && this._timeline[len - 1][comparitor] <= time){
if (len > 0 && this._timeline[len - 1][comparator] <= time){
return len - 1;
}
while (beginning < end){
@ -265,18 +273,18 @@ define(["Tone/core/Tone"], function (Tone) {
var midPoint = Math.floor(beginning + (end - beginning) / 2);
var event = this._timeline[midPoint];
var nextEvent = this._timeline[midPoint + 1];
if (event[comparitor] === time){
if (event[comparator] === time){
//choose the last one that has the same time
for (var i = midPoint; i < this._timeline.length; i++){
var testEvent = this._timeline[i];
if (testEvent[comparitor] === time){
if (testEvent[comparator] === time){
midPoint = i;
}
}
return midPoint;
} else if (event[comparitor] < time && nextEvent[comparitor] > time){
} else if (event[comparator] < time && nextEvent[comparator] > time){
return midPoint;
} else if (event[comparitor] > time){
} else if (event[comparator] > time){
//search lower
end = midPoint;
} else {
@ -303,15 +311,14 @@ define(["Tone/core/Tone"], function (Tone) {
callback.call(this, this._timeline[i]);
}
this._iterating = false;
if (this._toRemove.length > 0){
for (var j = 0; j < this._toRemove.length; j++){
var index = this._timeline.indexOf(this._toRemove[j]);
if (index !== -1){
this._timeline.splice(index, 1);
}
}
this._toRemove.forEach(function(event){
this.remove(event);
}.bind(this));
this._toRemove = [];
}
this._toAdd.forEach(function(event){
this.add(event);
}.bind(this));
this._toAdd = [];
};
/**
@ -397,6 +404,7 @@ define(["Tone/core/Tone"], function (Tone) {
Tone.prototype.dispose.call(this);
this._timeline = null;
this._toRemove = null;
this._toAdd = null;
return this;
};

View file

@ -508,6 +508,16 @@ define(function(){
return Tone.context.now();
};
/**
* Adds warning in the console if the scheduled time has passed.
* @type {Time}
*/
Tone.isPast = function(time){
if (time < Tone.context.currentTime){
console.warn("Time '" + time + "' is in the past. Scheduled time must be ≥ AudioContext.currentTime");
}
};
///////////////////////////////////////////////////////////////////////////
// INHERITANCE
///////////////////////////////////////////////////////////////////////////
@ -680,7 +690,7 @@ define(function(){
* @type {String}
* @static
*/
Tone.version = "r11";
Tone.version = "r11-dev";
// allow optional silencing of this log
if (!window.TONE_SILENCE_VERSION_LOGGING) {

View file

@ -1,5 +1,6 @@
define(["Tone/core/Tone", "Tone/core/Clock", "Tone/type/Type", "Tone/core/Timeline",
"Tone/core/Emitter", "Tone/core/Gain", "Tone/core/IntervalTimeline"],
"Tone/core/Emitter", "Tone/core/Gain", "Tone/core/IntervalTimeline",
"Tone/core/TransportRepeatEvent", "Tone/core/TransportEvent"],
function(Tone){
"use strict";
@ -117,13 +118,6 @@ function(Tone){
*/
this._scheduledEvents = {};
/**
* The event ID counter
* @type {Number}
* @private
*/
this._eventID = 0;
/**
* The scheduled events.
* @type {Tone.Timeline}
@ -138,13 +132,6 @@ function(Tone){
*/
this._repeatedEvents = new Tone.IntervalTimeline();
/**
* Events that occur once
* @type {Array}
* @private
*/
this._onceEvents = new Tone.Timeline();
/**
* All of the synced Signals
* @private
@ -171,7 +158,6 @@ function(Tone){
this._swingAmount = 0;
}.bind(this));
};
Tone.extend(Tone.Transport, Tone.Emitter);
@ -222,23 +208,9 @@ function(Tone){
this.emit("loop", tickTime);
}
}
//process the single occurrence events
this._onceEvents.forEachBefore(ticks, function(event){
event.callback(tickTime);
//remove the event
delete this._scheduledEvents[event.id.toString()];
}.bind(this));
//and clear the single occurrence timeline
this._onceEvents.cancelBefore(ticks);
//fire the next tick events if their time has come
//invoke the timeline events scheduled on this tick
this._timeline.forEachAtTime(ticks, function(event){
event.callback(tickTime);
});
//process the repeated events
this._repeatedEvents.forEachAtTime(ticks, function(event){
if ((ticks - event.time) % event.interval === 0){
event.callback(tickTime);
}
event.invoke(tickTime);
});
};
@ -258,17 +230,11 @@ function(Tone){
* }, "128i");
*/
Tone.Transport.prototype.schedule = function(callback, time){
var event = {
var event = new Tone.TransportEvent(this, {
"time" : this.toTicks(time),
"callback" : callback
};
var id = this._eventID++;
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._timeline
};
this._timeline.add(event);
return id;
});
return this._addEvent(event, this._timeline);
};
/**
@ -277,7 +243,7 @@ function(Tone){
* `duration`.
* @param {Function} callback The callback to invoke.
* @param {Time} interval The duration between successive
* callbacks.
* callbacks. Must be a positive number.
* @param {TimelinePosition=} startTime When along the timeline the events should
* start being invoked.
* @param {Time} [duration=Infinity] How long the event should repeat.
@ -288,22 +254,14 @@ function(Tone){
* Tone.Transport.scheduleRepeat(callback, "8n", "1m");
*/
Tone.Transport.prototype.scheduleRepeat = function(callback, interval, startTime, duration){
if (interval <= 0){
throw new Error("Tone.Transport: repeat events must have an interval larger than 0");
}
var event = {
var event = new Tone.TransportRepeatEvent(this, {
"callback" : callback,
"interval" : this.toTicks(interval),
"time" : this.toTicks(startTime),
"duration" : this.toTicks(Tone.defaultArg(duration, Infinity)),
"interval" : this.toTicks(interval),
"callback" : callback
};
var id = this._eventID++;
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._repeatedEvents
};
this._repeatedEvents.add(event);
return id;
});
//kick it off if the Transport is started
return this._addEvent(event, this._repeatedEvents);
};
/**
@ -315,18 +273,12 @@ function(Tone){
* @returns {Number} The ID of the scheduled event.
*/
Tone.Transport.prototype.scheduleOnce = function(callback, time){
var id = this._eventID++;
var event = {
var event = new Tone.TransportEvent(this, {
"time" : this.toTicks(time),
"callback" : callback,
"id" : id
};
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._onceEvents
};
this._onceEvents.add(event);
return id;
"once" : true
});
return this._addEvent(event, this._timeline);
};
/**
@ -338,11 +290,29 @@ function(Tone){
if (this._scheduledEvents.hasOwnProperty(eventId)){
var item = this._scheduledEvents[eventId.toString()];
item.timeline.remove(item.event);
item.event.dispose();
delete this._scheduledEvents[eventId.toString()];
}
return this;
};
/**
* Add an event to the correct timeline. Keep track of the
* timeline it was added to.
* @param {Tone.TransportEvent} event
* @param {Tone.Timeline} timeline
* @returns {Number} the event id which was just added
* @private
*/
Tone.Transport.prototype._addEvent = function(event, timeline){
this._scheduledEvents[event.id.toString()] = {
"event" : event,
"timeline" : timeline
};
timeline.add(event);
return event.id;
};
/**
* Remove scheduled events from the timeline after
* the given time. Repeated events will be removed
@ -355,7 +325,6 @@ function(Tone){
after = Tone.defaultArg(after, 0);
after = this.toTicks(after);
this._timeline.cancel(after);
this._onceEvents.cancel(after);
this._repeatedEvents.cancel(after);
return this;
};
@ -777,8 +746,6 @@ function(Tone){
this.bpm = null;
this._timeline.dispose();
this._timeline = null;
this._onceEvents.dispose();
this._onceEvents = null;
this._repeatedEvents.dispose();
this._repeatedEvents = null;
return this;

View file

@ -0,0 +1,92 @@
define(["Tone/core/Tone"], function(Tone){
/**
* @class Tone.TransportEvent is an internal class used by (Tone.Transport)[Transport]
* to schedule events. Do no invoke this class directly, it is
* handled from within Tone.Transport.
* @extends {Tone}
* @param {Object} options
*/
Tone.TransportEvent = function(Transport, options){
options = Tone.defaultArg(options, Tone.TransportEvent.defaults);
Tone.call(this);
/**
* Reference to the Transport that created it
* @type {Tone.Transport}
*/
this.Transport = Transport;
/**
* The unique id of the event
* @type {Number}
*/
this.id = Tone.TransportEvent._eventId++;
/**
* The time the event starts
* @type {Ticks}
*/
this.time = options.time;
/**
* The callback to invoke
* @type {Function}
*/
this.callback = options.callback;
/**
* If the event should be removed after being created.
* @type {Boolean}
* @private
*/
this._once = options.once;
};
Tone.extend(Tone.TransportEvent);
/**
* The defaults
* @static
* @type {Object}
*/
Tone.TransportEvent.defaults = {
"once" : false,
"callback" : Tone.noOp,
};
/**
* Current ID counter
* @private
* @static
* @type {Number}
*/
Tone.TransportEvent._eventId = 0;
/**
* Invoke the callback even callback.
* @param {Time} time The AudioContext time in seconds of the event
*/
Tone.TransportEvent.prototype.invoke = function(time){
if (this.callback){
this.callback(time);
if (this._once && this.Transport){
this.Transport.clear(this.id);
}
}
};
/**
* Clean up
* @return {Tone.TransportEvent} this
*/
Tone.TransportEvent.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
this.Transport = null;
this.callback = null;
return this;
};
return Tone.TransportEvent;
});

View file

@ -0,0 +1,130 @@
define(["Tone/core/Tone", "Tone/core/TransportEvent"], function(Tone){
/**
* @class Tone.TransportRepeatEvent is an internal class used by Tone.Transport
* to schedule repeat events. This class should not be instantiated directly.
* @extends {Tone.TransportEvent}
* @param {Object} options
*/
Tone.TransportRepeatEvent = function(Transport, options){
Tone.TransportEvent.call(this, Transport, options);
options = Tone.defaultArg(options, Tone.TransportRepeatEvent.defaults);
/**
* When the event should stop repeating
* @type {Ticks}
* @private
*/
this.duration = options.duration;
/**
* The interval of the repeated event
* @type {Ticks}
* @private
*/
this._interval = options.interval;
/**
* The ID of the current timeline event
* @type {Number}
* @private
*/
this._currentId = -1;
/**
* The ID of the next timeline event
* @type {Number}
* @private
*/
this._nextId = -1;
/**
* The time of the next event
* @type {Ticks}
* @private
*/
this._nextTick = this.time;
/**
* a reference to the bound start method
* @type {Function}
* @private
*/
this._boundRestart = this._restart.bind(this);
this.Transport.on("start loopStart", this._boundRestart);
this._restart();
};
Tone.extend(Tone.TransportRepeatEvent, Tone.TransportEvent);
/**
* The defaults
* @static
* @type {Object}
*/
Tone.TransportRepeatEvent.defaults = {
"duration" : Infinity,
"interval" : 1
};
/**
* Invoke the callback. Returns the tick time which
* the next event should be scheduled at.
* @param {Number} time The AudioContext time in seconds of the event
*/
Tone.TransportRepeatEvent.prototype.invoke = function(time){
//create more events if necessary
this._createEvents();
//call the super class
Tone.TransportEvent.prototype.invoke.call(this, time);
};
/**
* Push more events onto the timeline to keep up with the position of the timeline
* @private
*/
Tone.TransportRepeatEvent.prototype._createEvents = function(){
// schedule the next event
var ticks = this.Transport.ticks;
if (ticks >= this.time && ticks >= this._nextTick &&
this._nextTick + this._interval < this.time + this.duration){
this._nextTick += this._interval;
this._currentId = this._nextId;
this._nextId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
}
};
/**
* Push more events onto the timeline to keep up with the position of the timeline
* @private
*/
Tone.TransportRepeatEvent.prototype._restart = function(){
this.Transport.clear(this._currentId);
this.Transport.clear(this._nextId);
var ticks = this.Transport.ticks;
this._nextTick = this.time;
if (ticks > this.time){
this._nextTick = this.time + Math.ceil((ticks - this.time) / this._interval) * this._interval;
}
this._currentId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
this._nextTick += this._interval;
this._nextId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
};
/**
* Clean up
* @return {Tone.TransportRepeatEvent} this
*/
Tone.TransportRepeatEvent.prototype.dispose = function(){
this.Transport.clear(this._currentId);
this.Transport.clear(this._nextId);
this.Transport.off("start loopStart", this._boundRestart);
this._boundCreateEvents = null;
Tone.TransportEvent.prototype.dispose.call(this);
return this;
};
return Tone.TransportRepeatEvent;
});

View file

@ -102,7 +102,7 @@ define(["Tone/core/Tone", "Tone/instrument/Instrument", "Tone/source/FMOscillato
"harmonicity" : options.harmonicity,
"modulationIndex" : options.modulationIndex
});
osc.connect(this._highpass).start(0);
osc.connect(this._highpass).start();
this._oscillators[i] = osc;
var mult = new Tone.Multiply(inharmRatios[i]);

View file

@ -121,7 +121,8 @@ define(["Tone/core/Tone", "Tone/instrument/Instrument", "Tone/core/Buffers", "To
"buffer" : buffer,
"playbackRate" : Tone.intervalToFrequencyRatio(difference),
"fadeIn" : this.attack,
"fadeOut" : this.release
"fadeOut" : this.release,
"curve" : "exponential",
}).connect(this.output);
source.start(time, 0, buffer.duration, velocity);
// add it to the active sources

View file

@ -194,7 +194,6 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
/**
* Cancels all scheduled parameter changes with times greater than or
* equal to startTime.
*
* @param {Time} startTime
* @returns {Tone.TimelineSignal} this
*/
@ -205,6 +204,18 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
return this;
};
/**
* Cancels all scheduled parameter changes with times greater than or
* equal to cancelTime and sets the output of the signal to be the value
* at cancelTime. Similar to (cancelScheduledValues)[#cancelscheduledvalues].
* @param {Time} cancelTime
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.cancelAndHoldAtTime = function (cancelTime) {
this.setRampPoint(this.toSeconds(cancelTime));
return this;
};
/**
* Sets the computed value at the given time. This provides
* a point from which a linear or exponential curve
@ -237,8 +248,8 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
this.exponentialRampToValueAtTime(val, time);
}
}
this.setValueAtTime(val, time);
}
this.setValueAtTime(val, time);
return this;
};
@ -310,13 +321,13 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
value = this._initial;
} else if (before.type === Tone.TimelineSignal.Type.Target){
var previous = this._events.getBefore(before.time);
var previouVal;
var previousVal;
if (previous === null){
previouVal = this._initial;
previousVal = this._initial;
} else {
previouVal = previous.value;
previousVal = previous.value;
}
value = this._exponentialApproach(before.time, previouVal, before.value, before.constant, time);
value = this._exponentialApproach(before.time, previousVal, before.value, before.constant, time);
} else if (after === null){
value = before.value;
} else if (after.type === Tone.TimelineSignal.Type.Linear){

View file

@ -82,6 +82,12 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
*/
this.fadeOut = options.fadeOut;
/**
* The curve applied to the fades, either "linear" or "exponential"
* @type {String}
*/
this.curve = options.curve;
/**
* The value that the buffer ramps to
* @type {Gain}
@ -96,6 +102,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
*/
this._onendedTimeout = -1;
//set some values initially
this.loop = options.loop;
this.loopStart = options.loopStart;
this.loopEnd = options.loopEnd;
@ -117,6 +124,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
"loopEnd" : 0,
"fadeIn" : 0,
"fadeOut" : 0,
"curve" : "linear",
"playbackRate" : 1
};
@ -165,30 +173,27 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
offset = Tone.defaultArg(offset, 0);
}
offset = this.toSeconds(offset);
//the values in seconds
time = this.toSeconds(time);
gain = Tone.defaultArg(gain, 1);
this._gain = gain;
//the fadeIn time
if (Tone.isUndef(fadeInTime)){
fadeInTime = this.toSeconds(this.fadeIn);
} else {
fadeInTime = this.toSeconds(fadeInTime);
}
fadeInTime = this.toSeconds(Tone.defaultArg(fadeInTime, this.fadeIn));
this.fadeIn = fadeInTime;
if (fadeInTime > 0){
this._gainNode.gain.setValueAtTime(0, time);
if (this.curve === "linear"){
this._gainNode.gain.linearRampToValueAtTime(this._gain, time + fadeInTime);
} else {
this._gainNode.gain.setTargetAtTime(this._gain, time, this._gainNode.gain.getTimeConstant(fadeInTime));
}
} else {
this._gainNode.gain.setValueAtTime(gain, time);
}
this._startTime = time + fadeInTime;
this._startTime = time;
var computedDur = Tone.defaultArg(duration, this.buffer.duration - offset);
computedDur = this.toSeconds(computedDur);
var computedDur = this.toSeconds(Tone.defaultArg(duration, this.buffer.duration - offset));
computedDur = Math.max(computedDur, 0);
if (!this.loop || (this.loop && !Tone.isUndef(duration))){
@ -196,7 +201,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
if (!this.loop){
computedDur = Math.min(computedDur, this.buffer.duration - offset);
}
this.stop(time + computedDur + fadeInTime, this.fadeOut);
this.stop(time + computedDur, this.fadeOut);
}
//start the buffer source
@ -212,6 +217,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
}
this._source.buffer = this.buffer.get();
this._source.loopEnd = this.loopEnd || this.buffer.duration;
Tone.isPast(time);
this._source.start(time, offset);
} else {
throw new Error("Tone.BufferSource: buffer is either not set or not loaded.");
@ -232,26 +238,35 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
time = this.toSeconds(time);
//the fadeOut time
if (Tone.isUndef(fadeOutTime)){
fadeOutTime = this.toSeconds(this.fadeOut);
} else {
fadeOutTime = this.toSeconds(fadeOutTime);
//if this is before the previous stop
if (this._stopTime === -1 || this._stopTime > time){
//stop if it's schedule before the start time
if (time <= this._startTime){
this._gainNode.gain.cancelScheduledValues(time);
this._gainNode.gain.value = 0;
return this;
}
//only stop if the last stop was scheduled later
if (this._stopTime === -1 || this._stopTime > time){
time = Math.max(this._startTime + this.fadeIn + this.sampleTime, time);
//cancel the previous curve
this._gainNode.gain.cancelScheduledValues(time);
this._stopTime = time;
//cancel the end curve
this._gainNode.gain.cancelScheduledValues(this._startTime + this.sampleTime);
time = Math.max(this._startTime, time);
//the fadeOut time
fadeOutTime = this.toSeconds(Tone.defaultArg(fadeOutTime, this.fadeOut));
//set a new one
if (fadeOutTime > 0){
var startFade = Math.max(this._startTime, time - fadeOutTime);
var heldDuration = Math.min(time - this._startTime - this.fadeIn - this.sampleTime, this.buffer.duration);
fadeOutTime = Math.min(heldDuration, fadeOutTime);
var startFade = time - fadeOutTime;
if (fadeOutTime > this.sampleTime){
this._gainNode.gain.setValueAtTime(this._gain, startFade);
if (this.curve === "linear"){
this._gainNode.gain.linearRampToValueAtTime(0, time);
} else {
this._gainNode.gain.setTargetAtTime(0, startFade, this._gainNode.gain.getTimeConstant(fadeOutTime));
}
} else {
this._gainNode.gain.setValueAtTime(0, time);
}

View file

@ -132,7 +132,9 @@ function(Tone){
this.frequency.connect(this._oscillator.frequency);
this.detune.connect(this._oscillator.detune);
//start the oscillator
this._oscillator.start(this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._oscillator.start(time);
};
/**
@ -143,7 +145,9 @@ function(Tone){
*/
Tone.Oscillator.prototype._stop = function(time){
if (this._oscillator){
this._oscillator.stop(this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._oscillator.stop(time);
this._oscillator = null;
}
return this;

File diff suppressed because it is too large Load diff

14
build/Tone.min.js vendored

File diff suppressed because one or more lines are too long

View file

@ -42,10 +42,10 @@
<script>
//analyse the frequency/amplitude of the incoming signal
var fft = new Tone.Analyser("fft", 32);
var fft = new Tone.FFT(32);
//get the waveform data for the audio
var waveform = new Tone.Analyser("waveform", 1024);
var waveform = new Tone.Waveform(1024);
var player = new Tone.Player({
"url" : "./audio/FWDL.[mp3|ogg]",
@ -77,10 +77,9 @@
fftContext.clearRect(0, 0, canvasWidth, canvasHeight);
var barWidth = canvasWidth / fft.size;
for (var i = 0, len = values.length; i < len; i++){
var val = values[i] / 255;
var x = canvasWidth * (i / len);
var y = val * canvasHeight;
fftContext.fillStyle = "rgba(0, 0, 0, " + val + ")";
var y = (values[i] + 140) * 2;
fftContext.fillStyle = "rgba(0, 0, 0, " + i/len + ")";
fftContext.fillRect(x, canvasHeight - y, barWidth, canvasHeight);
}
}
@ -94,14 +93,13 @@
function drawWaveform(values){
//draw the waveform
waveContext.clearRect(0, 0, canvasWidth, canvasHeight);
var values = waveform.analyse();
waveContext.beginPath();
waveContext.lineJoin = "round";
waveContext.lineWidth = 6;
waveContext.strokeStyle = waveformGradient;
waveContext.moveTo(0, (values[0] / 255) * canvasHeight);
for (var i = 1, len = values.length; i < len; i++){
var val = values[i] / 255;
var val = (values[i] + 1) / 2;
var x = canvasWidth * (i / len);
var y = val * canvasHeight;
waveContext.lineTo(x, y);
@ -132,10 +130,10 @@
function loop(){
requestAnimationFrame(loop);
//get the fft data and draw it
var fftValues = fft.analyse();
var fftValues = fft.getValue();
drawFFT(fftValues);
//get the waveform valeus and draw it
var waveformValues = waveform.analyse();
var waveformValues = waveform.getValue();
drawWaveform(waveformValues);
}
loop();

View file

@ -33,15 +33,14 @@
<div id="Title">Meter</div>
<div id="Explanation">
<a href="https://tonejs.github.io/docs/#Meter" target="_blank">Tone.Meter</a>
gives you the level of the incoming signal (between 0-1). Values above 1
are clipping.
gives you the level of the incoming signal in decibels.
</div>
</div>
<script>
//create a level meter
var meter = new Tone.Meter("level");
var meter = new Tone.Meter();
var player = new Tone.Player({
"url" : "./audio/FWDL.[mp3|ogg]",
@ -72,7 +71,8 @@
var meterGraident;
function drawMeter(){
var level = meter.value * 0.8; //scale it since values go above 1 when clipping
var level = meter.getLevel();
level = Tone.dbToGain(level); //scale it between 0 - 1
meterContext.clearRect(0, 0, canvasWidth, canvasHeight);
meterContext.fillStyle = meterGraident;
meterContext.fillRect(0, 0, canvasWidth, canvasHeight);

View file

@ -44,10 +44,7 @@
//directly to the master output because of feedback.
var mic = new Tone.UserMedia();
var analyser = new Tone.Analyser({
"type" : "waveform",
"size" : 256
});
var analyser = new Tone.Waveform(256);
mic.connect(analyser);
@ -74,14 +71,14 @@
requestAnimationFrame(drawLoop);
//draw the waveform
context.clearRect(0, 0, canvasWidth, canvasHeight);
var values = analyser.analyse();
var values = analyser.getValue();
context.beginPath();
context.lineJoin = "round";
context.lineWidth = 6;
context.strokeStyle = "white";
context.moveTo(0, (values[0] / 255) * canvasHeight);
context.moveTo(0, (values[0] + 1) / 2 * canvasHeight);
for (var i = 1, len = values.length; i < len; i++){
var val = values[i] / 255;
var val = (values[i] + 1) / 2;
var x = canvasWidth * (i / (len - 1));
var y = val * canvasHeight;
context.lineTo(x, y);

View file

@ -13,7 +13,6 @@
<script src="https://tonejs.github.io/Logo/build/Logo.js"></script>
<script src="./scripts/StartAudioContext.js"></script>
<script src="./scripts/Interface.js"></script>
<script src="./scripts/nexusUI.js"></script>
<link rel="stylesheet" type="text/css" href="./style/examples.css">
@ -38,80 +37,33 @@
which allows for sample-accurate scheduling and ramping. <code>.rampTo(value, rampTime)</code>
smoothly changes the signal from the current value to the target value over the duration of the rampTime.
This example uses <code>.rampTo</code> in to smooth out changes in volume and frequency.
<br><br>
As the large dot gets closer to each of the smaller dots, a different harmonic is heard depending
on the distance to that smaller dot. The "harmony" slider adjusts each of the oscillators frequencies'
distance from the fundamental frequency.
</div>
<canvas nx="joints"></canvas>
</div>
<script>
Tone.Master.volume.value = -Infinity;
var oscillators = {};
var oscillators = [];
var bassFreq = 32;
var reverb = new Tone.JCReverb().toMaster();
for (var i = 0; i < 8; i++){
oscillators["node" + i] = new Tone.Oscillator({
oscillators.push(new Tone.Oscillator({
"frequency" : bassFreq * i,
"type" : "sawtooth10",
"volume" : -Infinity,
"detune" : Math.random() * 30 - 15,
}).connect(reverb).start();
}).start().toMaster());
}
// GUI //
nx.onload = function(){
nx.colorize("#7F33ED");
joints1.nodeSize = 25;
joints1.val.x = Math.random();
joints1.val.y = Math.random();
joints1.resize($("#Content").width(), 250);
joints1.animate("bounce");
var width = joints1.width;
var height = joints1.height;
joints1.threshold = Math.max($("#Content").width() / 1.5, 60);
joints1.init();
joints1.draw();
$(window).on("resize", function(){
joints1.resize($("#Content").width(), 250);
joints1.threshold = Math.max($("#Content").width() / 1.5, 60);
joints1.draw();
});
function setValues(data){
for (var n in oscillators){
oscillators[n].volume.cancelScheduledValues();
if (data.hasOwnProperty(n)){
oscillators[n].volume.rampTo((1 - Math.pow(data[n], 0.5)) * -60, 0.3);
} else {
oscillators[n].volume.rampTo(-Infinity, 0.4);
}
}
}
joints1.on("*", setValues);
Interface.Slider({
name : "harmony",
min : 0.5,
max : 2,
value : 1,
drag : function(value){
var i = 0;
for (var n in oscillators){
var osc = oscillators[n];
osc.volume.cancelScheduledValues();
oscillators.forEach(function(osc, i){
osc.frequency.rampTo(bassFreq * i * value, 0.4);
i++;
}
});
},
});
@ -122,13 +74,16 @@
type : "toggle",
key : 32, //spacebar
start : function(){
Tone.Master.volume.rampTo(-20, 0.5);
oscillators.forEach(function(osc){
osc.volume.rampTo(-20, 1);
});
},
end : function(){
Tone.Master.volume.rampTo(-Infinity, 0.5);
oscillators.forEach(function(osc){
osc.volume.rampTo(-Infinity, 1);
});
},
});
}
</script>
</body>

View file

@ -79,7 +79,7 @@
'A7' : 'A7.[mp3|ogg]',
'C8' : 'C8.[mp3|ogg]'
}, {
'release' : 0.1,
'release' : 1,
'baseUrl' : './audio/salamander/'
}).toMaster();

View file

@ -143,19 +143,6 @@ gulp.task("example", function() {
gulp.watch(["../examples/style/examples.scss"], ["sass"]);
});
/**
* THE WEBSERVER
*/
gulp.task("server", function(){
gulp.src("../")
.pipe(webserver({
// livereload: false,
directoryListing: true,
port : 3000,
open: false
}));
});
/**
* LINTING
*/
@ -231,45 +218,6 @@ gulp.task("collectTests", function(done){
*/
gulp.task("travis-test", ["lint", "karma-test"]);
/**
* COMMIT BUILD
*/
gulp.task("cloneBuild", function(done) {
var gitUser = "";
if (process.env.TRAVIS && process.env.GH_TOKEN){
gitUser = process.env.GH_TOKEN+"@";
}
git.clone("https://"+gitUser+"github.com/Tonejs/build", {args: `${TMP_FOLDER}/build`}, done);
});
gulp.task("moveToDev", ["build", "cloneBuild"], function(){
// move files to 'dev' folder
return gulp.src("../build/*.js")
.pipe(gulp.dest(`${TMP_FOLDER}/build/dev/`));
});
gulp.task("commitDev", ["moveToDev"], function(){
process.chdir(`${TMP_FOLDER}/build`);
return gulp.src("./dev/*")
.pipe(git.add())
.pipe(git.commit(`${VERSION} build #${process.env.TRAVIS_BUILD_NUMBER}: ${process.env.TRAVIS_COMMIT_MESSAGE}`));
});
gulp.task("pushBuild", ["commitDev"], function(done){
if (process.env.TRAVIS && process.env.GH_TOKEN){
git.push("origin", "gh-pages", {args: " -f"}, function (err) {
if (err) throw err;
done();
});
} else {
done();
}
});
gulp.task("commitDevBuild", ["pushBuild"], function(){
return del([`${TMP_FOLDER}/build`], { force : true});
});
/**
* COVERALLS
*/
@ -277,74 +225,3 @@ gulp.task("coveralls", function(){
return gulp.src("../test/coverage/**/lcov.info")
.pipe(coveralls());
});
/**
* JS DOC ATTRIBUTES
*/
gulp.task("cloneSite", function(done){
var gitUser = "";
if (process.env.TRAVIS && process.env.GH_TOKEN){
gitUser = process.env.GH_TOKEN+"@";
}
git.clone("https://"+gitUser+"github.com/Tonejs/tonejs.github.io", {args: `${TMP_FOLDER}/Site`}, done);
});
gulp.task("commitSite", ["buildJsdocs"], function(){
process.chdir(`${TMP_FOLDER}/Site`);
return gulp.src("*")
.pipe(git.add())
.pipe(git.commit(`${VERSION} build #${process.env.TRAVIS_BUILD_NUMBER}: ${process.env.TRAVIS_COMMIT_MESSAGE}`));
});
gulp.task("pushJSDocs", ["commitSite"], function(done){
if (process.env.TRAVIS && process.env.GH_TOKEN){
git.push("origin", "master", {args: " -f"}, function (err) {
if (err) throw err;
done();
});
} else {
done();
}
});
gulp.task("empty.md", ["cloneSite"], function(){
return gulp.src("../Tone/*/*.js")
.pipe(tap(function(file){
var className = path.basename(file.path, ".js");
var pathSplit = file.path.split("/");
var category = pathSplit[pathSplit.length-2];
file.contents = Buffer.from(`---\ntitle: ${className}\nlayout: ${className === "Type" ? "type" : "doc"}\nversion: ${VERSION}\n---`);
}))
.pipe(rename({extname: ".md"}))
.pipe(flatten())
.pipe(gulp.dest(`${TMP_FOLDER}/Site/_documentation/${VERSION.includes("dev") ? "dev" : VERSION}`))
.pipe(tap(function(file){
// and another one which just forwards
var className = path.basename(file.path, ".md");
file.contents = Buffer.from(`---\ntitle: ${className}\nlayout: forward\n---`);
}))
.pipe(gulp.dest(`${TMP_FOLDER}/Site/_documentation/`));
});
gulp.task("buildJsdocs", ["empty.md"], function(done){
glob("../Tone/*/*.js", function(err, files){
var docs = child_process.execSync(`./node_modules/.bin/jsdoc -X -a public ${files.join(" ")}`);
docs = JSON.parse(docs)
//filter out some stuff
docs = docs.filter(function(datum){
//is public
return datum.access !== "private" &&
//doesnt inherit
(!datum.hasOwnProperty('inherits') || !datum.inherits.startsWith('Tone#')) &&
//isnt undocumented (or a default value)
(!datum.undocumented || datum.longname.includes('defaults'))
});
var dest = `${TMP_FOLDER}/Site/_data/jsdocs-${VERSION}.json`;
fs.writeFile(dest, JSON.stringify(docs, undefined, '\t'), done);
});
});
gulp.task("commitJSDocs", ["pushJSDocs"], function(){
return del([`${TMP_FOLDER}/Site`], { force : true});
});

25
gulp/increment_version.js Normal file
View file

@ -0,0 +1,25 @@
const fs = require('fs')
const semver = require('semver')
const child_process = require('child_process')
const devVersion = child_process.execSync('npm show tone@next version').toString()
const masterVersion = child_process.execSync('npm show tone version').toString()
//go with whichever is the latest version
let version = masterVersion
if (semver.gt(devVersion, masterVersion)){
version = devVersion
}
version = version.split('.')
//increment the patch
version[2] = parseInt(version[2]) + 1
//put it back in semver
version = version.join('.')
console.log(`incrementing to version ${version}`)
//write it to the package.json
const packageFile = '../package.json'
const package = JSON.parse(fs.readFileSync(packageFile, 'utf-8'))
package.version = version
fs.writeFileSync(packageFile, JSON.stringify(package, undefined, ' '))

123
gulp/package-lock.json generated
View file

@ -218,11 +218,6 @@
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz",
"integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4="
},
"babylon": {
"version": "7.0.0-beta.19",
"resolved": "https://registry.npmjs.org/babylon/-/babylon-7.0.0-beta.19.tgz",
"integrity": "sha512-Vg0C9s/REX6/WIXN37UKpv5ZhRi6A4pjHlpkE34+8/a6c2W1Q692n3hmc+SZG5lKRnaExLUbxtJ1SVT+KaCQ/A=="
},
"backo2": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz",
@ -389,14 +384,6 @@
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz",
"integrity": "sha1-cVuW6phBWTzDMGeSP17GDr2k99c="
},
"catharsis": {
"version": "0.8.9",
"resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.8.9.tgz",
"integrity": "sha1-mMyJDKZS3S7w5ws3klMQ/56Q/Is=",
"requires": {
"underscore-contrib": "0.3.0"
}
},
"center-align": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz",
@ -2441,6 +2428,11 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
},
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
}
}
},
@ -3858,39 +3850,12 @@
"esprima": "2.7.3"
}
},
"js2xmlparser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-3.0.0.tgz",
"integrity": "sha1-P7YOqgicVED5MZ9RdgzNB+JJlzM=",
"requires": {
"xmlcreate": "1.0.2"
}
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
"optional": true
},
"jsdoc": {
"version": "3.5.4",
"resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-3.5.4.tgz",
"integrity": "sha512-VmTw0J+2L16IxAe0JSDSAcH0F+DbZxaj8wN1AjHtKMQU/hO0ciIl5ZE93XqrrFIbknobuqHKJCXZj6+Hk57MjA==",
"requires": {
"babylon": "7.0.0-beta.19",
"bluebird": "3.5.0",
"catharsis": "0.8.9",
"escape-string-regexp": "1.0.5",
"js2xmlparser": "3.0.0",
"klaw": "2.0.0",
"marked": "0.3.6",
"mkdirp": "0.5.1",
"requizzle": "0.2.1",
"strip-json-comments": "2.0.1",
"taffydb": "2.6.2",
"underscore": "1.8.3"
}
},
"jshint": {
"version": "2.9.5",
"resolved": "https://registry.npmjs.org/jshint/-/jshint-2.9.5.tgz",
@ -4101,14 +4066,6 @@
"is-buffer": "1.1.5"
}
},
"klaw": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/klaw/-/klaw-2.0.0.tgz",
"integrity": "sha1-WcEo4Nxc5BAgEVEZTuucv4WGUPY=",
"requires": {
"graceful-fs": "4.1.11"
}
},
"lazy-cache": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz",
@ -4462,6 +4419,13 @@
"requires": {
"readable-stream": "1.0.34",
"semver": "4.3.6"
},
"dependencies": {
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
}
}
},
"longest": {
@ -4511,11 +4475,6 @@
"resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz",
"integrity": "sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ="
},
"marked": {
"version": "0.3.6",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.3.6.tgz",
"integrity": "sha1-ssbGGPzOzk74bE/Gy4p8v1rtqNc="
},
"md5-hex": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-1.3.0.tgz",
@ -4726,7 +4685,7 @@
"requires": {
"hosted-git-info": "2.5.0",
"is-builtin-module": "1.0.0",
"semver": "4.3.6",
"semver": "5.4.1",
"validate-npm-package-license": "3.0.1"
}
},
@ -5439,21 +5398,6 @@
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
"requizzle": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.1.tgz",
"integrity": "sha1-aUPDUwxNmn5G8c3dUcFY/GcM294=",
"requires": {
"underscore": "1.6.0"
},
"dependencies": {
"underscore": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
"integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
}
}
},
"resolve": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.4.0.tgz",
@ -5574,9 +5518,9 @@
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
},
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.4.1.tgz",
"integrity": "sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg=="
},
"sequencify": {
"version": "0.0.7",
@ -5964,21 +5908,11 @@
"get-stdin": "4.0.1"
}
},
"strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
},
"supports-color": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc="
},
"taffydb": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/taffydb/-/taffydb-2.6.2.tgz",
"integrity": "sha1-fLy2S1oUG2ou/CxdLGe04VCyomg="
},
"textextensions": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/textextensions/-/textextensions-1.0.2.tgz",
@ -6119,26 +6053,6 @@
"resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz",
"integrity": "sha1-5z3T17DXxe2G+6xrCufYxqadUPo="
},
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
},
"underscore-contrib": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/underscore-contrib/-/underscore-contrib-0.3.0.tgz",
"integrity": "sha1-ZltmwkeD+PorGMn4y7Dix9SMJsc=",
"requires": {
"underscore": "1.6.0"
},
"dependencies": {
"underscore": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
"integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
}
}
},
"unique-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unique-stream/-/unique-stream-1.0.0.tgz",
@ -6367,11 +6281,6 @@
"resolved": "https://registry.npmjs.org/wtf-8/-/wtf-8-1.0.0.tgz",
"integrity": "sha1-OS2LotDxw00e4tYw8V0O+2jhBIo="
},
"xmlcreate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-1.0.2.tgz",
"integrity": "sha1-+mv3YqYKQT+z3Y9LA8WyaSONMI8="
},
"xmlhttprequest-ssl": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz",

View file

@ -25,7 +25,6 @@
"gulp-tap": "^0.1.3",
"gulp-uglify": "^2.0.0",
"gulp-util": "^3.0.7",
"jsdoc": "^3.4.3",
"jshint": "^2.9.4",
"karma": "^1.7.0",
"karma-chrome-launcher": "^2.2.0",
@ -35,6 +34,7 @@
"karma-requirejs": "^1.1.0",
"mocha": "^3.0.2",
"requirejs": "^2.1.22",
"semver": "^5.4.1",
"yargs": "^7.0.2"
},
"scripts": {

51
gulp/push_build.sh Executable file
View file

@ -0,0 +1,51 @@
#!/bin/bash
TMP_DIR=$(pwd)/tmp
mkdir $TMP_DIR
TONE_DIR=$(pwd)/..
BUILD_DIR=$TMP_DIR/build
# clone the build repo
if [ "$TRAVIS" = "true" ]; then
GITHUB_USER=${GH_TOKEN}@
fi
git clone https://${GITHUB_USER}github.com/Tonejs/build $BUILD_DIR > /dev/null 2>&1
cd $BUILD_DIR
git checkout gh-pages
# generate a new build
gulp build
# push to the appropriate location
if [ "$TRAVIS" = "true" ]; then
if [ "$TRAVIS_BRANCH" = "dev" ]; then
# dev builds go into the dev folder
cp -a $TONE_DIR/build/. $BUILD_DIR/dev/
elif [ "$TRAVIS_BRANCH" = "master" ]; then
# master builds are on the root level folder
cp -a $TONE_DIR/build/. $BUILD_DIR/
# and also in a folder with the version name
VERSION=$(node $TONE_DIR/gulp/version.js $TONE_DIR)
mkdir $BUILD_DIR/$VERSION
cp -a $TONE_DIR/build/. $BUILD_DIR/$VERSION
fi
fi
# push the build
git add .
git commit -m "build #$TRAVIS_BUILD_NUMBER: $TRAVIS_COMMIT_MESSAGE"
git push -f
rm -rf $TMP_DIR

View file

@ -2,11 +2,13 @@
if [ "${TRAVIS_PULL_REQUEST}" = "false" ]; then
# only commit the builds when not a PR
gulp commitDevBuild
gulp commitJSDocs
# commit the build
sh push_build.sh
# update the site
sh update_site.sh
fi
# do coveralls either way
# upload coveralls
gulp coveralls

18
gulp/update_site.sh Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
TMP_DIR=$(pwd)/tmp/
mkdir $TMP_DIR
SITE_DIR=$TMP_DIR/Site
# clone the tonejs.github.io site
if [ "$TRAVIS" = "true" ]; then
GITHUB_USER=${GH_TOKEN}@
fi
git clone https://${GITHUB_USER}github.com/Tonejs/tonejs.github.io $SITE_DIR > /dev/null 2>&1
cd $SITE_DIR
# run the update script
sh update.sh
rm -rf $TMP_DIR

7
gulp/version.js Executable file
View file

@ -0,0 +1,7 @@
const fs = require('fs')
var VERSION = fs.readFileSync(`${process.argv[2]}/Tone/core/Tone.js`, 'utf-8')
.match(/(?:Tone\.version\s*=\s*)(?:'|")(.*)(?:'|");/m)[1];
console.log(VERSION)

View file

@ -1,6 +1,6 @@
{
"name": "tone",
"version": "0.10.0",
"version": "0.11.0",
"description": "A Web Audio framework for making interactive music in the browser.",
"main": "build/Tone.js",
"files": [

View file

@ -1,5 +1,5 @@
define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
function (Analyser, Test, Basic, Supports) {
define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (Analyser, Test, Basic, Supports, Noise) {
describe("Analyser", function(){
@ -33,7 +33,7 @@ define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
it("can run fft analysis", function(){
var anl = new Analyser("fft", 512);
analysis = anl.analyse();
analysis = anl.getValue();
expect(analysis.length).to.equal(512);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.lessThan(0);
@ -41,14 +41,22 @@ define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
anl.dispose();
});
it("can run waveform analysis", function(){
it("can run waveform analysis", function(done){
var noise = new Noise();
var anl = new Analyser("waveform", 256);
analysis = anl.analyse();
noise.connect(anl);
noise.start();
setTimeout(function(){
analysis = anl.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(0, 1);
expect(analysis[i]).is.within(-1, 1);
}
anl.dispose();
noise.dispose();
done()
}, 300);
});
it("throws an error if an invalid type is set", function(){

View file

@ -228,13 +228,8 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttack(attackTime);
env.triggerRelease(releaseTime);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < attackTime - 0.001){
expect(sample).to.equal(0);
} else if (time > e.attack + e.decay + releaseTime + e.release){
expect(sample).to.equal(0);
}
});
expect(buffer.getValueAtTime(attackTime - 0.001)).to.equal(0);
expect(buffer.getValueAtTime(e.attack + e.decay + releaseTime + e.release)).to.be.below(0.01);
});
});
@ -253,11 +248,8 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttack(attackTime);
}, 0.4).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < attackTime - 0.001){
expect(sample).to.equal(0);
} else if (time > attackTime + e.attack + e.decay){
expect(sample).to.equal(0);
}
expect(buffer.getValueAtTime(attackTime - 0.001)).to.equal(0);
expect(buffer.getValueAtTime(attackTime + e.attack + e.decay)).to.be.below(0.01);
});
});
});
@ -317,7 +309,7 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
} else if (time < duration + e.release){
expect(sample).to.be.within(0, e.sustain + 0.01);
} else {
expect(sample).to.be.below(0.001);
expect(sample).to.be.below(0.0015);
}
});
});
@ -575,7 +567,7 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttackRelease(0.4, 0.1, 0.5);
}, 0.8).then(function(buffer){
buffer.forEach(function(sample){
expect(sample).to.be.lte(0.5);
expect(sample).to.be.at.most(0.51);
});
});
});

52
test/component/FFT.js Normal file
View file

@ -0,0 +1,52 @@
define(["Tone/component/FFT", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (FFT, Test, Basic, Supports, Noise) {
describe("FFT", function(){
Basic(FFT);
it("handles input connection", function(){
var fft = new FFT();
Test.connect(fft);
fft.dispose();
});
it("can get and set properties", function(){
var fft = new FFT();
fft.set({
"size" : 128
});
var values = fft.get();
expect(values.size).to.equal(128);
fft.dispose();
});
it("can correctly set the size", function(){
var fft = new FFT(512);
expect(fft.size).to.equal(512);
fft.size = 1024;
expect(fft.size).to.equal(1024);
fft.dispose();
});
it("can run waveform analysis", function(done){
var noise = new Noise();
var fft = new FFT(256);
noise.connect(fft);
noise.start();
setTimeout(function(){
analysis = fft.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(-200, 0);
}
fft.dispose();
noise.dispose();
done()
}, 300);
});
});
});

View file

@ -70,7 +70,7 @@ function (FrequencyEnvelope, Basic, Offline, Test, Envelope) {
if (time < e.attack){
expect(sample).to.be.within(200, 1600);
} else if (time < e.attack + e.decay){
expect(sample).to.be.closeTo(1600, 2);
expect(sample).to.be.closeTo(1600, 10);
}
});
});

View file

@ -18,11 +18,9 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
it("handles getter/setter as Object", function(){
var meter = new Meter();
var values = {
"type" : "signal",
"smoothing" : 0.2
};
meter.set(values);
expect(meter.get().type).to.equal("signal");
expect(meter.get().smoothing).to.equal(0.2);
meter.dispose();
});
@ -44,10 +42,10 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
});
it("measures the incoming signal", function(done){
var meter = new Meter("signal");
var meter = new Meter();
var signal = new Signal(1).connect(meter);
setTimeout(function(){
expect(meter.value).to.be.closeTo(1, 0.05);
expect(meter.getValue()).to.be.closeTo(1, 0.05);
meter.dispose();
signal.dispose();
done();
@ -59,7 +57,7 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
var osc = new Oscillator().connect(meter).start();
osc.volume.value = -6;
setTimeout(function(){
expect(meter.value).to.be.closeTo(1, 0.1);
expect(meter.getLevel()).to.be.closeTo(-6, 1);
meter.dispose();
osc.dispose();
done();

View file

@ -0,0 +1,52 @@
define(["Tone/component/Waveform", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (Waveform, Test, Basic, Supports, Noise) {
describe("Waveform", function(){
Basic(Waveform);
it("handles input connection", function(){
var anl = new Waveform();
Test.connect(anl);
anl.dispose();
});
it("can get and set properties", function(){
var anl = new Waveform();
anl.set({
"size" : 128
});
var values = anl.get();
expect(values.size).to.equal(128);
anl.dispose();
});
it("can correctly set the size", function(){
var anl = new Waveform(512);
expect(anl.size).to.equal(512);
anl.size = 1024;
expect(anl.size).to.equal(1024);
anl.dispose();
});
it("can run waveform analysis", function(done){
var noise = new Noise();
var anl = new Waveform(256);
noise.connect(anl);
noise.start();
setTimeout(function(){
analysis = anl.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(-1, 1);
}
anl.dispose();
noise.dispose();
done()
}, 300);
});
});
});

View file

@ -141,14 +141,14 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
it ("can schedule an exponential ramp", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.exponentialRampToValueAtTime(3, 1);
param.exponentialRampToValueAtTime(3, "+1");
param.dispose();
});
it ("can approach a target value", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.setTargetAtTime(0.2, 1, 2);
param.setTargetAtTime(0.2, "+1", 2);
param.dispose();
});
@ -162,30 +162,45 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
it ("can schedule multiple automations", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, 0.5);
param.linearRampToValueAtTime(0, 1);
param.linearRampToValueAtTime(0.5, "+0.5");
param.linearRampToValueAtTime(0, "+1");
param.dispose();
});
it ("can cancel an automation", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, 0.5);
param.linearRampToValueAtTime(0.5, "+0.5");
param.cancelScheduledValues(0);
param.dispose();
});
it ("can cancelAndHold an automation", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, "+0.5");
param.cancelAndHoldAtTime(0);
param.dispose();
});
it ("can set a linear ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValue(0.5, 0.5);
param.linearRampTo(0.5, 0.5);
param.dispose();
});
it ("can set an exponential ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.exponentialRampToValue(0.5, 0.5);
param.exponentialRampTo(0.5, 0.5);
param.dispose();
});
it ("can set an exponential approach ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.targetRampTo(0.5, 0.5);
param.dispose();
});

View file

@ -528,6 +528,26 @@ define(["Test", "Tone/core/Timeline"], function (Test, Timeline) {
expect(sched.length).to.equal(0);
sched.dispose();
});
it("can add items during iteration", function(){
var sched = new Timeline();
for (var i = 0; i < 1000; i++){
sched.add({"time" : i});
}
var added = false;
sched.forEach(function(event){
if (!added){
added = true;
sched.add({
"time" : 10,
"added" : true,
});
}
});
expect(sched.length).to.equal(1001);
sched.dispose();
});
});
});
});

View file

@ -178,7 +178,7 @@ define(["Test", "Tone/core/Tone", "helper/PassAudio", "Tone/source/Oscillator",
});
context("Tone.setContext", function(){
context("Tone.context", function(){
it ("can set a new context", function(){
var origCtx = Tone.context;
@ -207,6 +207,19 @@ define(["Test", "Tone/core/Tone", "helper/PassAudio", "Tone/source/Oscillator",
return ctx.close();
});
it ("tests if the audio context time has passed", function(){
// overwrite warn to throw errors
var originalWarn = console.warn;
console.warn = function(warning){
throw new Error(warning);
};
var currentTime = Tone.context.currentTime;
expect(function(){
Tone.isPast(currentTime-1);
}).to.throw(Error);
console.warn = originalWarn;
});
});
context("Tone.prototype.set / get", function(){

View file

@ -376,24 +376,32 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
it ("scheduled event gets invoked with the time of the event", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = 0.1;
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime, 0.01);
wasCalled = true;
}, 0);
Transport.start(startTime);
}, 0.2);
}, 0.2).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule events with TransportTime", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = 0.1;
var eighth = Transport.toSeconds("8n");
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime + eighth, 0.01);
wasCalled = true;
}, TransportTime("8n"));
Transport.start(startTime);
}, 0.5);
}, 0.5).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can cancel a scheduled event", function(){
@ -401,7 +409,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
var eventID = Transport.schedule(function(){
throw new Error("should not call this function");
}, 0);
Transport.cancel(eventID);
Transport.clear(eventID);
Transport.start();
});
});
@ -424,25 +432,30 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
Transport.scheduleOnce(Tone.noOp, 0);
Transport.scheduleOnce(Tone.noOp, 1);
Transport.scheduleOnce(Tone.noOp, 2);
expect(Transport._onceEvents.length).to.equal(3);
expect(Transport._timeline.length).to.equal(3);
Transport.cancel(2);
expect(Transport._onceEvents.length).to.equal(2);
expect(Transport._timeline.length).to.equal(2);
Transport.cancel(0);
expect(Transport._onceEvents.length).to.equal(0);
expect(Transport._timeline.length).to.equal(0);
});
});
it ("scheduled event anywhere along the timeline", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = Transport.now();
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime + 0.5, 0.001);
wasCalled = true;
}, 0.5);
Transport.start(startTime);
}, 0.6);
}, 0.6).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule multiple events and invoke them in the right order", function(){
var wasCalled = false;
return Offline(function(Transport){
var first = false;
Transport.schedule(function(){
@ -450,9 +463,12 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
}, 0.1);
Transport.schedule(function(){
expect(first).to.be.true;
wasCalled = true;
}, 0.11);
Tone.Transport.start();
}, 0.2);
}, 0.2).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("invokes the event again if the timeline is restarted", function(){
@ -467,6 +483,24 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("can add an event after the Transport is started", function(){
var wasCalled = false;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
return function(time){
if (time > 0.1 && !wasScheduled){
wasScheduled = true;
Transport.schedule(function(){
wasCalled = true;
}, 0.15);
}
}
}, 0.3).then(function(){
expect(wasCalled).to.be.true;
});
});
});
context("scheduleRepeat", function(){
@ -483,7 +517,6 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
return Offline(function(Transport){
var startTime = 0.1;
var eventID = Transport.scheduleRepeat(function(time){
Transport.clear(eventID);
expect(time).to.be.closeTo(startTime, 0.01);
invoked = true;
}, 1, 0);
@ -546,6 +579,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it ("repeats at the repeat interval", function(){
var wasCalled = false;
return Offline(function(Transport){
var repeatTime = -1;
Transport.scheduleRepeat(function(time){
@ -553,9 +587,12 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
expect(time - repeatTime).to.be.closeTo(0.1, 0.01);
}
repeatTime = time;
wasCalled = true;
}, 0.1, 0);
Transport.start();
}, 0.5);
}, 0.5).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule multiple events and invoke them in the right order", function(){
@ -578,18 +615,10 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("cannot schedule an event with an interval of 0", function(){
return Offline(function(Transport){
expect(function(){
Transport.scheduleRepeat(function(){}, 0, 10);
}).to.throw(Error);
});
});
it ("repeats for the given interval", function(){
var repeatCount = 0;
return Offline(function(Transport){
Transport.scheduleRepeat(function(){
Transport.scheduleRepeat(function(time){
repeatCount++;
}, 0.1, 0, 0.5);
Transport.start();
@ -598,6 +627,46 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("can add an event after the Transport is started", function(){
var invocations = 0;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
var times = [0.15, 0.3]
return function(time){
if (time > 0.1 && !wasScheduled){
wasScheduled = true;
Transport.scheduleRepeat(function(time){
expect(time).to.be.closeTo(times[invocations], 0.01);
invocations++;
}, 0.15, 0.15);
}
}
}, 0.31).then(function(){
expect(invocations).to.equal(2);
});
});
it ("can add an event to the past after the Transport is started", function(){
var invocations = 0;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
var times = [0.15, 0.25]
return function(time){
if (time >= 0.12 && !wasScheduled){
wasScheduled = true;
Transport.scheduleRepeat(function(time){
expect(time).to.be.closeTo(times[invocations], 0.01);
invocations++;
}, 0.1, 0.05);
}
}
}, 0.3).then(function(){
expect(invocations).to.equal(2);
});
});
});
context("scheduleOnce", function(){
@ -683,25 +752,29 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
context("events", function(){
it("invokes start/stop/pause events", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
Tone.Transport.on("start pause stop", function(){
invokations++;
invocations++;
});
Transport.start().stop(0.1).start(0.2);
}, 0.5).then(function(){
expect(invokations).to.equal(3);
expect(invocations).to.equal(3);
});
});
it("invokes start event with correct offset", function(){
var wasCalled = false;
return Offline(function(Transport){
Transport.on("start", function(time, offset){
expect(time).to.be.closeTo(0.2, 0.01);
expect(offset).to.be.closeTo(0.5, 0.001);
wasCalled = true;
});
Transport.start(0.2, "4n");
}, 0.3);
}, 0.3).then(function(){
expect(wasCalled).to.be.true;
});
});
it("invokes the event just before the scheduled time", function(){
@ -719,20 +792,20 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it("passes in the time argument to the events", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
var now = Transport.now();
Transport.on("start", function(time){
invokations++;
invocations++;
expect(time).to.be.closeTo(now + 0.1, 0.01);
});
Transport.on("stop", function(time){
invokations++;
invocations++;
expect(time).to.be.closeTo(now + 0.2, 0.01);
});
Transport.start("+0.1").stop("+0.2");
}, 0.3).then(function(){
expect(invokations).to.equal(2);
expect(invocations).to.equal(2);
});
});
@ -778,34 +851,34 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it("can swing", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
Transport.swing = 1;
Transport.swingSubdivision = "8n";
var eightNote = Transport.toSeconds("8n");
//downbeat, no swing
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(0, 0.001);
}, 0);
//eighth note has swing
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote * 5/3, 0.001);
}, "8n");
//sixteenth note is also swung
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote, 0.05);
}, "16n");
//no swing on the quarter
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote * 2, 0.001);
}, "4n");
Transport.start(0).stop(0.7);
}, 0.7).then(function(){
expect(invokations).to.equal(4);
expect(invocations).to.equal(4);
});
});
});

View file

@ -0,0 +1,26 @@
define(["Test", "Tone/core/TransportEvent", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator", "Tone/core/AudioNode"],
function (Test, TransportEvent, Tone, Offline, PassAudio, Oscillator, AudioNode) {
describe("TransportEvent", function(){
it ("can be created and disposed", function(){
return Offline(function(Transport){
var event = new TransportEvent(Transport, {
"time" : 0
});
event.dispose();
Test.wasDisposed(event);
})
});
it ("generates a unique event ID", function(){
return Offline(function(Transport){
var event = new TransportEvent(Transport, {
"time" : 0
});
expect(event.id).to.be.a('number');
event.dispose();
})
});
});
});

View file

@ -0,0 +1,37 @@
define(["Test", "Tone/core/TransportRepeatEvent", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator", "Tone/core/AudioNode"],
function (Test, TransportRepeatEvent, Tone, Offline, PassAudio, Oscillator, AudioNode) {
describe("TransportRepeatEvent", function(){
it ("can be created and disposed", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
event.dispose();
Test.wasDisposed(event);
})
});
it ("generates a unique event ID", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
expect(event.id).to.be.a('number');
event.dispose();
})
});
it ("is removed from the Transport when disposed", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
event.dispose();
expect(Transport._timeline.length).to.equal(0);
})
});
});
});

View file

@ -462,7 +462,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
"loopEnd" : 0.5,
"loop" : true,
"callback" : function(time, value){
if (value === 1){
if (value === 1 && !switched){
switched = true;
part.loopEnd = 0.2;
} else if (switched){
@ -582,7 +582,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
part.loopEnd = 0.3;
Transport.start(0.2).stop(0.61).start(0.8);
}, 2).then(function(){
expect(eventTimeIndex).to.equal(7);
expect(eventTimeIndex).to.equal(8);
});
});

View file

@ -67,20 +67,30 @@ function (Test, Type, Transport, Time, Frequency) {
}
}
function silenceWarning(cb){
var warning = console.warn;
console.warn = function(){};
cb();
console.warn = warning;
}
return {
method : function(constructor, fn, args, consArgs){
it (fn+" ("+args.join(", ") + ")", function(){
silenceWarning(function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
instance[fn].apply(instance, permutations[i]);
instance.dispose();
}
})
});
},
member : function(constructor, member, param, consArgs){
it (member+" = "+param, function(){
silenceWarning(function(){
var permutations = generateArgs([param]);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
@ -88,6 +98,7 @@ function (Test, Type, Transport, Time, Frequency) {
instance.dispose();
}
});
});
},
constructor : function(constructor, args){
@ -99,6 +110,7 @@ function (Test, Type, Transport, Time, Frequency) {
}
it ("constructor ( "+ argString + " )", function(){
silenceWarning(function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var Temp = function(){}; // temporary constructor
@ -108,6 +120,7 @@ function (Test, Type, Transport, Time, Frequency) {
tmpInst.dispose();
}
});
});
},
};
});

View file

@ -78,8 +78,8 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
var signalL = new Signal(-1).connect(merge.left);
var signalR = new Signal(1).connect(merge.right);
//make the signals ramp
signalL.linearRampToValue(1, 1);
signalR.linearRampToValue(-1, 1);
signalL.linearRampTo(1, 1);
signalR.linearRampTo(-1, 1);
instance.wet.value = 0;
}, 0.5, 2).then(function(buffer){
buffer.forEach(function(L, R, time){
@ -101,8 +101,8 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
var signalL = new Signal(-1).connect(merge.left);
var signalR = new Signal(1).connect(merge.right);
//make the signals ramp
signalL.linearRampToValue(1, 1);
signalR.linearRampToValue(-1, 1);
signalL.linearRampTo(1, 1);
signalR.linearRampTo(-1, 1);
if (instance.start){
instance.start();
}

View file

@ -38,7 +38,7 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.triggerRelease("C4", 0.1);
}, 0.3).then(function(buffer){
expect(buffer.getFirstSoundTime()).to.be.closeTo(0, 0.01);
expect(buffer.getLastSoundTime()).to.be.closeTo(0.2, 0.01);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(0, 0.01);
});
});
@ -51,7 +51,7 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.releaseAll(0.1);
}, 0.3).then(function(buffer){
expect(buffer.getFirstSoundTime()).to.be.closeTo(0, 0.01);
expect(buffer.getLastSoundTime()).to.be.closeTo(0.2, 0.01);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(0, 0.01);
});
});

View file

@ -77,13 +77,13 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can schedule an exponential ramp", function(){
var sig = new Signal(1);
sig.exponentialRampToValueAtTime(3, 1);
sig.exponentialRampToValueAtTime(3, "+1");
sig.dispose();
});
it ("can approach a target value", function(){
var sig = new Signal(1);
sig.setTargetAtTime(0.2, 1, 2);
sig.setTargetAtTime(0.2, "+1", 2);
sig.dispose();
});
@ -119,10 +119,23 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
}, 1);
});
it ("can cancel and hold an automation curve", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.linearRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5)
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1, 0.1);
});
});
it ("can set a linear ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.linearRampToValue(2, 0.3);
sig.linearRampTo(2, 0.3);
}, 0.5).then(function(buffer){
buffer.forEach(function(sample, time){
if (time > 0.3){
@ -135,7 +148,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can set an linear ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.linearRampToValue(50, 0.3, 0.2);
sig.linearRampTo(50, 0.3, 0.2);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.6){
@ -147,11 +160,31 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
});
});
it ("can set a exponential approach ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.targetRampTo(1, 0.3);
}, 0.5).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.below(0.0001);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(1, 0.02);
});
});
it ("can set an exponential approach ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.targetRampTo(50, 0.3, 0.2);
}, 0.7).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.6)).to.be.closeTo(50, 0.5);
});
});
it ("can set an exponential ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.exponentialRampToValue(50, 0.4);
sig.exponentialRampTo(50, 0.4);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.4){
@ -166,7 +199,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can set an exponential ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.exponentialRampToValue(50, 0.3, 0.2);
sig.exponentialRampTo(50, 0.3, 0.2);
}, 0.8).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.6){

View file

@ -103,6 +103,19 @@ define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type"
});
});
it ("can cancel and hold an automation curve", function(){
return Offline(function(){
var sig = new TimelineSignal(0).toMaster();
sig.linearRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5)
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1, 0.1);
});
});
if (Supports.ACCURATE_SIGNAL_SCHEDULING){
it("can match a complex scheduled curve", function(){

View file

@ -145,6 +145,22 @@ define(["Test", "Tone/signal/TransportTimelineSignal", "helper/Offline", "Tone/t
});
});
it("can cancel a scheduled value", function(){
var sched;
return Offline(function(Transport){
sched = new TransportTimelineSignal(0).toMaster();
sched.setValueAtTime(0, 0);
sched.linearRampToValueAtTime(1, 1);
sched.cancelAndHoldAtTime(0.5);
Transport.start(0);
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.25, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(0.5, 0.1);
});
});
it("can automate values with different units", function(){
var sched;
return Offline(function(Transport){

View file

@ -423,14 +423,14 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades from the end", function(){
it("fades from the end when passed into the stop call", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.start(0).stop(0.2, 0.1)
}, 0.3).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.1){
expect(sample).to.equal(1);
if (time < 0.101){
expect(sample).to.be.closeTo(1, 0.01);
} else if (time < 0.2){
expect(sample).to.be.lessThan(1);
} else {
@ -440,14 +440,32 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades at the end of the file at the files duration", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeOut = 0.1;
player.start(0);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.401){
expect(sample).to.be.closeTo(1, 0.01);
} else if (time < 0.5){
expect(sample).to.be.lessThan(1);
} else {
expect(sample).to.equal(0);
}
});
});
});
it("cant fade for shorter than the fade in time", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeIn = 0.15
player.start(0).stop(0.2, 0.1)
player.fadeIn = 0.15;
player.start(0).stop(0.2, 0.1);
}, 0.3).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.149){
if (time < 0.14){
expect(sample).to.be.lessThan(1);
} else if (Math.abs(time - 0.15) < 1e-4){
expect(sample).to.be.closeTo(1, 0.05);
@ -458,22 +476,54 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades at the end of the file", function(){
it("the fade out can shorten to fit the duration of the sample", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeOut = 0.1;
player.start(0);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.4){
expect(sample).to.equal(1);
} else if (time < 0.5){
expect(sample).to.be.lessThan(1);
} else {
expect(sample).to.equal(0);
}
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
it("the fade out will only start after the fade in", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeIn = 0.1;
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(0);
expect(buffer.getValueAtTime(0.05)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(1, 0.01);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
it("can fade with an exponential curve", function(){
var player = new BufferSource(onesBuffer).toMaster();
player.curve = "exponential";
expect(player.curve).to.equal("exponential");
player.dispose();
});
it("fades in and out exponentially", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.curve = "exponential";
player.fadeIn = 0.1;
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(0);
expect(buffer.getValueAtTime(0.05)).to.be.closeTo(0.93, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(1, 0.01);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(0.05, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
it("can be scheduled to start at a lower gain", function(){
@ -519,6 +569,26 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("does not play if the stop time is at the start time", function(){
return Offline(function(){
var player = new BufferSource(buffer);
player.toMaster();
player.start(0).stop(0);
}, 0.3).then(function(buffer){
expect(buffer.isSilent()).to.be.true;
});
});
it("does not play if the stop time is at before start time", function(){
return Offline(function(){
var player = new BufferSource(buffer);
player.toMaster();
player.start(0.1).stop(0);
}, 0.3).then(function(buffer){
expect(buffer.isSilent()).to.be.true;
});
});
it("stops playing at the earlier time if invoked with 'stop' at a later time", function(){
return Offline(function(){
var player = new BufferSource(buffer);