Merge branch 'dev'

This commit is contained in:
Yotam Mann 2017-09-16 15:35:09 -04:00
commit e0411838b1
57 changed files with 1654 additions and 24345 deletions

53
.gitignore vendored
View file

@ -1,56 +1,23 @@
.DS_Store
*.asd
*.scssc
examples/scratch.html
*.sublime-workspace
*.sublime-project
# grunt modules
node_modules
gulp/description
TODO.txt
# all the npm stuff
utils/npm/Tone
utils/npm/build/Tone.js
utils/npm/build/Tone.min.js
utils/npm/build/Tone.Preset.js
utils/npm/README.md
utils/jsdoc/*.json
examples/deps/FileSaver.js
examples/oscilloscope.html
.idea
wiki
test/performance
examples/crashes.html
examples/style/examples.css.map
examples/deps/Tone.dat.gui.js
examples/deps/dat.gui.js
test/mainTest.js
test/Main.js
build/p5.Tone.min.js
build/p5.Tone.js
.DS_Store
examples/scratch.html
examples/deps/FileSaver.js
examples/oscilloscope.html
examples/graph.html
*.asd
test/performance
test/mainTest.js
test/Main.js
test/supports.html
test/coverage/
build/*

View file

@ -1,18 +1,46 @@
sudo: false
dist: trusty
language: node_js
node_js:
- "8"
sudo: false
dist: trusty
language: node_js
node_js:
- '8'
addons:
chrome: stable
before_script:
- cd gulp
- npm install -g karma
- npm install -g gulp
- npm install
- git config --global user.email "travis@travis-ci.org"
- git config --global user.name "Travis CI"
- cd gulp
- npm install -g jsdoc
- npm install -g karma
- npm install -g gulp
- npm install
- git config --global user.email "travis@travis-ci.org"
- git config --global user.name "Travis CI"
script: gulp travis-test
after_success:
- sh success.sh
after_success:
- sh success.sh
before_deploy:
- node increment_version.js
- cd ../
deploy:
- provider: npm
skip_cleanup: true
email: yotammann@gmail.com
api_key: $NPM_TOKEN
tag: next
on:
repo: Tonejs/Tone.js
branch: dev
# publish without @next when pushing on master
- provider: npm
skip_cleanup: true
email: yotammann@gmail.com
api_key: $NPM_TOKEN
on:
repo: Tonejs/Tone.js
branch: master
# publish build files for releases
- provider: releases
api-key: $GH_TOKEN
file_glob: true
file: build/*
skip_cleanup: true
on:
tags: true

View file

@ -3,12 +3,16 @@
* [Code coverage](https://coveralls.io/github/Tonejs/Tone.js) analysis
* [Dev build](https://tonejs.github.io/build/dev/Tone.js) with each successful commit
* [Versioned docs](https://tonejs.github.io/docs/Tone) plus a [dev build of the docs](https://tonejs.github.io/docs/dev/Tone) on successful commits
* Tone.AudioNode is base class for all classes which generate or process audio
* [Tone.AudioNode](https://tonejs.github.io/docs/AudioNode) is base class for all classes which generate or process audio
* [Tone.Sampler](https://tonejs.github.io/docs/Sampler) simplifies creating multisampled instruments
* [Tone.Solo](https://tonejs.github.io/docs/Solo) makes it easier to mute/solo audio
* [Mixer](https://tonejs.github.io/examples/#mixer) and [sampler](https://tonejs.github.io/examples/#sampler) examples
* Making type-checking methods static
* [Tone.TransportTimelineSignal](https://tonejs.github.io/docs/TransportTimelineSignal) is a signal which can be scheduled along the Transport
* [Tone.FFT](https://tonejs.github.io/docs/FFT) and [Tone.Waveform](https://tonejs.github.io/docs/Waveform) abstract Tone.Analyser
* [Tone.Meter](https://tonejs.github.io/docs/Meter) returns decibels
* [Tone.Envelope](https://tonejs.github.io/docs/Envelope) uses exponential approach instead of exponential curve for decay and release curves
* [Tone.BufferSource](https://tonejs.github.io/docs/BufferSource) fadeIn/Out can be either "linear" or "exponential" curve
### r10

View file

@ -15,6 +15,7 @@ Tone.js is a Web Audio framework for creating interactive music in the browser.
* download [full](https://tonejs.github.io/build/Tone.js) | [min](https://tonejs.github.io/build/Tone.min.js)
* `npm install tone`
* dev -> `npm install tone@next`
[Full Installation Instruction](https://github.com/Tonejs/Tone.js/wiki/Installation).
@ -30,7 +31,7 @@ synth.triggerAttackRelease("C4", "8n");
#### Tone.Synth
[Tone.Synth](https://tonejs.github.io/docs/#Synth) is a basic synthesizer with a single [oscillator](https://tonejs.github.io/docs/#OmniOscillator) and an [ADSR envelope](https://en.wikipedia.org/wiki/Synthesizer#ADSR_envelope).
[Tone.Synth](https://tonejs.github.io/docs/#Synth) is a basic synthesizer with a single [oscillator](https://tonejs.github.io/docs/#OmniOscillator) and an [ADSR envelope](https://en.wikipedia.org/wiki/Synthesizer#ADSR_envelope).
#### triggerAttackRelease
@ -48,7 +49,7 @@ Tone.js abstracts away the AudioContext time. Instead of defining all values in
### Transport
[Tone.Transport](https://tonejs.github.io/docs/#Transport) is the master timekeeper, allowing for application-wide synchronization and scheduling of sources, signals and events along a shared timeline. Time expressions (like the ones above) are evaluated against the Transport's BPM which can be set like this: `Tone.Transport.bpm.value = 120`.
[Tone.Transport](https://tonejs.github.io/docs/#Transport) is the master timekeeper, allowing for application-wide synchronization and scheduling of sources, signals and events along a shared timeline. Time expressions (like the ones above) are evaluated against the Transport's BPM which can be set like this: `Tone.Transport.bpm.value = 120`.
### Loops
@ -101,7 +102,7 @@ var synth = new Tone.Synth({
synth.triggerAttack("D3", "+1");
```
All instruments are monophonic (one voice) but can be made polyphonic when the constructor is passed in as the second argument to [Tone.PolySynth](https://tonejs.github.io/docs/#PolySynth).
All instruments are monophonic (one voice) but can be made polyphonic when the constructor is passed in as the second argument to [Tone.PolySynth](https://tonejs.github.io/docs/#PolySynth).
```javascript
//a 4 voice Synth
@ -114,7 +115,7 @@ polySynth.triggerAttackRelease(["C4", "E4", "G4", "B4"], "2n");
# Effects
In the above examples, the synthesizer was always connected directly to the [master output](https://tonejs.github.io/docs/#Master), but the output of the synth could also be routed through one (or more) effects before going to the speakers.
In the above examples, the synthesizer was always connected directly to the [master output](https://tonejs.github.io/docs/#Master), but the output of the synth could also be routed through one (or more) effects before going to the speakers.
```javascript
//create a distortion effect
@ -138,7 +139,7 @@ var pwm = new Tone.PWMOscillator("Bb3").toMaster().start();
# Signals
Like the underlying Web Audio API, Tone.js is built with audio-rate signal control over nearly everything. This is a powerful feature which allows for sample-accurate synchronization and scheduling of parameters.
Like the underlying Web Audio API, Tone.js is built with audio-rate signal control over nearly everything. This is a powerful feature which allows for sample-accurate synchronization and scheduling of parameters.
[Read more](https://github.com/Tonejs/Tone.js/wiki/Signals).
@ -158,7 +159,7 @@ Tone.js makes extensive use of the native Web Audio Nodes such as the GainNode a
# Contributing
There are many ways to contribute to Tone.js. Check out [this wiki](https://github.com/Tonejs/Tone.js/wiki/Contributing) if you're interested.
There are many ways to contribute to Tone.js. Check out [this wiki](https://github.com/Tonejs/Tone.js/wiki/Contributing) if you're interested.
If you have questions (or answers) that are not necessarily bugs/issues, please post them to the [forum](https://groups.google.com/forum/#!forum/tonejs).

View file

@ -72,7 +72,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
};
/**
* Possible return types of Tone.Analyser.analyse()
* Possible return types of analyser.getValue()
* @enum {String}
*/
Tone.Analyser.Type = {
@ -85,7 +85,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
* result as a TypedArray.
* @returns {TypedArray}
*/
Tone.Analyser.prototype.analyse = function(){
Tone.Analyser.prototype.getValue = function(){
if (this._type === Tone.Analyser.Type.FFT){
this._analyser.getFloatFrequencyData(this._buffer);
} else if (this._type === Tone.Analyser.Type.Waveform){
@ -111,7 +111,7 @@ define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
});
/**
* The analysis function returned by Tone.Analyser.analyse(), either "fft" or "waveform".
* The analysis function returned by analyser.getValue(), either "fft" or "waveform".
* @memberOf Tone.Analyser#
* @type {String}
* @name type

View file

@ -245,9 +245,9 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
}
//attack
if (this._attackCurve === "linear"){
this._sig.linearRampToValue(velocity, attack, time);
this._sig.linearRampTo(velocity, attack, time);
} else if (this._attackCurve === "exponential"){
this._sig.exponentialRampToValue(velocity, attack, time);
this._sig.targetRampTo(velocity, attack, time);
} else if (attack > 0){
this._sig.setRampPoint(time);
var curve = this._attackCurve;
@ -262,7 +262,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
this._sig.setValueCurveAtTime(curve, time, attack, velocity);
}
//decay
this._sig.exponentialRampToValue(velocity * this.sustain, decay, attack + time);
this._sig.targetRampTo(velocity * this.sustain, decay, attack + time);
return this;
};
@ -280,9 +280,9 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
if (currentValue > 0){
var release = this.toSeconds(this.release);
if (this._releaseCurve === "linear"){
this._sig.linearRampToValue(0, release, time);
this._sig.linearRampTo(0, release, time);
} else if (this._releaseCurve === "exponential"){
this._sig.exponentialRampToValue(0, release, time);
this._sig.targetRampTo(0, release, time);
} else{
var curve = this._releaseCurve;
if (Tone.isArray(curve)){

69
Tone/component/FFT.js Normal file
View file

@ -0,0 +1,69 @@
define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], function (Tone) {
/**
* @class Get the current waveform data of the connected audio source.
* @extends {Tone.AudioNode}
* @param {Number=} size The size of the FFT. Value must be a power of
* two in the range 32 to 32768.
*/
Tone.FFT = function(){
var options = Tone.defaults(arguments, ["size"], Tone.FFT);
options.type = Tone.Analyser.Type.FFT;
Tone.AudioNode.call(this);
/**
* The analyser node.
* @private
* @type {Tone.Analyser}
*/
this._analyser = this.input = this.output = new Tone.Analyser(options);
};
Tone.extend(Tone.FFT, Tone.AudioNode);
/**
* The default values.
* @type {Object}
* @const
*/
Tone.FFT.defaults = {
"size" : 1024
};
/**
* Gets the waveform of the audio source. Returns the waveform data
* of length [size](#size) as a Float32Array with values between -1 and 1.
* @returns {TypedArray}
*/
Tone.FFT.prototype.getValue = function(){
return this._analyser.getValue();
};
/**
* The size of analysis. This must be a power of two in the range 32 to 32768.
* @memberOf Tone.FFT#
* @type {Number}
* @name size
*/
Object.defineProperty(Tone.FFT.prototype, "size", {
get : function(){
return this._analyser.size;
},
set : function(size){
this._analyser.size = size;
}
});
/**
* Clean up.
* @return {Tone.FFT} this
*/
Tone.FFT.prototype.dispose = function(){
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.dispose();
this._analyser = null;
};
return Tone.FFT;
});

View file

@ -9,36 +9,26 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
*
* @constructor
* @extends {Tone.AudioNode}
* @param {String} type Either "level" or "signal".
* @param {Number} smoothing The amount of smoothing applied between frames.
* @example
* var meter = new Tone.Meter();
* var mic = new Tone.UserMedia().open();
* //connect mic to the meter
* mic.connect(meter);
* //the current level of the mic input
* var level = meter.value;
* //the current level of the mic input in decibels
* var level = meter.getValue();
*/
Tone.Meter = function(){
var options = Tone.defaults(arguments, ["type", "smoothing"], Tone.Meter);
var options = Tone.defaults(arguments, ["smoothing"], Tone.Meter);
Tone.AudioNode.call(this);
/**
* The type of the meter, either "level" or "signal".
* A "level" meter will return the volume level (rms) of the
* input signal and a "signal" meter will return
* the signal value of the input.
* @type {String}
*/
this.type = options.type;
/**
* The analyser node which computes the levels.
* @private
* @type {Tone.Analyser}
*/
this.input = this.output = this._analyser = new Tone.Analyser("waveform", 512);
this.input = this.output = this._analyser = new Tone.Analyser("waveform", 1024);
/**
* The amount of carryover between the current and last frame.
@ -46,26 +36,10 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
* @type {Number}
*/
this.smoothing = options.smoothing;
/**
* The last computed value
* @type {Number}
* @private
*/
this._lastValue = 0;
};
Tone.extend(Tone.Meter, Tone.AudioNode);
/**
* @private
* @enum {String}
*/
Tone.Meter.Type = {
Level : "level",
Signal : "signal"
};
/**
* The defaults
* @type {Object}
@ -73,39 +47,44 @@ define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], fun
* @const
*/
Tone.Meter.defaults = {
"smoothing" : 0.8,
"type" : Tone.Meter.Type.Level
"smoothing" : 0.8
};
/**
* The current value of the meter. A value of 1 is
* "unity".
* Get the current decibel value of the incoming signal
* @returns {Decibels}
*/
Tone.Meter.prototype.getLevel = function(){
this._analyser.type = "fft";
var values = this._analyser.getValue();
var offset = 28; // normalizes most signal levels
// TODO: compute loudness from FFT
return Math.max.apply(this, values) + offset;
};
/**
* Get the signal value of the incoming signal
* @returns {Number}
*/
Tone.Meter.prototype.getValue = function(){
this._analyser.type = "waveform";
var value = this._analyser.getValue();
return value[0];
};
/**
* A value from 0 -> 1 where 0 represents no time averaging with the last analysis frame.
* @memberOf Tone.Meter#
* @type {Number}
* @name value
* @name smoothing
* @readOnly
*/
Object.defineProperty(Tone.Meter.prototype, "value", {
Object.defineProperty(Tone.Meter.prototype, "smoothing", {
get : function(){
var signal = this._analyser.analyse();
if (this.type === Tone.Meter.Type.Level){
//rms
var sum = 0;
for (var i = 0; i < signal.length; i++){
sum += Math.pow(signal[i], 2);
}
var rms = Math.sqrt(sum / signal.length);
//smooth it
rms = Math.max(rms, this._lastValue * this.smoothing);
this._lastValue = rms;
//scale it
var unity = 0.35;
var val = rms / unity;
//scale the output curve
return Math.sqrt(val);
} else {
return signal[0];
}
return this._analyser.smoothing;
},
set : function(val){
this._analyser.smoothing = val;
},
});

View file

@ -0,0 +1,68 @@
define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], function (Tone) {
/**
* @class Get the current waveform data of the connected audio source.
* @extends {Tone.AudioNode}
* @param {Number=} size The size of the FFT. Value must be a power of
* two in the range 32 to 32768.
*/
Tone.Waveform = function(){
var options = Tone.defaults(arguments, ["size"], Tone.Waveform);
options.type = Tone.Analyser.Type.Waveform;
Tone.AudioNode.call(this);
/**
* The analyser node.
* @private
* @type {Tone.Analyser}
*/
this._analyser = this.input = this.output = new Tone.Analyser(options);
};
Tone.extend(Tone.Waveform, Tone.AudioNode);
/**
* The default values.
* @type {Object}
* @const
*/
Tone.Waveform.defaults = {
"size" : 1024
};
/**
* Gets the waveform of the audio source. Returns the waveform data
* of length [size](#size) as a Float32Array with values between -1 and 1.
* @returns {TypedArray}
*/
Tone.Waveform.prototype.getValue = function(){
return this._analyser.getValue();
};
/**
* The size of analysis. This must be a power of two in the range 32 to 32768.
* @memberOf Tone.Waveform#
* @type {Number}
* @name size
*/
Object.defineProperty(Tone.Waveform.prototype, "size", {
get : function(){
return this._analyser.size;
},
set : function(size){
this._analyser.size = size;
}
});
/**
* Clean up.
* @return {Tone.Waveform} this
*/
Tone.Waveform.prototype.dispose = function(){
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.dispose();
this._analyser = null;
};
return Tone.Waveform;
});

View file

@ -1,7 +1,7 @@
define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
/**
* @class Tone.AudioNode is a base class for classes which process audio.
* @class Tone.AudioNode is the base class for classes which process audio.
* AudioNodes have inputs and outputs.
* @param {AudioContext=} context The audio context to use with the class
* @extends {Tone}
@ -26,7 +26,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
/**
* Get the audio context belonging to this instance.
* @type {AudioNode}
* @type {Tone.Context}
* @memberOf Tone.AudioNode#
* @name context
* @readOnly
@ -144,6 +144,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
* node.chain(effect, panVol, Tone.Master);
* @param {...AudioParam|Tone|AudioNode} nodes
* @returns {Tone.AudioNode} this
* @private
*/
Tone.AudioNode.prototype.chain = function(){
var currentUnit = this;
@ -159,6 +160,7 @@ define(["Tone/core/Tone", "Tone/core/Context"], function (Tone) {
* connect the output of this node to the rest of the nodes in parallel.
* @param {...AudioParam|Tone|AudioNode} nodes
* @returns {Tone.AudioNode} this
* @private
*/
Tone.AudioNode.prototype.fan = function(){
for (var i = 0; i < arguments.length; i++){

View file

@ -168,7 +168,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* freq.setValueAtTime("G4", "+1");
*/
Tone.Param.prototype.setValueAtTime = function(value, time){
this._param.setValueAtTime(this._fromUnits(value), this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._param.setValueAtTime(this._fromUnits(value), time);
return this;
};
@ -182,12 +184,12 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
*/
Tone.Param.prototype.setRampPoint = function(now){
now = Tone.defaultArg(now, this.now());
this.cancelAndHoldAtTime(this.context.currentTime);
var currentVal = this._param.value;
// exponentialRampToValueAt cannot ever ramp from or to 0
// More info: https://bugzilla.mozilla.org/show_bug.cgi?id=1125600#c2
if (currentVal === 0){
currentVal = this._minOutput;
}
// cancel and hold at the given time
this._param.setValueAtTime(currentVal, now);
return this;
};
@ -202,7 +204,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
*/
Tone.Param.prototype.linearRampToValueAtTime = function(value, endTime){
value = this._fromUnits(value);
this._param.linearRampToValueAtTime(value, this.toSeconds(endTime));
endTime = this.toSeconds(endTime);
Tone.isPast(endTime);
this._param.linearRampToValueAtTime(value, endTime);
return this;
};
@ -217,7 +221,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
Tone.Param.prototype.exponentialRampToValueAtTime = function(value, endTime){
value = this._fromUnits(value);
value = Math.max(this._minOutput, value);
this._param.exponentialRampToValueAtTime(value, this.toSeconds(endTime));
endTime = this.toSeconds(endTime);
Tone.isPast(endTime);
this._param.exponentialRampToValueAtTime(value, endTime);
return this;
};
@ -233,9 +239,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @returns {Tone.Param} this
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampToValue(2, 4);
* signal.exponentialRampTo(2, 4);
*/
Tone.Param.prototype.exponentialRampToValue = function(value, rampTime, startTime){
Tone.Param.prototype.exponentialRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
@ -254,15 +260,46 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @returns {Tone.Param} this
* @example
* //linearly ramp to the value 4 over 3 seconds.
* signal.linearRampToValue(4, 3);
* signal.linearRampTo(4, 3);
*/
Tone.Param.prototype.linearRampToValue = function(value, rampTime, startTime){
Tone.Param.prototype.linearRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
return this;
};
/**
* Convert between Time and time constant. The time
* constant returned can be used in setTargetAtTime.
* @param {Time} time The time to convert
* @return {Number} The time constant to get an exponentially approaching
* curve to over 99% of towards the target value.
*/
Tone.Param.prototype.getTimeConstant = function(time){
return Math.log(this.toSeconds(time)+1)/Math.log(200);
};
/**
* Start exponentially approaching the target value at the given time. Since it
* is an exponential approach it will continue approaching after the ramp duration. The
* rampTime is the time that it takes to reach over 99% of the way towards the value.
* @param {number} value The value to ramp to.
* @param {Time} rampTime the time that it takes the
* value to ramp from it's current value
* @param {Time} [startTime=now] When the ramp should start.
* @returns {Tone.Param} this
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampTo(2, 4);
*/
Tone.Param.prototype.targetRampTo = function(value, rampTime, startTime){
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.setTargetAtTime(value, startTime, this.getTimeConstant(rampTime));
return this;
};
/**
* Start exponentially approaching the target value at the given time with
* a rate having the given time constant.
@ -314,6 +351,31 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
return this;
};
/**
* This is similar to [cancelScheduledValues](#cancelScheduledValues) except
* it holds the automated value at cancelTime until the next automated event.
* @param {Time} cancelTime
* @returns {Tone.Param} this
*/
Tone.Param.prototype.cancelAndHoldAtTime = function(cancelTime){
cancelTime = this.toSeconds(cancelTime);
if (this._param.cancelAndHoldAtTime){
this._param.cancelAndHoldAtTime(cancelTime);
} else {
//fallback for unsupported browsers
//can't cancel and hold at any time in the future
//just do it immediately for gapless automation curves
var now = this.context.currentTime;
this._param.cancelScheduledValues(now);
var currentVal = this._param.value;
if (currentVal === 0){
currentVal = this._minOutput;
}
this._param.setValueAtTime(currentVal, now + this.sampleTime);
}
return this;
};
/**
* Ramps to the given value over the duration of the rampTime.
* Automatically selects the best ramp type (exponential or linear)
@ -333,11 +395,11 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* signal.rampTo(0, 10, 5)
*/
Tone.Param.prototype.rampTo = function(value, rampTime, startTime){
rampTime = Tone.defaultArg(rampTime, 0);
rampTime = Tone.defaultArg(rampTime, 0.1);
if (this.units === Tone.Type.Frequency || this.units === Tone.Type.BPM || this.units === Tone.Type.Decibels){
this.exponentialRampToValue(value, rampTime, startTime);
this.exponentialRampTo(value, rampTime, startTime);
} else {
this.linearRampToValue(value, rampTime, startTime);
this.linearRampTo(value, rampTime, startTime);
}
return this;
};

View file

@ -4,8 +4,8 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* @class A Timeline class for scheduling and maintaining state
* along a timeline. All events must have a "time" property.
* Internally, events are stored in time order for fast
* along a timeline. All events must have a "time" property.
* Internally, events are stored in time order for fast
* retrieval.
* @extends {Tone}
* @param {Positive} [memory=Infinity] The number of previous events that are retained.
@ -23,12 +23,19 @@ define(["Tone/core/Tone"], function (Tone) {
this._timeline = [];
/**
* An array of items to remove from the list.
* An array of items to remove from the list.
* @type {Array}
* @private
*/
this._toRemove = [];
/**
* An array of items to add from the list (once it's done iterating)
* @type {Array}
* @private
*/
this._toAdd = [];
/**
* Flag if the timeline is mid iteration
* @private
@ -70,8 +77,8 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Insert an event object onto the timeline. Events must have a "time" attribute.
* @param {Object} event The event object to insert into the
* timeline.
* @param {Object} event The event object to insert into the
* timeline.
* @returns {Tone.Timeline} this
*/
Tone.Timeline.prototype.add = function(event){
@ -79,16 +86,16 @@ define(["Tone/core/Tone"], function (Tone) {
if (Tone.isUndef(event.time)){
throw new Error("Tone.Timeline: events must have a time attribute");
}
if (this._timeline.length){
if (this._iterating){
this._toAdd.push(event);
} else {
var index = this._search(event.time);
this._timeline.splice(index + 1, 0, event);
} else {
this._timeline.push(event);
}
//if the length is more than the memory, remove the previous ones
if (this.length > this.memory){
var diff = this.length - this.memory;
this._timeline.splice(0, diff);
//if the length is more than the memory, remove the previous ones
if (this.length > this.memory){
var diff = this.length - this.memory;
this._timeline.splice(0, diff);
}
}
return this;
};
@ -113,12 +120,12 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the nearest event whose time is less than or equal to the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object set after that time.
*/
Tone.Timeline.prototype.get = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
var index = this._search(time, comparitor);
Tone.Timeline.prototype.get = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var index = this._search(time, comparator);
if (index !== -1){
return this._timeline[index];
} else {
@ -145,12 +152,12 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the event which is scheduled after the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object after the given time
*/
Tone.Timeline.prototype.getAfter = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
var index = this._search(time, comparitor);
Tone.Timeline.prototype.getAfter = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var index = this._search(time, comparator);
if (index + 1 < this._timeline.length){
return this._timeline[index + 1];
} else {
@ -161,17 +168,17 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* Get the event before the event at the given time.
* @param {Number} time The time to query.
* @param {String} comparitor Which value in the object to compare
* @param {String} comparator Which value in the object to compare
* @returns {Object} The event object before the given time
*/
Tone.Timeline.prototype.getBefore = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
Tone.Timeline.prototype.getBefore = function(time, comparator){
comparator = Tone.defaultArg(comparator, "time");
var len = this._timeline.length;
//if it's after the last item, return the last item
if (len > 0 && this._timeline[len - 1][comparitor] < time){
if (len > 0 && this._timeline[len - 1][comparator] < time){
return this._timeline[len - 1];
}
var index = this._search(time, comparitor);
var index = this._search(time, comparator);
if (index - 1 >= 0){
return this._timeline[index - 1];
} else {
@ -219,11 +226,9 @@ define(["Tone/core/Tone"], function (Tone) {
* @returns {Tone.Timeline} this
*/
Tone.Timeline.prototype.cancelBefore = function(time){
if (this._timeline.length){
var index = this._search(time);
if (index >= 0){
this._timeline = this._timeline.slice(index + 1);
}
var index = this._search(time);
if (index >= 0){
this._timeline = this._timeline.slice(index + 1);
}
return this;
};
@ -243,21 +248,24 @@ define(["Tone/core/Tone"], function (Tone) {
};
/**
* Does a binary serach on the timeline array and returns the
* Does a binary search on the timeline array and returns the
* nearest event index whose time is after or equal to the given time.
* If a time is searched before the first index in the timeline, -1 is returned.
* If the time is after the end, the index of the last item is returned.
* @param {Number} time
* @param {String} comparitor Which value in the object to compare
* @return {Number} the index in the timeline array
* @param {Number} time
* @param {String} comparator Which value in the object to compare
* @return {Number} the index in the timeline array
* @private
*/
Tone.Timeline.prototype._search = function(time, comparitor){
comparitor = Tone.defaultArg(comparitor, "time");
Tone.Timeline.prototype._search = function(time, comparator){
if (this._timeline.length === 0){
return -1;
}
comparator = Tone.defaultArg(comparator, "time");
var beginning = 0;
var len = this._timeline.length;
var end = len;
if (len > 0 && this._timeline[len - 1][comparitor] <= time){
if (len > 0 && this._timeline[len - 1][comparator] <= time){
return len - 1;
}
while (beginning < end){
@ -265,34 +273,34 @@ define(["Tone/core/Tone"], function (Tone) {
var midPoint = Math.floor(beginning + (end - beginning) / 2);
var event = this._timeline[midPoint];
var nextEvent = this._timeline[midPoint + 1];
if (event[comparitor] === time){
if (event[comparator] === time){
//choose the last one that has the same time
for (var i = midPoint; i < this._timeline.length; i++){
var testEvent = this._timeline[i];
if (testEvent[comparitor] === time){
if (testEvent[comparator] === time){
midPoint = i;
}
}
return midPoint;
} else if (event[comparitor] < time && nextEvent[comparitor] > time){
} else if (event[comparator] < time && nextEvent[comparator] > time){
return midPoint;
} else if (event[comparitor] > time){
} else if (event[comparator] > time){
//search lower
end = midPoint;
} else {
//search upper
beginning = midPoint + 1;
}
}
}
return -1;
};
/**
* Internal iterator. Applies extra safety checks for
* removing items from the array.
* @param {Function} callback
* @param {Number=} lowerBound
* @param {Number=} upperBound
* Internal iterator. Applies extra safety checks for
* removing items from the array.
* @param {Function} callback
* @param {Number=} lowerBound
* @param {Number=} upperBound
* @private
*/
Tone.Timeline.prototype._iterate = function(callback, lowerBound, upperBound){
@ -303,15 +311,14 @@ define(["Tone/core/Tone"], function (Tone) {
callback.call(this, this._timeline[i]);
}
this._iterating = false;
if (this._toRemove.length > 0){
for (var j = 0; j < this._toRemove.length; j++){
var index = this._timeline.indexOf(this._toRemove[j]);
if (index !== -1){
this._timeline.splice(index, 1);
}
}
this._toRemove = [];
}
this._toRemove.forEach(function(event){
this.remove(event);
}.bind(this));
this._toRemove = [];
this._toAdd.forEach(function(event){
this.add(event);
}.bind(this));
this._toAdd = [];
};
/**
@ -353,7 +360,7 @@ define(["Tone/core/Tone"], function (Tone) {
};
/**
* Iterate over everything in the array at or after the given time. Similar to
* Iterate over everything in the array at or after the given time. Similar to
* forEachAfter, but includes the item(s) at the given time.
* @param {Number} time The time to check if items are before
* @param {Function} callback The callback to invoke with every item
@ -383,7 +390,7 @@ define(["Tone/core/Tone"], function (Tone) {
this._iterate(function(event){
if (event.time === time){
callback.call(this, event);
}
}
}, 0, upperBound);
}
return this;
@ -397,8 +404,9 @@ define(["Tone/core/Tone"], function (Tone) {
Tone.prototype.dispose.call(this);
this._timeline = null;
this._toRemove = null;
this._toAdd = null;
return this;
};
return Tone.Timeline;
});
});

View file

@ -508,6 +508,16 @@ define(function(){
return Tone.context.now();
};
/**
* Adds warning in the console if the scheduled time has passed.
* @type {Time}
*/
Tone.isPast = function(time){
if (time < Tone.context.currentTime){
console.warn("Time '" + time + "' is in the past. Scheduled time must be ≥ AudioContext.currentTime");
}
};
///////////////////////////////////////////////////////////////////////////
// INHERITANCE
///////////////////////////////////////////////////////////////////////////
@ -680,7 +690,7 @@ define(function(){
* @type {String}
* @static
*/
Tone.version = "r11";
Tone.version = "r11-dev";
// allow optional silencing of this log
if (!window.TONE_SILENCE_VERSION_LOGGING) {

View file

@ -1,5 +1,6 @@
define(["Tone/core/Tone", "Tone/core/Clock", "Tone/type/Type", "Tone/core/Timeline",
"Tone/core/Emitter", "Tone/core/Gain", "Tone/core/IntervalTimeline"],
define(["Tone/core/Tone", "Tone/core/Clock", "Tone/type/Type", "Tone/core/Timeline",
"Tone/core/Emitter", "Tone/core/Gain", "Tone/core/IntervalTimeline",
"Tone/core/TransportRepeatEvent", "Tone/core/TransportEvent"],
function(Tone){
"use strict";
@ -10,10 +11,10 @@ function(Tone){
* Tone.Transport timing events pass in the exact time of the scheduled event
* in the argument of the callback function. Pass that time value to the object
* you're scheduling. <br><br>
* A single transport is created for you when the library is initialized.
* A single transport is created for you when the library is initialized.
* <br><br>
* The transport emits the events: "start", "stop", "pause", and "loop" which are
* called with the time of that event as the argument.
* called with the time of that event as the argument.
*
* @extends {Tone.Emitter}
* @singleton
@ -38,20 +39,20 @@ function(Tone){
// LOOPING
//////////////////////////////////////////////////////////////////////
/**
/**
* If the transport loops or not.
* @type {boolean}
*/
this.loop = false;
/**
/**
* The loop start position in ticks
* @type {Ticks}
* @private
*/
this._loopStart = 0;
/**
/**
* The loop end position in ticks
* @type {Ticks}
* @private
@ -76,14 +77,14 @@ function(Tone){
* @type {Tone.Clock}
*/
this._clock = new Tone.Clock({
"callback" : this._processTick.bind(this),
"callback" : this._processTick.bind(this),
"frequency" : 0,
});
this._bindClockEvents();
/**
* The Beats Per Minute of the Transport.
* The Beats Per Minute of the Transport.
* @type {BPM}
* @signal
* @example
@ -100,7 +101,7 @@ function(Tone){
/**
* The time signature, or more accurately the numerator
* of the time signature over a denominator of 4.
* of the time signature over a denominator of 4.
* @type {Number}
* @private
*/
@ -117,13 +118,6 @@ function(Tone){
*/
this._scheduledEvents = {};
/**
* The event ID counter
* @type {Number}
* @private
*/
this._eventID = 0;
/**
* The scheduled events.
* @type {Tone.Timeline}
@ -139,15 +133,8 @@ function(Tone){
this._repeatedEvents = new Tone.IntervalTimeline();
/**
* Events that occur once
* @type {Array}
* @private
*/
this._onceEvents = new Tone.Timeline();
/**
* All of the synced Signals
* @private
* @private
* @type {Array}
*/
this._syncedSignals = [];
@ -171,7 +158,6 @@ function(Tone){
this._swingAmount = 0;
}.bind(this));
};
Tone.extend(Tone.Transport, Tone.Emitter);
@ -204,14 +190,14 @@ function(Tone){
Tone.Transport.prototype._processTick = function(tickTime){
var ticks = this._clock.ticks;
//handle swing
if (this._swingAmount > 0 &&
if (this._swingAmount > 0 &&
ticks % this._ppq !== 0 && //not on a downbeat
ticks % (this._swingTicks * 2) !== 0){
//add some swing
var progress = (ticks % (this._swingTicks * 2)) / (this._swingTicks * 2);
var amount = Math.sin((progress) * Math.PI) * this._swingAmount;
tickTime += Tone.Time(this._swingTicks * 2/3, "i") * amount;
}
}
//do the loop test
if (this.loop){
if (ticks >= this._loopEnd){
@ -222,23 +208,9 @@ function(Tone){
this.emit("loop", tickTime);
}
}
//process the single occurrence events
this._onceEvents.forEachBefore(ticks, function(event){
event.callback(tickTime);
//remove the event
delete this._scheduledEvents[event.id.toString()];
}.bind(this));
//and clear the single occurrence timeline
this._onceEvents.cancelBefore(ticks);
//fire the next tick events if their time has come
//invoke the timeline events scheduled on this tick
this._timeline.forEachAtTime(ticks, function(event){
event.callback(tickTime);
});
//process the repeated events
this._repeatedEvents.forEachAtTime(ticks, function(event){
if ((ticks - event.time) % event.interval === 0){
event.callback(tickTime);
}
event.invoke(tickTime);
});
};
@ -250,7 +222,7 @@ function(Tone){
* Schedule an event along the timeline.
* @param {Function} callback The callback to be invoked at the time.
* @param {TransportTime} time The time to invoke the callback at.
* @return {Number} The id of the event which can be used for canceling the event.
* @return {Number} The id of the event which can be used for canceling the event.
* @example
* //trigger the callback when the Transport reaches the desired time
* Tone.Transport.schedule(function(time){
@ -258,75 +230,55 @@ function(Tone){
* }, "128i");
*/
Tone.Transport.prototype.schedule = function(callback, time){
var event = {
var event = new Tone.TransportEvent(this, {
"time" : this.toTicks(time),
"callback" : callback
};
var id = this._eventID++;
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._timeline
};
this._timeline.add(event);
return id;
});
return this._addEvent(event, this._timeline);
};
/**
* Schedule a repeated event along the timeline. The event will fire
* at the `interval` starting at the `startTime` and for the specified
* `duration`.
* `duration`.
* @param {Function} callback The callback to invoke.
* @param {Time} interval The duration between successive
* callbacks.
* callbacks. Must be a positive number.
* @param {TimelinePosition=} startTime When along the timeline the events should
* start being invoked.
* @param {Time} [duration=Infinity] How long the event should repeat.
* @param {Time} [duration=Infinity] How long the event should repeat.
* @return {Number} The ID of the scheduled event. Use this to cancel
* the event.
* the event.
* @example
* //a callback invoked every eighth note after the first measure
* Tone.Transport.scheduleRepeat(callback, "8n", "1m");
*/
Tone.Transport.prototype.scheduleRepeat = function(callback, interval, startTime, duration){
if (interval <= 0){
throw new Error("Tone.Transport: repeat events must have an interval larger than 0");
}
var event = {
var event = new Tone.TransportRepeatEvent(this, {
"callback" : callback,
"interval" : this.toTicks(interval),
"time" : this.toTicks(startTime),
"duration" : this.toTicks(Tone.defaultArg(duration, Infinity)),
"interval" : this.toTicks(interval),
"callback" : callback
};
var id = this._eventID++;
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._repeatedEvents
};
this._repeatedEvents.add(event);
return id;
});
//kick it off if the Transport is started
return this._addEvent(event, this._repeatedEvents);
};
/**
* Schedule an event that will be removed after it is invoked.
* Note that if the given time is less than the current transport time,
* the event will be invoked immediately.
* Schedule an event that will be removed after it is invoked.
* Note that if the given time is less than the current transport time,
* the event will be invoked immediately.
* @param {Function} callback The callback to invoke once.
* @param {TransportTime} time The time the callback should be invoked.
* @returns {Number} The ID of the scheduled event.
* @returns {Number} The ID of the scheduled event.
*/
Tone.Transport.prototype.scheduleOnce = function(callback, time){
var id = this._eventID++;
var event = {
var event = new Tone.TransportEvent(this, {
"time" : this.toTicks(time),
"callback" : callback,
"id" : id
};
this._scheduledEvents[id.toString()] = {
"event" : event,
"timeline" : this._onceEvents
};
this._onceEvents.add(event);
return id;
"once" : true
});
return this._addEvent(event, this._timeline);
};
/**
@ -338,24 +290,41 @@ function(Tone){
if (this._scheduledEvents.hasOwnProperty(eventId)){
var item = this._scheduledEvents[eventId.toString()];
item.timeline.remove(item.event);
item.event.dispose();
delete this._scheduledEvents[eventId.toString()];
}
return this;
};
/**
* Add an event to the correct timeline. Keep track of the
* timeline it was added to.
* @param {Tone.TransportEvent} event
* @param {Tone.Timeline} timeline
* @returns {Number} the event id which was just added
* @private
*/
Tone.Transport.prototype._addEvent = function(event, timeline){
this._scheduledEvents[event.id.toString()] = {
"event" : event,
"timeline" : timeline
};
timeline.add(event);
return event.id;
};
/**
* Remove scheduled events from the timeline after
* the given time. Repeated events will be removed
* if their startTime is after the given time
* @param {TransportTime} [after=0] Clear all events after
* this time.
* this time.
* @returns {Tone.Transport} this
*/
Tone.Transport.prototype.cancel = function(after){
after = Tone.defaultArg(after, 0);
after = this.toTicks(after);
this._timeline.cancel(after);
this._onceEvents.cancel(after);
this._repeatedEvents.cancel(after);
return this;
};
@ -402,7 +371,7 @@ function(Tone){
* @param {TransportTime=} offset The timeline offset to start the transport.
* @returns {Tone.Transport} this
* @example
* //start the transport in one second starting at beginning of the 5th measure.
* //start the transport in one second starting at beginning of the 5th measure.
* Tone.Transport.start("+1", "4:0:0");
*/
Tone.Transport.prototype.start = function(time, offset){
@ -416,7 +385,7 @@ function(Tone){
/**
* Stop the transport and all sources synced to the transport.
* @param {Time} [time=now] The time when the transport should stop.
* @param {Time} [time=now] The time when the transport should stop.
* @returns {Tone.Transport} this
* @example
* Tone.Transport.stop();
@ -457,7 +426,7 @@ function(Tone){
///////////////////////////////////////////////////////////////////////////////
/**
* The time signature as just the numerator over 4.
* The time signature as just the numerator over 4.
* For example 4/4 would be just 4 and 6/8 would be 3.
* @memberOf Tone.Transport#
* @type {Number|Array}
@ -514,9 +483,9 @@ function(Tone){
});
/**
* Set the loop start and stop at the same time.
* @param {TransportTime} startPosition
* @param {TransportTime} endPosition
* Set the loop start and stop at the same time.
* @param {TransportTime} startPosition
* @param {TransportTime} endPosition
* @returns {Tone.Transport} this
* @example
* //loop over the first measure
@ -530,7 +499,7 @@ function(Tone){
};
/**
* The swing value. Between 0-1 where 1 equal to
* The swing value. Between 0-1 where 1 equal to
* the note + half the subdivision.
* @memberOf Tone.Transport#
* @type {NormalRange}
@ -547,10 +516,10 @@ function(Tone){
});
/**
* Set the subdivision which the swing will be applied to.
* The default value is an 8th note. Value must be less
* Set the subdivision which the swing will be applied to.
* The default value is an 8th note. Value must be less
* than a quarter note.
*
*
* @memberOf Tone.Transport#
* @type {Time}
* @name swingSubdivision
@ -566,7 +535,7 @@ function(Tone){
/**
* The Transport's position in Bars:Beats:Sixteenths.
* Setting the value will jump to that position right away.
* Setting the value will jump to that position right away.
* @memberOf Tone.Transport#
* @type {BarsBeatsSixteenths}
* @name position
@ -583,7 +552,7 @@ function(Tone){
/**
* The Transport's position in seconds
* Setting the value will jump to that position right away.
* Setting the value will jump to that position right away.
* @memberOf Tone.Transport#
* @type {Seconds}
* @name seconds
@ -600,7 +569,7 @@ function(Tone){
/**
* The Transport's loop position as a normalized value. Always
* returns 0 if the transport if loop is not true.
* returns 0 if the transport if loop is not true.
* @memberOf Tone.Transport#
* @name progress
* @type {NormalRange}
@ -617,7 +586,7 @@ function(Tone){
/**
* The transports current tick position.
*
*
* @memberOf Tone.Transport#
* @type {Ticks}
* @name ticks
@ -645,9 +614,9 @@ function(Tone){
/**
* Pulses Per Quarter note. This is the smallest resolution
* the Transport timing supports. This should be set once
* on initialization and not set again. Changing this value
* after other objects have been created can cause problems.
*
* on initialization and not set again. Changing this value
* after other objects have been created can cause problems.
*
* @memberOf Tone.Transport#
* @type {Number}
* @name PPQ
@ -716,14 +685,14 @@ function(Tone){
};
/**
* Attaches the signal to the tempo control signal so that
* Attaches the signal to the tempo control signal so that
* any changes in the tempo will change the signal in the same
* ratio.
*
* @param {Tone.Signal} signal
* ratio.
*
* @param {Tone.Signal} signal
* @param {number=} ratio Optionally pass in the ratio between
* the two signals. Otherwise it will be computed
* based on their current values.
* based on their current values.
* @returns {Tone.Transport} this
*/
Tone.Transport.prototype.syncSignal = function(signal, ratio){
@ -747,9 +716,9 @@ function(Tone){
};
/**
* Unsyncs a previously synced signal from the transport's control.
* Unsyncs a previously synced signal from the transport's control.
* See Tone.Transport.syncSignal.
* @param {Tone.Signal} signal
* @param {Tone.Signal} signal
* @returns {Tone.Transport} this
*/
Tone.Transport.prototype.unsyncSignal = function(signal){
@ -765,7 +734,7 @@ function(Tone){
};
/**
* Clean up.
* Clean up.
* @returns {Tone.Transport} this
* @private
*/
@ -777,8 +746,6 @@ function(Tone){
this.bpm = null;
this._timeline.dispose();
this._timeline = null;
this._onceEvents.dispose();
this._onceEvents = null;
this._repeatedEvents.dispose();
this._repeatedEvents = null;
return this;

View file

@ -0,0 +1,92 @@
define(["Tone/core/Tone"], function(Tone){
/**
* @class Tone.TransportEvent is an internal class used by (Tone.Transport)[Transport]
* to schedule events. Do no invoke this class directly, it is
* handled from within Tone.Transport.
* @extends {Tone}
* @param {Object} options
*/
Tone.TransportEvent = function(Transport, options){
options = Tone.defaultArg(options, Tone.TransportEvent.defaults);
Tone.call(this);
/**
* Reference to the Transport that created it
* @type {Tone.Transport}
*/
this.Transport = Transport;
/**
* The unique id of the event
* @type {Number}
*/
this.id = Tone.TransportEvent._eventId++;
/**
* The time the event starts
* @type {Ticks}
*/
this.time = options.time;
/**
* The callback to invoke
* @type {Function}
*/
this.callback = options.callback;
/**
* If the event should be removed after being created.
* @type {Boolean}
* @private
*/
this._once = options.once;
};
Tone.extend(Tone.TransportEvent);
/**
* The defaults
* @static
* @type {Object}
*/
Tone.TransportEvent.defaults = {
"once" : false,
"callback" : Tone.noOp,
};
/**
* Current ID counter
* @private
* @static
* @type {Number}
*/
Tone.TransportEvent._eventId = 0;
/**
* Invoke the callback even callback.
* @param {Time} time The AudioContext time in seconds of the event
*/
Tone.TransportEvent.prototype.invoke = function(time){
if (this.callback){
this.callback(time);
if (this._once && this.Transport){
this.Transport.clear(this.id);
}
}
};
/**
* Clean up
* @return {Tone.TransportEvent} this
*/
Tone.TransportEvent.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
this.Transport = null;
this.callback = null;
return this;
};
return Tone.TransportEvent;
});

View file

@ -0,0 +1,130 @@
define(["Tone/core/Tone", "Tone/core/TransportEvent"], function(Tone){
/**
* @class Tone.TransportRepeatEvent is an internal class used by Tone.Transport
* to schedule repeat events. This class should not be instantiated directly.
* @extends {Tone.TransportEvent}
* @param {Object} options
*/
Tone.TransportRepeatEvent = function(Transport, options){
Tone.TransportEvent.call(this, Transport, options);
options = Tone.defaultArg(options, Tone.TransportRepeatEvent.defaults);
/**
* When the event should stop repeating
* @type {Ticks}
* @private
*/
this.duration = options.duration;
/**
* The interval of the repeated event
* @type {Ticks}
* @private
*/
this._interval = options.interval;
/**
* The ID of the current timeline event
* @type {Number}
* @private
*/
this._currentId = -1;
/**
* The ID of the next timeline event
* @type {Number}
* @private
*/
this._nextId = -1;
/**
* The time of the next event
* @type {Ticks}
* @private
*/
this._nextTick = this.time;
/**
* a reference to the bound start method
* @type {Function}
* @private
*/
this._boundRestart = this._restart.bind(this);
this.Transport.on("start loopStart", this._boundRestart);
this._restart();
};
Tone.extend(Tone.TransportRepeatEvent, Tone.TransportEvent);
/**
* The defaults
* @static
* @type {Object}
*/
Tone.TransportRepeatEvent.defaults = {
"duration" : Infinity,
"interval" : 1
};
/**
* Invoke the callback. Returns the tick time which
* the next event should be scheduled at.
* @param {Number} time The AudioContext time in seconds of the event
*/
Tone.TransportRepeatEvent.prototype.invoke = function(time){
//create more events if necessary
this._createEvents();
//call the super class
Tone.TransportEvent.prototype.invoke.call(this, time);
};
/**
* Push more events onto the timeline to keep up with the position of the timeline
* @private
*/
Tone.TransportRepeatEvent.prototype._createEvents = function(){
// schedule the next event
var ticks = this.Transport.ticks;
if (ticks >= this.time && ticks >= this._nextTick &&
this._nextTick + this._interval < this.time + this.duration){
this._nextTick += this._interval;
this._currentId = this._nextId;
this._nextId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
}
};
/**
* Push more events onto the timeline to keep up with the position of the timeline
* @private
*/
Tone.TransportRepeatEvent.prototype._restart = function(){
this.Transport.clear(this._currentId);
this.Transport.clear(this._nextId);
var ticks = this.Transport.ticks;
this._nextTick = this.time;
if (ticks > this.time){
this._nextTick = this.time + Math.ceil((ticks - this.time) / this._interval) * this._interval;
}
this._currentId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
this._nextTick += this._interval;
this._nextId = this.Transport.scheduleOnce(this.invoke.bind(this), Tone.TransportTime(this._nextTick, "i"));
};
/**
* Clean up
* @return {Tone.TransportRepeatEvent} this
*/
Tone.TransportRepeatEvent.prototype.dispose = function(){
this.Transport.clear(this._currentId);
this.Transport.clear(this._nextId);
this.Transport.off("start loopStart", this._boundRestart);
this._boundCreateEvents = null;
Tone.TransportEvent.prototype.dispose.call(this);
return this;
};
return Tone.TransportRepeatEvent;
});

View file

@ -102,7 +102,7 @@ define(["Tone/core/Tone", "Tone/instrument/Instrument", "Tone/source/FMOscillato
"harmonicity" : options.harmonicity,
"modulationIndex" : options.modulationIndex
});
osc.connect(this._highpass).start(0);
osc.connect(this._highpass).start();
this._oscillators[i] = osc;
var mult = new Tone.Multiply(inharmRatios[i]);

View file

@ -121,7 +121,8 @@ define(["Tone/core/Tone", "Tone/instrument/Instrument", "Tone/core/Buffers", "To
"buffer" : buffer,
"playbackRate" : Tone.intervalToFrequencyRatio(difference),
"fadeIn" : this.attack,
"fadeOut" : this.release
"fadeOut" : this.release,
"curve" : "exponential",
}).connect(this.output);
source.start(time, 0, buffer.duration, velocity);
// add it to the active sources

View file

@ -3,7 +3,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
"use strict";
/**
* @class A signal which adds the method getValueAtTime.
* @class A signal which adds the method getValueAtTime.
* Code and inspiration from https://github.com/jsantell/web-audio-automation-timeline
* @extends {Tone.Signal}
* @param {Number=} value The initial value of the signal
@ -13,7 +13,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
var options = Tone.defaults(arguments, ["value", "units"], Tone.Signal);
Tone.Signal.call(this, options);
/**
* The scheduled events
* @type {Tone.Timeline}
@ -48,7 +48,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
/**
* The current value of the signal.
* The current value of the signal.
* @memberOf Tone.TimelineSignal#
* @type {Number}
* @name value
@ -79,7 +79,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
* @param {Time} time The time when the change should occur.
* @returns {Tone.TimelineSignal} this
* @example
* //set the frequency to "G4" in exactly 1 second from now.
* //set the frequency to "G4" in exactly 1 second from now.
* freq.setValueAtTime("G4", "+1");
*/
Tone.TimelineSignal.prototype.setValueAtTime = function (value, startTime) {
@ -96,11 +96,11 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
/**
* Schedules a linear continuous change in parameter value from the
* Schedules a linear continuous change in parameter value from the
* previous scheduled parameter value to the given value.
*
* @param {number} value
* @param {Time} endTime
*
* @param {number} value
* @param {Time} endTime
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.linearRampToValueAtTime = function (value, endTime) {
@ -116,11 +116,11 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
/**
* Schedules an exponential continuous change in parameter value from
* Schedules an exponential continuous change in parameter value from
* the previous scheduled parameter value to the given value.
*
* @param {number} value
* @param {Time} endTime
*
* @param {number} value
* @param {Time} endTime
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.exponentialRampToValueAtTime = function (value, endTime) {
@ -151,10 +151,10 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
/**
* Start exponentially approaching the target value at the given time with
* a rate having the given time constant.
* @param {number} value
* @param {Time} startTime
* @param {number} timeConstant
* @returns {Tone.TimelineSignal} this
* @param {number} value
* @param {Time} startTime
* @param {number} timeConstant
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.setTargetAtTime = function (value, startTime, timeConstant) {
value = this._fromUnits(value);
@ -173,11 +173,11 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
/**
* Set an array of arbitrary values starting at the given time for the given duration.
* @param {Float32Array} values
* @param {Time} startTime
* @param {Float32Array} values
* @param {Time} startTime
* @param {Time} duration
* @param {NormalRange} [scaling=1] If the values in the curve should be scaled by some value
* @returns {Tone.TimelineSignal} this
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.setValueCurveAtTime = function (values, startTime, duration, scaling) {
scaling = Tone.defaultArg(scaling, 1);
@ -192,9 +192,8 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
/**
* Cancels all scheduled parameter changes with times greater than or
* Cancels all scheduled parameter changes with times greater than or
* equal to startTime.
*
* @param {Time} startTime
* @returns {Tone.TimelineSignal} this
*/
@ -205,13 +204,25 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
return this;
};
/**
* Cancels all scheduled parameter changes with times greater than or
* equal to cancelTime and sets the output of the signal to be the value
* at cancelTime. Similar to (cancelScheduledValues)[#cancelscheduledvalues].
* @param {Time} cancelTime
* @returns {Tone.TimelineSignal} this
*/
Tone.TimelineSignal.prototype.cancelAndHoldAtTime = function (cancelTime) {
this.setRampPoint(this.toSeconds(cancelTime));
return this;
};
/**
* Sets the computed value at the given time. This provides
* a point from which a linear or exponential curve
* can be scheduled after. Will cancel events after
* can be scheduled after. Will cancel events after
* the given time and shorten the currently scheduled
* linear or exponential ramp so that it ends at `time` .
* This is to avoid discontinuities and clicks in envelopes.
* This is to avoid discontinuities and clicks in envelopes.
* @param {Time} time When to set the ramp point
* @returns {Tone.TimelineSignal} this
*/
@ -237,8 +248,8 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
this.exponentialRampToValueAtTime(val, time);
}
}
this.setValueAtTime(val, time);
}
this.setValueAtTime(val, time);
return this;
};
@ -310,13 +321,13 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
value = this._initial;
} else if (before.type === Tone.TimelineSignal.Type.Target){
var previous = this._events.getBefore(before.time);
var previouVal;
var previousVal;
if (previous === null){
previouVal = this._initial;
previousVal = this._initial;
} else {
previouVal = previous.value;
previousVal = previous.value;
}
value = this._exponentialApproach(before.time, previouVal, before.value, before.constant, time);
value = this._exponentialApproach(before.time, previousVal, before.value, before.constant, time);
} else if (after === null){
value = before.value;
} else if (after.type === Tone.TimelineSignal.Type.Linear){
@ -330,12 +341,12 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
/**
* When signals connect to other signals or AudioParams,
* they take over the output value of that signal or AudioParam.
* For all other nodes, the behavior is the same as a default <code>connect</code>.
* When signals connect to other signals or AudioParams,
* they take over the output value of that signal or AudioParam.
* For all other nodes, the behavior is the same as a default <code>connect</code>.
*
* @override
* @param {AudioParam|AudioNode|Tone.Signal|Tone} node
* @param {AudioParam|AudioNode|Tone.Signal|Tone} node
* @param {number} [outputNumber=0] The output number to connect from.
* @param {number} [inputNumber=0] The input number to connect to.
* @returns {Tone.TimelineSignal} this
@ -385,4 +396,4 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Timeline"], function
};
return Tone.TimelineSignal;
});
});

View file

@ -82,6 +82,12 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
*/
this.fadeOut = options.fadeOut;
/**
* The curve applied to the fades, either "linear" or "exponential"
* @type {String}
*/
this.curve = options.curve;
/**
* The value that the buffer ramps to
* @type {Gain}
@ -96,6 +102,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
*/
this._onendedTimeout = -1;
//set some values initially
this.loop = options.loop;
this.loopStart = options.loopStart;
this.loopEnd = options.loopEnd;
@ -117,6 +124,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
"loopEnd" : 0,
"fadeIn" : 0,
"fadeOut" : 0,
"curve" : "linear",
"playbackRate" : 1
};
@ -165,30 +173,27 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
offset = Tone.defaultArg(offset, 0);
}
offset = this.toSeconds(offset);
//the values in seconds
time = this.toSeconds(time);
gain = Tone.defaultArg(gain, 1);
this._gain = gain;
//the fadeIn time
if (Tone.isUndef(fadeInTime)){
fadeInTime = this.toSeconds(this.fadeIn);
} else {
fadeInTime = this.toSeconds(fadeInTime);
}
fadeInTime = this.toSeconds(Tone.defaultArg(fadeInTime, this.fadeIn));
this.fadeIn = fadeInTime;
if (fadeInTime > 0){
this._gainNode.gain.setValueAtTime(0, time);
this._gainNode.gain.linearRampToValueAtTime(this._gain, time + fadeInTime);
if (this.curve === "linear"){
this._gainNode.gain.linearRampToValueAtTime(this._gain, time + fadeInTime);
} else {
this._gainNode.gain.setTargetAtTime(this._gain, time, this._gainNode.gain.getTimeConstant(fadeInTime));
}
} else {
this._gainNode.gain.setValueAtTime(gain, time);
}
this._startTime = time + fadeInTime;
this._startTime = time;
var computedDur = Tone.defaultArg(duration, this.buffer.duration - offset);
computedDur = this.toSeconds(computedDur);
var computedDur = this.toSeconds(Tone.defaultArg(duration, this.buffer.duration - offset));
computedDur = Math.max(computedDur, 0);
if (!this.loop || (this.loop && !Tone.isUndef(duration))){
@ -196,7 +201,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
if (!this.loop){
computedDur = Math.min(computedDur, this.buffer.duration - offset);
}
this.stop(time + computedDur + fadeInTime, this.fadeOut);
this.stop(time + computedDur, this.fadeOut);
}
//start the buffer source
@ -212,6 +217,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
}
this._source.buffer = this.buffer.get();
this._source.loopEnd = this.loopEnd || this.buffer.duration;
Tone.isPast(time);
this._source.start(time, offset);
} else {
throw new Error("Tone.BufferSource: buffer is either not set or not loaded.");
@ -232,26 +238,35 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
time = this.toSeconds(time);
//the fadeOut time
if (Tone.isUndef(fadeOutTime)){
fadeOutTime = this.toSeconds(this.fadeOut);
} else {
fadeOutTime = this.toSeconds(fadeOutTime);
}
//only stop if the last stop was scheduled later
//if this is before the previous stop
if (this._stopTime === -1 || this._stopTime > time){
//stop if it's schedule before the start time
if (time <= this._startTime){
this._gainNode.gain.cancelScheduledValues(time);
this._gainNode.gain.value = 0;
return this;
}
time = Math.max(this._startTime + this.fadeIn + this.sampleTime, time);
//cancel the previous curve
this._gainNode.gain.cancelScheduledValues(time);
this._stopTime = time;
//cancel the end curve
this._gainNode.gain.cancelScheduledValues(this._startTime + this.sampleTime);
time = Math.max(this._startTime, time);
//the fadeOut time
fadeOutTime = this.toSeconds(Tone.defaultArg(fadeOutTime, this.fadeOut));
//set a new one
if (fadeOutTime > 0){
var startFade = Math.max(this._startTime, time - fadeOutTime);
var heldDuration = Math.min(time - this._startTime - this.fadeIn - this.sampleTime, this.buffer.duration);
fadeOutTime = Math.min(heldDuration, fadeOutTime);
var startFade = time - fadeOutTime;
if (fadeOutTime > this.sampleTime){
this._gainNode.gain.setValueAtTime(this._gain, startFade);
this._gainNode.gain.linearRampToValueAtTime(0, time);
if (this.curve === "linear"){
this._gainNode.gain.linearRampToValueAtTime(0, time);
} else {
this._gainNode.gain.setTargetAtTime(0, startFade, this._gainNode.gain.getTimeConstant(fadeOutTime));
}
} else {
this._gainNode.gain.setValueAtTime(0, time);
}

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/source/Source", "Tone/core/Transport"],
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/source/Source", "Tone/core/Transport"],
function(Tone){
"use strict";
@ -9,7 +9,7 @@ function(Tone){
*/
if (window.OscillatorNode && !OscillatorNode.prototype.start){
OscillatorNode.prototype.start = OscillatorNode.prototype.noteOn;
OscillatorNode.prototype.stop = OscillatorNode.prototype.noteOff;
OscillatorNode.prototype.stop = OscillatorNode.prototype.noteOff;
if (!OscillatorNode.prototype.setPeriodicWave){
OscillatorNode.prototype.setPeriodicWave = OscillatorNode.prototype.setWaveTable;
}
@ -20,7 +20,7 @@ function(Tone){
/**
* @class Tone.Oscillator supports a number of features including
* phase rotation, multiple oscillator types (see Tone.Oscillator.type),
* phase rotation, multiple oscillator types (see Tone.Oscillator.type),
* and Transport syncing (see Tone.Oscillator.syncFrequency).
*
* @constructor
@ -32,7 +32,7 @@ function(Tone){
* var osc = new Tone.Oscillator(440, "sine").toMaster().start();
*/
Tone.Oscillator = function(){
var options = Tone.defaults(arguments, ["frequency", "type"], Tone.Oscillator);
Tone.Source.call(this, options);
@ -42,7 +42,7 @@ function(Tone){
* @private
*/
this._oscillator = null;
/**
* The frequency control.
* @type {Frequency}
@ -85,7 +85,7 @@ function(Tone){
* @private
*/
this._type = null;
//setup
this.type = options.type;
this.phase = this._phase;
@ -120,7 +120,7 @@ function(Tone){
/**
* start the oscillator
* @param {Time} [time=now]
* @param {Time} [time=now]
* @private
*/
Tone.Oscillator.prototype._start = function(time){
@ -132,7 +132,9 @@ function(Tone){
this.frequency.connect(this._oscillator.frequency);
this.detune.connect(this._oscillator.detune);
//start the oscillator
this._oscillator.start(this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._oscillator.start(time);
};
/**
@ -143,7 +145,9 @@ function(Tone){
*/
Tone.Oscillator.prototype._stop = function(time){
if (this._oscillator){
this._oscillator.stop(this.toSeconds(time));
time = this.toSeconds(time);
Tone.isPast(time);
this._oscillator.stop(time);
this._oscillator = null;
}
return this;
@ -151,14 +155,14 @@ function(Tone){
/**
* Sync the signal to the Transport's bpm. Any changes to the transports bpm,
* will also affect the oscillators frequency.
* will also affect the oscillators frequency.
* @returns {Tone.Oscillator} this
* @example
* Tone.Transport.bpm.value = 120;
* osc.frequency.value = 440;
* //the ration between the bpm and the frequency will be maintained
* osc.syncFrequency();
* Tone.Transport.bpm.value = 240;
* Tone.Transport.bpm.value = 240;
* // the frequency of the oscillator is doubled to 880
*/
Tone.Oscillator.prototype.syncFrequency = function(){
@ -167,7 +171,7 @@ function(Tone){
};
/**
* Unsync the oscillator's frequency from the Transport.
* Unsync the oscillator's frequency from the Transport.
* See Tone.Oscillator.syncFrequency
* @returns {Tone.Oscillator} this
*/
@ -181,11 +185,11 @@ function(Tone){
* setting the first x number of partials of the oscillator. For example: "sine4" would
* set be the first 4 partials of the sine wave and "triangle8" would set the first
* 8 partials of the triangle wave.
* <br><br>
* Uses PeriodicWave internally even for native types so that it can set the phase.
* PeriodicWave equations are from the
* <br><br>
* Uses PeriodicWave internally even for native types so that it can set the phase.
* PeriodicWave equations are from the
* [Webkit Web Audio implementation](https://code.google.com/p/chromium/codesearch#chromium/src/third_party/WebKit/Source/modules/webaudio/PeriodicWave.cpp&sq=package:chromium).
*
*
* @memberOf Tone.Oscillator#
* @type {string}
* @name type
@ -212,7 +216,7 @@ function(Tone){
});
/**
* Returns the real and imaginary components based
* Returns the real and imaginary components based
* on the oscillator type.
* @returns {Array} [real, imaginary]
* @private
@ -223,7 +227,7 @@ function(Tone){
var real = new Float32Array(periodicWaveSize);
var imag = new Float32Array(periodicWaveSize);
var partialCount = 1;
if (type === Tone.Oscillator.Type.Custom){
partialCount = this._partials.length + 1;
@ -240,9 +244,9 @@ function(Tone){
for (var n = 1; n < periodicWaveSize; ++n) {
var piFactor = 2 / (n * Math.PI);
var b;
var b;
switch (type) {
case Tone.Oscillator.Type.Sine:
case Tone.Oscillator.Type.Sine:
b = (n <= partialCount) ? 1 : 0;
break;
case Tone.Oscillator.Type.Square:
@ -258,7 +262,7 @@ function(Tone){
b = 0;
}
break;
case Tone.Oscillator.Type.Custom:
case Tone.Oscillator.Type.Custom:
b = this._partials[n - 1];
break;
default:
@ -276,10 +280,10 @@ function(Tone){
};
/**
* Compute the inverse FFT for a given phase.
* Compute the inverse FFT for a given phase.
* @param {Float32Array} real
* @param {Float32Array} imag
* @param {NormalRange} phase
* @param {Float32Array} imag
* @param {NormalRange} phase
* @return {AudioRange}
* @private
*/
@ -311,12 +315,12 @@ function(Tone){
};
/**
* The partials of the waveform. A partial represents
* the amplitude at a harmonic. The first harmonic is the
* The partials of the waveform. A partial represents
* the amplitude at a harmonic. The first harmonic is the
* fundamental frequency, the second is the octave and so on
* following the harmonic series.
* Setting this value will automatically set the type to "custom".
* The value is an empty array when the type is not "custom".
* following the harmonic series.
* Setting this value will automatically set the type to "custom".
* The value is an empty array when the type is not "custom".
* @memberOf Tone.Oscillator#
* @type {Array}
* @name partials
@ -330,7 +334,7 @@ function(Tone){
} else {
return this._partials;
}
},
},
set : function(partials){
this._partials = partials;
this.type = Tone.Oscillator.Type.Custom;
@ -338,7 +342,7 @@ function(Tone){
});
/**
* The phase of the oscillator in degrees.
* The phase of the oscillator in degrees.
* @memberOf Tone.Oscillator#
* @type {Degrees}
* @name phase
@ -348,7 +352,7 @@ function(Tone){
Object.defineProperty(Tone.Oscillator.prototype, "phase", {
get : function(){
return this._phase * (180 / Math.PI);
},
},
set : function(phase){
this._phase = phase * Math.PI / 180;
//reset the type
@ -377,4 +381,4 @@ function(Tone){
};
return Tone.Oscillator;
});
});

File diff suppressed because it is too large Load diff

14
build/Tone.min.js vendored

File diff suppressed because one or more lines are too long

View file

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link rel="icon" type="image/png" sizes="174x174" href="./style/favicon.png">
<script src="../build/Tone.js"></script>
<script src="./scripts/jquery.min.js"></script>
<script src="./scripts/draggabilly.js"></script>
@ -30,22 +30,22 @@
<body>
<div id="Content" class="FullScreen">
<div id="Title">Analyser</div>
<div id="Explanation">
<div id="Explanation">
<a href="https://tonejs.github.io/docs/#Analyser" target="_blank">Tone.Analyser</a>
analyses the incoming audio to produce a TypedArray of either the
<a href="https://en.wikipedia.org/wiki/Fast_Fourier_transform" target="_blank">FFT data</a>
<a href="https://en.wikipedia.org/wiki/Fast_Fourier_transform" target="_blank">FFT data</a>
or the waveform. The default <code>returnType</code> is "byte" which returns values
in the range 0-255.
</div>
</div>
<script>
//analyse the frequency/amplitude of the incoming signal
var fft = new Tone.Analyser("fft", 32);
<script>
//analyse the frequency/amplitude of the incoming signal
var fft = new Tone.FFT(32);
//get the waveform data for the audio
var waveform = new Tone.Analyser("waveform", 1024);
var waveform = new Tone.Waveform(1024);
var player = new Tone.Player({
"url" : "./audio/FWDL.[mp3|ogg]",
@ -77,10 +77,9 @@
fftContext.clearRect(0, 0, canvasWidth, canvasHeight);
var barWidth = canvasWidth / fft.size;
for (var i = 0, len = values.length; i < len; i++){
var val = values[i] / 255;
var x = canvasWidth * (i / len);
var y = val * canvasHeight;
fftContext.fillStyle = "rgba(0, 0, 0, " + val + ")";
var y = (values[i] + 140) * 2;
fftContext.fillStyle = "rgba(0, 0, 0, " + i/len + ")";
fftContext.fillRect(x, canvasHeight - y, barWidth, canvasHeight);
}
}
@ -94,14 +93,13 @@
function drawWaveform(values){
//draw the waveform
waveContext.clearRect(0, 0, canvasWidth, canvasHeight);
var values = waveform.analyse();
waveContext.beginPath();
waveContext.lineJoin = "round";
waveContext.lineWidth = 6;
waveContext.strokeStyle = waveformGradient;
waveContext.moveTo(0, (values[0] / 255) * canvasHeight);
for (var i = 1, len = values.length; i < len; i++){
var val = values[i] / 255;
var val = (values[i] + 1) / 2;
var x = canvasWidth * (i / len);
var y = val * canvasHeight;
waveContext.lineTo(x, y);
@ -123,7 +121,7 @@
//make the gradient
waveformGradient = waveContext.createLinearGradient(0, 0, canvasWidth, canvasHeight);
waveformGradient.addColorStop(0, "#ddd");
waveformGradient.addColorStop(1, "#000");
waveformGradient.addColorStop(1, "#000");
}
sizeCanvases();
@ -132,10 +130,10 @@
function loop(){
requestAnimationFrame(loop);
//get the fft data and draw it
var fftValues = fft.analyse();
var fftValues = fft.getValue();
drawFFT(fftValues);
//get the waveform valeus and draw it
var waveformValues = waveform.analyse();
var waveformValues = waveform.getValue();
drawWaveform(waveformValues);
}
loop();

View file

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link rel="icon" type="image/png" sizes="174x174" href="./style/favicon.png">
<script src="../build/Tone.js"></script>
<script src="./scripts/jquery.min.js"></script>
<script src="./scripts/draggabilly.js"></script>
@ -31,23 +31,22 @@
<body>
<div id="Content" class="FullScreen">
<div id="Title">Meter</div>
<div id="Explanation">
<div id="Explanation">
<a href="https://tonejs.github.io/docs/#Meter" target="_blank">Tone.Meter</a>
gives you the level of the incoming signal (between 0-1). Values above 1
are clipping.
gives you the level of the incoming signal in decibels.
</div>
</div>
<script>
<script>
//create a level meter
var meter = new Tone.Meter("level");
var meter = new Tone.Meter();
var player = new Tone.Player({
"url" : "./audio/FWDL.[mp3|ogg]",
"loop" : true
}).connect(meter).toMaster();
// GUI //
//start button
@ -72,7 +71,8 @@
var meterGraident;
function drawMeter(){
var level = meter.value * 0.8; //scale it since values go above 1 when clipping
var level = meter.getLevel();
level = Tone.dbToGain(level); //scale it between 0 - 1
meterContext.clearRect(0, 0, canvasWidth, canvasHeight);
meterContext.fillStyle = meterGraident;
meterContext.fillRect(0, 0, canvasWidth, canvasHeight);
@ -91,8 +91,8 @@
//make the gradient
meterGraident = meterContext.createLinearGradient(0, 0, canvasWidth, canvasHeight);
meterGraident.addColorStop(0, "#BFFF02");
meterGraident.addColorStop(0.8, "#02FF24");
meterGraident.addColorStop(1, "#FF0202");
meterGraident.addColorStop(0.8, "#02FF24");
meterGraident.addColorStop(1, "#FF0202");
}
sizeCanvases();

View file

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
<link rel="icon" type="image/png" sizes="174x174" href="./style/favicon.png">
<script src="../build/Tone.js"></script>
<script src="./scripts/jquery.min.js"></script>
<script src="./scripts/draggabilly.js"></script>
@ -44,10 +44,7 @@
//directly to the master output because of feedback.
var mic = new Tone.UserMedia();
var analyser = new Tone.Analyser({
"type" : "waveform",
"size" : 256
});
var analyser = new Tone.Waveform(256);
mic.connect(analyser);
@ -74,14 +71,14 @@
requestAnimationFrame(drawLoop);
//draw the waveform
context.clearRect(0, 0, canvasWidth, canvasHeight);
var values = analyser.analyse();
var values = analyser.getValue();
context.beginPath();
context.lineJoin = "round";
context.lineWidth = 6;
context.strokeStyle = "white";
context.moveTo(0, (values[0] / 255) * canvasHeight);
context.moveTo(0, (values[0] + 1) / 2 * canvasHeight);
for (var i = 1, len = values.length; i < len; i++){
var val = values[i] / 255;
var val = (values[i] + 1) / 2;
var x = canvasWidth * (i / (len - 1));
var y = val * canvasHeight;
context.lineTo(x, y);
@ -89,7 +86,7 @@
context.stroke();
}
drawLoop();
Interface.Button({
type : "toggle",
text : "Open Mic",
@ -105,6 +102,6 @@
}
</script>
</body>
</html>

View file

@ -13,7 +13,6 @@
<script src="https://tonejs.github.io/Logo/build/Logo.js"></script>
<script src="./scripts/StartAudioContext.js"></script>
<script src="./scripts/Interface.js"></script>
<script src="./scripts/nexusUI.js"></script>
<link rel="stylesheet" type="text/css" href="./style/examples.css">
@ -28,108 +27,64 @@
margin-top: 3px;
}
</style>
<div id="Content" class="FullScreen">
<div id="Title">rampTo</div>
<div id="Explanation">
In Tone.js, many of a class' members are <a href="https://tonejs.github.io/docs/#Signal">Tone.Signals</a>.
Working with signals is different than working with numbers or strings:
In Tone.js, many of a class' members are <a href="https://tonejs.github.io/docs/#Signal">Tone.Signals</a>.
Working with signals is different than working with numbers or strings:
Signals are values which are updated at audio rate,
which allows for sample-accurate scheduling and ramping. <code>.rampTo(value, rampTime)</code>
smoothly changes the signal from the current value to the target value over the duration of the rampTime.
This example uses <code>.rampTo</code> in to smooth out changes in volume and frequency.
<br><br>
As the large dot gets closer to each of the smaller dots, a different harmonic is heard depending
on the distance to that smaller dot. The "harmony" slider adjusts each of the oscillators frequencies'
distance from the fundamental frequency.
smoothly changes the signal from the current value to the target value over the duration of the rampTime.
This example uses <code>.rampTo</code> in to smooth out changes in volume and frequency.
</div>
<canvas nx="joints"></canvas>
</div>
<script>
Tone.Master.volume.value = -Infinity;
var oscillators = {};
var oscillators = [];
var bassFreq = 32;
var reverb = new Tone.JCReverb().toMaster();
for (var i = 0; i < 8; i++){
oscillators["node" + i] = new Tone.Oscillator({
oscillators.push(new Tone.Oscillator({
"frequency" : bassFreq * i,
"type" : "sawtooth10",
"volume" : -Infinity,
"detune" : Math.random() * 30 - 15,
}).connect(reverb).start();
}).start().toMaster());
}
// GUI //
nx.onload = function(){
nx.colorize("#7F33ED");
joints1.nodeSize = 25;
joints1.val.x = Math.random();
joints1.val.y = Math.random();
joints1.resize($("#Content").width(), 250);
joints1.animate("bounce");
var width = joints1.width;
var height = joints1.height;
joints1.threshold = Math.max($("#Content").width() / 1.5, 60);
joints1.init();
joints1.draw();
$(window).on("resize", function(){
joints1.resize($("#Content").width(), 250);
joints1.threshold = Math.max($("#Content").width() / 1.5, 60);
joints1.draw();
});
function setValues(data){
for (var n in oscillators){
oscillators[n].volume.cancelScheduledValues();
if (data.hasOwnProperty(n)){
oscillators[n].volume.rampTo((1 - Math.pow(data[n], 0.5)) * -60, 0.3);
} else {
oscillators[n].volume.rampTo(-Infinity, 0.4);
}
}
}
joints1.on("*", setValues);
Interface.Slider({
name : "harmony",
min : 0.5,
max : 2,
value : 1,
drag : function(value){
var i = 0;
for (var n in oscillators){
var osc = oscillators[n];
osc.volume.cancelScheduledValues();
osc.frequency.rampTo(bassFreq * i * value, 0.4);
i++;
}
},
});
Interface.Slider({
name : "harmony",
min : 0.5,
max : 2,
value : 1,
drag : function(value){
oscillators.forEach(function(osc, i){
osc.frequency.rampTo(bassFreq * i * value, 0.4);
});
},
});
Interface.Button({
text : "Unmute",
activeText : "Mute",
type : "toggle",
key : 32, //spacebar
start : function(){
Tone.Master.volume.rampTo(-20, 0.5);
},
end : function(){
Tone.Master.volume.rampTo(-Infinity, 0.5);
},
});
}
Interface.Button({
text : "Unmute",
activeText : "Mute",
type : "toggle",
key : 32, //spacebar
start : function(){
oscillators.forEach(function(osc){
osc.volume.rampTo(-20, 1);
});
},
end : function(){
oscillators.forEach(function(osc){
osc.volume.rampTo(-Infinity, 1);
});
},
});
</script>
</body>
</html>

View file

@ -79,7 +79,7 @@
'A7' : 'A7.[mp3|ogg]',
'C8' : 'C8.[mp3|ogg]'
}, {
'release' : 0.1,
'release' : 1,
'baseUrl' : './audio/salamander/'
}).toMaster();

View file

@ -143,19 +143,6 @@ gulp.task("example", function() {
gulp.watch(["../examples/style/examples.scss"], ["sass"]);
});
/**
* THE WEBSERVER
*/
gulp.task("server", function(){
gulp.src("../")
.pipe(webserver({
// livereload: false,
directoryListing: true,
port : 3000,
open: false
}));
});
/**
* LINTING
*/
@ -177,7 +164,7 @@ gulp.task("collectTests", function(done){
var tests = ["../test/*/*.js", "!../test/helper/*.js", "!../test/tests/*.js"];
if (argv.file){
tests = ["../test/*/"+argv.file+".js"];
} else if (argv.signal || argv.core || argv.component || argv.instrument ||
} else if (argv.signal || argv.core || argv.component || argv.instrument ||
argv.source || argv.effect || argv.event || argv.type || argv.examples){
tests = [];
if (argv.signal){
@ -207,7 +194,7 @@ gulp.task("collectTests", function(done){
if (argv.examples){
tests.push("../test/examples/*.js");
}
}
}
// console.log(argv.signal === undefined);
var allFiles = [];
var task = gulp.src(tests)
@ -231,45 +218,6 @@ gulp.task("collectTests", function(done){
*/
gulp.task("travis-test", ["lint", "karma-test"]);
/**
* COMMIT BUILD
*/
gulp.task("cloneBuild", function(done) {
var gitUser = "";
if (process.env.TRAVIS && process.env.GH_TOKEN){
gitUser = process.env.GH_TOKEN+"@";
}
git.clone("https://"+gitUser+"github.com/Tonejs/build", {args: `${TMP_FOLDER}/build`}, done);
});
gulp.task("moveToDev", ["build", "cloneBuild"], function(){
// move files to 'dev' folder
return gulp.src("../build/*.js")
.pipe(gulp.dest(`${TMP_FOLDER}/build/dev/`));
});
gulp.task("commitDev", ["moveToDev"], function(){
process.chdir(`${TMP_FOLDER}/build`);
return gulp.src("./dev/*")
.pipe(git.add())
.pipe(git.commit(`${VERSION} build #${process.env.TRAVIS_BUILD_NUMBER}: ${process.env.TRAVIS_COMMIT_MESSAGE}`));
});
gulp.task("pushBuild", ["commitDev"], function(done){
if (process.env.TRAVIS && process.env.GH_TOKEN){
git.push("origin", "gh-pages", {args: " -f"}, function (err) {
if (err) throw err;
done();
});
} else {
done();
}
});
gulp.task("commitDevBuild", ["pushBuild"], function(){
return del([`${TMP_FOLDER}/build`], { force : true});
});
/**
* COVERALLS
*/
@ -277,74 +225,3 @@ gulp.task("coveralls", function(){
return gulp.src("../test/coverage/**/lcov.info")
.pipe(coveralls());
});
/**
* JS DOC ATTRIBUTES
*/
gulp.task("cloneSite", function(done){
var gitUser = "";
if (process.env.TRAVIS && process.env.GH_TOKEN){
gitUser = process.env.GH_TOKEN+"@";
}
git.clone("https://"+gitUser+"github.com/Tonejs/tonejs.github.io", {args: `${TMP_FOLDER}/Site`}, done);
});
gulp.task("commitSite", ["buildJsdocs"], function(){
process.chdir(`${TMP_FOLDER}/Site`);
return gulp.src("*")
.pipe(git.add())
.pipe(git.commit(`${VERSION} build #${process.env.TRAVIS_BUILD_NUMBER}: ${process.env.TRAVIS_COMMIT_MESSAGE}`));
});
gulp.task("pushJSDocs", ["commitSite"], function(done){
if (process.env.TRAVIS && process.env.GH_TOKEN){
git.push("origin", "master", {args: " -f"}, function (err) {
if (err) throw err;
done();
});
} else {
done();
}
});
gulp.task("empty.md", ["cloneSite"], function(){
return gulp.src("../Tone/*/*.js")
.pipe(tap(function(file){
var className = path.basename(file.path, ".js");
var pathSplit = file.path.split("/");
var category = pathSplit[pathSplit.length-2];
file.contents = Buffer.from(`---\ntitle: ${className}\nlayout: ${className === "Type" ? "type" : "doc"}\nversion: ${VERSION}\n---`);
}))
.pipe(rename({extname: ".md"}))
.pipe(flatten())
.pipe(gulp.dest(`${TMP_FOLDER}/Site/_documentation/${VERSION.includes("dev") ? "dev" : VERSION}`))
.pipe(tap(function(file){
// and another one which just forwards
var className = path.basename(file.path, ".md");
file.contents = Buffer.from(`---\ntitle: ${className}\nlayout: forward\n---`);
}))
.pipe(gulp.dest(`${TMP_FOLDER}/Site/_documentation/`));
});
gulp.task("buildJsdocs", ["empty.md"], function(done){
glob("../Tone/*/*.js", function(err, files){
var docs = child_process.execSync(`./node_modules/.bin/jsdoc -X -a public ${files.join(" ")}`);
docs = JSON.parse(docs)
//filter out some stuff
docs = docs.filter(function(datum){
//is public
return datum.access !== "private" &&
//doesnt inherit
(!datum.hasOwnProperty('inherits') || !datum.inherits.startsWith('Tone#')) &&
//isnt undocumented (or a default value)
(!datum.undocumented || datum.longname.includes('defaults'))
});
var dest = `${TMP_FOLDER}/Site/_data/jsdocs-${VERSION}.json`;
fs.writeFile(dest, JSON.stringify(docs, undefined, '\t'), done);
});
});
gulp.task("commitJSDocs", ["pushJSDocs"], function(){
return del([`${TMP_FOLDER}/Site`], { force : true});
});

25
gulp/increment_version.js Normal file
View file

@ -0,0 +1,25 @@
const fs = require('fs')
const semver = require('semver')
const child_process = require('child_process')
const devVersion = child_process.execSync('npm show tone@next version').toString()
const masterVersion = child_process.execSync('npm show tone version').toString()
//go with whichever is the latest version
let version = masterVersion
if (semver.gt(devVersion, masterVersion)){
version = devVersion
}
version = version.split('.')
//increment the patch
version[2] = parseInt(version[2]) + 1
//put it back in semver
version = version.join('.')
console.log(`incrementing to version ${version}`)
//write it to the package.json
const packageFile = '../package.json'
const package = JSON.parse(fs.readFileSync(packageFile, 'utf-8'))
package.version = version
fs.writeFileSync(packageFile, JSON.stringify(package, undefined, ' '))

123
gulp/package-lock.json generated
View file

@ -218,11 +218,6 @@
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz",
"integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4="
},
"babylon": {
"version": "7.0.0-beta.19",
"resolved": "https://registry.npmjs.org/babylon/-/babylon-7.0.0-beta.19.tgz",
"integrity": "sha512-Vg0C9s/REX6/WIXN37UKpv5ZhRi6A4pjHlpkE34+8/a6c2W1Q692n3hmc+SZG5lKRnaExLUbxtJ1SVT+KaCQ/A=="
},
"backo2": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz",
@ -389,14 +384,6 @@
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.11.0.tgz",
"integrity": "sha1-cVuW6phBWTzDMGeSP17GDr2k99c="
},
"catharsis": {
"version": "0.8.9",
"resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.8.9.tgz",
"integrity": "sha1-mMyJDKZS3S7w5ws3klMQ/56Q/Is=",
"requires": {
"underscore-contrib": "0.3.0"
}
},
"center-align": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz",
@ -2441,6 +2428,11 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
},
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
}
}
},
@ -3858,39 +3850,12 @@
"esprima": "2.7.3"
}
},
"js2xmlparser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-3.0.0.tgz",
"integrity": "sha1-P7YOqgicVED5MZ9RdgzNB+JJlzM=",
"requires": {
"xmlcreate": "1.0.2"
}
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
"optional": true
},
"jsdoc": {
"version": "3.5.4",
"resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-3.5.4.tgz",
"integrity": "sha512-VmTw0J+2L16IxAe0JSDSAcH0F+DbZxaj8wN1AjHtKMQU/hO0ciIl5ZE93XqrrFIbknobuqHKJCXZj6+Hk57MjA==",
"requires": {
"babylon": "7.0.0-beta.19",
"bluebird": "3.5.0",
"catharsis": "0.8.9",
"escape-string-regexp": "1.0.5",
"js2xmlparser": "3.0.0",
"klaw": "2.0.0",
"marked": "0.3.6",
"mkdirp": "0.5.1",
"requizzle": "0.2.1",
"strip-json-comments": "2.0.1",
"taffydb": "2.6.2",
"underscore": "1.8.3"
}
},
"jshint": {
"version": "2.9.5",
"resolved": "https://registry.npmjs.org/jshint/-/jshint-2.9.5.tgz",
@ -4101,14 +4066,6 @@
"is-buffer": "1.1.5"
}
},
"klaw": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/klaw/-/klaw-2.0.0.tgz",
"integrity": "sha1-WcEo4Nxc5BAgEVEZTuucv4WGUPY=",
"requires": {
"graceful-fs": "4.1.11"
}
},
"lazy-cache": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz",
@ -4462,6 +4419,13 @@
"requires": {
"readable-stream": "1.0.34",
"semver": "4.3.6"
},
"dependencies": {
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
}
}
},
"longest": {
@ -4511,11 +4475,6 @@
"resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz",
"integrity": "sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ="
},
"marked": {
"version": "0.3.6",
"resolved": "https://registry.npmjs.org/marked/-/marked-0.3.6.tgz",
"integrity": "sha1-ssbGGPzOzk74bE/Gy4p8v1rtqNc="
},
"md5-hex": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-1.3.0.tgz",
@ -4726,7 +4685,7 @@
"requires": {
"hosted-git-info": "2.5.0",
"is-builtin-module": "1.0.0",
"semver": "4.3.6",
"semver": "5.4.1",
"validate-npm-package-license": "3.0.1"
}
},
@ -5439,21 +5398,6 @@
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
"requizzle": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.1.tgz",
"integrity": "sha1-aUPDUwxNmn5G8c3dUcFY/GcM294=",
"requires": {
"underscore": "1.6.0"
},
"dependencies": {
"underscore": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
"integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
}
}
},
"resolve": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.4.0.tgz",
@ -5574,9 +5518,9 @@
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
},
"semver": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.6.tgz",
"integrity": "sha1-MAvG4OhjdPe6YQaLWx7NV/xlMto="
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.4.1.tgz",
"integrity": "sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg=="
},
"sequencify": {
"version": "0.0.7",
@ -5964,21 +5908,11 @@
"get-stdin": "4.0.1"
}
},
"strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
},
"supports-color": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc="
},
"taffydb": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/taffydb/-/taffydb-2.6.2.tgz",
"integrity": "sha1-fLy2S1oUG2ou/CxdLGe04VCyomg="
},
"textextensions": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/textextensions/-/textextensions-1.0.2.tgz",
@ -6119,26 +6053,6 @@
"resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz",
"integrity": "sha1-5z3T17DXxe2G+6xrCufYxqadUPo="
},
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
},
"underscore-contrib": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/underscore-contrib/-/underscore-contrib-0.3.0.tgz",
"integrity": "sha1-ZltmwkeD+PorGMn4y7Dix9SMJsc=",
"requires": {
"underscore": "1.6.0"
},
"dependencies": {
"underscore": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.6.0.tgz",
"integrity": "sha1-izixDKze9jM3uLJOT/htRa6lKag="
}
}
},
"unique-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unique-stream/-/unique-stream-1.0.0.tgz",
@ -6367,11 +6281,6 @@
"resolved": "https://registry.npmjs.org/wtf-8/-/wtf-8-1.0.0.tgz",
"integrity": "sha1-OS2LotDxw00e4tYw8V0O+2jhBIo="
},
"xmlcreate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-1.0.2.tgz",
"integrity": "sha1-+mv3YqYKQT+z3Y9LA8WyaSONMI8="
},
"xmlhttprequest-ssl": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz",

View file

@ -25,7 +25,6 @@
"gulp-tap": "^0.1.3",
"gulp-uglify": "^2.0.0",
"gulp-util": "^3.0.7",
"jsdoc": "^3.4.3",
"jshint": "^2.9.4",
"karma": "^1.7.0",
"karma-chrome-launcher": "^2.2.0",
@ -35,6 +34,7 @@
"karma-requirejs": "^1.1.0",
"mocha": "^3.0.2",
"requirejs": "^2.1.22",
"semver": "^5.4.1",
"yargs": "^7.0.2"
},
"scripts": {

51
gulp/push_build.sh Executable file
View file

@ -0,0 +1,51 @@
#!/bin/bash
TMP_DIR=$(pwd)/tmp
mkdir $TMP_DIR
TONE_DIR=$(pwd)/..
BUILD_DIR=$TMP_DIR/build
# clone the build repo
if [ "$TRAVIS" = "true" ]; then
GITHUB_USER=${GH_TOKEN}@
fi
git clone https://${GITHUB_USER}github.com/Tonejs/build $BUILD_DIR > /dev/null 2>&1
cd $BUILD_DIR
git checkout gh-pages
# generate a new build
gulp build
# push to the appropriate location
if [ "$TRAVIS" = "true" ]; then
if [ "$TRAVIS_BRANCH" = "dev" ]; then
# dev builds go into the dev folder
cp -a $TONE_DIR/build/. $BUILD_DIR/dev/
elif [ "$TRAVIS_BRANCH" = "master" ]; then
# master builds are on the root level folder
cp -a $TONE_DIR/build/. $BUILD_DIR/
# and also in a folder with the version name
VERSION=$(node $TONE_DIR/gulp/version.js $TONE_DIR)
mkdir $BUILD_DIR/$VERSION
cp -a $TONE_DIR/build/. $BUILD_DIR/$VERSION
fi
fi
# push the build
git add .
git commit -m "build #$TRAVIS_BUILD_NUMBER: $TRAVIS_COMMIT_MESSAGE"
git push -f
rm -rf $TMP_DIR

View file

@ -2,11 +2,13 @@
if [ "${TRAVIS_PULL_REQUEST}" = "false" ]; then
# only commit the builds when not a PR
gulp commitDevBuild
gulp commitJSDocs
# commit the build
sh push_build.sh
# update the site
sh update_site.sh
fi
# do coveralls either way
gulp coveralls
# upload coveralls
gulp coveralls

18
gulp/update_site.sh Executable file
View file

@ -0,0 +1,18 @@
#!/bin/bash
TMP_DIR=$(pwd)/tmp/
mkdir $TMP_DIR
SITE_DIR=$TMP_DIR/Site
# clone the tonejs.github.io site
if [ "$TRAVIS" = "true" ]; then
GITHUB_USER=${GH_TOKEN}@
fi
git clone https://${GITHUB_USER}github.com/Tonejs/tonejs.github.io $SITE_DIR > /dev/null 2>&1
cd $SITE_DIR
# run the update script
sh update.sh
rm -rf $TMP_DIR

7
gulp/version.js Executable file
View file

@ -0,0 +1,7 @@
const fs = require('fs')
var VERSION = fs.readFileSync(`${process.argv[2]}/Tone/core/Tone.js`, 'utf-8')
.match(/(?:Tone\.version\s*=\s*)(?:'|")(.*)(?:'|");/m)[1];
console.log(VERSION)

View file

@ -1,6 +1,6 @@
{
"name": "tone",
"version": "0.10.0",
"version": "0.11.0",
"description": "A Web Audio framework for making interactive music in the browser.",
"main": "build/Tone.js",
"files": [

View file

@ -1,5 +1,5 @@
define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
function (Analyser, Test, Basic, Supports) {
define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (Analyser, Test, Basic, Supports, Noise) {
describe("Analyser", function(){
@ -33,7 +33,7 @@ define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
it("can run fft analysis", function(){
var anl = new Analyser("fft", 512);
analysis = anl.analyse();
analysis = anl.getValue();
expect(analysis.length).to.equal(512);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.lessThan(0);
@ -41,14 +41,22 @@ define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
anl.dispose();
});
it("can run waveform analysis", function(){
it("can run waveform analysis", function(done){
var noise = new Noise();
var anl = new Analyser("waveform", 256);
analysis = anl.analyse();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(0, 1);
}
anl.dispose();
noise.connect(anl);
noise.start();
setTimeout(function(){
analysis = anl.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(-1, 1);
}
anl.dispose();
noise.dispose();
done()
}, 300);
});
it("throws an error if an invalid type is set", function(){
@ -60,4 +68,4 @@ define(["Tone/component/Analyser", "Test", "helper/Basic", "helper/Supports"],
});
});
});
});

View file

@ -1,5 +1,5 @@
define(["Tone/component/Envelope", "helper/Basic", "helper/Offline", "Test",
"helper/Supports", "helper/PassAudio", "helper/APITest"],
define(["Tone/component/Envelope", "helper/Basic", "helper/Offline", "Test",
"helper/Supports", "helper/PassAudio", "helper/APITest"],
function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
describe("Envelope", function(){
@ -152,10 +152,10 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.toMaster();
env.triggerAttack(0);
}, 0.7).then(function(buffer){
buffer.forEach(function(sample, time){
buffer.forEach(function(sample, time){
var target = 1 - (time - 0.2) * 10;
expect(sample).to.be.closeTo(target, 0.01);
}, 0.2, 0.2);
}, 0.2, 0.2);
});
});
@ -228,13 +228,8 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttack(attackTime);
env.triggerRelease(releaseTime);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < attackTime - 0.001){
expect(sample).to.equal(0);
} else if (time > e.attack + e.decay + releaseTime + e.release){
expect(sample).to.equal(0);
}
});
expect(buffer.getValueAtTime(attackTime - 0.001)).to.equal(0);
expect(buffer.getValueAtTime(e.attack + e.decay + releaseTime + e.release)).to.be.below(0.01);
});
});
@ -253,11 +248,8 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttack(attackTime);
}, 0.4).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < attackTime - 0.001){
expect(sample).to.equal(0);
} else if (time > attackTime + e.attack + e.decay){
expect(sample).to.equal(0);
}
expect(buffer.getValueAtTime(attackTime - 0.001)).to.equal(0);
expect(buffer.getValueAtTime(attackTime + e.attack + e.decay)).to.be.below(0.01);
});
});
});
@ -317,7 +309,7 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
} else if (time < duration + e.release){
expect(sample).to.be.within(0, e.sustain + 0.01);
} else {
expect(sample).to.be.below(0.001);
expect(sample).to.be.below(0.0015);
}
});
});
@ -375,7 +367,7 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
expect(sample).to.be.below(0.02);
} else if (time > 0.5 && time < 0.8){
expect(sample).to.be.above(0);
}
}
});
});
});
@ -575,7 +567,7 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
env.triggerAttackRelease(0.4, 0.1, 0.5);
}, 0.8).then(function(buffer){
buffer.forEach(function(sample){
expect(sample).to.be.lte(0.5);
expect(sample).to.be.at.most(0.51);
});
});
});
@ -605,4 +597,4 @@ function (Envelope, Basic, Offline, Test, Supports, PassAudio, APITest) {
});
});
});
});
});

52
test/component/FFT.js Normal file
View file

@ -0,0 +1,52 @@
define(["Tone/component/FFT", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (FFT, Test, Basic, Supports, Noise) {
describe("FFT", function(){
Basic(FFT);
it("handles input connection", function(){
var fft = new FFT();
Test.connect(fft);
fft.dispose();
});
it("can get and set properties", function(){
var fft = new FFT();
fft.set({
"size" : 128
});
var values = fft.get();
expect(values.size).to.equal(128);
fft.dispose();
});
it("can correctly set the size", function(){
var fft = new FFT(512);
expect(fft.size).to.equal(512);
fft.size = 1024;
expect(fft.size).to.equal(1024);
fft.dispose();
});
it("can run waveform analysis", function(done){
var noise = new Noise();
var fft = new FFT(256);
noise.connect(fft);
noise.start();
setTimeout(function(){
analysis = fft.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(-200, 0);
}
fft.dispose();
noise.dispose();
done()
}, 300);
});
});
});

View file

@ -1,4 +1,4 @@
define(["Tone/component/FrequencyEnvelope", "helper/Basic", "helper/Offline", "Test", "Tone/component/Envelope"],
define(["Tone/component/FrequencyEnvelope", "helper/Basic", "helper/Offline", "Test", "Tone/component/Envelope"],
function (FrequencyEnvelope, Basic, Offline, Test, Envelope) {
describe("FrequencyEnvelope", function(){
@ -70,11 +70,11 @@ function (FrequencyEnvelope, Basic, Offline, Test, Envelope) {
if (time < e.attack){
expect(sample).to.be.within(200, 1600);
} else if (time < e.attack + e.decay){
expect(sample).to.be.closeTo(1600, 2);
}
expect(sample).to.be.closeTo(1600, 10);
}
});
});
});
});
});
});
});

View file

@ -1,6 +1,6 @@
define(["Tone/component/Meter", "helper/Basic", "helper/Offline", "Test",
"Tone/signal/Signal", "helper/PassAudio", "Tone/type/Type",
"Tone/component/Merge", "Tone/source/Oscillator"],
define(["Tone/component/Meter", "helper/Basic", "helper/Offline", "Test",
"Tone/signal/Signal", "helper/PassAudio", "Tone/type/Type",
"Tone/component/Merge", "Tone/source/Oscillator"],
function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillator) {
describe("Meter", function(){
@ -18,11 +18,9 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
it("handles getter/setter as Object", function(){
var meter = new Meter();
var values = {
"type" : "signal",
"smoothing" : 0.2
};
meter.set(values);
expect(meter.get().type).to.equal("signal");
expect(meter.get().smoothing).to.equal(0.2);
meter.dispose();
});
@ -40,14 +38,14 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
return PassAudio(function(input){
meter = new Meter();
input.chain(meter, Tone.Master);
});
});
});
it("measures the incoming signal", function(done){
var meter = new Meter("signal");
var meter = new Meter();
var signal = new Signal(1).connect(meter);
setTimeout(function(){
expect(meter.value).to.be.closeTo(1, 0.05);
expect(meter.getValue()).to.be.closeTo(1, 0.05);
meter.dispose();
signal.dispose();
done();
@ -59,7 +57,7 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
var osc = new Oscillator().connect(meter).start();
osc.volume.value = -6;
setTimeout(function(){
expect(meter.value).to.be.closeTo(1, 0.1);
expect(meter.getLevel()).to.be.closeTo(-6, 1);
meter.dispose();
osc.dispose();
done();
@ -67,4 +65,4 @@ function (Meter, Basic, Offline, Test, Signal, PassAudio, Tone, Merge, Oscillato
});
});
});
});
});

View file

@ -0,0 +1,52 @@
define(["Tone/component/Waveform", "Test", "helper/Basic", "helper/Supports", "Tone/source/Noise"],
function (Waveform, Test, Basic, Supports, Noise) {
describe("Waveform", function(){
Basic(Waveform);
it("handles input connection", function(){
var anl = new Waveform();
Test.connect(anl);
anl.dispose();
});
it("can get and set properties", function(){
var anl = new Waveform();
anl.set({
"size" : 128
});
var values = anl.get();
expect(values.size).to.equal(128);
anl.dispose();
});
it("can correctly set the size", function(){
var anl = new Waveform(512);
expect(anl.size).to.equal(512);
anl.size = 1024;
expect(anl.size).to.equal(1024);
anl.dispose();
});
it("can run waveform analysis", function(done){
var noise = new Noise();
var anl = new Waveform(256);
noise.connect(anl);
noise.start();
setTimeout(function(){
analysis = anl.getValue();
expect(analysis.length).to.equal(256);
for (i = 0; i < analysis.length; i++){
expect(analysis[i]).is.within(-1, 1);
}
anl.dispose();
noise.dispose();
done()
}, 300);
});
});
});

View file

@ -1,4 +1,4 @@
define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signal/Signal", "Tone/core/Transport"],
define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signal/Signal", "Tone/core/Transport"],
function (Basic, Test, Param, Tone, Signal, Transport) {
describe("Param", function(){
@ -114,7 +114,7 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
expect(param.value).to.be.closeTo(0, 0.01);
param.dispose();
});
});
context("Scheduling API", function(){
@ -141,14 +141,14 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
it ("can schedule an exponential ramp", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.exponentialRampToValueAtTime(3, 1);
param.exponentialRampToValueAtTime(3, "+1");
param.dispose();
});
it ("can approach a target value", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.setTargetAtTime(0.2, 1, 2);
param.setTargetAtTime(0.2, "+1", 2);
param.dispose();
});
@ -162,30 +162,45 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
it ("can schedule multiple automations", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, 0.5);
param.linearRampToValueAtTime(0, 1);
param.linearRampToValueAtTime(0.5, "+0.5");
param.linearRampToValueAtTime(0, "+1");
param.dispose();
});
it ("can cancel an automation", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, 0.5);
param.linearRampToValueAtTime(0.5, "+0.5");
param.cancelScheduledValues(0);
param.dispose();
});
it ("can cancelAndHold an automation", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValueAtTime(0.5, "+0.5");
param.cancelAndHoldAtTime(0);
param.dispose();
});
it ("can set a linear ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.linearRampToValue(0.5, 0.5);
param.linearRampTo(0.5, 0.5);
param.dispose();
});
it ("can set an exponential ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.exponentialRampToValue(0.5, 0.5);
param.exponentialRampTo(0.5, 0.5);
param.dispose();
});
it ("can set an exponential approach ramp from the current time", function(){
var gain = Tone.context.createGain();
var param = new Param(gain.gain);
param.targetRampTo(0.5, 0.5);
param.dispose();
});
@ -209,4 +224,4 @@ define(["helper/Basic", "Test", "Tone/core/Param", "Tone/type/Type", "Tone/signa
});
});
});
});
});

View file

@ -126,7 +126,7 @@ define(["Test", "Tone/core/Timeline"], function (Test, Timeline) {
sched.dispose();
});
it ("can get the scheduled event at the given time", function(){
var sched = new Timeline();
sched.add({
@ -261,7 +261,7 @@ define(["Test", "Tone/core/Timeline"], function (Test, Timeline) {
sched.dispose();
});
it ("has no problem with many items", function(){
var sched = new Timeline();
@ -528,6 +528,26 @@ define(["Test", "Tone/core/Timeline"], function (Test, Timeline) {
expect(sched.length).to.equal(0);
sched.dispose();
});
it("can add items during iteration", function(){
var sched = new Timeline();
for (var i = 0; i < 1000; i++){
sched.add({"time" : i});
}
var added = false;
sched.forEach(function(event){
if (!added){
added = true;
sched.add({
"time" : 10,
"added" : true,
});
}
});
expect(sched.length).to.equal(1001);
sched.dispose();
});
});
});
});
});

View file

@ -178,7 +178,7 @@ define(["Test", "Tone/core/Tone", "helper/PassAudio", "Tone/source/Oscillator",
});
context("Tone.setContext", function(){
context("Tone.context", function(){
it ("can set a new context", function(){
var origCtx = Tone.context;
@ -207,6 +207,19 @@ define(["Test", "Tone/core/Tone", "helper/PassAudio", "Tone/source/Oscillator",
return ctx.close();
});
it ("tests if the audio context time has passed", function(){
// overwrite warn to throw errors
var originalWarn = console.warn;
console.warn = function(warning){
throw new Error(warning);
};
var currentTime = Tone.context.currentTime;
expect(function(){
Tone.isPast(currentTime-1);
}).to.throw(Error);
console.warn = originalWarn;
});
});
context("Tone.prototype.set / get", function(){

View file

@ -1,5 +1,5 @@
define(["Test", "Tone/core/Transport", "Tone/core/Tone", "helper/Offline",
"Tone/type/TransportTime", "Tone/signal/Signal", "helper/BufferTest"],
define(["Test", "Tone/core/Transport", "Tone/core/Tone", "helper/Offline",
"Tone/type/TransportTime", "Tone/signal/Signal", "helper/BufferTest"],
function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
describe("Transport", function(){
@ -230,7 +230,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
context("state", function(){
@ -287,7 +287,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
it("resets ticks on stop but not on pause", function(){
return Offline(function(Transport){
Transport.start(0).pause(0.1).stop(0.2);
var pausedTicks = 0;
return function(time){
@ -363,10 +363,10 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
}, 0.6);
});
});
});
context("schedule", function(){
context("schedule", function(){
it ("can schedule an event on the timeline", function(){
return Offline(function(Transport){
var eventID = Transport.schedule(function(){}, 0);
@ -376,24 +376,32 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
it ("scheduled event gets invoked with the time of the event", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = 0.1;
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime, 0.01);
wasCalled = true;
}, 0);
Transport.start(startTime);
}, 0.2);
}, 0.2).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule events with TransportTime", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = 0.1;
var eighth = Transport.toSeconds("8n");
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime + eighth, 0.01);
wasCalled = true;
}, TransportTime("8n"));
Transport.start(startTime);
}, 0.5);
}, 0.5).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can cancel a scheduled event", function(){
@ -401,7 +409,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
var eventID = Transport.schedule(function(){
throw new Error("should not call this function");
}, 0);
Transport.cancel(eventID);
Transport.clear(eventID);
Transport.start();
});
});
@ -424,25 +432,30 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
Transport.scheduleOnce(Tone.noOp, 0);
Transport.scheduleOnce(Tone.noOp, 1);
Transport.scheduleOnce(Tone.noOp, 2);
expect(Transport._onceEvents.length).to.equal(3);
expect(Transport._timeline.length).to.equal(3);
Transport.cancel(2);
expect(Transport._onceEvents.length).to.equal(2);
expect(Transport._timeline.length).to.equal(2);
Transport.cancel(0);
expect(Transport._onceEvents.length).to.equal(0);
expect(Transport._timeline.length).to.equal(0);
});
});
it ("scheduled event anywhere along the timeline", function(){
var wasCalled = false;
return Offline(function(Transport){
var startTime = Transport.now();
Transport.schedule(function(time){
expect(time).to.be.closeTo(startTime + 0.5, 0.001);
wasCalled = true;
}, 0.5);
Transport.start(startTime);
}, 0.6);
}, 0.6).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule multiple events and invoke them in the right order", function(){
var wasCalled = false;
return Offline(function(Transport){
var first = false;
Transport.schedule(function(){
@ -450,9 +463,12 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
}, 0.1);
Transport.schedule(function(){
expect(first).to.be.true;
wasCalled = true;
}, 0.11);
Tone.Transport.start();
}, 0.2);
}, 0.2).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("invokes the event again if the timeline is restarted", function(){
@ -467,9 +483,27 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("can add an event after the Transport is started", function(){
var wasCalled = false;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
return function(time){
if (time > 0.1 && !wasScheduled){
wasScheduled = true;
Transport.schedule(function(){
wasCalled = true;
}, 0.15);
}
}
}, 0.3).then(function(){
expect(wasCalled).to.be.true;
});
});
});
context("scheduleRepeat", function(){
context("scheduleRepeat", function(){
it ("can schedule a repeated event", function(){
return Offline(function(Transport){
@ -483,7 +517,6 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
return Offline(function(Transport){
var startTime = 0.1;
var eventID = Transport.scheduleRepeat(function(time){
Transport.clear(eventID);
expect(time).to.be.closeTo(startTime, 0.01);
invoked = true;
}, 1, 0);
@ -546,6 +579,7 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it ("repeats at the repeat interval", function(){
var wasCalled = false;
return Offline(function(Transport){
var repeatTime = -1;
Transport.scheduleRepeat(function(time){
@ -553,9 +587,12 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
expect(time - repeatTime).to.be.closeTo(0.1, 0.01);
}
repeatTime = time;
wasCalled = true;
}, 0.1, 0);
Transport.start();
}, 0.5);
}, 0.5).then(function(){
expect(wasCalled).to.be.true;
});
});
it ("can schedule multiple events and invoke them in the right order", function(){
@ -578,18 +615,10 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("cannot schedule an event with an interval of 0", function(){
return Offline(function(Transport){
expect(function(){
Transport.scheduleRepeat(function(){}, 0, 10);
}).to.throw(Error);
});
});
it ("repeats for the given interval", function(){
var repeatCount = 0;
return Offline(function(Transport){
Transport.scheduleRepeat(function(){
Transport.scheduleRepeat(function(time){
repeatCount++;
}, 0.1, 0, 0.5);
Transport.start();
@ -598,9 +627,49 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
});
it ("can add an event after the Transport is started", function(){
var invocations = 0;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
var times = [0.15, 0.3]
return function(time){
if (time > 0.1 && !wasScheduled){
wasScheduled = true;
Transport.scheduleRepeat(function(time){
expect(time).to.be.closeTo(times[invocations], 0.01);
invocations++;
}, 0.15, 0.15);
}
}
}, 0.31).then(function(){
expect(invocations).to.equal(2);
});
});
it ("can add an event to the past after the Transport is started", function(){
var invocations = 0;
return Offline(function(Transport){
Transport.start(0);
var wasScheduled = false;
var times = [0.15, 0.25]
return function(time){
if (time >= 0.12 && !wasScheduled){
wasScheduled = true;
Transport.scheduleRepeat(function(time){
expect(time).to.be.closeTo(times[invocations], 0.01);
invocations++;
}, 0.1, 0.05);
}
}
}, 0.3).then(function(){
expect(invocations).to.equal(2);
});
});
});
context("scheduleOnce", function(){
context("scheduleOnce", function(){
it ("can schedule a single event on the timeline", function(){
return Offline(function(Transport){
@ -683,25 +752,29 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
context("events", function(){
it("invokes start/stop/pause events", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
Tone.Transport.on("start pause stop", function(){
invokations++;
invocations++;
});
Transport.start().stop(0.1).start(0.2);
}, 0.5).then(function(){
expect(invokations).to.equal(3);
expect(invocations).to.equal(3);
});
});
it("invokes start event with correct offset", function(){
var wasCalled = false;
return Offline(function(Transport){
Transport.on("start", function(time, offset){
expect(time).to.be.closeTo(0.2, 0.01);
expect(offset).to.be.closeTo(0.5, 0.001);
wasCalled = true;
});
Transport.start(0.2, "4n");
}, 0.3);
}, 0.3).then(function(){
expect(wasCalled).to.be.true;
});
});
it("invokes the event just before the scheduled time", function(){
@ -719,20 +792,20 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it("passes in the time argument to the events", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
var now = Transport.now();
Transport.on("start", function(time){
invokations++;
invocations++;
expect(time).to.be.closeTo(now + 0.1, 0.01);
});
Transport.on("stop", function(time){
invokations++;
invocations++;
expect(time).to.be.closeTo(now + 0.2, 0.01);
});
Transport.start("+0.1").stop("+0.2");
}, 0.3).then(function(){
expect(invokations).to.equal(2);
expect(invocations).to.equal(2);
});
});
@ -778,37 +851,37 @@ function (Test, Transport, Tone, Offline, TransportTime, Signal, BufferTest) {
});
it("can swing", function(){
var invokations = 0;
var invocations = 0;
return Offline(function(Transport){
Transport.swing = 1;
Transport.swingSubdivision = "8n";
var eightNote = Transport.toSeconds("8n");
//downbeat, no swing
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(0, 0.001);
}, 0);
//eighth note has swing
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote * 5/3, 0.001);
}, "8n");
//sixteenth note is also swung
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote, 0.05);
}, "16n");
//no swing on the quarter
Transport.schedule(function(time){
invokations++;
invocations++;
expect(time).is.closeTo(eightNote * 2, 0.001);
}, "4n");
Transport.start(0).stop(0.7);
}, 0.7).then(function(){
expect(invokations).to.equal(4);
expect(invocations).to.equal(4);
});
});
});
});
});
});

View file

@ -0,0 +1,26 @@
define(["Test", "Tone/core/TransportEvent", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator", "Tone/core/AudioNode"],
function (Test, TransportEvent, Tone, Offline, PassAudio, Oscillator, AudioNode) {
describe("TransportEvent", function(){
it ("can be created and disposed", function(){
return Offline(function(Transport){
var event = new TransportEvent(Transport, {
"time" : 0
});
event.dispose();
Test.wasDisposed(event);
})
});
it ("generates a unique event ID", function(){
return Offline(function(Transport){
var event = new TransportEvent(Transport, {
"time" : 0
});
expect(event.id).to.be.a('number');
event.dispose();
})
});
});
});

View file

@ -0,0 +1,37 @@
define(["Test", "Tone/core/TransportRepeatEvent", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator", "Tone/core/AudioNode"],
function (Test, TransportRepeatEvent, Tone, Offline, PassAudio, Oscillator, AudioNode) {
describe("TransportRepeatEvent", function(){
it ("can be created and disposed", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
event.dispose();
Test.wasDisposed(event);
})
});
it ("generates a unique event ID", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
expect(event.id).to.be.a('number');
event.dispose();
})
});
it ("is removed from the Transport when disposed", function(){
return Offline(function(Transport){
var event = new TransportRepeatEvent(Transport, {
"time" : 0
});
event.dispose();
expect(Transport._timeline.length).to.equal(0);
})
});
});
});

View file

@ -1,5 +1,5 @@
define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
"Tone/core/Transport", "Tone/event/Event", "helper/Offline", "Test"],
define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
"Tone/core/Transport", "Tone/event/Event", "helper/Offline", "Test"],
function (Basic, Part, Tone, Transport, Event, Offline, Test) {
describe("Part", function(){
@ -210,7 +210,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
//loop duration is the same
expect(firstEvent.loopEnd).to.equal("1m");
expect(firstEvent.loopStart).to.equal("4n");
var secondEvent = part.at(0.3);
expect(secondEvent.humanize).to.equal(0.1);
expect(secondEvent.probability).to.equal(0.2);
@ -412,7 +412,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
});
});
context("Looping", function(){
it ("can be set to loop", function(){
@ -462,20 +462,20 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
"loopEnd" : 0.5,
"loop" : true,
"callback" : function(time, value){
if (value === 1){
if (value === 1 && !switched){
switched = true;
part.loopEnd = 0.2;
} else if (switched){
expect(value).to.equal(0);
invoked = true;
}
}
},
events : [[0, 0], [0.25, 1]]
}).start(0);
Transport.start();
}, 0.7).then(function(){
expect(invoked).to.be.true;
});
});
});
it ("a started part will be stopped if it is before the loopStart", function(){
@ -492,7 +492,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
} else if (switched){
expect(value).to.equal(1);
invoked = true;
}
}
},
events : [[0, 0], [0.25, 1]]
}).start(0);
@ -502,7 +502,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
});
});
it ("can loop a specific number of times", function(){
var callCount = 0;
return Offline(function(Transport){
@ -582,7 +582,7 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
part.loopEnd = 0.3;
Transport.start(0.2).stop(0.61).start(0.8);
}, 2).then(function(){
expect(eventTimeIndex).to.equal(7);
expect(eventTimeIndex).to.equal(8);
});
});
@ -710,4 +710,4 @@ define(["helper/Basic", "Tone/event/Part", "Tone/core/Tone",
});
});
});
});
});

View file

@ -1,4 +1,4 @@
define(["Test", "Tone/type/Type", "Tone/core/Transport", "Tone/type/Time", "Tone/type/Frequency"],
define(["Test", "Tone/type/Type", "Tone/core/Transport", "Tone/type/Time", "Tone/type/Frequency"],
function (Test, Type, Transport, Time, Frequency) {
//modified from http://stackoverflow.com/questions/15298912/javascript-generating-combinations-from-n-arrays-with-m-elements
@ -67,26 +67,37 @@ function (Test, Type, Transport, Time, Frequency) {
}
}
function silenceWarning(cb){
var warning = console.warn;
console.warn = function(){};
cb();
console.warn = warning;
}
return {
method : function(constructor, fn, args, consArgs){
it (fn+" ("+args.join(", ") + ")", function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
instance[fn].apply(instance, permutations[i]);
instance.dispose();
}
silenceWarning(function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
instance[fn].apply(instance, permutations[i]);
instance.dispose();
}
})
});
},
member : function(constructor, member, param, consArgs){
it (member+" = "+param, function(){
var permutations = generateArgs([param]);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
instance[member] = permutations[i];
instance.dispose();
}
silenceWarning(function(){
var permutations = generateArgs([param]);
for (var i = 0; i < permutations.length; i++){
var instance = new constructor(consArgs);
instance[member] = permutations[i];
instance.dispose();
}
});
});
},
constructor : function(constructor, args){
@ -99,15 +110,17 @@ function (Test, Type, Transport, Time, Frequency) {
}
it ("constructor ( "+ argString + " )", function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var Temp = function(){}; // temporary constructor
Temp.prototype = constructor.prototype;
var tmpInst = new Temp();
constructor.apply(tmpInst, permutations[i]);
tmpInst.dispose();
}
silenceWarning(function(){
var permutations = generateArgs(args);
for (var i = 0; i < permutations.length; i++){
var Temp = function(){}; // temporary constructor
Temp.prototype = constructor.prototype;
var tmpInst = new Temp();
constructor.apply(tmpInst, permutations[i]);
tmpInst.dispose();
}
});
});
},
};
});
});

View file

@ -1,5 +1,5 @@
define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
"helper/PassAudioStereo", "Test", "helper/Offline", "Tone/signal/Signal", "Tone/component/Merge"],
define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
"helper/PassAudioStereo", "Test", "helper/Offline", "Tone/signal/Signal", "Tone/component/Merge"],
function (OutputAudio, Effect, PassAudio, PassAudioStereo, Test, Offline, Signal, Merge) {
return function(Constr, args, before){
@ -55,7 +55,7 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
input.connect(instance);
instance.toMaster();
});
});
});
it("passes audio in both channels", function(){
return PassAudioStereo(function(input){
@ -66,7 +66,7 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
input.connect(instance);
instance.toMaster();
});
});
});
it("can pass 100% dry signal", function(){
return Offline(function(){
@ -78,8 +78,8 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
var signalL = new Signal(-1).connect(merge.left);
var signalR = new Signal(1).connect(merge.right);
//make the signals ramp
signalL.linearRampToValue(1, 1);
signalR.linearRampToValue(-1, 1);
signalL.linearRampTo(1, 1);
signalR.linearRampTo(-1, 1);
instance.wet.value = 0;
}, 0.5, 2).then(function(buffer){
buffer.forEach(function(L, R, time){
@ -89,7 +89,7 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
expect(R).to.be.closeTo(rightValue, 0.001);
});
});
});
});
it("effects the incoming signal", function(){
return Offline(function(){
@ -101,8 +101,8 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
var signalL = new Signal(-1).connect(merge.left);
var signalR = new Signal(1).connect(merge.right);
//make the signals ramp
signalL.linearRampToValue(1, 1);
signalR.linearRampToValue(-1, 1);
signalL.linearRampTo(1, 1);
signalR.linearRampTo(-1, 1);
if (instance.start){
instance.start();
}
@ -114,14 +114,14 @@ define(["helper/OutputAudio", "Tone/effect/Effect", "helper/PassAudio",
leftEffected = true;
}
if (Math.abs(R - rightValue) > 0.01){
rightEffected = true;
rightEffected = true;
}
});
expect(leftEffected).to.be.true;
expect(rightEffected).to.be.true;
});
});
});
});
};
});
});

View file

@ -1,5 +1,5 @@
define(["Tone/instrument/PolySynth", "helper/Basic", "helper/InstrumentTests", "helper/OutputAudioStereo",
"Tone/instrument/Instrument", "Test", "helper/OutputAudio", "Tone/instrument/MonoSynth", "helper/Offline"],
define(["Tone/instrument/PolySynth", "helper/Basic", "helper/InstrumentTests", "helper/OutputAudioStereo",
"Tone/instrument/Instrument", "Test", "helper/OutputAudio", "Tone/instrument/MonoSynth", "helper/Offline"],
function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test, OutputAudio, MonoSynth, Offline) {
describe("PolySynth", function(){
@ -27,7 +27,7 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.toMaster();
polySynth.triggerAttackRelease(["C4", "D4"], [0.1, 0.2]);
});
});
});
it("triggerAttack and triggerRelease can be invoked without arrays", function(){
return Offline(function(){
@ -38,9 +38,9 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.triggerRelease("C4", 0.1);
}, 0.3).then(function(buffer){
expect(buffer.getFirstSoundTime()).to.be.closeTo(0, 0.01);
expect(buffer.getLastSoundTime()).to.be.closeTo(0.2, 0.01);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(0, 0.01);
});
});
});
it("can stop all of the currently playing sounds", function(){
return Offline(function(){
@ -51,9 +51,9 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
polySynth.releaseAll(0.1);
}, 0.3).then(function(buffer){
expect(buffer.getFirstSoundTime()).to.be.closeTo(0, 0.01);
expect(buffer.getLastSoundTime()).to.be.closeTo(0.2, 0.01);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(0, 0.01);
});
});
});
it("is silent before being triggered", function(){
return Offline(function(){
@ -62,7 +62,7 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
}).then(function(buffer){
expect(buffer.isSilent()).to.be.true;
});
});
});
it("can be scheduled to start in the future", function(){
return Offline(function(){
@ -116,4 +116,4 @@ function (PolySynth, Basic, InstrumentTests, OutputAudioStereo, Instrument, Test
});
});
});
});

View file

@ -1,5 +1,5 @@
define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
"Tone/type/Type", "Tone/core/Transport", "Tone/component/LFO", "helper/ConstantOutput"],
define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
"Tone/type/Type", "Tone/core/Transport", "Tone/component/LFO", "helper/ConstantOutput"],
function (Offline, Basic, Test, Signal, Tone, Transport, LFO, ConstantOutput) {
describe("Signal", function(){
@ -77,13 +77,13 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can schedule an exponential ramp", function(){
var sig = new Signal(1);
sig.exponentialRampToValueAtTime(3, 1);
sig.exponentialRampToValueAtTime(3, "+1");
sig.dispose();
});
it ("can approach a target value", function(){
var sig = new Signal(1);
sig.setTargetAtTime(0.2, 1, 2);
sig.setTargetAtTime(0.2, "+1", 2);
sig.dispose();
});
@ -119,10 +119,23 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
}, 1);
});
it ("can cancel and hold an automation curve", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.linearRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5)
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1, 0.1);
});
});
it ("can set a linear ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.linearRampToValue(2, 0.3);
sig.linearRampTo(2, 0.3);
}, 0.5).then(function(buffer){
buffer.forEach(function(sample, time){
if (time > 0.3){
@ -135,7 +148,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can set an linear ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.linearRampToValue(50, 0.3, 0.2);
sig.linearRampTo(50, 0.3, 0.2);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.6){
@ -147,11 +160,31 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
});
});
it ("can set a exponential approach ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(0).toMaster();
sig.targetRampTo(1, 0.3);
}, 0.5).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.below(0.0001);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(1, 0.02);
});
});
it ("can set an exponential approach ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.targetRampTo(50, 0.3, 0.2);
}, 0.7).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.6)).to.be.closeTo(50, 0.5);
});
});
it ("can set an exponential ramp from the current time", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.exponentialRampToValue(50, 0.4);
sig.exponentialRampTo(50, 0.4);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.4){
@ -166,7 +199,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
it ("can set an exponential ramp in the future", function(){
return Offline(function(){
var sig = new Signal(1).toMaster();
sig.exponentialRampToValue(50, 0.3, 0.2);
sig.exponentialRampTo(50, 0.3, 0.2);
}, 0.8).then(function(buffer){
buffer.forEach(function(sample, time){
if (time >= 0.6){
@ -207,7 +240,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
});
});
});
});
context("Units", function(){
@ -273,7 +306,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
expect(signal.value).to.be.closeTo(0, 0.01);
signal.dispose();
});
});
context("Transport Syncing", function(){
@ -387,7 +420,7 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
"frequency" : 10,
}
},
}
}
}
}
}).toMaster();
@ -399,4 +432,4 @@ define(["helper/Offline", "helper/Basic", "Test", "Tone/signal/Signal",
});
});
});
});
});

View file

@ -1,4 +1,4 @@
define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type", "helper/Supports"],
define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type", "helper/Supports"],
function (Test, TimelineSignal, Offline, Tone, Supports) {
describe("TimelineSignal", function(){
@ -78,7 +78,7 @@ define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type"
});
});
});
it("can get set a curve in the future", function(){
var sched;
return Offline(function(){
@ -103,8 +103,21 @@ define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type"
});
});
it ("can cancel and hold an automation curve", function(){
return Offline(function(){
var sig = new TimelineSignal(0).toMaster();
sig.linearRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5)
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1, 0.1);
});
});
if (Supports.ACCURATE_SIGNAL_SCHEDULING){
it("can match a complex scheduled curve", function(){
var sched;
return Offline(function(){
@ -169,4 +182,4 @@ define(["Test", "Tone/signal/TimelineSignal", "helper/Offline", "Tone/type/Type"
});
});
});
});
});

View file

@ -145,6 +145,22 @@ define(["Test", "Tone/signal/TransportTimelineSignal", "helper/Offline", "Tone/t
});
});
it("can cancel a scheduled value", function(){
var sched;
return Offline(function(Transport){
sched = new TransportTimelineSignal(0).toMaster();
sched.setValueAtTime(0, 0);
sched.linearRampToValueAtTime(1, 1);
sched.cancelAndHoldAtTime(0.5);
Transport.start(0);
}, 1).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.25, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(0.5, 0.1);
});
});
it("can automate values with different units", function(){
var sched;
return Offline(function(Transport){

View file

@ -1,5 +1,5 @@
define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
"Tone/core/Buffer", "helper/Meter", "Tone/core/Tone"],
define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
"Tone/core/Buffer", "helper/Meter", "Tone/core/Tone"],
function (BasicTests, BufferSource, Offline, Buffer, Meter, Tone) {
if (window.__karma__){
@ -423,14 +423,14 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades from the end", function(){
it("fades from the end when passed into the stop call", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.start(0).stop(0.2, 0.1)
}, 0.3).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.1){
expect(sample).to.equal(1);
if (time < 0.101){
expect(sample).to.be.closeTo(1, 0.01);
} else if (time < 0.2){
expect(sample).to.be.lessThan(1);
} else {
@ -440,14 +440,32 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades at the end of the file at the files duration", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeOut = 0.1;
player.start(0);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.401){
expect(sample).to.be.closeTo(1, 0.01);
} else if (time < 0.5){
expect(sample).to.be.lessThan(1);
} else {
expect(sample).to.equal(0);
}
});
});
});
it("cant fade for shorter than the fade in time", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeIn = 0.15
player.start(0).stop(0.2, 0.1)
player.fadeIn = 0.15;
player.start(0).stop(0.2, 0.1);
}, 0.3).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.149){
if (time < 0.14){
expect(sample).to.be.lessThan(1);
} else if (Math.abs(time - 0.15) < 1e-4){
expect(sample).to.be.closeTo(1, 0.05);
@ -458,21 +476,53 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("fades at the end of the file", function(){
it("the fade out can shorten to fit the duration of the sample", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeOut = 0.1;
player.start(0);
}, 0.6).then(function(buffer){
buffer.forEach(function(sample, time){
if (time < 0.4){
expect(sample).to.equal(1);
} else if (time < 0.5){
expect(sample).to.be.lessThan(1);
} else {
expect(sample).to.equal(0);
}
});
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
it("the fade out will only start after the fade in", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.fadeIn = 0.1;
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(0);
expect(buffer.getValueAtTime(0.05)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(1, 0.01);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
it("can fade with an exponential curve", function(){
var player = new BufferSource(onesBuffer).toMaster();
player.curve = "exponential";
expect(player.curve).to.equal("exponential");
player.dispose();
});
it("fades in and out exponentially", function(){
return Offline(function(){
var player = new BufferSource(onesBuffer).toMaster();
player.curve = "exponential";
player.fadeIn = 0.1;
player.fadeOut = 1;
player.start(0).stop(0.5);
}, 0.51).then(function(buffer){
expect(buffer.getValueAtTime(0)).to.equal(0);
expect(buffer.getValueAtTime(0.05)).to.be.closeTo(0.93, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(1, 0.01);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(0.05, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0, 0.01);
});
});
@ -519,6 +569,26 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
it("does not play if the stop time is at the start time", function(){
return Offline(function(){
var player = new BufferSource(buffer);
player.toMaster();
player.start(0).stop(0);
}, 0.3).then(function(buffer){
expect(buffer.isSilent()).to.be.true;
});
});
it("does not play if the stop time is at before start time", function(){
return Offline(function(){
var player = new BufferSource(buffer);
player.toMaster();
player.start(0.1).stop(0);
}, 0.3).then(function(buffer){
expect(buffer.isSilent()).to.be.true;
});
});
it("stops playing at the earlier time if invoked with 'stop' at a later time", function(){
return Offline(function(){
var player = new BufferSource(buffer);
@ -531,4 +601,4 @@ define(["helper/Basic", "Tone/source/BufferSource", "helper/Offline",
});
});
});
});