classes which generate or process sound extend Tone.AudioNode

This commit is contained in:
Yotam Mann 2017-08-27 17:50:31 -04:00
parent 112b8081a7
commit e6faa39dba
43 changed files with 572 additions and 571 deletions

View file

@ -3,20 +3,20 @@ define(["Tone/core/Tone", "Tone/component/Envelope", "Tone/core/Gain"], function
"use strict";
/**
* @class Tone.AmplitudeEnvelope is a Tone.Envelope connected to a gain node.
* @class Tone.AmplitudeEnvelope is a Tone.Envelope connected to a gain node.
* Unlike Tone.Envelope, which outputs the envelope's value, Tone.AmplitudeEnvelope accepts
* an audio signal as the input and will apply the envelope to the amplitude
* of the signal. Read more about ADSR Envelopes on [Wikipedia](https://en.wikipedia.org/wiki/Synthesizer#ADSR_envelope).
*
*
* @constructor
* @extends {Tone.Envelope}
* @param {Time|Object} [attack] The amount of time it takes for the envelope to go from
* 0 to it's maximum value.
* @param {Time|Object} [attack] The amount of time it takes for the envelope to go from
* 0 to it's maximum value.
* @param {Time} [decay] The period of time after the attack that it takes for the envelope
* to fall to the sustain value.
* to fall to the sustain value.
* @param {NormalRange} [sustain] The percent of the maximum value that the envelope rests at until
* the release is triggered.
* @param {Time} [release] The amount of time after the release is triggered it takes to reach 0.
* the release is triggered.
* @param {Time} [release] The amount of time after the release is triggered it takes to reach 0.
* @example
* var ampEnv = new Tone.AmplitudeEnvelope({
* "attack": 0.1,
@ -50,11 +50,9 @@ define(["Tone/core/Tone", "Tone/component/Envelope", "Tone/core/Gain"], function
* @return {Tone.AmplitudeEnvelope} this
*/
Tone.AmplitudeEnvelope.prototype.dispose = function(){
this.input.dispose();
this.input = null;
Tone.Envelope.prototype.dispose.call(this);
return this;
};
return Tone.AmplitudeEnvelope;
});
});

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/AudioNode"], function (Tone) {
"use strict";
@ -7,7 +7,7 @@ define(["Tone/core/Tone"], function (Tone) {
* @private
*/
if (window.AnalyserNode && !AnalyserNode.prototype.getFloatTimeDomainData){
//referenced https://github.com/mohayonao/get-float-time-domain-data
//referenced https://github.com/mohayonao/get-float-time-domain-data
AnalyserNode.prototype.getFloatTimeDomainData = function(array){
var uint8 = new Uint8Array(array.length);
this.getByteTimeDomainData(uint8);
@ -19,18 +19,18 @@ define(["Tone/core/Tone"], function (Tone) {
/**
* @class Wrapper around the native Web Audio's
* @class Wrapper around the native Web Audio's
* [AnalyserNode](http://webaudio.github.io/web-audio-api/#idl-def-AnalyserNode).
* Extracts FFT or Waveform data from the incoming signal.
* @extends {Tone}
* @param {String=} type The return type of the analysis, either "fft", or "waveform".
* @param {Number=} size The size of the FFT. Value must be a power of
* @extends {Tone.AudioNode}
* @param {String=} type The return type of the analysis, either "fft", or "waveform".
* @param {Number=} size The size of the FFT. Value must be a power of
* two in the range 32 to 32768.
*/
Tone.Analyser = function(){
var options = Tone.defaults(arguments, ["type", "size"], Tone.Analyser);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The analyser node.
@ -58,7 +58,7 @@ define(["Tone/core/Tone"], function (Tone) {
this.type = options.type;
};
Tone.extend(Tone.Analyser);
Tone.extend(Tone.Analyser, Tone.AudioNode);
/**
* The default values.
@ -81,8 +81,8 @@ define(["Tone/core/Tone"], function (Tone) {
};
/**
* Run the analysis given the current settings and return the
* result as a TypedArray.
* Run the analysis given the current settings and return the
* result as a TypedArray.
* @returns {TypedArray}
*/
Tone.Analyser.prototype.analyse = function(){
@ -111,7 +111,7 @@ define(["Tone/core/Tone"], function (Tone) {
});
/**
* The analysis function returned by Tone.Analyser.analyse(), either "fft" or "waveform".
* The analysis function returned by Tone.Analyser.analyse(), either "fft" or "waveform".
* @memberOf Tone.Analyser#
* @type {String}
* @name type
@ -148,11 +148,11 @@ define(["Tone/core/Tone"], function (Tone) {
* @return {Tone.Analyser} this
*/
Tone.Analyser.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.disconnect();
this._analyser = null;
this._buffer = null;
};
return Tone.Analyser;
});
});

View file

@ -1,15 +1,15 @@
define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
define(["Tone/core/Tone", "Tone/core/Param", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Compressor is a thin wrapper around the Web Audio
* @class Tone.Compressor is a thin wrapper around the Web Audio
* [DynamicsCompressorNode](http://webaudio.github.io/web-audio-api/#the-dynamicscompressornode-interface).
* Compression reduces the volume of loud sounds or amplifies quiet sounds
* by narrowing or "compressing" an audio signal's dynamic range.
* Compression reduces the volume of loud sounds or amplifies quiet sounds
* by narrowing or "compressing" an audio signal's dynamic range.
* Read more on [Wikipedia](https://en.wikipedia.org/wiki/Dynamic_range_compression).
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Decibels|Object} [threshold] The value above which the compression starts to be applied.
* @param {Positive} [ratio] The gain reduction ratio.
@ -19,7 +19,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
Tone.Compressor = function(){
var options = Tone.defaults(arguments, ["threshold", "ratio"], Tone.Compressor);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the compressor node
@ -34,7 +34,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
* @signal
*/
this.threshold = new Tone.Param({
"param" : this._compressor.threshold,
"param" : this._compressor.threshold,
"units" : Tone.Type.Decibels,
"convert" : false
});
@ -59,7 +59,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
* @signal
*/
this.knee = new Tone.Param({
"param" : this._compressor.knee,
"param" : this._compressor.knee,
"units" : Tone.Type.Decibels,
"convert" : false
});
@ -70,7 +70,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
* @signal
*/
this.ratio = new Tone.Param({
"param" : this._compressor.ratio,
"param" : this._compressor.ratio,
"convert" : false
});
@ -79,7 +79,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
this.set(options);
};
Tone.extend(Tone.Compressor);
Tone.extend(Tone.Compressor, Tone.AudioNode);
/**
* @static
@ -99,7 +99,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
* @returns {Tone.Compressor} this
*/
Tone.Compressor.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["knee", "release", "attack", "ratio", "threshold"]);
this._compressor.disconnect();
this._compressor = null;
@ -117,4 +117,4 @@ define(["Tone/core/Tone", "Tone/core/Param"], function(Tone){
};
return Tone.Compressor;
});
});

View file

@ -1,14 +1,14 @@
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
"Tone/signal/EqualPowerGain", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
"Tone/signal/EqualPowerGain", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Crossfade provides equal power fading between two inputs.
* @class Tone.Crossfade provides equal power fading between two inputs.
* More on crossfading technique [here](https://en.wikipedia.org/wiki/Fade_(audio_engineering)#Crossfading).
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {NormalRange} [initialFade=0.5]
* @example
* var crossFade = new Tone.CrossFade(0.5);
@ -23,29 +23,29 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
* crossFade.fade.value = 1;
* // ^ only effectB is output
* crossFade.fade.value = 0.5;
* // ^ the two signals are mixed equally.
*/
* // ^ the two signals are mixed equally.
*/
Tone.CrossFade = function(initialFade){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(2, 1);
/**
* Alias for <code>input[0]</code>.
* Alias for <code>input[0]</code>.
* @type {Tone.Gain}
*/
this.a = this.input[0] = new Tone.Gain();
/**
* Alias for <code>input[1]</code>.
* Alias for <code>input[1]</code>.
* @type {Tone.Gain}
*/
this.b = this.input[1] = new Tone.Gain();
/**
* The mix between the two inputs. A fade value of 0
* will output 100% <code>input[0]</code> and
* a value of 1 will output 100% <code>input[1]</code>.
* will output 100% <code>input[0]</code> and
* a value of 1 will output 100% <code>input[1]</code>.
* @type {NormalRange}
* @signal
*/
@ -64,7 +64,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
* @type {Tone.EqualPowerGain}
*/
this._equalPowerB = new Tone.EqualPowerGain();
/**
* invert the incoming signal
* @private
@ -80,14 +80,14 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
this._readOnly("fade");
};
Tone.extend(Tone.CrossFade);
Tone.extend(Tone.CrossFade, Tone.AudioNode);
/**
* clean up
* @returns {Tone.CrossFade} this
*/
Tone.CrossFade.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable("fade");
this._equalPowerA.dispose();
this._equalPowerA = null;

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
@ -7,8 +7,8 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
* well as the low and high crossover frequencies.
*
* @constructor
* @extends {Tone}
*
* @extends {Tone.AudioNode}
*
* @param {Decibels|Object} [lowLevel] The gain applied to the lows.
* @param {Decibels} [midLevel] The gain applied to the mid.
* @param {Decibels} [highLevel] The gain applied to the high.
@ -18,7 +18,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
Tone.EQ3 = function(){
var options = Tone.defaults(arguments, ["low", "mid", "high"], Tone.EQ3);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the output node
@ -80,21 +80,21 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
this.high = this._highGain.gain;
/**
* The Q value for all of the filters.
* The Q value for all of the filters.
* @type {Positive}
* @signal
*/
this.Q = this._multibandSplit.Q;
/**
* The low/mid crossover frequency.
* The low/mid crossover frequency.
* @type {Frequency}
* @signal
*/
this.lowFrequency = this._multibandSplit.lowFrequency;
/**
* The mid/high crossover frequency.
* The mid/high crossover frequency.
* @type {Frequency}
* @signal
*/
@ -107,7 +107,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
this._readOnly(["low", "mid", "high", "lowFrequency", "highFrequency"]);
};
Tone.extend(Tone.EQ3);
Tone.extend(Tone.EQ3, Tone.AudioNode);
/**
* the default values
@ -125,7 +125,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
* @returns {Tone.EQ3} this
*/
Tone.EQ3.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["low", "mid", "high", "lowFrequency", "highFrequency"]);
this._multibandSplit.dispose();
this._multibandSplit = null;
@ -145,4 +145,4 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/core/Gain"], fu
};
return Tone.EQ3;
});
});

View file

@ -1,23 +1,23 @@
define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
"Tone/signal/Pow", "Tone/type/Type"], function(Tone){
define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
"Tone/signal/Pow", "Tone/type/Type", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Envelope is an [ADSR](https://en.wikipedia.org/wiki/Synthesizer#ADSR_envelope)
* envelope generator. Tone.Envelope outputs a signal which
* can be connected to an AudioParam or Tone.Signal.
* envelope generator. Tone.Envelope outputs a signal which
* can be connected to an AudioParam or Tone.Signal.
* <img src="https://upload.wikimedia.org/wikipedia/commons/e/ea/ADSR_parameter.svg">
*
* @constructor
* @extends {Tone}
* @param {Time} [attack] The amount of time it takes for the envelope to go from
* 0 to it's maximum value.
* @extends {Tone.AudioNode}
* @param {Time} [attack] The amount of time it takes for the envelope to go from
* 0 to it's maximum value.
* @param {Time} [decay] The period of time after the attack that it takes for the envelope
* to fall to the sustain value.
* to fall to the sustain value.
* @param {NormalRange} [sustain] The percent of the maximum value that the envelope rests at until
* the release is triggered.
* @param {Time} [release] The amount of time after the release is triggered it takes to reach 0.
* the release is triggered.
* @param {Time} [release] The amount of time after the release is triggered it takes to reach 0.
* @example
* //an amplitude envelope
* var gainNode = Tone.context.createGain();
@ -33,26 +33,26 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
//get all of the defaults
var options = Tone.defaults(arguments, ["attack", "decay", "sustain", "release"], Tone.Envelope);
Tone.call(this);
/**
Tone.AudioNode.call(this);
/**
* When triggerAttack is called, the attack time is the amount of
* time it takes for the envelope to reach it's maximum value.
* time it takes for the envelope to reach it's maximum value.
* @type {Time}
*/
this.attack = options.attack;
/**
* After the attack portion of the envelope, the value will fall
* over the duration of the decay time to it's sustain value.
* over the duration of the decay time to it's sustain value.
* @type {Time}
*/
this.decay = options.decay;
/**
* The sustain value is the value
* The sustain value is the value
* which the envelope rests at after triggerAttack is
* called, but before triggerRelease is invoked.
* called, but before triggerRelease is invoked.
* @type {NormalRange}
*/
this.sustain = options.sustain;
@ -60,7 +60,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
/**
* After triggerRelease is called, the envelope's
* value will fall to it's miminum value over the
* duration of the release time.
* duration of the release time.
* @type {Time}
*/
this.release = options.release;
@ -92,7 +92,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
this.releaseCurve = options.releaseCurve;
};
Tone.extend(Tone.Envelope);
Tone.extend(Tone.Envelope, Tone.AudioNode);
/**
* the default parameters
@ -109,8 +109,8 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
};
/**
* Read the current value of the envelope. Useful for
* syncronizing visual output to the envelope.
* Read the current value of the envelope. Useful for
* syncronizing visual output to the envelope.
* @memberOf Tone.Envelope#
* @type {Number}
* @name value
@ -123,7 +123,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
});
/**
* The shape of the attack.
* The shape of the attack.
* Can be any of these strings:
* <ul>
* <li>linear</li>
@ -136,7 +136,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
* </ul>
* Can also be an array which describes the curve. Values
* in the array are evenly subdivided and linearly
* interpolated over the duration of the attack.
* interpolated over the duration of the attack.
* @memberOf Tone.Envelope#
* @type {String|Array}
* @name attackCurve
@ -160,7 +160,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
//otherwise just return the array
return this._attackCurve;
}
},
},
set : function(curve){
//check if it's a valid type
if (Tone.Envelope.Type.hasOwnProperty(curve)){
@ -179,7 +179,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
});
/**
* The shape of the release. See the attack curve types.
* The shape of the release. See the attack curve types.
* @memberOf Tone.Envelope#
* @type {String|Array}
* @name releaseCurve
@ -200,7 +200,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
//otherwise just return the array
return this._releaseCurve;
}
},
},
set : function(curve){
//check if it's a valid type
if (Tone.Envelope.Type.hasOwnProperty(curve)){
@ -219,7 +219,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
});
/**
* Trigger the attack/decay portion of the ADSR envelope.
* Trigger the attack/decay portion of the ADSR envelope.
* @param {Time} [time=now] When the attack should start.
* @param {NormalRange} [velocity=1] The velocity of the envelope scales the vales.
* number between 0-1
@ -265,10 +265,10 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
this._sig.exponentialRampToValue(velocity * this.sustain, decay, attack + time);
return this;
};
/**
* Triggers the release of the envelope.
* @param {Time} [time=now] When the release portion of the envelope should start.
* @param {Time} [time=now] When the release portion of the envelope should start.
* @returns {Tone.Envelope} this
* @example
* //trigger release immediately
@ -306,10 +306,10 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
/**
* triggerAttackRelease is shorthand for triggerAttack, then waiting
* some duration, then triggerRelease.
* some duration, then triggerRelease.
* @param {Time} duration The duration of the sustain.
* @param {Time} [time=now] When the attack should be triggered.
* @param {number} [velocity=1] The velocity of the envelope.
* @param {number} [velocity=1] The velocity of the envelope.
* @returns {Tone.Envelope} this
* @example
* //trigger the attack and then the release after 0.6 seconds.
@ -333,14 +333,14 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
};
/**
* Borrows the connect method from Tone.Signal.
* Borrows the connect method from Tone.Signal.
* @function
* @private
*/
Tone.Envelope.prototype.connect = Tone.Signal.prototype.connect;
/**
* Generate some complex envelope curves.
* Generate some complex envelope curves.
*/
(function _createCurves(){
@ -369,7 +369,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
var steps = 5;
for (i = 0; i < curveLen; i++){
stairsCurve[i] = Math.ceil((i / (curveLen - 1)) * steps) / steps;
}
}
//in-out easing curve
var sineCurve = [];
@ -444,7 +444,7 @@ define(["Tone/core/Tone", "Tone/signal/TimelineSignal",
* @returns {Tone.Envelope} this
*/
Tone.Envelope.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._sig.dispose();
this._sig = null;
this._attackCurve = null;

View file

@ -1,5 +1,5 @@
define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
"Tone/core/Param", "Tone/core/Delay", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
"Tone/core/Param", "Tone/core/Delay", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
@ -7,15 +7,15 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
* @class Comb filters are basic building blocks for physical modeling. Read more
* about comb filters on [CCRMA's website](https://ccrma.stanford.edu/~jos/pasp/Feedback_Comb_Filters.html).
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Time|Object} [delayTime] The delay time of the filter.
* @param {NormalRange=} resonance The amount of feedback the filter has.
* @param {Time|Object} [delayTime] The delay time of the filter.
* @param {NormalRange=} resonance The amount of feedback the filter has.
*/
Tone.FeedbackCombFilter = function(){
var options = Tone.defaults(arguments, ["delayTime", "resonance"], Tone.FeedbackCombFilter);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the delay node
@ -25,7 +25,7 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
this._delay = this.input = this.output = new Tone.Delay(options.delayTime);
/**
* The amount of delay of the comb filter.
* The amount of delay of the comb filter.
* @type {Time}
* @signal
*/
@ -39,7 +39,7 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
this._feedback = new Tone.Gain(options.resonance, Tone.Type.NormalRange);
/**
* The amount of feedback of the delayed signal.
* The amount of feedback of the delayed signal.
* @type {NormalRange}
* @signal
*/
@ -49,7 +49,7 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
this._readOnly(["resonance", "delayTime"]);
};
Tone.extend(Tone.FeedbackCombFilter);
Tone.extend(Tone.FeedbackCombFilter, Tone.AudioNode);
/**
* the default parameters
@ -67,7 +67,7 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
* @returns {Tone.FeedbackCombFilter} this
*/
Tone.FeedbackCombFilter.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["resonance", "delayTime"]);
this._delay.dispose();
this._delay = null;
@ -79,4 +79,4 @@ define(["Tone/core/Tone", "Tone/signal/ScaleExp", "Tone/signal/Signal",
};
return Tone.FeedbackCombFilter;
});
});

View file

@ -1,15 +1,15 @@
define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Filter is a filter which allows for all of the same native methods
* as the [BiquadFilterNode](http://webaudio.github.io/web-audio-api/#the-biquadfilternode-interface).
* Tone.Filter has the added ability to set the filter rolloff at -12
* (default), -24 and -48.
* as the [BiquadFilterNode](http://webaudio.github.io/web-audio-api/#the-biquadfilternode-interface).
* Tone.Filter has the added ability to set the filter rolloff at -12
* (default), -24 and -48.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Frequency|Object} [frequency] The cutoff frequency of the filter.
* @param {string=} type The type of filter.
* @param {number=} rolloff The drop in decibels per octave after the cutoff frequency.
@ -20,7 +20,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
Tone.Filter = function(){
var options = Tone.defaults(arguments, ["frequency", "type", "rolloff"], Tone.Filter);
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 1);
/**
@ -31,7 +31,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
this._filters = [];
/**
* The cutoff frequency of the filter.
* The cutoff frequency of the filter.
* @type {Frequency}
* @signal
*/
@ -50,7 +50,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
* @signal
*/
this.gain = new Tone.Signal({
"value" : options.gain,
"value" : options.gain,
"convert" : false
});
@ -80,7 +80,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
this._readOnly(["detune", "frequency", "gain", "Q"]);
};
Tone.extend(Tone.Filter);
Tone.extend(Tone.Filter, Tone.AudioNode);
/**
* the default parameters
@ -97,8 +97,8 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
};
/**
* The type of the filter. Types: "lowpass", "highpass",
* "bandpass", "lowshelf", "highshelf", "notch", "allpass", or "peaking".
* The type of the filter. Types: "lowpass", "highpass",
* "bandpass", "lowshelf", "highshelf", "notch", "allpass", or "peaking".
* @memberOf Tone.Filter#
* @type {string}
* @name type
@ -138,7 +138,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
//check the rolloff is valid
if (cascadingCount === -1){
throw new RangeError("Tone.Filter: rolloff can only be -12, -24, -48 or -96");
}
}
cascadingCount += 1;
this._rolloff = rolloff;
//first disconnect the filters and throw them away
@ -164,11 +164,11 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
});
/**
* Clean up.
* Clean up.
* @return {Tone.Filter} this
*/
Tone.Filter.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
for (var i = 0; i < this._filters.length; i++) {
this._filters[i].disconnect();
this._filters[i] = null;
@ -187,4 +187,4 @@ define(["Tone/core/Tone", "Tone/signal/Signal"], function(Tone){
};
return Tone.Filter;
});
});

View file

@ -1,28 +1,28 @@
define(["Tone/core/Tone", "Tone/signal/Abs", "Tone/signal/Subtract", "Tone/signal/Multiply",
"Tone/signal/Signal", "Tone/signal/WaveShaper", "Tone/type/Type", "Tone/core/Delay"],
define(["Tone/core/Tone", "Tone/signal/Abs", "Tone/signal/Subtract", "Tone/signal/Multiply",
"Tone/signal/Signal", "Tone/signal/WaveShaper", "Tone/type/Type", "Tone/core/Delay", "Tone/core/AudioNode"],
function(Tone){
"use strict";
/**
* @class Tone.Follower is a crude envelope follower which will follow
* the amplitude of an incoming signal.
* Take care with small (< 0.02) attack or decay values
* @class Tone.Follower is a crude envelope follower which will follow
* the amplitude of an incoming signal.
* Take care with small (< 0.02) attack or decay values
* as follower has some ripple which is exaggerated
* at these values. Read more about envelope followers (also known
* at these values. Read more about envelope followers (also known
* as envelope detectors) on [Wikipedia](https://en.wikipedia.org/wiki/Envelope_detector).
*
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Time|Object} [attack] The rate at which the follower rises.
* @param {Time=} release The rate at which the folower falls.
* @param {Time=} release The rate at which the folower falls.
* @example
* var follower = new Tone.Follower(0.2, 0.4);
*/
Tone.Follower = function(){
var options = Tone.defaults(arguments, ["attack", "release"], Tone.Follower);
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 1);
/**
@ -46,7 +46,7 @@ function(Tone){
* @private
*/
this._frequencyValues = new Tone.WaveShaper();
/**
* @type {Tone.Subtract}
* @private
@ -89,21 +89,21 @@ function(Tone){
this._setAttackRelease(this._attack, this._release);
};
Tone.extend(Tone.Follower);
Tone.extend(Tone.Follower, Tone.AudioNode);
/**
* @static
* @type {Object}
*/
Tone.Follower.defaults = {
"attack" : 0.05,
"attack" : 0.05,
"release" : 0.5
};
/**
* sets the attack and release times in the wave shaper
* @param {Time} attack
* @param {Time} release
* @param {Time} attack
* @param {Time} release
* @private
*/
Tone.Follower.prototype._setAttackRelease = function(attack, release){
@ -117,7 +117,7 @@ function(Tone){
return attack;
} else {
return release;
}
}
});
};
@ -133,7 +133,7 @@ function(Tone){
},
set : function(attack){
this._attack = attack;
this._setAttackRelease(this._attack, this._release);
this._setAttackRelease(this._attack, this._release);
}
});
@ -149,7 +149,7 @@ function(Tone){
},
set : function(release){
this._release = release;
this._setAttackRelease(this._attack, this._release);
this._setAttackRelease(this._attack, this._release);
}
});
@ -165,7 +165,7 @@ function(Tone){
* @returns {Tone.Follower} this
*/
Tone.Follower.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._filter.disconnect();
this._filter = null;
this._frequencyValues.disconnect();
@ -183,4 +183,4 @@ function(Tone){
};
return Tone.Follower;
});
});

View file

@ -1,28 +1,28 @@
define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Gate only passes a signal through when the incoming
* signal exceeds a specified threshold. To do this, Gate uses
* a Tone.Follower to follow the amplitude of the incoming signal.
* @class Tone.Gate only passes a signal through when the incoming
* signal exceeds a specified threshold. To do this, Gate uses
* a Tone.Follower to follow the amplitude of the incoming signal.
* A common implementation of this class is a [Noise Gate](https://en.wikipedia.org/wiki/Noise_gate).
*
*
* @constructor
* @extends {Tone}
* @param {Decibels|Object} [threshold] The threshold above which the gate will open.
* @extends {Tone.AudioNode}
* @param {Decibels|Object} [threshold] The threshold above which the gate will open.
* @param {Time=} attack The follower's attack time
* @param {Time=} release The follower's release time
* @example
* var gate = new Tone.Gate(-30, 0.2, 0.3).toMaster();
* var mic = new Tone.UserMedia().connect(gate);
* //the gate will only pass through the incoming
* //the gate will only pass through the incoming
* //signal when it's louder than -30db
*/
Tone.Gate = function(){
var options = Tone.defaults(arguments, ["threshold", "attack", "release"], Tone.Gate);
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 1);
/**
@ -43,7 +43,7 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
this.input.chain(this._gt, this._follower, this.output.gain);
};
Tone.extend(Tone.Gate);
Tone.extend(Tone.Gate, Tone.AudioNode);
/**
* @const
@ -51,7 +51,7 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
* @type {Object}
*/
Tone.Gate.defaults = {
"attack" : 0.1,
"attack" : 0.1,
"release" : 0.1,
"threshold" : -40
};
@ -65,7 +65,7 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
Object.defineProperty(Tone.Gate.prototype, "threshold", {
get : function(){
return Tone.gainToDb(this._gt.value);
},
},
set : function(thresh){
this._gt.value = Tone.dbToGain(thresh);
}
@ -80,7 +80,7 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
Object.defineProperty(Tone.Gate.prototype, "attack", {
get : function(){
return this._follower.attack;
},
},
set : function(attackTime){
this._follower.attack = attackTime;
}
@ -95,18 +95,18 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
Object.defineProperty(Tone.Gate.prototype, "release", {
get : function(){
return this._follower.release;
},
},
set : function(releaseTime){
this._follower.release = releaseTime;
}
});
/**
* Clean up.
* Clean up.
* @returns {Tone.Gate} this
*/
Tone.Gate.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._follower.dispose();
this._gt.dispose();
this._follower = null;
@ -115,4 +115,4 @@ define(["Tone/core/Tone", "Tone/component/Follower", "Tone/signal/GreaterThan"],
};
return Tone.Gate;
});
});

View file

@ -1,21 +1,21 @@
define(["Tone/core/Tone", "Tone/source/Oscillator", "Tone/signal/Scale",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/type/Type", "Tone/signal/Zero"],
define(["Tone/core/Tone", "Tone/source/Oscillator", "Tone/signal/Scale", "Tone/core/AudioNode",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/type/Type", "Tone/signal/Zero"],
function(Tone){
"use strict";
/**
* @class LFO stands for low frequency oscillator. Tone.LFO produces an output signal
* which can be attached to an AudioParam or Tone.Signal
* in order to modulate that parameter with an oscillator. The LFO can
* @class LFO stands for low frequency oscillator. Tone.LFO produces an output signal
* which can be attached to an AudioParam or Tone.Signal
* in order to modulate that parameter with an oscillator. The LFO can
* also be synced to the transport to start/stop and change when the tempo changes.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Frequency|Object} [frequency] The frequency of the oscillation. Typically, LFOs will be
* in the frequency range of 0.1 to 10 hertz.
* @param {number=} min The minimum output value of the LFO.
* @param {number=} max The maximum value of the LFO.
* in the frequency range of 0.1 to 10 hertz.
* @param {number=} min The minimum output value of the LFO.
* @param {number=} max The maximum value of the LFO.
* @example
* var lfo = new Tone.LFO("4n", 400, 4000);
* lfo.connect(filter.frequency);
@ -23,16 +23,16 @@ function(Tone){
Tone.LFO = function(){
var options = Tone.defaults(arguments, ["frequency", "min", "max"], Tone.LFO);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The oscillator.
/**
* The oscillator.
* @type {Tone.Oscillator}
* @private
*/
this._oscillator = new Tone.Oscillator({
"frequency" : options.frequency,
"type" : options.type,
"frequency" : options.frequency,
"type" : options.type,
});
/**
@ -44,9 +44,9 @@ function(Tone){
/**
* The amplitude of the LFO, which controls the output range between
* the min and max output. For example if the min is -10 and the max
* the min and max output. For example if the min is -10 and the max
* is 10, setting the amplitude to 0.5 would make the LFO modulate
* between -5 and 5.
* between -5 and 5.
* @type {Number}
* @signal
*/
@ -76,20 +76,20 @@ function(Tone){
this._stoppedValue = 0;
/**
* @type {Tone.AudioToGain}
* @type {Tone.AudioToGain}
* @private
*/
this._a2g = new Tone.AudioToGain();
/**
* @type {Tone.Scale}
* @type {Tone.Scale}
* @private
*/
this._scaler = this.output = new Tone.Scale(options.min, options.max);
/**
* the units of the LFO (used for converting)
* @type {Tone.Type}
* @type {Tone.Type}
* @private
*/
this._units = Tone.Type.Default;
@ -103,7 +103,7 @@ function(Tone){
this.phase = options.phase;
};
Tone.extend(Tone.LFO);
Tone.extend(Tone.LFO, Tone.AudioNode);
/**
* the default parameters
@ -123,7 +123,7 @@ function(Tone){
};
/**
* Start the LFO.
* Start the LFO.
* @param {Time} [time=now] the time the LFO will start
* @returns {Tone.LFO} this
*/
@ -135,7 +135,7 @@ function(Tone){
};
/**
* Stop the LFO.
* Stop the LFO.
* @param {Time} [time=now] the time the LFO will stop
* @returns {Tone.LFO} this
*/
@ -147,13 +147,13 @@ function(Tone){
};
/**
* Sync the start/stop/pause to the transport
* Sync the start/stop/pause to the transport
* and the frequency to the bpm of the transport
* @returns {Tone.LFO} this
* @example
* lfo.frequency.value = "8n";
* lfo.sync().start(0)
* //the rate of the LFO will always be an eighth note,
* //the rate of the LFO will always be an eighth note,
* //even as the tempo changes
*/
Tone.LFO.prototype.sync = function(){
@ -205,7 +205,7 @@ function(Tone){
});
/**
* The type of the oscillator: sine, square, sawtooth, triangle.
* The type of the oscillator: sine, square, sawtooth, triangle.
* @memberOf Tone.LFO#
* @type {string}
* @name type
@ -259,7 +259,7 @@ function(Tone){
});
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.LFO#
* @type {Boolean}
* @name mute
@ -267,7 +267,7 @@ function(Tone){
Object.defineProperty(Tone.LFO.prototype, "mute", {
get : function(){
return this._oscillator.mute;
},
},
set : function(mute){
this._oscillator.mute = mute;
}
@ -287,10 +287,10 @@ function(Tone){
});
/**
* Connect the output of the LFO to an AudioParam, AudioNode, or Tone Node.
* Tone.LFO will automatically convert to the destination units of the
* Connect the output of the LFO to an AudioParam, AudioNode, or Tone Node.
* Tone.LFO will automatically convert to the destination units of the
* will get the units from the connected node.
* @param {Tone | AudioParam | AudioNode} node
* @param {Tone | AudioParam | AudioNode} node
* @param {number} [outputNum=0] optionally which output to connect from
* @param {number} [inputNum=0] optionally which input to connect to
* @returns {Tone.LFO} this
@ -306,7 +306,7 @@ function(Tone){
};
/**
* private method borrowed from Param converts
* private method borrowed from Param converts
* units from their destination value
* @function
* @private
@ -314,7 +314,7 @@ function(Tone){
Tone.LFO.prototype._fromUnits = Tone.Param.prototype._fromUnits;
/**
* private method borrowed from Param converts
* private method borrowed from Param converts
* units to their destination value
* @function
* @private
@ -326,7 +326,7 @@ function(Tone){
* @returns {Tone.LFO} this
*/
Tone.LFO.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["amplitude", "frequency"]);
this._oscillator.dispose();
this._oscillator = null;
@ -344,4 +344,4 @@ function(Tone){
};
return Tone.LFO;
});
});

View file

@ -1,25 +1,25 @@
define(["Tone/core/Tone", "Tone/component/Compressor"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Compressor", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Limiter will limit the loudness of an incoming signal.
* It is composed of a Tone.Compressor with a fast attack
* and release. Limiters are commonly used to safeguard against
* signal clipping. Unlike a compressor, limiters do not provide
* smooth gain reduction and almost completely prevent
* @class Tone.Limiter will limit the loudness of an incoming signal.
* It is composed of a Tone.Compressor with a fast attack
* and release. Limiters are commonly used to safeguard against
* signal clipping. Unlike a compressor, limiters do not provide
* smooth gain reduction and almost completely prevent
* additional gain above the threshold.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {number} threshold The theshold above which the limiting is applied.
* @param {number} threshold The theshold above which the limiting is applied.
* @example
* var limiter = new Tone.Limiter(-6);
*/
Tone.Limiter = function(){
var options = Tone.defaults(arguments, ["threshold"], Tone.Limiter);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the compressor
@ -42,7 +42,7 @@ define(["Tone/core/Tone", "Tone/component/Compressor"], function(Tone){
this._readOnly("threshold");
};
Tone.extend(Tone.Limiter);
Tone.extend(Tone.Limiter, Tone.AudioNode);
/**
* The default value
@ -59,7 +59,7 @@ define(["Tone/core/Tone", "Tone/component/Compressor"], function(Tone){
* @returns {Tone.Limiter} this
*/
Tone.Limiter.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._compressor.dispose();
this._compressor = null;
this._writable("threshold");
@ -68,4 +68,4 @@ define(["Tone/core/Tone", "Tone/component/Compressor"], function(Tone){
};
return Tone.Limiter;
});
});

View file

@ -1,23 +1,23 @@
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter", "Tone/core/AudioNode",
"Tone/core/Param", "Tone/core/Gain", "Tone/core/Delay"], function(Tone){
"use strict";
/**
* @class Tone.Lowpass is a lowpass feedback comb filter. It is similar to
* @class Tone.Lowpass is a lowpass feedback comb filter. It is similar to
* Tone.FeedbackCombFilter, but includes a lowpass filter.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Time|Object} [delayTime] The delay time of the comb filter
* @param {NormalRange=} resonance The resonance (feedback) of the comb filter
* @param {Frequency=} dampening The cutoff of the lowpass filter dampens the
* signal as it is fedback.
* signal as it is fedback.
*/
Tone.LowpassCombFilter = function(){
var options = Tone.defaults(arguments, ["delayTime", "resonance", "dampening"], Tone.LowpassCombFilter);
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 1);
/**
@ -28,7 +28,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
this._delay = this.input = new Tone.Delay(options.delayTime);
/**
* The delayTime of the comb filter.
* The delayTime of the comb filter.
* @type {Time}
* @signal
*/
@ -49,7 +49,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
* @signal
*/
this.dampening = new Tone.Param({
"param" : this._lowpass.frequency,
"param" : this._lowpass.frequency,
"units" : Tone.Type.Frequency,
"value" : options.dampening
});
@ -62,7 +62,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
this._feedback = new Tone.Gain(options.resonance, Tone.Type.NormalRange);
/**
* The amount of feedback of the delayed signal.
* The amount of feedback of the delayed signal.
* @type {NormalRange}
* @signal
*/
@ -73,7 +73,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
this._readOnly(["dampening", "resonance", "delayTime"]);
};
Tone.extend(Tone.LowpassCombFilter);
Tone.extend(Tone.LowpassCombFilter, Tone.AudioNode);
/**
* the default parameters
@ -88,11 +88,11 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
};
/**
* Clean up.
* Clean up.
* @returns {Tone.LowpassCombFilter} this
*/
Tone.LowpassCombFilter.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["dampening", "resonance", "delayTime"]);
this.dampening.dispose();
this.dampening = null;
@ -109,4 +109,4 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/component/Filter",
};
return Tone.LowpassCombFilter;
});
});

View file

@ -1,13 +1,13 @@
define(["Tone/core/Tone"], function(Tone){
define(["Tone/core/Tone", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Merge brings two signals into the left and right
* @class Tone.Merge brings two signals into the left and right
* channels of a single stereo channel.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @example
* var merge = new Tone.Merge().toMaster();
* //routing a sine tone in the left channel
@ -20,7 +20,7 @@ define(["Tone/core/Tone"], function(Tone){
*/
Tone.Merge = function(){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(2, 0);
/**
@ -54,14 +54,14 @@ define(["Tone/core/Tone"], function(Tone){
this.right.channelCountMode = "explicit";
};
Tone.extend(Tone.Merge);
Tone.extend(Tone.Merge, Tone.AudioNode);
/**
* Clean up.
* @returns {Tone.Merge} this
*/
Tone.Merge.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.left.dispose();
this.left = null;
this.right.dispose();
@ -69,7 +69,7 @@ define(["Tone/core/Tone"], function(Tone){
this._merger.disconnect();
this._merger = null;
return this;
};
};
return Tone.Merge;
});

View file

@ -1,15 +1,15 @@
define(["Tone/core/Tone", "Tone/component/Analyser"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Analyser", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Meter gets the [RMS](https://en.wikipedia.org/wiki/Root_mean_square)
* of an input signal with some averaging applied. It can also get the raw
* of an input signal with some averaging applied. It can also get the raw
* value of the input signal.
*
* @constructor
* @extends {Tone}
* @param {String} type Either "level" or "signal".
* @extends {Tone.AudioNode}
* @param {String} type Either "level" or "signal".
* @param {Number} smoothing The amount of smoothing applied between frames.
* @example
* var meter = new Tone.Meter();
@ -22,13 +22,13 @@ define(["Tone/core/Tone", "Tone/component/Analyser"], function(Tone){
Tone.Meter = function(){
var options = Tone.defaults(arguments, ["type", "smoothing"], Tone.Meter);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The type of the meter, either "level" or "signal".
* A "level" meter will return the volume level (rms) of the
* The type of the meter, either "level" or "signal".
* A "level" meter will return the volume level (rms) of the
* input signal and a "signal" meter will return
* the signal value of the input.
* the signal value of the input.
* @type {String}
*/
this.type = options.type;
@ -41,7 +41,7 @@ define(["Tone/core/Tone", "Tone/component/Analyser"], function(Tone){
this.input = this.output = this._analyser = new Tone.Analyser("waveform", 512);
/**
* The amount of carryover between the current and last frame.
* The amount of carryover between the current and last frame.
* Only applied meter for "level" type.
* @type {Number}
*/
@ -55,7 +55,7 @@ define(["Tone/core/Tone", "Tone/component/Analyser"], function(Tone){
this._lastValue = 0;
};
Tone.extend(Tone.Meter);
Tone.extend(Tone.Meter, Tone.AudioNode);
/**
* @private
@ -114,11 +114,11 @@ define(["Tone/core/Tone", "Tone/component/Analyser"], function(Tone){
* @returns {Tone.Meter} this
*/
Tone.Meter.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._analyser.dispose();
this._analyser = null;
return this;
};
return Tone.Meter;
});
});

View file

@ -1,20 +1,20 @@
define(["Tone/core/Tone", "Tone/component/MidSideSplit", "Tone/component/MidSideMerge",
"Tone/component/Compressor"], function(Tone){
define(["Tone/core/Tone", "Tone/component/MidSideSplit", "Tone/component/MidSideMerge",
"Tone/component/Compressor", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.MidSideCompressor applies two different compressors to the mid
* and side signal components. See Tone.MidSideSplit.
* and side signal components. See Tone.MidSideSplit.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Object} options The options that are passed to the mid and side
* compressors.
* compressors.
* @constructor
*/
Tone.MidSideCompressor = function(options){
Tone.call(this);
Tone.AudioNode.call(this);
options = Tone.defaultArg(options, Tone.MidSideCompressor.defaults);
/**
@ -48,7 +48,7 @@ define(["Tone/core/Tone", "Tone/component/MidSideSplit", "Tone/component/MidSide
this._readOnly(["mid", "side"]);
};
Tone.extend(Tone.MidSideCompressor);
Tone.extend(Tone.MidSideCompressor, Tone.AudioNode);
/**
* @const
@ -77,7 +77,7 @@ define(["Tone/core/Tone", "Tone/component/MidSideSplit", "Tone/component/MidSide
* @returns {Tone.MidSideCompressor} this
*/
Tone.MidSideCompressor.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["mid", "side"]);
this.mid.dispose();
this.mid = null;
@ -91,4 +91,4 @@ define(["Tone/core/Tone", "Tone/component/MidSideSplit", "Tone/component/MidSide
};
return Tone.MidSideCompressor;
});
});

View file

@ -1,12 +1,13 @@
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr", "Tone/component/Merge", "Tone/core/Gain"],
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr",
"Tone/component/Merge", "Tone/core/Gain", "Tone/core/AudioNode"],
function(Tone){
"use strict";
/**
* @class Mid/Side processing separates the the 'mid' signal
* (which comes out of both the left and the right channel)
* and the 'side' (which only comes out of the the side channels).
* @class Mid/Side processing separates the the 'mid' signal
* (which comes out of both the left and the right channel)
* and the 'side' (which only comes out of the the side channels).
* MidSideMerge merges the mid and side signal after they've been seperated
* by Tone.MidSideSplit.<br><br>
* <code>
@ -14,12 +15,12 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr", "Tone/compon
* Right = (Mid-Side)/sqrt(2); // obtain right signal from mid and side<br>
* </code>
*
* @extends {Tone.StereoEffect}
* @extends {Tone.AudioNode}
* @constructor
*/
Tone.MidSideMerge = function(){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(2, 0);
/**
@ -67,14 +68,14 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr", "Tone/compon
this.context.getConstant(Math.SQRT1_2).connect(this._right, 0, 2);
};
Tone.extend(Tone.MidSideMerge);
Tone.extend(Tone.MidSideMerge, Tone.AudioNode);
/**
* clean up
* @returns {Tone.MidSideMerge} this
*/
Tone.MidSideMerge.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.mid.dispose();
this.mid = null;
this.side.dispose();
@ -89,4 +90,4 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/signal/Expr", "Tone/compon
};
return Tone.MidSideMerge;
});
});

View file

@ -1,23 +1,23 @@
define(["Tone/core/Tone", "Tone/signal/Expr", "Tone/signal/Signal", "Tone/component/Split"],
define(["Tone/core/Tone", "Tone/signal/Expr", "Tone/signal/Signal", "Tone/component/Split", "Tone/core/AudioNode"],
function(Tone){
"use strict";
/**
* @class Mid/Side processing separates the the 'mid' signal
* (which comes out of both the left and the right channel)
* @class Mid/Side processing separates the the 'mid' signal
* (which comes out of both the left and the right channel)
* and the 'side' (which only comes out of the the side channels). <br><br>
* <code>
* Mid = (Left+Right)/sqrt(2); // obtain mid-signal from left and right<br>
* Side = (Left-Right)/sqrt(2); // obtain side-signal from left and righ<br>
* </code>
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
*/
Tone.MidSideSplit = function(){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(0, 2);
/**
@ -49,14 +49,14 @@ define(["Tone/core/Tone", "Tone/signal/Expr", "Tone/signal/Signal", "Tone/compon
this.context.getConstant(Math.SQRT1_2).connect(this.side, 0, 2);
};
Tone.extend(Tone.MidSideSplit);
Tone.extend(Tone.MidSideSplit, Tone.AudioNode);
/**
* clean up
* @returns {Tone.MidSideSplit} this
*/
Tone.MidSideSplit.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.mid.dispose();
this.mid = null;
this.side.dispose();
@ -67,4 +67,4 @@ define(["Tone/core/Tone", "Tone/signal/Expr", "Tone/signal/Signal", "Tone/compon
};
return Tone.MidSideSplit;
});
});

View file

@ -1,18 +1,18 @@
define(["Tone/core/Tone", "Tone/component/Merge"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Merge", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Mono coerces the incoming mono or stereo signal into a mono signal
* where both left and right channels have the same value. This can be useful
* where both left and right channels have the same value. This can be useful
* for [stereo imaging](https://en.wikipedia.org/wiki/Stereo_imaging).
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
*/
Tone.Mono = function(){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 0);
/**
@ -34,11 +34,11 @@ define(["Tone/core/Tone", "Tone/component/Merge"], function(Tone){
* @returns {Tone.Mono} this
*/
Tone.Mono.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._merge.dispose();
this._merge = null;
return this;
};
return Tone.Mono;
});
});

View file

@ -1,11 +1,11 @@
define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compressor"], function(Tone){
define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compressor", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class A compressor with seperate controls over low/mid/high dynamics
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Object} options The low/mid/high compressor settings.
* @example
@ -19,7 +19,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compr
*/
Tone.MultibandCompressor = function(options){
Tone.call(this);
Tone.AudioNode.call(this);
options = Tone.defaultArg(arguments, Tone.MultibandCompressor.defaults);
/**
@ -79,7 +79,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compr
this._readOnly(["high", "mid", "low", "highFrequency", "lowFrequency"]);
};
Tone.extend(Tone.MultibandCompressor);
Tone.extend(Tone.MultibandCompressor, Tone.AudioNode);
/**
* @const
@ -99,7 +99,7 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compr
* @returns {Tone.MultibandCompressor} this
*/
Tone.MultibandCompressor.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._splitter.dispose();
this._writable(["high", "mid", "low", "highFrequency", "lowFrequency"]);
this.low.dispose();
@ -115,4 +115,4 @@ define(["Tone/core/Tone", "Tone/component/MultibandSplit", "Tone/component/Compr
};
return Tone.MultibandCompressor;
});
});

View file

@ -1,12 +1,12 @@
define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Split the incoming signal into three bands (low, mid, high)
* with two crossover frequency controls.
* with two crossover frequency controls.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Frequency|Object} [lowFrequency] the low/mid crossover frequency
* @param {Frequency} [highFrequency] the mid/high crossover frequency
@ -14,7 +14,7 @@ define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/c
Tone.MultibandSplit = function(){
var options = Tone.defaults(arguments, ["lowFrequency", "highFrequency"], Tone.MultibandSplit);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the input
@ -92,7 +92,7 @@ define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/c
this._readOnly(["high", "mid", "low", "highFrequency", "lowFrequency"]);
};
Tone.extend(Tone.MultibandSplit);
Tone.extend(Tone.MultibandSplit, Tone.AudioNode);
/**
* @private
@ -110,7 +110,7 @@ define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/c
* @returns {Tone.MultibandSplit} this
*/
Tone.MultibandSplit.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["high", "mid", "low", "highFrequency", "lowFrequency"]);
this.low.dispose();
this.low = null;
@ -130,4 +130,4 @@ define(["Tone/core/Tone", "Tone/component/Filter", "Tone/signal/Signal", "Tone/c
};
return Tone.MultibandSplit;
});
});

View file

@ -1,11 +1,11 @@
define(["Tone/core/Tone", "Tone/component/Panner", "Tone/component/Volume"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Panner", "Tone/component/Volume", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.PanVol is a Tone.Panner and Tone.Volume in one.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {AudioRange} pan the initial pan
* @param {number} volume The output volume.
@ -16,7 +16,7 @@ define(["Tone/core/Tone", "Tone/component/Panner", "Tone/component/Volume"], fun
Tone.PanVol = function(){
var options = Tone.defaults(arguments, ["pan", "volume"], Tone.PanVol);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The panning node
@ -53,7 +53,7 @@ define(["Tone/core/Tone", "Tone/component/Panner", "Tone/component/Volume"], fun
this._readOnly(["pan", "volume"]);
};
Tone.extend(Tone.PanVol);
Tone.extend(Tone.PanVol, Tone.AudioNode);
/**
* The defaults
@ -87,7 +87,7 @@ define(["Tone/core/Tone", "Tone/component/Panner", "Tone/component/Volume"], fun
* @returns {Tone.PanVol} this
*/
Tone.PanVol.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable(["pan", "volume"]);
this._panner.dispose();
this._panner = null;

View file

@ -1,23 +1,23 @@
define(["Tone/core/Tone", "Tone/component/CrossFade", "Tone/component/Merge", "Tone/component/Split",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/signal/Zero"],
define(["Tone/core/Tone", "Tone/component/CrossFade", "Tone/component/Merge", "Tone/component/Split",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/signal/Zero", "Tone/core/AudioNode"],
function(Tone){
"use strict";
/**
* @class Tone.Panner is an equal power Left/Right Panner and does not
* support 3D. Panner uses the StereoPannerNode when available.
*
* support 3D. Panner uses the StereoPannerNode when available.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {NormalRange} [initialPan=0] The initail panner value (center).
* @example
* //pan the input signal hard right.
* //pan the input signal hard right.
* var panner = new Tone.Panner(1);
*/
Tone.Panner = function(initialPan){
Tone.call(this);
Tone.AudioNode.call(this);
if (Tone.Panner.hasStereoPanner){
/**
@ -28,12 +28,12 @@ function(Tone){
this._panner = this.input = this.output = this.context.createStereoPanner();
/**
* The pan control. -1 = hard left, 1 = hard right.
* The pan control. -1 = hard left, 1 = hard right.
* @type {NormalRange}
* @signal
*/
*/
this.pan = this._panner.pan;
} else {
/**
@ -42,24 +42,24 @@ function(Tone){
* @private
*/
this._crossFade = new Tone.CrossFade();
/**
* @type {Tone.Merge}
* @private
*/
this._merger = this.output = new Tone.Merge();
/**
* @type {Tone.Split}
* @private
*/
this._splitter = this.input = new Tone.Split();
/**
* The pan control. -1 = hard left, 1 = hard right.
* The pan control. -1 = hard left, 1 = hard right.
* @type {AudioRange}
* @signal
*/
*/
this.pan = new Tone.Signal(0, Tone.Type.AudioRange);
/**
@ -91,7 +91,7 @@ function(Tone){
this._readOnly("pan");
};
Tone.extend(Tone.Panner);
Tone.extend(Tone.Panner, Tone.AudioNode);
/**
* Indicates if the panner is using the new StereoPannerNode internally
@ -107,7 +107,7 @@ function(Tone){
* @returns {Tone.Panner} this
*/
Tone.Panner.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable("pan");
if (Tone.Panner.hasStereoPanner){
this._panner.disconnect();
@ -131,4 +131,4 @@ function(Tone){
};
return Tone.Panner;
});
});

View file

@ -1,5 +1,5 @@
define(["Tone/core/Tone", "Tone/component/CrossFade", "Tone/component/Merge", "Tone/component/Split",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/signal/Zero"],
define(["Tone/core/Tone", "Tone/component/CrossFade", "Tone/component/Merge", "Tone/component/Split",
"Tone/signal/Signal", "Tone/signal/AudioToGain", "Tone/signal/Zero", "Tone/core/AudioNode"],
function(Tone){
"use strict";
@ -7,9 +7,9 @@ function(Tone){
/**
* @class A spatialized panner node which supports equalpower or HRTF panning.
* Tries to normalize the API across various browsers. See Tone.Listener
*
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Number} positionX The initial x position.
* @param {Number} positionY The initial y position.
* @param {Number} positionZ The initial z position.
@ -17,7 +17,7 @@ function(Tone){
Tone.Panner3D = function(){
var options = Tone.defaults(arguments, ["positionX", "positionY", "positionZ"], Tone.Panner3D);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The panner node
@ -58,7 +58,7 @@ function(Tone){
this.positionZ = options.positionZ;
};
Tone.extend(Tone.Panner3D);
Tone.extend(Tone.Panner3D, Tone.AudioNode);
/**
* Defaults according to the specification
@ -91,7 +91,7 @@ function(Tone){
Tone.Panner3D.prototype._rampTimeConstant = 0.01;
/**
* Sets the position of the source in 3d space.
* Sets the position of the source in 3d space.
* @param {Number} x
* @param {Number} y
* @param {Number} z
@ -111,7 +111,7 @@ function(Tone){
};
/**
* Sets the orientation of the source in 3d space.
* Sets the orientation of the source in 3d space.
* @param {Number} x
* @param {Number} y
* @param {Number} z
@ -283,7 +283,7 @@ function(Tone){
Tone.Panner3D._aliasProperty("coneInnerAngle");
/**
* The angle, in degrees, outside of which the volume will be reduced
* The angle, in degrees, outside of which the volume will be reduced
* to a constant value of coneOuterGain
* @type {Degrees}
* @memberOf Tone.Panner3D#
@ -300,7 +300,7 @@ function(Tone){
Tone.Panner3D._aliasProperty("coneOuterGain");
/**
* The maximum distance between source and listener,
* The maximum distance between source and listener,
* after which the volume will not be reduced any further.
* @type {Positive}
* @memberOf Tone.Panner3D#
@ -313,6 +313,7 @@ function(Tone){
* @returns {Tone.Panner3D} this
*/
Tone.Panner3D.prototype.dispose = function(){
Tone.AudioNode.prototype.dispose.call(this);
this._panner.disconnect();
this._panner = null;
this._orientation = null;
@ -321,4 +322,4 @@ function(Tone){
};
return Tone.Panner3D;
});
});

View file

@ -1,9 +1,9 @@
define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/Gain", "Tone/core/AudioNode"], function (Tone) {
/**
* @class Tone.Solo lets you isolate a specific audio stream. When
* an instance is set to `solo=true`, it will mute all other instances.
* @extends {Tone}
* @extends {Tone.AudioNode}
* @example
* var soloA = new Tone.Solo()
* var soloB = new Tone.Solo()
@ -13,7 +13,7 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
Tone.Solo = function(){
var options = Tone.defaults(arguments, ["solo"], Tone.Solo);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The input and output node
@ -34,7 +34,7 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
this.solo = options.solo;
};
Tone.extend(Tone.Solo);
Tone.extend(Tone.Solo, Tone.AudioNode);
/**
* The defaults
@ -144,10 +144,10 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
* @return {Tone.Solo} this
*/
Tone.Solo.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
this.context.off("solo", this._soloBind);
this._removeSolo();
this._soloBind = null;
Tone.AudioNode.prototype.dispose.call(this);
return this;
};

View file

@ -1,22 +1,22 @@
define(["Tone/core/Tone", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Split splits an incoming signal into left and right channels.
*
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @example
* var split = new Tone.Split();
* stereoSignal.connect(split);
*/
Tone.Split = function(){
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(0, 2);
/**
/**
* @type {ChannelSplitterNode}
* @private
*/
@ -24,8 +24,8 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function(Tone){
this._splitter.channelCount = 2;
this._splitter.channelCountMode = "explicit";
/**
* Left channel output.
/**
* Left channel output.
* Alias for <code>output[0]</code>
* @type {Tone.Gain}
*/
@ -37,20 +37,20 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function(Tone){
* @type {Tone.Gain}
*/
this.right = this.output[1] = new Tone.Gain();
//connections
this._splitter.connect(this.left, 0, 0);
this._splitter.connect(this.right, 1, 0);
};
Tone.extend(Tone.Split);
Tone.extend(Tone.Split, Tone.AudioNode);
/**
* Clean up.
* Clean up.
* @returns {Tone.Split} this
*/
Tone.Split.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._splitter.disconnect();
this.left.dispose();
this.left = null;
@ -58,7 +58,7 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function(Tone){
this.right = null;
this._splitter = null;
return this;
};
};
return Tone.Split;
});
});

View file

@ -1,11 +1,11 @@
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone){
define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Volume is a simple volume node, useful for creating a volume fader.
* @class Tone.Volume is a simple volume node, useful for creating a volume fader.
*
* @extends {Tone}
* @extends {Tone.AudioNode}
* @constructor
* @param {Decibels} [volume=0] the initial volume
* @example
@ -15,7 +15,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
Tone.Volume = function(){
var options = Tone.defaults(arguments, ["volume"], Tone.Volume);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* the output node
@ -32,7 +32,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
this._unmutedVolume = options.volume;
/**
* The volume control in decibels.
* The volume control in decibels.
* @type {Decibels}
* @signal
*/
@ -44,7 +44,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
this.mute = options.mute;
};
Tone.extend(Tone.Volume);
Tone.extend(Tone.Volume, Tone.AudioNode);
/**
* Defaults
@ -58,7 +58,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
};
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.Volume#
* @type {boolean}
* @name mute
@ -69,7 +69,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
Object.defineProperty(Tone.Volume.prototype, "mute", {
get : function(){
return this.volume.value === -Infinity;
},
},
set : function(mute){
if (!this.mute && mute){
this._unmutedVolume = this.volume.value;
@ -87,7 +87,7 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
*/
Tone.Volume.prototype.dispose = function(){
this.input.dispose();
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable("volume");
this.volume.dispose();
this.volume = null;
@ -95,4 +95,4 @@ define(["Tone/core/Tone", "Tone/signal/Signal", "Tone/core/Gain"], function(Tone
};
return Tone.Volume;
});
});

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone", "Tone/core/Param"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/Param", "Tone/core/AudioNode"], function (Tone) {
"use strict";
@ -11,15 +11,15 @@ define(["Tone/core/Tone", "Tone/core/Param"], function (Tone) {
}
/**
* @class Wrapper around Web Audio's native [DelayNode](http://webaudio.github.io/web-audio-api/#the-delaynode-interface).
* @class Wrapper around Web Audio's native [DelayNode](http://webaudio.github.io/web-audio-api/#the-delaynode-interface).
* @extends {Tone}
* @param {Time=} delayTime The delay applied to the incoming signal.
* @param {Time=} maxDelay The maximum delay time.
* @param {Time=} maxDelay The maximum delay time.
*/
Tone.Delay = function(){
var options = Tone.defaults(arguments, ["delayTime", "maxDelay"], Tone.Delay);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The native delay node
@ -30,7 +30,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function (Tone) {
/**
* The amount of time the incoming signal is
* delayed.
* delayed.
* @type {Time}
* @signal
*/
@ -43,7 +43,7 @@ define(["Tone/core/Tone", "Tone/core/Param"], function (Tone) {
this._readOnly("delayTime");
};
Tone.extend(Tone.Delay);
Tone.extend(Tone.Delay, Tone.AudioNode);
/**
* The defaults
@ -54,19 +54,19 @@ define(["Tone/core/Tone", "Tone/core/Param"], function (Tone) {
"maxDelay" : 1,
"delayTime" : 0
};
/**
* Clean up.
* @return {Tone.Delay} this
*/
Tone.Delay.prototype.dispose = function(){
Tone.Param.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._delayNode.disconnect();
this._delayNode = null;
this._writable("delayTime");
this.delayTime = null;
return this;
};
return Tone.Delay;
});
});

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone", "Tone/core/Param", "Tone/type/Type"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/Param", "Tone/type/Type", "Tone/core/AudioNode"], function (Tone) {
"use strict";
@ -21,7 +21,7 @@ define(["Tone/core/Tone", "Tone/core/Param", "Tone/type/Type"], function (Tone)
Tone.Gain = function(){
var options = Tone.defaults(arguments, ["gain", "units"], Tone.Gain);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The GainNode
@ -44,7 +44,7 @@ define(["Tone/core/Tone", "Tone/core/Param", "Tone/type/Type"], function (Tone)
this._readOnly("gain");
};
Tone.extend(Tone.Gain);
Tone.extend(Tone.Gain, Tone.AudioNode);
/**
* The defaults
@ -61,7 +61,7 @@ define(["Tone/core/Tone", "Tone/core/Param", "Tone/type/Type"], function (Tone)
* @return {Tone.Gain} this
*/
Tone.Gain.prototype.dispose = function(){
Tone.Param.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._gainNode.disconnect();
this._gainNode = null;
this._writable("gain");

View file

@ -1,13 +1,13 @@
define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class A single master output which is connected to the
* AudioDestinationNode (aka your speakers).
* It provides useful conveniences such as the ability
* to set the volume and mute the entire application.
* It also gives you the ability to apply master effects to your application.
* AudioDestinationNode (aka your speakers).
* It provides useful conveniences such as the ability
* to set the volume and mute the entire application.
* It also gives you the ability to apply master effects to your application.
* <br><br>
* Like Tone.Transport, A single Tone.Master is created
* on initialization and you do not need to explicitly construct one.
@ -23,8 +23,8 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
* //the above two examples are equivalent.
*/
Tone.Master = function(){
Tone.call(this);
Tone.AudioNode.call(this);
Tone.getContext(function(){
this.createInsOuts(1, 0);
@ -41,14 +41,14 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
* @signal
*/
this.volume = this._volume.volume;
this._readOnly("volume");
//connections
this.input.chain(this.output, this.context.destination);
}.bind(this));
};
Tone.extend(Tone.Master);
Tone.extend(Tone.Master, Tone.AudioNode);
/**
* @type {Object}
@ -60,7 +60,7 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
};
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.Master#
* @type {boolean}
* @name mute
@ -71,15 +71,15 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
Object.defineProperty(Tone.Master.prototype, "mute", {
get : function(){
return this._volume.mute;
},
},
set : function(mute){
this._volume.mute = mute;
}
});
/**
* Add a master effects chain. NOTE: this will disconnect any nodes which were previously
* chained in the master effects chain.
* Add a master effects chain. NOTE: this will disconnect any nodes which were previously
* chained in the master effects chain.
* @param {AudioNode|Tone} args... All arguments will be connected in a row
* and the Master will be routed through it.
* @return {Tone.Master} this
@ -93,7 +93,7 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
* });
* //give a little boost to the lows
* var lowBump = new Tone.Filter(200, "lowshelf");
* //route everything through the filter
* //route everything through the filter
* //and compressor before going to the speakers
* Tone.Master.chain(lowBump, masterCompressor);
*/
@ -108,7 +108,7 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
* @return {Tone.Master} this
*/
Tone.Master.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._writable("volume");
this._volume.dispose();
this._volume = null;
@ -121,12 +121,12 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
/**
* Connect 'this' to the master output. Shorthand for this.connect(Tone.Master)
* @returns {Tone} this
* @returns {Tone.AudioNode} this
* @example
* //connect an oscillator to the master output
* var osc = new Tone.Oscillator().toMaster();
*/
Tone.prototype.toMaster = function(){
Tone.AudioNode.prototype.toMaster = function(){
this.connect(Tone.Master);
return this;
};
@ -162,4 +162,4 @@ define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/Context"], functio
});
return Tone.Master;
});
});

View file

@ -6,7 +6,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @class Tone.Param wraps the native Web Audio's AudioParam to provide
* additional unit conversion functionality. It also
* serves as a base-class for classes which have a single,
* automatable parameter.
* automatable parameter.
* @extends {Tone}
* @param {AudioParam} param The parameter to wrap.
* @param {Tone.Type} units The units of the audio param.
@ -15,7 +15,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
Tone.Param = function(){
var options = Tone.defaults(arguments, ["param", "units", "convert"], Tone.Param);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The native parameter to control
@ -37,7 +37,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
this.convert = options.convert;
/**
* True if the signal value is being overridden by
* True if the signal value is being overridden by
* a connected signal.
* @readOnly
* @type {boolean}
@ -59,8 +59,8 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
}
};
Tone.extend(Tone.Param);
Tone.extend(Tone.Param, Tone.AudioNode);
/**
* Defaults
* @type {Object}
@ -73,7 +73,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* The current value of the parameter.
* The current value of the parameter.
* @memberOf Tone.Param#
* @type {Number}
* @name value
@ -112,17 +112,17 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
Tone.Param.prototype._fromUnits = function(val){
if (this.convert || Tone.isUndef(this.convert)){
switch(this.units){
case Tone.Type.Time:
case Tone.Type.Time:
return this.toSeconds(val);
case Tone.Type.Frequency:
case Tone.Type.Frequency:
return this.toFrequency(val);
case Tone.Type.Decibels:
case Tone.Type.Decibels:
return Tone.dbToGain(val);
case Tone.Type.NormalRange:
case Tone.Type.NormalRange:
return Math.min(Math.max(val, 0), 1);
case Tone.Type.AudioRange:
case Tone.Type.AudioRange:
return Math.min(Math.max(val, -1), 1);
case Tone.Type.Positive:
case Tone.Type.Positive:
return Math.max(val, 0);
default:
return val;
@ -141,7 +141,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
Tone.Param.prototype._toUnits = function(val){
if (this.convert || Tone.isUndef(this.convert)){
switch(this.units){
case Tone.Type.Decibels:
case Tone.Type.Decibels:
return Tone.gainToDb(val);
default:
return val;
@ -164,7 +164,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @param {Time} time The time when the change should occur.
* @returns {Tone.Param} this
* @example
* //set the frequency to "G4" in exactly 1 second from now.
* //set the frequency to "G4" in exactly 1 second from now.
* freq.setValueAtTime("G4", "+1");
*/
Tone.Param.prototype.setValueAtTime = function(value, time){
@ -174,10 +174,10 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
/**
* Creates a schedule point with the current value at the current time.
* This is useful for creating an automation anchor point in order to
* schedule changes from the current value.
* This is useful for creating an automation anchor point in order to
* schedule changes from the current value.
*
* @param {number=} now (Optionally) pass the now value in.
* @param {number=} now (Optionally) pass the now value in.
* @returns {Tone.Param} this
*/
Tone.Param.prototype.setRampPoint = function(now){
@ -193,11 +193,11 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Schedules a linear continuous change in parameter value from the
* Schedules a linear continuous change in parameter value from the
* previous scheduled parameter value to the given value.
*
* @param {number} value
* @param {Time} endTime
*
* @param {number} value
* @param {Time} endTime
* @returns {Tone.Param} this
*/
Tone.Param.prototype.linearRampToValueAtTime = function(value, endTime){
@ -207,11 +207,11 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Schedules an exponential continuous change in parameter value from
* Schedules an exponential continuous change in parameter value from
* the previous scheduled parameter value to the given value.
*
* @param {number} value
* @param {Time} endTime
*
* @param {number} value
* @param {Time} endTime
* @returns {Tone.Param} this
*/
Tone.Param.prototype.exponentialRampToValueAtTime = function(value, endTime){
@ -222,17 +222,17 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Schedules an exponential continuous change in parameter value from
* the current time and current value to the given value over the
* Schedules an exponential continuous change in parameter value from
* the current time and current value to the given value over the
* duration of the rampTime.
*
*
* @param {number} value The value to ramp to.
* @param {Time} rampTime the time that it takes the
* @param {Time} rampTime the time that it takes the
* value to ramp from it's current value
* @param {Time} [startTime=now] When the ramp should start.
* @param {Time} [startTime=now] When the ramp should start.
* @returns {Tone.Param} this
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampToValue(2, 4);
*/
Tone.Param.prototype.exponentialRampToValue = function(value, rampTime, startTime){
@ -243,17 +243,17 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Schedules an linear continuous change in parameter value from
* the current time and current value to the given value over the
* Schedules an linear continuous change in parameter value from
* the current time and current value to the given value over the
* duration of the rampTime.
*
*
* @param {number} value The value to ramp to.
* @param {Time} rampTime the time that it takes the
* @param {Time} rampTime the time that it takes the
* value to ramp from it's current value
* @param {Time} [startTime=now] When the ramp should start.
* @param {Time} [startTime=now] When the ramp should start.
* @returns {Tone.Param} this
* @example
* //linearly ramp to the value 4 over 3 seconds.
* //linearly ramp to the value 4 over 3 seconds.
* signal.linearRampToValue(4, 3);
*/
Tone.Param.prototype.linearRampToValue = function(value, rampTime, startTime){
@ -266,10 +266,10 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
/**
* Start exponentially approaching the target value at the given time with
* a rate having the given time constant.
* @param {number} value
* @param {Time} startTime
* @param {number} timeConstant
* @returns {Tone.Param} this
* @param {number} value
* @param {Time} startTime
* @param {number} timeConstant
* @returns {Tone.Param} this
*/
Tone.Param.prototype.setTargetAtTime = function(value, startTime, timeConstant){
value = this._fromUnits(value);
@ -285,10 +285,10 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
/**
* Sets an array of arbitrary parameter values starting at the given time
* for the given duration.
*
* @param {Array} values
* @param {Time} startTime
* @param {Time} duration
*
* @param {Array} values
* @param {Time} startTime
* @param {Time} duration
* @returns {Tone.Param} this
*/
Tone.Param.prototype.setValueCurveAtTime = function(values, startTime, duration){
@ -303,9 +303,9 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Cancels all scheduled parameter changes with times greater than or
* Cancels all scheduled parameter changes with times greater than or
* equal to startTime.
*
*
* @param {Time} startTime
* @returns {Tone.Param} this
*/
@ -315,17 +315,17 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
/**
* Ramps to the given value over the duration of the rampTime.
* Ramps to the given value over the duration of the rampTime.
* Automatically selects the best ramp type (exponential or linear)
* depending on the `units` of the signal
*
* @param {number} value
* @param {Time} rampTime The time that it takes the
*
* @param {number} value
* @param {Time} rampTime The time that it takes the
* value to ramp from it's current value
* @param {Time} [startTime=now] When the ramp should start.
* @param {Time} [startTime=now] When the ramp should start.
* @returns {Tone.Param} this
* @example
* //ramp to the value either linearly or exponentially
* //ramp to the value either linearly or exponentially
* //depending on the "units" value of the signal
* signal.rampTo(0, 10);
* @example
@ -361,7 +361,7 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
* @returns {Tone.Param} this
*/
Tone.Param.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._param = null;
if (this._lfo){
this._lfo.dispose();
@ -371,4 +371,4 @@ define(["Tone/core/Tone", "Tone/type/Type"], function(Tone){
};
return Tone.Param;
});
});

View file

@ -1,20 +1,20 @@
define(["Tone/core/Tone", "Tone/component/CrossFade"], function(Tone){
define(["Tone/core/Tone", "Tone/component/CrossFade", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Effect is the base class for effects. Connect the effect between
* the effectSend and effectReturn GainNodes, then control the amount of
* effect which goes to the output using the wet control.
*
* @constructor
* @extends {Tone}
* @param {NormalRange|Object} [wet] The starting wet value.
* @extends {Tone.AudioNode}
* @param {NormalRange|Object} [wet] The starting wet value.
*/
Tone.Effect = function(){
var options = Tone.defaults(arguments, ["wet"], Tone.Effect);
Tone.call(this);
Tone.AudioNode.call(this);
this.createInsOuts(1, 1);
/**
@ -27,7 +27,7 @@ define(["Tone/core/Tone", "Tone/component/CrossFade"], function(Tone){
/**
* The wet control is how much of the effected
* will pass through to the output. 1 = 100% effected
* signal, 0 = 100% dry signal.
* signal, 0 = 100% dry signal.
* @type {NormalRange}
* @signal
*/
@ -55,7 +55,7 @@ define(["Tone/core/Tone", "Tone/component/CrossFade"], function(Tone){
this._readOnly(["wet"]);
};
Tone.extend(Tone.Effect);
Tone.extend(Tone.Effect, Tone.AudioNode);
/**
* @static
@ -77,11 +77,11 @@ define(["Tone/core/Tone", "Tone/component/CrossFade"], function(Tone){
};
/**
* Clean up.
* Clean up.
* @returns {Tone.Effect} this
*/
Tone.Effect.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._dryWet.dispose();
this._dryWet = null;
this.effectSend.dispose();
@ -94,4 +94,4 @@ define(["Tone/core/Tone", "Tone/component/CrossFade"], function(Tone){
};
return Tone.Effect;
});
});

View file

@ -13,6 +13,7 @@ function(Tone){
Tone.StereoEffect = function(){
//get the defaults
Tone.AudioNode.call(this);
var options = Tone.defaults(arguments, ["wet"], Tone.Effect);
this.createInsOuts(1, 1);
@ -89,7 +90,7 @@ function(Tone){
* @returns {Tone.StereoEffect} this
*/
Tone.StereoEffect.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._dryWet.dispose();
this._dryWet = null;
this._split.dispose();

View file

@ -4,15 +4,15 @@ define(["Tone/core/Tone", "Tone/type/Type", "Tone/core/Master"], function(Tone){
/**
* @class Base-class for all instruments
*
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
*/
Tone.Instrument = function(options){
//get the defaults
options = Tone.defaultArg(options, Tone.Instrument.defaults);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The output and volume triming node
@ -32,7 +32,7 @@ define(["Tone/core/Tone", "Tone/type/Type", "Tone/core/Master"], function(Tone){
this._readOnly("volume");
};
Tone.extend(Tone.Instrument);
Tone.extend(Tone.Instrument, Tone.AudioNode);
/**
* the default attributes
@ -58,10 +58,10 @@ define(["Tone/core/Tone", "Tone/type/Type", "Tone/core/Master"], function(Tone){
Tone.Instrument.prototype.triggerRelease = Tone.noOp;
/**
* Trigger the attack and then the release after the duration.
* Trigger the attack and then the release after the duration.
* @param {Frequency} note The note to trigger.
* @param {Time} duration How long the note should be held for before
* triggering the release. This value must be greater than 0.
* triggering the release. This value must be greater than 0.
* @param {Time} [time=now] When the note should be triggered.
* @param {NormalRange} [velocity=1] The velocity the note should be triggered at.
* @returns {Tone.Instrument} this
@ -82,7 +82,7 @@ define(["Tone/core/Tone", "Tone/type/Type", "Tone/core/Master"], function(Tone){
* @returns {Tone.Instrument} this
*/
Tone.Instrument.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._volume.dispose();
this._volume = null;
this._writable(["volume"]);
@ -91,4 +91,4 @@ define(["Tone/core/Tone", "Tone/type/Type", "Tone/core/Master"], function(Tone){
};
return Tone.Instrument;
});
});

View file

@ -1,34 +1,34 @@
define(["Tone/core/Tone"], function(Tone){
define(["Tone/core/Tone", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Base class for all Signals. Used Internally.
* @class Base class for all Signals. Used Internally.
*
* @constructor
* @extends {Tone}
*/
Tone.SignalBase = function(){
Tone.call(this);
Tone.AudioNode.call(this);
};
Tone.extend(Tone.SignalBase);
Tone.extend(Tone.SignalBase, Tone.AudioNode);
/**
* When signals connect to other signals or AudioParams,
* they take over the output value of that signal or AudioParam.
* For all other nodes, the behavior is the same as a default <code>connect</code>.
* When signals connect to other signals or AudioParams,
* they take over the output value of that signal or AudioParam.
* For all other nodes, the behavior is the same as a default <code>connect</code>.
*
* @override
* @param {AudioParam|AudioNode|Tone.Signal|Tone} node
* @param {AudioParam|AudioNode|Tone.Signal|Tone} node
* @param {number} [outputNumber=0] The output number to connect from.
* @param {number} [inputNumber=0] The input number to connect to.
* @returns {Tone.SignalBase} this
*/
Tone.SignalBase.prototype.connect = function(node, outputNumber, inputNumber){
//zero it out so that the signal can have full control
if ((Tone.Signal && Tone.Signal === node.constructor) ||
(Tone.Param && Tone.Param === node.constructor) ||
if ((Tone.Signal && Tone.Signal === node.constructor) ||
(Tone.Param && Tone.Param === node.constructor) ||
(Tone.TimelineSignal && Tone.TimelineSignal === node.constructor)){
//cancel changes
node._param.cancelScheduledValues(0);
@ -39,10 +39,10 @@ define(["Tone/core/Tone"], function(Tone){
} else if (node instanceof AudioParam){
node.cancelScheduledValues(0);
node.value = 0;
}
Tone.prototype.connect.call(this, node, outputNumber, inputNumber);
}
Tone.AudioNode.prototype.connect.call(this, node, outputNumber, inputNumber);
return this;
};
return Tone.SignalBase;
});
});

View file

@ -1,9 +1,9 @@
define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/Gain", "Tone/signal/SignalBase"], function (Tone) {
/**
* @class Tone.Zero outputs 0's at audio-rate. The reason this has to be
* it's own class is that many browsers optimize out Tone.Signal
* with a value of 0 and will not process nodes further down the graph.
* with a value of 0 and will not process nodes further down the graph.
* @extends {Tone.SignalBase}
*/
Tone.Zero = function(){
@ -34,4 +34,4 @@ define(["Tone/core/Tone", "Tone/core/Gain"], function (Tone) {
};
return Tone.Zero;
});
});

View file

@ -1,4 +1,4 @@
define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/Gain"], function (Tone) {
define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/Gain", "Tone/core/AudioNode"], function (Tone) {
/**
* BufferSource polyfill
@ -10,19 +10,19 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
/**
* @class Wrapper around the native BufferSourceNode.
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {AudioBuffer|Tone.Buffer} buffer The buffer to play
* @param {Function} onload The callback to invoke when the
* @param {Function} onload The callback to invoke when the
* buffer is done playing.
*/
Tone.BufferSource = function(){
var options = Tone.defaults(arguments, ["buffer", "onload"], Tone.BufferSource);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The callback to invoke after the
* buffer source is done playing.
* The callback to invoke after the
* buffer source is done playing.
* @type {Function}
*/
this.onended = options.onended;
@ -62,7 +62,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
* @private
*/
this._buffer = new Tone.Buffer(options.buffer, options.onload);
/**
* The playbackRate of the buffer
* @type {Positive}
@ -102,7 +102,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
this.playbackRate.value = options.playbackRate;
};
Tone.extend(Tone.BufferSource);
Tone.extend(Tone.BufferSource, Tone.AudioNode);
/**
* The defaults
@ -142,9 +142,9 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
* Start the buffer
* @param {Time} [startTime=now] When the player should start.
* @param {Time} [offset=0] The offset from the beginning of the sample
* to start at.
* to start at.
* @param {Time=} duration How long the sample should play. If no duration
* is given, it will default to the full length
* is given, it will default to the full length
* of the sample (minus any offset)
* @param {Gain} [gain=1] The gain to play the buffer back at.
* @param {Time=} fadeInTime The optional fadeIn ramp time.
@ -221,8 +221,8 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
};
/**
* Stop the buffer. Optionally add a ramp time to fade the
* buffer out.
* Stop the buffer. Optionally add a ramp time to fade the
* buffer out.
* @param {Time=} time The time the buffer should stop.
* @param {Time=} fadeOutTime How long the gain should fade out for
* @return {Tone.BufferSource} this
@ -231,17 +231,17 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
if (this.buffer.loaded){
time = this.toSeconds(time);
//the fadeOut time
if (Tone.isUndef(fadeOutTime)){
fadeOutTime = this.toSeconds(this.fadeOut);
} else {
fadeOutTime = this.toSeconds(fadeOutTime);
}
}
//only stop if the last stop was scheduled later
if (this._stopTime === -1 || this._stopTime > time){
this._stopTime = time;
this._stopTime = time;
//cancel the end curve
this._gainNode.gain.cancelScheduledValues(this._startTime + this.sampleTime);
@ -267,7 +267,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
};
/**
* Internal callback when the buffer is ended.
* Internal callback when the buffer is ended.
* Invokes `onended` and disposes the node.
* @private
*/
@ -276,7 +276,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
};
/**
* If loop is true, the loop will start at this position.
* If loop is true, the loop will start at this position.
* @memberOf Tone.BufferSource#
* @type {Time}
* @name loopStart
@ -284,7 +284,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
Object.defineProperty(Tone.BufferSource.prototype, "loopStart", {
get : function(){
return this._source.loopStart;
},
},
set : function(loopStart){
this._source.loopStart = this.toSeconds(loopStart);
}
@ -299,14 +299,14 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
Object.defineProperty(Tone.BufferSource.prototype, "loopEnd", {
get : function(){
return this._source.loopEnd;
},
},
set : function(loopEnd){
this._source.loopEnd = this.toSeconds(loopEnd);
}
});
/**
* The audio buffer belonging to the player.
* The audio buffer belonging to the player.
* @memberOf Tone.BufferSource#
* @type {Tone.Buffer}
* @name buffer
@ -314,14 +314,14 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
Object.defineProperty(Tone.BufferSource.prototype, "buffer", {
get : function(){
return this._buffer;
},
},
set : function(buffer){
this._buffer.set(buffer);
}
});
/**
* If the buffer should loop once it's over.
* If the buffer should loop once it's over.
* @memberOf Tone.BufferSource#
* @type {Boolean}
* @name loop
@ -329,7 +329,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
Object.defineProperty(Tone.BufferSource.prototype, "loop", {
get : function(){
return this._source.loop;
},
},
set : function(loop){
this._source.loop = loop;
}
@ -340,7 +340,7 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
* @return {Tone.BufferSource} this
*/
Tone.BufferSource.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.onended = null;
this._source.disconnect();
this._source = null;
@ -355,4 +355,4 @@ define(["Tone/core/Tone", "Tone/core/Buffer", "Tone/source/Source", "Tone/core/G
};
return Tone.BufferSource;
});
});

View file

@ -1,14 +1,14 @@
define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], function(Tone){
define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.Players combines multiple [Tone.Player](Player) objects.
*
* @class Tone.Players combines multiple [Tone.Player](Player) objects.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Object} urls An object mapping a name to a url.
* @param {function=} onload The function to invoke when the buffer is loaded.
* @param {function=} onload The function to invoke when the buffer is loaded.
*/
Tone.Players = function(urls){
@ -75,7 +75,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
}
};
Tone.extend(Tone.Players);
Tone.extend(Tone.Players, Tone.AudioNode);
/**
* The default values
@ -91,7 +91,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
/**
* A buffer was loaded. decrement the counter.
* @param {Function} callback
* @param {Function} callback
* @private
*/
Tone.Players.prototype._bufferLoaded = function(callback){
@ -102,7 +102,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
};
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.Source#
* @type {boolean}
* @name mute
@ -113,7 +113,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
Object.defineProperty(Tone.Players.prototype, "mute", {
get : function(){
return this._volume.mute;
},
},
set : function(mute){
this._volume.mute = mute;
}
@ -128,7 +128,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
Object.defineProperty(Tone.Players.prototype, "fadeIn", {
get : function(){
return this._fadeIn;
},
},
set : function(fadeIn){
this._fadeIn = fadeIn;
this._forEach(function(player){
@ -146,7 +146,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
Object.defineProperty(Tone.Players.prototype, "fadeOut", {
get : function(){
return this._fadeOut;
},
},
set : function(fadeOut){
this._fadeOut = fadeOut;
this._forEach(function(player){
@ -174,7 +174,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
/**
* True if the buffers object has a buffer by that name.
* @param {String|Number} name The key or index of the
* @param {String|Number} name The key or index of the
* buffer.
* @return {Boolean}
*/
@ -183,9 +183,9 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
};
/**
* Get a player by name.
* @param {String} name The players name as defined in
* the constructor object or `add` method.
* Get a player by name.
* @param {String} name The players name as defined in
* the constructor object or `add` method.
* @return {Tone.Player}
*/
Tone.Players.prototype.get = function(name){
@ -229,10 +229,10 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
/**
* Add a player by name and url to the Players
* @param {String} name A unique name to give the player
* @param {String|Tone.Buffer|Audiobuffer} url Either the url of the bufer,
* @param {String|Tone.Buffer|Audiobuffer} url Either the url of the bufer,
* or a buffer which will be added
* with the given name.
* @param {Function=} callback The callback to invoke
* @param {Function=} callback The callback to invoke
* when the url is loaded.
*/
Tone.Players.prototype.add = function(name, url, callback){
@ -258,7 +258,7 @@ define(["Tone/core/Tone", "Tone/source/Player", "Tone/component/Volume"], functi
* @return {Tone.Players} this
*/
Tone.Players.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this._volume.dispose();
this._volume = null;
this._writable("volume");

View file

@ -1,20 +1,20 @@
define(["Tone/core/Tone", "Tone/core/Transport", "Tone/component/Volume", "Tone/core/Master",
"Tone/type/Type", "Tone/core/TimelineState", "Tone/signal/Signal"],
"Tone/type/Type", "Tone/core/TimelineState", "Tone/signal/Signal", "Tone/core/AudioNode"],
function(Tone){
"use strict";
/**
* @class Base class for sources. Sources have start/stop methods
* and the ability to be synced to the
* start/stop of Tone.Transport.
* and the ability to be synced to the
* start/stop of Tone.Transport.
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @example
* //Multiple state change events can be chained together,
* //but must be set in the correct order and with ascending times
*
*
* // OK
* state.start().stop("+0.2");
* // AND
@ -24,12 +24,12 @@ function(Tone){
* state.stop("+0.2").start();
* // OR
* state.start("+0.3").stop("+0.2");
*
*/
*
*/
Tone.Source = function(options){
Tone.call(this);
options = Tone.defaultArg(options, Tone.Source.defaults);
Tone.AudioNode.call(this);
/**
* The output volume node
@ -77,7 +77,7 @@ function(Tone){
this.mute = options.mute;
};
Tone.extend(Tone.Source);
Tone.extend(Tone.Source, Tone.AudioNode);
/**
* The default parameters
@ -112,7 +112,7 @@ function(Tone){
});
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.Source#
* @type {boolean}
* @name mute
@ -123,7 +123,7 @@ function(Tone){
Object.defineProperty(Tone.Source.prototype, "mute", {
get : function(){
return this._volume.mute;
},
},
set : function(mute){
this._volume.mute = mute;
}
@ -134,7 +134,7 @@ function(Tone){
Tone.Source.prototype._stop = Tone.noOp;
/**
* Start the source at the specified time. If no time is given,
* Start the source at the specified time. If no time is given,
* start the source now.
* @param {Time} [time=now] When the source should be started.
* @returns {Tone.Source} this
@ -146,7 +146,7 @@ function(Tone){
time = Tone.Transport.seconds;
} else {
time = this.toSeconds(time);
}
}
//if it's started, stop it and restart it
if (!this.retrigger && this._state.getValueAtTime(time) === Tone.State.Started){
this.stop(time);
@ -168,9 +168,9 @@ function(Tone){
};
/**
* Stop the source at the specified time. If no time is given,
* Stop the source at the specified time. If no time is given,
* stop the source now.
* @param {Time} [time=now] When the source should be stopped.
* @param {Time} [time=now] When the source should be stopped.
* @returns {Tone.Source} this
* @example
* source.stop(); // stops the source immediately
@ -188,14 +188,14 @@ function(Tone){
} else {
var sched = Tone.Transport.schedule(this._stop.bind(this), time);
this._scheduled.push(sched);
}
}
return this;
};
/**
* Sync the source to the Transport so that all subsequent
* calls to `start` and `stop` are synced to the TransportTime
* instead of the AudioContext time.
* instead of the AudioContext time.
*
* @returns {Tone.Source} this
* @example
@ -223,7 +223,7 @@ function(Tone){
var startOffset = offset - this.toSeconds(stateEvent.time);
var duration;
if (stateEvent.duration){
duration = this.toSeconds(stateEvent.duration) - startOffset;
duration = this.toSeconds(stateEvent.duration) - startOffset;
}
this._start(time, this.toSeconds(stateEvent.offset) + startOffset, duration);
}
@ -264,7 +264,7 @@ function(Tone){
* @return {Tone.Source} this
*/
Tone.Source.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.unsync();
this._scheduled = null;
this._writable("volume");
@ -276,4 +276,4 @@ function(Tone){
};
return Tone.Source;
});
});

View file

@ -1,21 +1,21 @@
define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
define(["Tone/core/Tone", "Tone/component/Volume", "Tone/core/AudioNode"], function(Tone){
"use strict";
/**
* @class Tone.UserMedia uses MediaDevices.getUserMedia to open up
* and external microphone or audio input. Check
* and external microphone or audio input. Check
* [MediaDevices API Support](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia)
* to see which browsers are supported. Access to an external input
* is limited to secure (HTTPS) connections.
*
*
* @constructor
* @extends {Tone}
* @extends {Tone.AudioNode}
* @param {Decibels=} volume The level of the input
* @example
* //list the inputs and open the third one
* var motu = new Tone.UserMedia();
*
*
* //opening the input asks the user to activate their mic
* motu.open().then(function(){
* //opening is activates the microphone
@ -27,15 +27,15 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
Tone.UserMedia = function(){
var options = Tone.defaults(arguments, ["volume"], Tone.UserMedia);
Tone.call(this);
Tone.AudioNode.call(this);
/**
* The MediaStreamNode
* The MediaStreamNode
* @type {MediaStreamAudioSourceNode}
* @private
*/
this._mediaStream = null;
/**
* The media stream created by getUserMedia.
* @type {LocalMediaStream}
@ -70,7 +70,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
this.mute = options.mute;
};
Tone.extend(Tone.UserMedia);
Tone.extend(Tone.UserMedia, Tone.AudioNode);
/**
* the default parameters
@ -85,7 +85,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
* Open the media stream. If a string is passed in, it is assumed
* to be the label or id of the stream, if a number is passed in,
* it is the input number of the stream.
* @param {String|Number} [labelOrId="default"] The label or id of the audio input media device.
* @param {String|Number} [labelOrId="default"] The label or id of the audio input media device.
* With no argument, the default stream is opened.
* @return {Promise} The promise is resolved when the stream is open.
*/
@ -121,7 +121,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
this._mediaStream = this.context.createMediaStreamSource(stream);
//Connect the MediaStreamSourceNode to a gate gain node
this._mediaStream.connect(this.output);
}
}
return this;
}.bind(this));
}.bind(this));
@ -177,11 +177,11 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
});
/**
* Returns an identifier for the represented device that is
* persisted across sessions. It is un-guessable by other applications and
* unique to the origin of the calling application. It is reset when the
* user clears cookies (for Private Browsing, a different identifier is
* used that is not persisted across sessions). Returns undefined when the
* Returns an identifier for the represented device that is
* persisted across sessions. It is un-guessable by other applications and
* unique to the origin of the calling application. It is reset when the
* user clears cookies (for Private Browsing, a different identifier is
* used that is not persisted across sessions). Returns undefined when the
* device is not open.
* @type {String}
* @readOnly
@ -197,7 +197,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
});
/**
* Returns a group identifier. Two devices have the
* Returns a group identifier. Two devices have the
* same group identifier if they belong to the same physical device.
* Returns undefined when the device is not open.
* @type {String}
@ -214,7 +214,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
});
/**
* Returns a label describing this device (for example "Built-in Microphone").
* Returns a label describing this device (for example "Built-in Microphone").
* Returns undefined when the device is not open or label is not available
* because of permissions.
* @type {String}
@ -231,7 +231,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
});
/**
* Mute the output.
* Mute the output.
* @memberOf Tone.UserMedia#
* @type {boolean}
* @name mute
@ -242,7 +242,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
Object.defineProperty(Tone.UserMedia.prototype, "mute", {
get : function(){
return this._volume.mute;
},
},
set : function(mute){
this._volume.mute = mute;
}
@ -253,7 +253,7 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
* @return {Tone.UserMedia} this
*/
Tone.UserMedia.prototype.dispose = function(){
Tone.prototype.dispose.call(this);
Tone.AudioNode.prototype.dispose.call(this);
this.close();
this._writable("volume");
this._volume.dispose();
@ -277,4 +277,4 @@ define(["Tone/core/Tone", "Tone/component/Volume"], function(Tone){
});
return Tone.UserMedia;
});
});

View file

@ -1,6 +1,6 @@
define(["Tone/component/Panner", "helper/Basic", "helper/Offline", "Test", "Tone/signal/Signal",
"helper/PassAudio", "helper/PassAudioStereo", "Tone/component/Merge", "Tone/core/Tone", "helper/Supports"],
function (Panner, Basic, Offline, Test, Signal, PassAudio, PassAudioStereo, Merge, Tone, Supports) {
define(["Tone/component/Panner", "helper/Basic", "helper/Offline", "Test", "Tone/signal/Signal",
"helper/PassAudio", "helper/PassAudioStereo", "Tone/component/Merge", "Tone/core/Tone", "helper/Supports", "Tone/core/AudioNode"],
function (Panner, Basic, Offline, Test, Signal, PassAudio, PassAudioStereo, Merge, Tone, Supports, AudioNode) {
//a stereo signal for testing
var StereoSignal = function(val){
@ -15,7 +15,7 @@ function (Panner, Basic, Offline, Test, Signal, PassAudio, PassAudioStereo, Merg
}
};
Tone.extend(StereoSignal);
Tone.extend(StereoSignal, AudioNode);
StereoSignal.prototype.dispose = function(){
if (Panner.hasStereoPanner){
@ -190,4 +190,4 @@ function (Panner, Basic, Offline, Test, Signal, PassAudio, PassAudioStereo, Merg
}
});
});
});
});

View file

@ -1,5 +1,5 @@
define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
"helper/PassAudio", "Tone/signal/Signal", "Tone/core/Gain", "Tone/component/Merge"],
define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
"helper/PassAudio", "Tone/signal/Signal", "Tone/core/Gain", "Tone/component/Merge"],
function (Test, Bus, Tone, Offline, PassAudio, Signal, Gain, Merge) {
describe("Bus", function(){
@ -17,7 +17,7 @@ define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
send.send("test");
recv.receive("test");
});
});
});
it ("can create the recieve before the send", function(){
return PassAudio(function(input){
@ -28,7 +28,7 @@ define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
recv.receive("test");
send.send("test");
});
});
});
it ("passes audio from a send to a receive at the given level", function(){
return Offline(function(){
@ -41,7 +41,7 @@ define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
expect(sample).to.be.closeTo(0.25, 0.1);
});
});
});
});
it ("can receive from a specific channel", function(){
return Offline(function(){
@ -55,6 +55,6 @@ define(["Test", "Tone/core/Bus", "Tone/core/Tone", "helper/Offline",
expect(r).to.be.closeTo(2, 0.01);
});
});
});
});
});
});
});

View file

@ -1,5 +1,5 @@
define(["Test", "Tone/core/Master", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator"],
function (Test, Master, Tone, Offline, PassAudio, Oscillator) {
define(["Test", "Tone/core/Master", "Tone/core/Tone", "helper/Offline", "helper/PassAudio", "Tone/source/Oscillator", "Tone/core/AudioNode"],
function (Test, Master, Tone, Offline, PassAudio, Oscillator, AudioNode) {
describe("Master", function(){
it ("exists", function(){
@ -7,7 +7,7 @@ define(["Test", "Tone/core/Master", "Tone/core/Tone", "helper/Offline", "helper/
});
it ("provides a toMaster method", function(){
expect(Tone.prototype.toMaster).is.a("function");
expect(AudioNode.prototype.toMaster).is.a("function");
var gain = Tone.context.createGain();
expect(gain.toMaster).is.a("function");
gain.toMaster();
@ -50,4 +50,4 @@ define(["Test", "Tone/core/Master", "Tone/core/Tone", "helper/Offline", "helper/
});
});
});
});
});