started retooling with typescript

This commit is contained in:
tambien 2019-04-12 10:37:47 -04:00
parent 184102ab4c
commit df08425a70
44 changed files with 6581 additions and 382 deletions

113
Tone/core/Connect.ts Normal file
View file

@ -0,0 +1,113 @@
import { InputNode, OutputNode, ToneAudioNode } from "../node/AudioNode";
import { isArray, isDefined, isNumber } from "./Util";
/**
* connect together all of the arguments in series
* @param nodes
*/
export function connectSeries(...nodes: InputNode[]): void {
nodes.reduce((prev, current) => {
if (prev instanceof ToneAudioNode || prev instanceof AudioNode) {
connect(prev, current);
}
return current;
}, nodes[0]);
}
/**
* Connect two nodes together so that signal flows from the
* first node to the second. Optionally specify the input and output channels.
* @param srcNode The source node
* @param dstNode The destination node
* @param outputNumber The output channel of the srcNode
* @param inputNumber The input channel of the dstNode
*/
export function connect(srcNode: OutputNode, dstNode: InputNode, outputNumber = 0, inputNumber = 0): void {
// resolve the input of the dstNode
while (!(dstNode instanceof AudioNode || dstNode instanceof AudioParam)) {
if (isArray(dstNode.input)) {
this.assert(dstNode.input.length < inputNumber, "the output number is greater than the number of outputs");
dstNode = dstNode.input[inputNumber];
} else if (isDefined(dstNode.input)) {
dstNode = dstNode.input;
}
inputNumber = 0;
}
if (srcNode instanceof ToneAudioNode) {
if (isArray(srcNode.output)) {
this.assert(srcNode.output.length < outputNumber, "the output number is greater than the number of outputs");
srcNode = srcNode.output[outputNumber];
} else if (isDefined(srcNode.output)) {
srcNode = srcNode.output;
}
outputNumber = 0;
}
// make the connection
if (dstNode instanceof AudioParam) {
srcNode.connect(dstNode, outputNumber);
} else {
srcNode.connect(dstNode, outputNumber, inputNumber);
}
}
/**
* Disconnect a node from all nodes or optionally include a destination node and input/output channels.
* @param srcNode The source node
* @param dstNode The destination node
* @param outputNumber The output channel of the srcNode
* @param inputNumber The input channel of the dstNode
*/
export function disconnect(
srcNode: OutputNode,
dstNode?: InputNode,
outputNumber = 0,
inputNumber = 0,
): void {
// resolve the destination node
if (isDefined(dstNode)) {
while (dstNode instanceof ToneAudioNode) {
if (isArray(dstNode.input)) {
if (isNumber(inputNumber)) {
this.assert(dstNode.input.length < inputNumber, "the input number is greater than the number of inputs");
dstNode = dstNode.input[inputNumber];
} else {
// disconnect from all of the nodes
// since we don't know which one was connected
dstNode.input.forEach(dst => {
try {
// catch errors from disconnecting from nodes that are not connected
disconnect(srcNode, dst, outputNumber);
// tslint:disable-next-line: no-empty
} catch (e) { }
});
}
inputNumber = 0;
} else if (dstNode.input) {
dstNode = dstNode.input;
}
}
}
// resolve the src node
while (!(srcNode instanceof AudioNode)) {
if (isArray(srcNode.output)) {
this.assert(srcNode.output.length < outputNumber, "the output number is greater than the number of outputs");
srcNode = srcNode.output[outputNumber];
} else if (isDefined(srcNode.output)) {
srcNode = srcNode.output;
}
outputNumber = 0;
}
if (dstNode instanceof AudioParam) {
srcNode.disconnect(dstNode, outputNumber);
} else if (dstNode instanceof AudioNode) {
srcNode.disconnect(dstNode, outputNumber, inputNumber);
} else {
srcNode.disconnect();
}
}

379
Tone/core/Context.ts Normal file
View file

@ -0,0 +1,379 @@
import { Ticker, TickerClockSource } from "./ContextTicker";
import { Emitter } from "./Emitter";
import { Timeline } from "./Timeline";
import { isString, Omit, optionsFromArguments } from "./Util";
type ContextLatencyHint = AudioContextLatencyCategory | "fastest";
// these are either not used in Tone.js or deprecated and not implemented.
type ExcludedFromBaseAudioContext = "createScriptProcessor" | "onstatechange" | "addEventListener"
| "removeEventListener" | "listener" | "dispatchEvent" | "audioWorklet";
// the subset of the BaseAudioContext which Tone.Context implements.
type BaseAudioContextSubset = Omit<BaseAudioContext, ExcludedFromBaseAudioContext>;
interface ContextOptions {
clockSource: TickerClockSource;
latencyHint: ContextLatencyHint;
lookAhead: Seconds;
updateInterval: Seconds;
}
interface ContextTimeoutEvent {
callback: (...args: any[]) => void;
id: number;
time: Seconds;
}
/**
* Wrapper around the native AudioContext.
*/
export class Context extends Emitter implements BaseAudioContextSubset {
name = "Context";
static getDefaults(): ContextOptions {
return {
clockSource: "worker",
latencyHint: "interactive",
lookAhead: 0.1,
updateInterval: 0.03,
};
}
/**
* The amount of time into the future events are scheduled
*/
lookAhead: Seconds;
/**
* private reference to the BaseAudioContext
*/
private readonly _context: BaseAudioContext;
/**
* A reliable callback method
*/
private readonly _ticker: Ticker;
/**
* The default latency hint
*/
private _latencyHint: ContextLatencyHint | Seconds;
/**
* An object containing all of the constants AudioBufferSourceNodes
*/
private _constants = new Map<number, AudioBufferSourceNode>();
/**
* All of the setTimeout events.
*/
private _timeouts: Timeline<ContextTimeoutEvent>;
/**
* The timeout id counter
*/
private _timeoutIds = 0;
constructor(context: BaseAudioContext) {
super();
this._context = context;
const defaults = Context.getDefaults();
this._latencyHint = defaults.latencyHint;
this.lookAhead = defaults.lookAhead;
this._timeouts = new Timeline();
this._ticker = new Ticker(this.emit.bind(this, "tick"), defaults.clockSource, defaults.updateInterval);
this.on("tick", this._timeoutLoop.bind(this));
// fwd events from the context
this._context.addEventListener("statechange", () => {
this.emit("statechange", this.state);
});
}
///////////////////////////////////////////////////////////////////////
// BASE AUDIO CONTEXT METHODS
///////////////////////////////////////////////////////////////////////
createAnalyser(): AnalyserNode {
return this._context.createAnalyser();
}
createOscillator(): OscillatorNode {
return this._context.createOscillator();
}
createBufferSource(): AudioBufferSourceNode {
return this._context.createBufferSource();
}
createBiquadFilter(): BiquadFilterNode {
return this._context.createBiquadFilter();
}
createBuffer(numberOfChannels: number, length: number, sampleRate: number): AudioBuffer {
return this._context.createBuffer(numberOfChannels, length, sampleRate);
}
createChannelMerger(numberOfInputs?: number | undefined): ChannelMergerNode {
return this._context.createChannelMerger(numberOfInputs);
}
createChannelSplitter(numberOfOutputs?: number | undefined): ChannelSplitterNode {
return this._context.createChannelSplitter(numberOfOutputs);
}
createConstantSource(): ConstantSourceNode {
return this._context.createConstantSource();
}
createConvolver(): ConvolverNode {
return this._context.createConvolver();
}
createDelay(maxDelayTime?: number | undefined): DelayNode {
return this._context.createDelay(maxDelayTime);
}
createDynamicsCompressor(): DynamicsCompressorNode {
return this._context.createDynamicsCompressor();
}
createGain(): GainNode {
return this._context.createGain();
}
createIIRFilter(feedforward: number[], feedback: number[]): IIRFilterNode {
return this._context.createIIRFilter(feedforward, feedback);
}
createPanner(): PannerNode {
return this._context.createPanner();
}
createPeriodicWave(
real: number[] | Float32Array,
imag: number[] | Float32Array,
constraints?: PeriodicWaveConstraints | undefined,
): PeriodicWave {
return this._context.createPeriodicWave(real, imag, constraints);
}
createStereoPanner(): StereoPannerNode {
return this._context.createStereoPanner();
}
createWaveShaper(): WaveShaperNode {
return this._context.createWaveShaper();
}
decodeAudioData(audioData: ArrayBuffer): Promise<AudioBuffer> {
return this._context.decodeAudioData(audioData);
}
/**
* The audio output destination. Alias for Tone.Master
*/
get destination(): AudioDestinationNode {
return this._context.destination;
}
/**
* The current time in seconds of the AudioContext.
*/
get currentTime(): Seconds {
return this._context.currentTime;
}
/**
* The current time in seconds of the AudioContext.
*/
get state(): AudioContextState {
return this._context.state;
}
/**
* The current time in seconds of the AudioContext.
*/
get sampleRate(): number {
return this._context.sampleRate;
}
/**
* The listener
*/
get listener(): AudioListener {
return this._context.listener;
}
///////////////////////////////////////////////////////////////////////
// TICKER
///////////////////////////////////////////////////////////////////////
/**
* How often the interval callback is invoked.
* This number corresponds to how responsive the scheduling
* can be. context.updateInterval + context.lookAhead gives you the
* total latency between scheduling an event and hearing it.
*/
get updateInterval(): Seconds {
return this._ticker.updateInterval;
}
set updateInterval(interval: Seconds) {
this._ticker.updateInterval = interval;
}
/**
* What the source of the clock is, either "worker" (Web Worker [default]),
* "timeout" (setTimeout), or "offline" (none).
*/
get clockSource(): TickerClockSource {
return this._ticker.type;
}
set clockSource(type: TickerClockSource) {
this._ticker.type = type;
}
/**
* The type of playback, which affects tradeoffs between audio
* output latency and responsiveness.
*
* In addition to setting the value in seconds, the latencyHint also
* accepts the strings "interactive" (prioritizes low latency),
* "playback" (prioritizes sustained playback), "balanced" (balances
* latency and performance), and "fastest" (lowest latency, might glitch more often).
* @example
* //set the lookAhead to 0.3 seconds
* Tone.context.latencyHint = 0.3;
*/
get latencyHint(): ContextLatencyHint | Seconds {
return this._latencyHint;
}
set latencyHint(hint: ContextLatencyHint | Seconds) {
let lookAheadValue = 0;
this._latencyHint = hint;
if (isString(hint)) {
switch (hint) {
case "interactive":
lookAheadValue = 0.1;
break;
case "playback":
lookAheadValue = 0.8;
break;
case "balanced":
lookAheadValue = 0.25;
break;
case "fastest":
lookAheadValue = 0.01;
break;
}
}
this.lookAhead = lookAheadValue;
this.updateInterval = lookAheadValue / 3;
}
/**
* The unwrapped AudioContext.
*/
get rawContext(): BaseAudioContext {
return this._context;
}
/**
* The current audio context time
*/
now(): Seconds {
return this._context.currentTime + this.lookAhead;
}
/**
* Starts the audio context from a suspended state. This is required
* to initially start the AudioContext.
*/
resume(): Promise<void> {
if (this._context.state === "suspended" && this._context instanceof AudioContext) {
return this._context.resume();
} else {
return Promise.resolve();
}
}
/**
* Promise which is invoked when the context is running.
* Tries to resume the context if it's not started.
* @return {Promise}
*/
async close(): Promise<Context> {
if (this._context instanceof AudioContext) {
await this._context.close();
}
return this;
}
/**
* Generate a looped buffer at some constant value.
*/
getConstant(val: number): AudioBufferSourceNode {
if (this._constants.has(val)) {
return this._constants.get(val) as AudioBufferSourceNode;
} else {
const buffer = this._context.createBuffer(1, 128, this._context.sampleRate);
const arr = buffer.getChannelData(0);
for (let i = 0; i < arr.length; i++) {
arr[i] = val;
}
const constant = this._context.createBufferSource();
constant.channelCount = 1;
constant.channelCountMode = "explicit";
constant.buffer = buffer;
constant.loop = true;
constant.start(0);
this._constants.set(val, constant);
return constant;
}
}
/**
* Clean up. Also closes the audio context.
*/
dispose(): Context {
this._ticker.dispose();
this._timeouts.dispose();
Object.keys(this._constants).map(val => this._constants[val].disconnect());
this.close();
return this;
}
///////////////////////////////////////////////////////////////////////
// TIMEOUTS
///////////////////////////////////////////////////////////////////////
/**
* The private loop which keeps track of the context scheduled timeouts
* Is invoked from the clock source
*/
private _timeoutLoop(): void {
const now = this.now();
let firstEvent = this._timeouts.peek();
while (this._timeouts.length && firstEvent && firstEvent.time <= now) {
// invoke the callback
firstEvent.callback();
firstEvent = this._timeouts.peek();
// shift the first event off
this._timeouts.shift();
}
}
/**
* A setTimeout which is guarented by the clock source.
* Also runs in the offline context.
* @param fn The callback to invoke
* @param timeout The timeout in seconds
* @returns ID to use when invoking Context.clearTimeout
*/
setTimeout(fn: (...args: any[]) => void, timeout: Seconds): number {
this._timeoutIds++;
const now = this.now();
this._timeouts.add({
callback : fn,
id : this._timeoutIds,
time : now + timeout,
});
return this._timeoutIds;
}
/**
* Clears a previously scheduled timeout with Tone.context.setTimeout
* @param id The ID returned from setTimeout
*/
clearTimeout(id: number): Context {
this._timeouts.forEach(event => {
if (event.id === id) {
this._timeouts.remove(event);
}
});
return this;
}
}

146
Tone/core/ContextTicker.ts Normal file
View file

@ -0,0 +1,146 @@
export type TickerClockSource = "worker" | "timeout" | "offline";
/**
* A class which provides a reliable callback using either
* a Web Worker, or if that isn't supported, falls back to setTimeout.
*/
export class Ticker {
/**
* Either "worker" or "timeout" or "offline"
*/
private _type: TickerClockSource;
/**
* The update interval of the worker
*/
private _updateInterval: Seconds;
/**
* The callback to invoke at regular intervals
*/
private _callback: () => void;
/**
* track the callback interval
*/
private _timeout!: number;
/**
* private reference to the worker
*/
private _worker!: Worker;
constructor(callback: () => void, type: TickerClockSource, updateInterval: Seconds) {
this._callback = callback;
this._type = type;
this._updateInterval = updateInterval;
// create the clock source for the first time
this._createClock();
}
/**
* Generate a web worker
*/
private _createWorker(): void {
const blob = new Blob([
// the initial timeout time
`var timeoutTime = ${(this._updateInterval * 1000).toFixed(1)};
// onmessage callback
self.onmessage = function(msg){
timeoutTime = parseInt(msg.data);
};
// the tick function which posts a message
// and schedules a new tick
function tick(){
setTimeout(tick, timeoutTime);
self.postMessage('tick');
}
// call tick initially
tick();`,
]);
const blobUrl = window.URL.createObjectURL(blob);
const worker = new Worker(blobUrl);
worker.onmessage = this._callback.bind(this);
this._worker = worker;
}
/**
* Create a timeout loop
*/
private _createTimeout(): void {
this._timeout = window.setTimeout(() => {
this._createTimeout();
this._callback();
}, this._updateInterval * 1000);
}
/**
* Create the clock source.
*/
private _createClock(): void {
if (this._type === "worker") {
try {
this._createWorker();
} catch (e) {
// workers not supported, fallback to timeout
this._type = "timeout";
this._createClock();
}
} else if (this._type === "timeout") {
this._createTimeout();
}
}
/**
* Clean up the current clock source
*/
private _disposeClock(): void {
if (this._timeout) {
clearTimeout(this._timeout);
this._timeout = 0;
}
if (this._worker) {
this._worker.terminate();
this._worker.onmessage = null;
}
}
/**
* The rate in seconds the ticker will update
*/
get updateInterval(): Seconds {
return this._updateInterval;
}
set updateInterval(interval: Seconds) {
this._updateInterval = Math.max(interval, 128 / 44100);
if (this._type === "worker") {
this._worker.postMessage(Math.max(interval * 1000, 1));
}
}
/**
* The type of the ticker, either a worker or a timeout
*/
get type(): TickerClockSource {
return this._type;
}
set type(type: TickerClockSource) {
this._disposeClock();
this._type = type;
this._createClock();
}
/**
* Clean up
*/
dispose(): void {
this._disposeClock();
}
}

20
Tone/core/Decorator.ts Normal file
View file

@ -0,0 +1,20 @@
function optionsFromArguments<Options>(defaults: Options, args: any[], keys: string[]): Options {
return defaults;
}
type ObjectConstructor = new (...args: any[]) => {};
export function useDefaultsAndArguments<DefaultOptions>(
defaults: DefaultOptions,
optionsOrder: string[],
) {
return <T extends ObjectConstructor>(classDef: T) => {
return class extends classDef {
// assign the instance defaults
defaults: DefaultOptions = defaults;
constructor(...args: any[]) {
super(optionsFromArguments(defaults, args, optionsOrder));
}
};
};
}

120
Tone/core/Emitter.ts Normal file
View file

@ -0,0 +1,120 @@
import { Tone } from "./Tone";
import { isUndef } from "./Util";
interface EventObject {
[event: string]: Array<(...args: any[]) => void>;
}
/**
* Emitter gives classes which extend it
* the ability to listen for and emit events.
* Inspiration and reference from Jerome Etienne's [MicroEvent](https://github.com/jeromeetienne/microevent.js).
* MIT (c) 2011 Jerome Etienne.
*/
export class Emitter extends Tone {
name = "Emitter";
/**
* Private container for the events
*/
private _events: EventObject = {};
/**
* Bind a callback to a specific event.
* @param event The name of the event to listen for.
* @param callback The callback to invoke when the event is emitted
*/
on(event: string, callback: (...args: any[]) => void): Emitter {
// split the event
const events = event.split(/\W+/);
events.forEach(eventName => {
if (!this._events.hasOwnProperty(eventName)) {
this._events[eventName] = [];
}
this._events[eventName].push(callback);
});
return this;
}
/**
* Bind a callback which is only invoked once
* @param event The name of the event to listen for.
* @param callback The callback to invoke when the event is emitted
*/
once(event: string, callback: (...args: any[]) => void): Emitter {
const boundCallback = (...args: any[]) => {
// invoke the callback
callback(...args);
// remove the event
this.off(event, boundCallback);
};
this.on(event, boundCallback);
return this;
}
/**
* Remove the event listener.
* @param event The event to stop listening to.
* @param callback The callback which was bound to the event with Emitter.on.
* If no callback is given, all callbacks events are removed.
*/
off(event: string, callback?: (...args: any[]) => void): Emitter {
const events = event.split(/\W+/);
events.forEach(eventName => {
if (this._events.hasOwnProperty(event)) {
if (isUndef(callback)) {
this._events[event] = [];
} else {
const eventList = this._events[event];
for (let i = 0; i < eventList.length; i++) {
if (eventList[i] === callback) {
eventList.splice(i, 1);
}
}
}
}
});
return this;
}
/**
* Invoke all of the callbacks bound to the event
* with any arguments passed in.
* @param event The name of the event.
* @param args The arguments to pass to the functions listening.
*/
emit(event, ...args: any[]): Emitter {
if (this._events) {
if (this._events.hasOwnProperty(event)) {
const eventList = this._events[event].slice(0);
for (let i = 0, len = eventList.length; i < len; i++) {
eventList[i].apply(this, args);
}
}
}
return this;
}
/**
* Add Emitter functions (on/off/emit) to the object
*/
// static mixin(object) {
// const functions = ["on", "once", "off", "emit"];
// object._events = {};
// functions.forEach(func => {
// const emitterFunc = Emitter.prototype[func];
// object[func] = emitterFunc;
// });
// return Emitter;
// }
/**
* Clean up
*/
dispose(): Emitter {
this._events = {};
return this;
}
}

14
Tone/core/FromContext.ts Normal file
View file

@ -0,0 +1,14 @@
import { Gain } from "../node/Gain";
import { isUndef } from "./Util";
export function fromContext(context) {
const Tone: any = {};
if (isUndef(Gain.prototype.defaultContext)) {
Tone.Gain = class extends Gain {
get defaultContext() {
return context;
}
};
}
return Tone;
}

65
Tone/core/Global.ts Normal file
View file

@ -0,0 +1,65 @@
import { isUndef } from "./Util";
/**
* The global audio context which is getable and assignable through
* getContext and setContext
*/
let globalContext: BaseAudioContext;
/**
* Returns the default system-wide AudioContext
*/
export function getContext(): BaseAudioContext {
if (!globalContext) {
globalContext = new AudioContext();
}
return globalContext;
}
/**
* Set the default audio context
*/
export function setContext(context: BaseAudioContext): void {
globalContext = context;
}
/**
* Most browsers will not play _any_ audio until a user
* clicks something (like a play button). Invoke this method
* on a click or keypress event handler to start the audio context.
* More about the Autoplay policy
* [here](https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#webaudio)
* @example
* document.querySelector('#playbutton').addEventListener('click', () => Tone.start())
*/
export function start(): Promise < void> {
return globalContext.resume();
}
/**
* True if the current environment has the necessary APIs to run Tone.js
*/
// // export const supported: boolean = toneGlobal.hasOwnProperty("Promise") && toneGlobal.hasOwnProperty("AudioContext");
// set the audio context initially, and if one is not already created
// if (Tone.supported && !Tone.initialized){
// if (!Tone.global.TONE_AUDIO_CONTEXT){
// Tone.global.TONE_AUDIO_CONTEXT = new Context();
// }
// Tone.context = Tone.global.TONE_AUDIO_CONTEXT;
// // log on first initialization
// // allow optional silencing of this log
// if (!Tone.global.TONE_SILENCE_LOGGING){
// var prefix = "v";
// if (Tone.version === "dev"){
// prefix = "";
// }
// var printString = " * Tone.js " + prefix + Tone.version + " * ";
// // eslint-disable-next-line no-console
// console.log("%c" + printString, "background: #000; color: #fff");
// }
// } else if (!Tone.supported && !Tone.global.TONE_SILENCE_LOGGING){
// // eslint-disable-next-line no-console
// console.warn("This browser does not support Tone.js");
// }

370
Tone/core/Timeline.ts Normal file
View file

@ -0,0 +1,370 @@
import { Tone } from "./Tone";
import { optionsFromArguments } from "./Util";
interface TimelineOptions {
memory: number;
}
/**
* An event must have a time number
*/
interface TimelineEvent {
time: number;
}
/**
* @class A Timeline class for scheduling and maintaining state
* along a timeline. All events must have a "time" property.
* Internally, events are stored in time order for fast
* retrieval.
* @extends {Tone}
* @param {Positive} [memory=Infinity] The number of previous events that are retained.
*/
export class Timeline<GenericEvent extends TimelineEvent> extends Tone {
name = "Timeline";
/**
* The memory of the timeline, i.e.
* how many events in the past it will retain
*/
memory: number;
/**
* The array of scheduled timeline events
*/
private _timeline: GenericEvent[];
constructor(options?: Partial<TimelineOptions>);
constructor(memory?: number);
constructor() {
super();
const options = optionsFromArguments(Timeline.getDefaults(), arguments, ["memory"]);
this._timeline = [];
this.memory = options.memory;
}
static getDefaults(): TimelineOptions {
return {
memory: Infinity,
};
}
/**
* The number of items in the timeline.
*/
get length(): number {
return this._timeline.length;
}
/**
* Insert an event object onto the timeline. Events must have a "time" attribute.
* @param event The event object to insert into the timeline.
*/
add(event: GenericEvent): Timeline<GenericEvent> {
// the event needs to have a time attribute
this.assert(Reflect.has(event, "time"), "Timeline: events must have a time attribute");
event.time = event.time.valueOf();
const index = this._search(event.time);
this._timeline.splice(index + 1, 0, event);
// if the length is more than the memory, remove the previous ones
if (this.length > this.memory) {
const diff = this.length - this.memory;
this._timeline.splice(0, diff);
}
return this;
}
/**
* Remove an event from the timeline.
* @param {Object} event The event object to remove from the list.
* @returns {Timeline} this
*/
remove(event: GenericEvent): Timeline<GenericEvent> {
const index = this._timeline.indexOf(event);
if (index !== -1) {
this._timeline.splice(index, 1);
}
return this;
}
/**
* Get the nearest event whose time is less than or equal to the given time.
* @param time The time to query.
*/
get(time: number): GenericEvent | null {
const index = this._search(time);
if (index !== -1) {
return this._timeline[index];
} else {
return null;
}
}
/**
* Return the first event in the timeline without removing it
* @returns {Object} The first event object
*/
peek(): GenericEvent | undefined {
return this._timeline[0];
}
/**
* Return the first event in the timeline and remove it
*/
shift(): GenericEvent | undefined {
return this._timeline.shift();
}
/**
* Get the event which is scheduled after the given time.
* @param time The time to query.
*/
getAfter(time: number): GenericEvent | null {
const index = this._search(time);
if (index + 1 < this._timeline.length) {
return this._timeline[index + 1];
} else {
return null;
}
}
/**
* Get the event before the event at the given time.
* @param time The time to query.
*/
getBefore(time: number): GenericEvent | null {
const len = this._timeline.length;
// if it's after the last item, return the last item
if (len > 0 && this._timeline[len - 1].time < time) {
return this._timeline[len - 1];
}
const index = this._search(time);
if (index - 1 >= 0) {
return this._timeline[index - 1];
} else {
return null;
}
}
/**
* Cancel events at and after the given time
* @param time The time to query.
*/
cancel(after: number): Timeline<GenericEvent> {
if (this._timeline.length > 1) {
let index = this._search(after);
if (index >= 0) {
if (this._timeline[index].time === after) {
// get the first item with that time
for (let i = index; i >= 0; i--) {
if (this._timeline[i].time === after) {
index = i;
} else {
break;
}
}
this._timeline = this._timeline.slice(0, index);
} else {
this._timeline = this._timeline.slice(0, index + 1);
}
} else {
this._timeline = [];
}
} else if (this._timeline.length === 1) {
// the first item's time
if (this._timeline[0].time >= after) {
this._timeline = [];
}
}
return this;
}
/**
* Cancel events before or equal to the given time.
* @param {Number} time The time to cancel before.
* @returns {Timeline} this
*/
cancelBefore(time): Timeline<GenericEvent> {
const index = this._search(time);
if (index >= 0) {
this._timeline = this._timeline.slice(index + 1);
}
return this;
}
/**
* Returns the previous event if there is one. null otherwise
* @param {Object} event The event to find the previous one of
* @return {Object} The event right before the given event
*/
previousEvent(event): GenericEvent | null {
const index = this._timeline.indexOf(event);
if (index > 0) {
return this._timeline[index - 1];
} else {
return null;
}
}
/**
* Does a binary search on the timeline array and returns the
* nearest event index whose time is after or equal to the given time.
* If a time is searched before the first index in the timeline, -1 is returned.
* If the time is after the end, the index of the last item is returned.
* @param time
* @private
*/
private _search(time: number): number {
if (this._timeline.length === 0) {
return -1;
}
let beginning = 0;
const len = this._timeline.length;
let end = len;
if (len > 0 && this._timeline[len - 1].time <= time) {
return len - 1;
}
while (beginning < end) {
// calculate the midpoint for roughly equal partition
let midPoint = Math.floor(beginning + (end - beginning) / 2);
const event = this._timeline[midPoint];
const nextEvent = this._timeline[midPoint + 1];
if (event.time === time) {
// choose the last one that has the same time
for (let i = midPoint; i < this._timeline.length; i++) {
const testEvent = this._timeline[i];
if (testEvent.time === time) {
midPoint = i;
}
}
return midPoint;
} else if (event.time < time && nextEvent.time > time) {
return midPoint;
} else if (event.time > time) {
// search lower
end = midPoint;
} else {
// search upper
beginning = midPoint + 1;
}
}
return -1;
}
/**
* Internal iterator. Applies extra safety checks for
* removing items from the array.
*/
private _iterate(
callback: (event: GenericEvent) => void,
lowerBound = 0, upperBound = this._timeline.length - 1,
): void {
this._timeline.slice(lowerBound, upperBound + 1).forEach(callback);
}
/**
* Iterate over everything in the array
* @param callback The callback to invoke with every item
*/
forEach(callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
this._iterate(callback);
return this;
}
/**
* Iterate over everything in the array at or before the given time.
* @param time The time to check if items are before
* @param callback The callback to invoke with every item
*/
forEachBefore(time, callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
// iterate over the items in reverse so that removing an item doesn't break things
const upperBound = this._search(time);
if (upperBound !== -1) {
this._iterate(callback, 0, upperBound);
}
return this;
}
/**
* Iterate over everything in the array after the given time.
* @param time The time to check if items are before
* @param callback The callback to invoke with every item
*/
forEachAfter(time, callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
// iterate over the items in reverse so that removing an item doesn't break things
const lowerBound = this._search(time);
this._iterate(callback, lowerBound + 1);
return this;
}
/**
* Iterate over everything in the array between the startTime and endTime.
* The timerange is inclusive of the startTime, but exclusive of the endTime.
* range = [startTime, endTime).
* @param startTime The time to check if items are before
* @param endTime The end of the test interval.
* @param callback The callback to invoke with every item
*/
forEachBetween(startTime: number, endTime: number, callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
let lowerBound = this._search(startTime);
let upperBound = this._search(endTime);
if (lowerBound !== -1 && upperBound !== -1) {
if (this._timeline[lowerBound].time !== startTime) {
lowerBound += 1;
}
// exclusive of the end time
if (this._timeline[upperBound].time === endTime) {
upperBound -= 1;
}
this._iterate(callback, lowerBound, upperBound);
} else if (lowerBound === -1) {
this._iterate(callback, 0, upperBound);
}
return this;
}
/**
* Iterate over everything in the array at or after the given time. Similar to
* forEachAfter, but includes the item(s) at the given time.
* @param time The time to check if items are before
* @param callback The callback to invoke with every item
*/
forEachFrom(time: number, callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
// iterate over the items in reverse so that removing an item doesn't break things
let lowerBound = this._search(time);
// work backwards until the event time is less than time
while (lowerBound >= 0 && this._timeline[lowerBound].time >= time) {
lowerBound--;
}
this._iterate(callback, lowerBound + 1);
return this;
}
/**
* Iterate over everything in the array at the given time
* @param time The time to check if items are before
* @param callback The callback to invoke with every item
*/
forEachAtTime(time: number, callback: (event: GenericEvent) => void): Timeline<GenericEvent> {
// iterate over the items in reverse so that removing an item doesn't break things
const upperBound = this._search(time);
if (upperBound !== -1) {
this._iterate(event => {
if (event.time === time) {
callback(event);
}
}, 0, upperBound);
}
return this;
}
/**
* Clean up.
*/
dispose(): Timeline<GenericEvent> {
this._timeline = [];
return this;
}
}

147
Tone/core/Tone.ts Normal file
View file

@ -0,0 +1,147 @@
/**
* Tone.js
* @author Yotam Mann
* @license http://opensource.org/licenses/MIT MIT License
* @copyright 2014-2019 Yotam Mann
*/
import "../type/Units";
import { version } from "../version";
///////////////////////////////////////////////////////////////////////////
// TONE
///////////////////////////////////////////////////////////////////////////
// tslint:disable-next-line: no-empty-interface
export interface BaseToneOptions {}
/**
* @class Tone is the base class of all other classes.
* @constructor
*/
export abstract class Tone {
/**
* The version number semver
*/
static version: string = version;
/**
* The name of the class
*/
protected abstract name: string;
/**
* disconnect and dispose.
*/
abstract dispose(): Tone;
/**
* Takes a partial options an returns the completed options by filling in the defaults
*/
static getDefaults(): BaseToneOptions {
return {};
}
///////////////////////////////////////////////////////////////////////////
// DEBUGGING
///////////////////////////////////////////////////////////////////////////
/**
* Set this debug flag to log all events that happen in this class.
*/
protected debug: boolean = false;
/**
* Prints the outputs to the console log for debugging purposes.
* Prints the contents only if either the object has a property
* called `debug` set to true, or a variable called TONE_DEBUG_CLASS
* is set to the name of the class.
* @param args
* @example
* //prints all logs originating from Tone.OscillatorNode
* Tone.global.TONE_DEBUG_CLASS = "OscillatorNode"
*/
protected log(...args): void {
// if the object is either set to debug = true
// or if there is a string on the Tone.global.with the class name
// if (this.debug || this.toString() === global.TONE_DEBUG_CLASS) {
// args.unshift(this.toString() + ":");
// // eslint-disable-next-line no-console
// console.log(...args);
// }
}
/**
* Assert that the statement is true, otherwise invoke the error.
* @param {Boolean} statement
* @param {String} error The message which is passed into an Error
* @private
*/
protected assert(statement: boolean, error: string): void {
if (!statement) {
throw new Error(error);
}
}
///////////////////////////////////////////////////////////////////////////
// DEFAULTS
///////////////////////////////////////////////////////////////////////////
/**
* If the `given` parameter is undefined, use the `fallback`.
* If both `given` and `fallback` are object literals, it will
* return a deep copy which includes all of the parameters from both
* objects. If a parameter is undefined in given, it will return
* the fallback property.
* <br><br>
* WARNING: if object is self referential, it will go into an an
* infinite recursive loop.
* @memberOf Tone
* @param {*} given
* @param {*} fallback
* @return {*}
*/
// static defaultArg(given, fallback) {
// if (isObject(given) && isObject(fallback)) {
// const ret = {};
// // make a deep copy of the given object
// for (const givenProp in given) {
// ret[givenProp] = Tone.defaultArg(fallback[givenProp], given[givenProp]);
// }
// for (const fallbackProp in fallback) {
// ret[fallbackProp] = Tone.defaultArg(given[fallbackProp], fallback[fallbackProp]);
// }
// return ret;
// } else {
// return isUndef(given) ? fallback : given;
// }
// }
// protected options(argsArray: IArguments, keys: string[]): object {
// let options: any = {};
// const args = Array.from(argsArray);
// if (args[0] instanceof BaseAudioContext) {
// options.context = args.shift();
// }
// if (args.length === 1 && isObject(args[0])) {
// options = Object.assign(options, args[0]);
// } else {
// for (let i = 0; i < keys.length; i++) {
// if (isDefined(args[i])) {
// options[keys[i]] = args[i];
// }
// }
// }
// return deepMerge(this.getDefaults(), options);
// }
/**
* Convert the class to a string
* @example
* const osc = new Oscillator()
* osc.toString() // "Oscillator"
*/
toString(): string {
return this.name;
}
}

169
Tone/core/Util.ts Normal file
View file

@ -0,0 +1,169 @@
import { BaseToneOptions } from "./Tone";
// return an interface which excludes certain keys
export type Omit<T, K extends keyof T> = Pick<T, Exclude<keyof T, K>>;
/**
* Recursively merge an object
* @param target the object to merge into
* @param sources the source objects to merge
*/
export function deepMerge<T>(target: T, ...sources: T[]): T {
if (!sources.length) { return target; }
const source = sources.shift();
if (isObject(target) && isObject(source)) {
for (const key in source) {
if (isObject(source[key])) {
if (!target[key]) { Object.assign(target, { [key]: {} }); }
deepMerge(target[key], source[key] as any);
} else {
Object.assign(target, { [key]: source[key] as any });
}
}
}
return deepMerge(target, ...sources);
}
/**
* Convert an args array into an object.
*/
export function optionsFromArguments<T>(defaults: T, argsArray: IArguments, keys: string[]): T {
const opts: any = {};
const args = Array.from(argsArray);
if (args.length === 1 && isObject(args[0])) {
deepMerge(opts, args[0]);
} else {
for (let i = 0; i < keys.length; i++) {
if (isDefined(args[i])) {
opts[keys[i]] = args[i];
}
}
}
return deepMerge(defaults, opts);
}
/**
* Return this instances default values by calling Constructor.getDefaults()
*/
export function getDefaultsFromInstance<T>(instance: T): BaseToneOptions {
type ToneClass = {
constructor: ToneClass;
getDefaults: () => BaseToneOptions;
} & T;
return (instance as ToneClass).constructor.getDefaults();
}
/**
* Take an array of arguments and return a formatted options object.
* @param args the arguments passed into the function
* @param keys an array of keys
* @param defaults the class's defaults
*/
// export function defaultArg<T>(given: T, fallback): T {
// }
/**
* Test if the arg is undefined
*/
export function isUndef(arg: any): arg is undefined {
return typeof arg === "undefined";
}
/**
* Test if the arg is not undefined
*/
export function isDefined<T>(arg: T | undefined): arg is T {
return !isUndef(arg);
}
/**
* Test if the arg is a function
*/
export function isFunction(arg: any): arg is (a: any) => any {
return typeof arg === "function";
}
/**
* Test if the argument is a number.
*/
export function isNumber(arg: any): arg is number {
return (typeof arg === "number");
}
/**
* Test if the given argument is an object literal (i.e. `{}`);
*/
export function isObject(arg: any): arg is object {
return (Object.prototype.toString.call(arg) === "[object Object]" && arg.constructor === Object);
}
/**
* Test if the argument is a boolean.
*/
export function isBoolean(arg: any): arg is boolean {
return (typeof arg === "boolean");
}
/**
* Test if the argument is an Array
*/
export function isArray(arg: any): arg is any[] {
return (Array.isArray(arg));
}
/**
* Test if the argument is a string.
*/
export function isString(arg: any): arg is string {
return (typeof arg === "string");
}
/**
* Test if the argument is in the form of a note in scientific pitch notation.
* e.g. "C4"
*/
export function isNote(arg: any): arg is Note {
return isString(arg) && /^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i.test(arg);
}
/**
* Make the property not writable using `defineProperty`. Internal use only.
*/
export function readOnly(target: object, property: string | string[]): void {
if (isArray(property)) {
property.forEach(str => readOnly(target, str));
} else {
Object.defineProperty(target, property, {
enumerable : true,
writable : false,
});
}
}
/**
* Make an attribute writeable. Internal use only.
*/
export function writable(target: object, property: string | string[]): void {
if (isArray(property)) {
property.forEach(str => this._writable(str));
} else {
Object.defineProperty(target, property, {
writable : true,
});
}
}
/**
* Apply a mixin to extend the derived constructor with the prototype of the baseConstructors
*/
export function applyMixins(derivedCtor: any, baseCtors: any[]): void {
baseCtors.forEach(baseCtor => {
Object.getOwnPropertyNames(baseCtor.prototype).forEach(name => {
derivedCtor.prototype[name] = baseCtor.prototype[name];
});
});
}

185
Tone/node/AbstractParam.ts Normal file
View file

@ -0,0 +1,185 @@
import "../type/Units";
import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor";
export abstract class AbstractParam<Type extends Unit> {
/**
* Schedules a parameter value change at the given time.
* Does this work? is this working. are you working?
* @param value The value to set the signal.
* @param time The time when the change should occur.
* @example
* //set the frequency to "G4" in exactly 1 second from now.
* freq.setValueAtTime("G4", "+1");
*/
abstract setValueAtTime(value: UnitMap[Type], time: Time): this;
/**
* Get the signals value at the given time. Subsequent scheduling
* may invalidate the returned value.
* @param time When to get the value
*/
abstract getValueAtTime(time: Time): UnitMap[Type];
/**
* Creates a schedule point with the current value at the current time.
* This is useful for creating an automation anchor point in order to
* schedule changes from the current value.
* @param time When to add a ramp point.
* @example
* param.getValueAtTime(Tone.now())
*/
abstract setRampPoint(time: Time): this;
/**
* Schedules a linear continuous change in parameter value from the
* previous scheduled parameter value to the given value.
*/
abstract linearRampToValueAtTime(value: UnitMap[Type], time: Time): this;
/**
* Schedules an exponential continuous change in parameter value from
* the previous scheduled parameter value to the given value.
*/
abstract exponentialRampToValueAtTime(value: UnitMap[Type], time: Time): this;
/**
* Schedules an exponential continuous change in parameter value from
* the current time and current value to the given value over the
* duration of the rampTime.
* @param value The value to ramp to.
* @param rampTime the time that it takes the
* value to ramp from it's current value
* @param startTime When the ramp should start.
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampTo(2, 4);
*/
abstract exponentialRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this;
/**
* Schedules an linear continuous change in parameter value from
* the current time and current value to the given value over the
* duration of the rampTime.
*
* @param value The value to ramp to.
* @param rampTime the time that it takes the
* value to ramp from it's current value
* @param startTime When the ramp should start.
* @returns {Param} this
* @example
* //linearly ramp to the value 4 over 3 seconds.
* signal.linearRampTo(4, 3);
*/
abstract linearRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this;
/**
* Start exponentially approaching the target value at the given time. Since it
* is an exponential approach it will continue approaching after the ramp duration. The
* rampTime is the time that it takes to reach over 99% of the way towards the value.
* @param value The value to ramp to.
* @param rampTime the time that it takes the
* value to ramp from it's current value
* @param startTime When the ramp should start.
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampTo(2, 4);
*/
abstract targetRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this;
/**
* Start exponentially approaching the target value at the given time. Since it
* is an exponential approach it will continue approaching after the ramp duration. The
* rampTime is the time that it takes to reach over 99% of the way towards the value. This methods
* is similar to setTargetAtTime except the third argument is a time instead of a 'timeConstant'
* @param value The value to ramp to.
* @param time When the ramp should start.
* @param rampTime the time that it takes the value to ramp from it's current value
* @example
* //exponentially ramp to the value 2 over 4 seconds.
* signal.exponentialRampTo(2, 4);
*/
abstract exponentialApproachValueAtTime(value: UnitMap[Type], time: Time, rampTime: Time): this;
/**
* Start exponentially approaching the target value at the given time with
* a rate having the given time constant.
* @param value
* @param startTime
* @param timeConstant
*/
abstract setTargetAtTime(value: UnitMap[Type], startTime: Time, timeConstant: number): this;
/**
* Sets an array of arbitrary parameter values starting at the given time
* for the given duration.
*
* @param values
* @param startTime
* @param duration
* @param scaling If the values in the curve should be scaled by some value
*/
abstract setValueCurveAtTime(values: Type[], startTime: Time, duration: Time, scaling?: number): this;
/**
* Cancels all scheduled parameter changes with times greater than or
* equal to startTime.
*/
abstract cancelScheduledValues(time: Time): this;
/**
* This is similar to [cancelScheduledValues](#cancelScheduledValues) except
* it holds the automated value at time until the next automated event.
*/
abstract cancelAndHoldAtTime(time: Time): this;
/**
* Ramps to the given value over the duration of the rampTime.
* Automatically selects the best ramp type (exponential or linear)
* depending on the `units` of the signal
*
* @param value
* @param rampTime The time that it takes the value to ramp from it's current value
* @param startTime When the ramp should start.
* @example
* //ramp to the value either linearly or exponentially
* //depending on the "units" value of the signal
* signal.rampTo(0, 10);
* @example
* //schedule it to ramp starting at a specific time
* signal.rampTo(0, 10, 5)
*/
abstract rampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this;
/**
* The current value of the parameter. Setting this value
* is equivalent to setValueAtTime(value, context.currentTime)
*/
abstract value: UnitMap[Type];
/**
* If the value should be converted or not
*/
abstract convert: boolean;
/**
* The unit type
*/
abstract readonly units: Unit;
/**
* True if the signal value is being overridden by
* a connected signal. Internal use only.
*/
abstract readonly overridden: boolean = false;
/**
* The minimum value of the output given the units
*/
abstract readonly minValue: UnitMap[Type];
/**
* The maximum value of the output given the units
*/
abstract readonly maxValue: UnitMap[Type];
}

275
Tone/node/AudioNode.ts Normal file
View file

@ -0,0 +1,275 @@
import { connect, connectSeries, disconnect } from "../core/Connect";
import { isArray, isDefined, optionsFromArguments } from "../core/Util";
import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor";
import { Param } from "./Param";
export type InputNode = ToneAudioNode | AudioNode | AudioParam | Param<Unit>;
export type OutputNode = ToneAudioNode | AudioNode;
export interface ChannelProperties {
channelCount: number;
channelCountMode: ChannelCountMode;
channelInterpretation: ChannelInterpretation;
}
/**
* The possible options for this node
*/
export interface ToneAudioNodeOptions extends AudioProcessorOptions {
numberOfInputs: number;
numberOfOutputs: number;
channelCount: number;
channelCountMode: ChannelCountMode;
channelInterpretation: ChannelInterpretation;
}
/**
* ToneAudioNode is the base class for classes which process audio.
*/
export abstract class ToneAudioNode<Options extends ToneAudioNodeOptions = ToneAudioNodeOptions>
extends AudioProcessor<Options> {
abstract name = "ToneAudioNode";
/**
* The input node or nodes. If the object is a source,
* it does not have any input and this.input is undefined.
*/
abstract input: InputNode | InputNode[] | undefined;
/**
* The output nodes. If the object is a sink,
* it does not have any output and this.output is undefined.
*/
abstract output: OutputNode | OutputNode[] | undefined;
/**
* The number of inputs feeding into the AudioNode.
* For source nodes, this will be 0.
*/
readonly numberOfInputs: number;
/**
* The number of outputs of the AudioNode.
*/
readonly numberOfOutputs: number;
/**
* List all of the node that must be set to match the ChannelProperties
*/
protected abstract _internalChannels: OutputNode[];
static getDefaults(): ToneAudioNodeOptions {
return Object.assign(AudioProcessor.getDefaults(), {
channelCount: 2,
channelCountMode: "max" as ChannelCountMode,
channelInterpretation: "speakers" as ChannelInterpretation,
numberOfInputs: 0,
numberOfOutputs: 0,
});
}
constructor(options: Partial<ToneAudioNodeOptions>);
constructor() {
super(optionsFromArguments(ToneAudioNode.getDefaults(), arguments, ["context"]));
const options = optionsFromArguments(ToneAudioNode.getDefaults(), arguments, ["context"]);
this.numberOfInputs = options.numberOfInputs;
this.numberOfOutputs = options.numberOfInputs;
}
protected createInsOuts(numberOfInputs: number = 0, numberOfOutputs: number = 0): void {
if (numberOfInputs === 1) {
this.input = this.context.createGain();
} else if (numberOfInputs > 1) {
this.input = [];
for (let i = 0; i < numberOfInputs; i++) {
this.input[i] = this.context.createGain();
}
}
if (numberOfOutputs === 1) {
this.output = this.context.createGain();
} else if (numberOfOutputs > 1) {
this.output = [];
for (let o = 0; o < numberOfOutputs; o++) {
this.output[o] = this.context.createGain();
}
}
}
///////////////////////////////////////////////////////////////////////////
// AUDIO PROPERTIES
///////////////////////////////////////////////////////////////////////////
/**
* Set the audio options for this node such as channelInterpretation
* channelCount, etc.
* @param options
*/
private _setChannelProperties(options: ChannelProperties): void {
if (this._internalChannels.length) {
this._internalChannels.forEach(node => {
node.channelCount = options.channelCount;
node.channelCountMode = options.channelCountMode;
node.channelInterpretation = options.channelInterpretation;
});
}
}
/**
* Get the current audio options for this node such as channelInterpretation
* channelCount, etc.
*/
private _getChannelProperties(): ChannelProperties {
if (this._internalChannels.length) {
const node = this._internalChannels[0];
return {
channelCount: node.channelCount,
channelCountMode: node.channelCountMode,
channelInterpretation: node.channelInterpretation,
};
} else {
// return the defaults
return {
channelCount: 2,
channelCountMode: "max",
channelInterpretation: "speakers",
};
}
}
/**
* channelCount is the number of channels used when up-mixing and down-mixing
* connections to any inputs to the node. The default value is 2 except for
* specific nodes where its value is specially determined.
*/
get channelCount(): number {
return this._getChannelProperties().channelCount;
}
set channelCount(channelCount: number) {
const props = this._getChannelProperties();
// merge it with the other properties
this._setChannelProperties(Object.assign(props, { channelCount }));
}
/**
* channelCountMode determines how channels will be counted when up-mixing and
* down-mixing connections to any inputs to the node.
* The default value is "max". This attribute has no effect for nodes with no inputs.
*/
get channelCountMode(): ChannelCountMode {
return this._getChannelProperties().channelCountMode;
}
set channelCountMode(channelCountMode: ChannelCountMode) {
const props = this._getChannelProperties();
// merge it with the other properties
this._setChannelProperties(Object.assign(props, { channelCountMode }));
}
/**
* channelInterpretation determines how individual channels will be treated
* when up-mixing and down-mixing connections to any inputs to the node.
* The default value is "speakers".
*/
get channelInterpretation(): ChannelInterpretation {
return this._getChannelProperties().channelInterpretation;
}
set channelInterpretation(channelInterpretation: ChannelInterpretation) {
const props = this._getChannelProperties();
// merge it with the other properties
this._setChannelProperties(Object.assign(props, { channelInterpretation }));
}
///////////////////////////////////////////////////////////////////////////
// CONNECTIONS
///////////////////////////////////////////////////////////////////////////
/**
* connect the output of a ToneAudioNode to an AudioParam, AudioNode, or ToneAudioNode
* @param unit The output to connect to
* @param outputNum The output to connect from
* @param inputNum The input to connect to
*/
connect(destination: InputNode, outputNum = 0, inputNum = 0): this {
connect(this, destination, outputNum, inputNum);
return this;
}
/**
* Connect the output to the context's destination node.
* alias for {@link toDestination}
*/
toMaster(): this {
this.connect(this.context.destination);
return this;
}
/**
* disconnect the output
* @param output Either the output index to disconnect if the output is an array, or the node to disconnect from.
*/
disconnect(destination?: InputNode, outputNum = 0, inputNum = 0): this {
disconnect(this, destination, outputNum, inputNum);
return this;
}
/**
* Connect the output of this node to the rest of the nodes in series.
* @param nodes
* @example
* //connect a node to an effect, panVol and then to the master output
* node.chain(effect, panVol, Tone.Destination);
*/
chain(...nodes: InputNode[]): this {
connectSeries(...nodes);
return this;
}
/**
* connect the output of this node to the rest of the nodes in parallel.
* @param nodes
* @returns this
*/
fan(...nodes: InputNode[]): this {
nodes.forEach(node => this.connect(node));
return this;
}
/**
* Dispose and disconnect
*/
dispose(): this {
if (isDefined(this.input)) {
if (isArray(this.input)) {
this.input.forEach(input => {
if (input instanceof ToneAudioNode) {
input.dispose();
} else if (input instanceof AudioNode) {
input.disconnect();
}
});
} else {
if (this.input instanceof AudioNode) {
this.input.disconnect();
}
}
}
if (isDefined(this.output)) {
if (isArray(this.output)) {
this.output.forEach(output => {
if (output instanceof ToneAudioNode) {
output.dispose();
} else {
output.disconnect();
}
});
} else {
this.output.disconnect();
}
}
this._internalChannels = [];
return this;
}
}

156
Tone/node/AudioProcessor.ts Normal file
View file

@ -0,0 +1,156 @@
import { getContext } from "../core/Global";
import { Tone } from "../core/Tone";
import { getDefaultsFromInstance, isDefined, isUndef, optionsFromArguments } from "../core/Util";
import "../type/Units";
/**
* A unit which process audio
*/
export interface AudioProcessorOptions {
context: BaseAudioContext;
}
/**
* The BaseAudioContext belonging to this node
*/
export abstract class AudioProcessor<Options extends AudioProcessorOptions> extends Tone {
/**
* The context belonging to the node.
*/
readonly context: BaseAudioContext;
readonly defaultContext?: BaseAudioContext;
constructor(context?: BaseAudioContext | Partial<AudioProcessorOptions>) {
const options = optionsFromArguments(AudioProcessor.getDefaults(), arguments, ["context"]);
super();
if (this.defaultContext) {
this.context = this.defaultContext;
} else {
this.context = options.context;
}
}
static getDefaults(): AudioProcessorOptions {
return {
context: getContext(),
};
}
/**
* Return the current time of the BaseAudioContext clock plus the lookAhead.
*/
now(): Seconds {
return this.context.currentTime;
}
/**
* Return the current time of the BaseAudioContext clock without any lookAhead.
*/
immediate(): Seconds {
return this.context.currentTime;
}
/**
* The duration in seconds of one sample.
*/
get sampleTime(): Seconds {
return 1 / this.context.sampleRate;
}
/**
* The number of seconds of 1 processing block (128 samples)
*/
get blockTime(): Seconds {
return 128 / this.context.sampleRate;
}
/**
* Convert the incoming time to seconds
*/
toSeconds(time: Time): Seconds {
if (isUndef(time)) {
return this.now();
} else {
return time as Seconds;
}
}
/**
* Convert the input to a frequency number
*/
toFrequency(frequency: Frequency): Hertz {
return frequency as Hertz;
}
///////////////////////////////////////////////////////////////////////////
// GET/SET
///////////////////////////////////////////////////////////////////////////
/**
* Get the object's attributes. Given no arguments get
* will return all available object properties and their corresponding
* values. Pass in a single attribute to retrieve or an array
* of attributes. The attribute strings can also include a "."
* to access deeper properties.
* @param params the parameters to get, otherwise will return all available.
* @example
* osc.get();
* //returns {"type" : "sine", "frequency" : 440, ...etc}
* @example
* osc.get("type");
* //returns { "type" : "sine"}
* @example
* //use dot notation to access deep properties
* synth.get(["envelope.attack", "envelope.release"]);
* //returns {"envelope" : {"attack" : 0.2, "release" : 0.4}}
*/
get(): Options {
const defaults = getDefaultsFromInstance(this) as Options;
Object.keys(defaults).forEach(attribute => {
if (Reflect.has(this, attribute)) {
const member = this[attribute];
if (isDefined(member) && isDefined(member.value)) {
defaults[attribute] = member.value;
} else if (member instanceof AudioProcessor) {
defaults[attribute] = member.get();
} else {
defaults[attribute] = member;
}
}
});
return defaults;
}
/**
* Set the parameters at once. Either pass in an
* object mapping parameters to values, or to set a
* single parameter, by passing in a string and value.
* The last argument is an optional ramp time which
* will ramp any signal values to their destination value
* over the duration of the rampTime.
* @param params
* @example
* //set values using an object
* filter.set({
* "frequency" : 300,
* "type" : "highpass"
* });
*/
set(props: Partial<Options>): AudioProcessor<Options> {
Object.keys(props).forEach(attribute => {
if (Reflect.has(this, attribute)) {
if (isDefined(this[attribute]) && isDefined(this[attribute].value)) {
this[attribute].value = props[attribute];
} else if (this[attribute] instanceof AudioProcessor) {
this[attribute].set(props[attribute]);
} else {
this[attribute] = props[attribute];
}
}
});
return this;
}
}

82
Tone/node/Delay.ts Normal file
View file

@ -0,0 +1,82 @@
import { optionsFromArguments, readOnly } from "../core/Util";
import { ToneAudioNode, ToneAudioNodeOptions } from "./AudioNode";
import { Param } from "./Param";
export interface DelayOptions extends ToneAudioNodeOptions {
delayTime: number;
maxDelay: number;
}
/**
* Wrapper around Web Audio's native [DelayNode](http://webaudio.github.io/web-audio-api/#the-delaynode-interface).
* @param delayTime The delay applied to the incoming signal.
* @param maxDelay The maximum delay time.
*/
export class Delay extends ToneAudioNode<DelayOptions> {
readonly name = "Delay";
/**
* The maximum delay time. This cannot be changed after
* the value is passed into the constructor.
*/
readonly maxDelay: Time;
/**
* The amount of time the incoming signal is delayed.
*/
readonly delayTime: Param<"time">;
/**
* Private reference to the internal DelayNode
*/
private _delayNode: DelayNode;
readonly input: DelayNode;
readonly output: DelayNode;
/**
* The internal channels for channel routing changes
*/
protected _internalChannels: AudioNode[];
constructor(options?: Partial<DelayOptions>)
constructor(delayTime?: Time, maxDelay?: Time)
constructor() {
super(optionsFromArguments(Delay.getDefaults(), arguments, ["delayTime", "maxDelay"]));
const options = optionsFromArguments(Delay.getDefaults(), arguments, ["delayTime", "maxDelay"]);
this.maxDelay = Math.max(this.toSeconds(options.maxDelay), this.toSeconds(options.delayTime));
this._delayNode = this.input = this.output = this.context.createDelay(options.maxDelay);
this._internalChannels = [this._delayNode];
this.delayTime = new Param({
context: this.context,
param : this._delayNode.delayTime,
units : "time",
value : options.delayTime,
});
readOnly(this, "delayTime");
}
static getDefaults(): DelayOptions {
return Object.assign(ToneAudioNode.getDefaults(), {
delayTime : 0,
maxDelay: 1,
numberOfInputs: 1,
numberOfOutputs: 1,
});
}
/**
* Clean up.
*/
dispose(): this {
super.dispose();
this._delayNode.disconnect();
this.delayTime.dispose();
return this;
}
}

77
Tone/node/Gain.ts Normal file
View file

@ -0,0 +1,77 @@
import { optionsFromArguments, readOnly } from "../core/Util";
import "../type/Units";
import { ToneAudioNode, ToneAudioNodeOptions } from "./AudioNode";
import { Param } from "./Param";
interface GainOptions extends ToneAudioNodeOptions {
gain: number;
units: Unit;
convert: boolean;
}
/**
* A thin wrapper around the Native Web Audio GainNode.
* The GainNode is a basic building block of the Web Audio
* API and is useful for routing audio and adjusting gains.
* @param gain The initial gain of the GainNode
* @param units The units of the gain parameter.
*/
export class Gain<Type extends Unit = "gain"> extends ToneAudioNode<GainOptions> {
readonly name = "Gain";
/**
* The gain parameter of the gain node.
*/
readonly gain: Param<Type>;
/**
* The wrapped GainNode.
*/
private _gainNode: GainNode = this.context.createGain();
// input = output
readonly input: GainNode = this._gainNode;
readonly output: GainNode = this._gainNode;
/**
* Add all of the gain nodes
*/
protected _internalChannels: AudioNode[] = [this._gainNode];
constructor(gain?: GainFactor, units?: Unit);
constructor(options?: Partial<GainOptions>);
constructor() {
super(optionsFromArguments(Gain.getDefaults(), arguments, ["gain", "units"]));
const options = optionsFromArguments(Gain.getDefaults(), arguments, ["gain", "units"]);
this.gain = new Param({
context : this.context,
convert : options.convert,
param : this._gainNode.gain,
units : options.units,
value : options.gain,
});
readOnly(this, "gain");
}
static getDefaults(): GainOptions {
return Object.assign(ToneAudioNode.getDefaults(), {
convert : true,
gain : 1,
numberOfInputs: 1,
numberOfOutputs: 1,
units : "gain" as Unit,
});
}
/**
* Clean up.
*/
dispose(): this {
super.dispose();
this._gainNode.disconnect();
this.gain.dispose();
return this;
}
}

414
Tone/node/Param.ts Normal file
View file

@ -0,0 +1,414 @@
import { Timeline } from "../core/Timeline";
import { deepMerge, isDefined, optionsFromArguments } from "../core/Util";
import { dbToGain, gainToDb } from "../type/Conversions";
import "../type/Units";
import { AbstractParam } from "./AbstractParam";
import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor";
export interface ParamOptions extends AudioProcessorOptions {
units: Unit;
value?: number;
param: AudioParam;
convert: boolean;
}
/**
* the possible automation types
*/
type AutomationType = "linear" | "exponential" | "setValue" | "setTarget" | "cancel";
/**
* The events on the automation
*/
export interface AutomationEvent {
type: AutomationType;
time: number;
value: number;
constant?: number;
}
/**
* Param wraps the native Web Audio's AudioParam to provide
* additional unit conversion functionality. It also
* serves as a base-class for classes which have a single,
* automatable parameter.
*/
export class Param<Type extends Unit = "number">
extends AudioProcessor<ParamOptions>
implements AbstractParam<Type> {
name = "Param";
static getDefaults(): ParamOptions {
return Object.assign(AudioProcessor.getDefaults(), {
convert: true,
units: "number" as Unit,
} as ParamOptions);
}
/**
* The input connection
*/
readonly input: AudioParam;
readonly units: Unit;
convert: boolean;
overridden: boolean = false;
/**
* The timeline which tracks all of the automations.
*/
private _events: Timeline<AutomationEvent>;
/**
* The native parameter to control
*/
protected _param: AudioParam;
/**
* The default value before anything is assigned
*/
private _initialValue: number;
/**
* The minimum output value
*/
private _minOutput = 1e-5;
constructor(param: AudioParam, units?: Unit, convert?: boolean);
constructor(options: Partial<ParamOptions>);
constructor() {
super(optionsFromArguments(Param.getDefaults(), arguments, ["param", "units", "convert"]));
const options = optionsFromArguments(Param.getDefaults(), arguments, ["param", "units", "convert"]);
this.assert(isDefined(options.param) && options.param instanceof AudioParam, "param must be an AudioParam");
// initialize
this._param = this.input = options.param;
this._events = new Timeline<AutomationEvent>(1000);
this._initialValue = this._param.value;
this.units = options.units;
this.convert = options.convert;
// if the value is defined, set it immediately
if (isDefined(options.value)) {
this.setValueAtTime(options.value, 0);
}
}
get value(): UnitMap[Type] {
const now = this.now();
return this.getValueAtTime(now);
}
set value(value: UnitMap[Type]) {
this._initialValue = this._fromType(value);
this.cancelScheduledValues(this.now());
this.setValueAtTime(value, this.now());
}
get minValue(): number {
if (this.units === "time" || this.units === "frequency" ||
this.units === "normalRange" || this.units === "positive" ||
this.units === "transportTime" || this.units === "ticks" ||
this.units === "bpm" || this.units === "hertz" || this.units === "samples") {
return 0;
} else if (this.units === "audioRange") {
return -1;
} else if (this.units === "decibels") {
return -Infinity;
} else {
return this._param.minValue;
}
}
get maxValue(): number {
if (this.units === "normalRange" ||
this.units === "audioRange") {
return 1;
} else {
return this._param.maxValue;
}
}
/**
* Type guard based on the unit name
*/
private _is<T>(arg, type: Unit): arg is T {
return this.units === type;
}
/**
* Convert the given value from the type specified by Param.units
* into the destination value (such as Gain or Frequency).
*/
private _fromType(val: UnitMap[Type]): number {
if (this.convert && !this.overridden) {
if (this._is<Time>(val, "time")) {
return this.toSeconds(val);
} else if (this._is<Decibels>(val, "decibels")) {
return dbToGain(val);
} else if (this._is<Frequency>(val, "frequency")) {
return this.toFrequency(val);
} else if (this._is<NormalRange>(val, "normalRange")) {
return Math.min(Math.max(val, 0), 1);
} else if (this._is<AudioRange>(val, "audioRange")) {
return Math.min(Math.max(val, -1), 1);
} else if (this._is<Positive>(val, "positive")) {
return Math.max(val, 0);
} else if (this._is<number>(val, "number")) {
return val;
} else {
return val as number;
}
} else {
return val as number;
}
}
/**
* Convert the parameters value into the units specified by Param.units.
*/
private _toType(val: number): UnitMap[Type] {
if (this.convert && this.units === "decibels") {
return gainToDb(val) as UnitMap[Type];
} else {
return val as UnitMap[Type];
}
}
///////////////////////////////////////////////////////////////////////////
// ABSTRACT PARAM INTERFACE
// all docs are generated from ParamInterface.ts
///////////////////////////////////////////////////////////////////////////
setValueAtTime(value: UnitMap[Type], time: Time): this {
time = this.toSeconds(time);
const numericValue = this._fromType(value);
this._events.add({
time,
type: "setValue",
value: numericValue,
});
this.log("setValue", value, time);
this._param.setValueAtTime(numericValue, time);
return this;
}
getValueAtTime(time: Time): UnitMap[Type] {
time = this.toSeconds(time);
const after = this._events.getAfter(time);
const before = this._events.get(time);
let value = this._initialValue;
// if it was set by
if (before === null) {
value = this._initialValue;
} else if (before.type === "setTarget" && (after === null || after.type === "setValue")) {
const previous = this._events.getBefore(before.time);
let previousVal;
if (previous === null) {
previousVal = this._initialValue;
} else {
previousVal = previous.value;
}
if (isDefined(before.constant)) {
value = this._exponentialApproach(before.time, previousVal, before.value, before.constant, time);
}
} else if (after === null) {
value = before.value;
} else if (after.type === "linear" || after.type === "exponential") {
let beforeValue = before.value;
if (before.type === "setTarget") {
const previous = this._events.getBefore(before.time);
if (previous === null) {
beforeValue = this._initialValue;
} else {
beforeValue = previous.value;
}
}
if (after.type === "linear") {
value = this._linearInterpolate(before.time, beforeValue, after.time, after.value, time);
} else {
value = this._exponentialInterpolate(before.time, beforeValue, after.time, after.value, time);
}
} else {
value = before.value;
}
return this._toType(value);
}
setRampPoint(time: Time): this {
time = this.toSeconds(time);
let currentVal = this.getValueAtTime(time);
this.cancelAndHoldAtTime(time);
if (this._fromType(currentVal) === 0) {
currentVal = this._toType(this._minOutput);
}
this.setValueAtTime(currentVal, time);
return this;
}
linearRampToValueAtTime(value: UnitMap[Type], endTime: Time): this {
const numericValue = this._fromType(value);
endTime = this.toSeconds(endTime);
this._events.add({
time: endTime,
type: "linear",
value : numericValue,
});
this.log("linear", value, endTime);
this._param.linearRampToValueAtTime(numericValue, endTime);
return this;
}
exponentialRampToValueAtTime(value: UnitMap[Type], endTime: Time): this {
let numericValue = this._fromType(value);
numericValue = Math.max(this._minOutput, numericValue);
endTime = this.toSeconds(endTime);
// store the event
this._events.add({
time: endTime,
type: "exponential",
value : numericValue,
});
this.log("exponential", value, endTime);
this._param.exponentialRampToValueAtTime(numericValue, endTime);
return this;
}
exponentialRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.exponentialRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
return this;
}
linearRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.linearRampToValueAtTime(value, startTime + this.toSeconds(rampTime));
return this;
}
targetRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
startTime = this.toSeconds(startTime);
this.setRampPoint(startTime);
this.exponentialApproachValueAtTime(value, startTime, rampTime);
return this;
}
exponentialApproachValueAtTime(value: UnitMap[Type], time: Time, rampTime: Time): this {
const timeConstant = Math.log(this.toSeconds(rampTime) + 1) / Math.log(200);
time = this.toSeconds(time);
return this.setTargetAtTime(value, time, timeConstant);
}
setTargetAtTime(value: UnitMap[Type], startTime: Time, timeConstant: Positive): this {
const numericValue = this._fromType(value);
// The value will never be able to approach without timeConstant > 0.
this.assert(timeConstant > 0, "timeConstant must be greater than 0");
startTime = this.toSeconds(startTime);
this._events.add({
constant: timeConstant,
time: startTime,
type: "setTarget",
value: numericValue,
});
this.log("setTarget", value, startTime, timeConstant);
this._param.setTargetAtTime(numericValue, startTime, timeConstant);
return this;
}
setValueCurveAtTime(values: Array<UnitMap[Type]>, startTime: Time, duration: Time, scaling: number = 1): this {
duration = this.toSeconds(duration);
startTime = this.toSeconds(startTime);
const startingValue = this._fromType(values[0]) * scaling;
this.setValueAtTime(this._toType(startingValue), startTime);
const segTime = duration / (values.length - 1);
for (let i = 1; i < values.length; i++) {
const numericValue = this._fromType(values[i]) * scaling;
this.linearRampToValueAtTime(this._toType(numericValue), startTime + i * segTime);
}
return this;
}
cancelScheduledValues(time: Time): this {
time = this.toSeconds(time);
this._events.cancel(time);
this._param.cancelScheduledValues(time);
this.log("cancel", time);
return this;
}
cancelAndHoldAtTime(time: Time): this {
time = this.toSeconds(time);
const valueAtTime = this._fromType(this.getValueAtTime(time));
this.log("cancelAndHoldAtTime", time, "value=" + valueAtTime);
// remove the schedule events
this._param.cancelScheduledValues(time);
// if there is an event at the given time
// and that even is not a "set"
const before = this._events.get(time);
const after = this._events.getAfter(time);
if (before && before.time === time) {
// remove everything after
if (after) {
this._events.cancel(after.time);
} else {
this._events.cancel(time + this.sampleTime);
}
} else if (after) {
// cancel the next event(s)
this._events.cancel(after.time);
if (after.type === "linear") {
this.linearRampToValueAtTime(this._toType(valueAtTime), time);
} else if (after.type === "exponential") {
this.exponentialRampToValueAtTime(this._toType(valueAtTime), time);
}
}
// set the value at the given time
this._events.add({
time,
type: "setValue",
value: valueAtTime,
});
this._param.setValueAtTime(valueAtTime, time);
return this;
}
rampTo(value: UnitMap[Type], rampTime: Time = 0.1, startTime?: Time): this {
if (this.units === "frequency" || this.units === "bpm" || this.units === "decibels") {
this.exponentialRampTo(value, rampTime, startTime);
} else {
this.linearRampTo(value, rampTime, startTime);
}
return this;
}
dispose(): this {
this._events.dispose();
return this;
}
///////////////////////////////////////////////////////////////////////////
// AUTOMATION CURVE CALCULATIONS
// MIT License, copyright (c) 2014 Jordan Santell
///////////////////////////////////////////////////////////////////////////
// Calculates the the value along the curve produced by setTargetAtTime
private _exponentialApproach(t0: number, v0: number, v1: number, timeConstant: number, t: number): number {
return v1 + (v0 - v1) * Math.exp(-(t - t0) / timeConstant);
}
// Calculates the the value along the curve produced by linearRampToValueAtTime
private _linearInterpolate(t0: number, v0: number, t1: number, v1: number, t: number): number {
return v0 + (v1 - v0) * ((t - t0) / (t1 - t0));
}
// Calculates the the value along the curve produced by exponentialRampToValueAtTime
private _exponentialInterpolate(t0: number, v0: number, t1: number, v1: number, t: number): number {
return v0 * Math.pow(v1 / v0, (t - t0) / (t1 - t0));
}
}

0
Tone/node/Split.ts Normal file
View file

178
Tone/signal/Signal.ts Normal file
View file

@ -0,0 +1,178 @@
import { optionsFromArguments } from "../core/Util";
import { AbstractParam } from "../node/AbstractParam";
import { InputNode, ToneAudioNode, ToneAudioNodeOptions } from "../node/AudioNode";
import { Param } from "../node/Param";
interface SignalOptions extends ToneAudioNodeOptions {
value: number;
units: Unit;
convert: boolean;
}
/**
* A signal is an audio-rate value. Tone.Signal is a core component of the library.
* Unlike a number, Signals can be scheduled with sample-level accuracy. Tone.Signal
* has all of the methods available to native Web Audio
* [AudioParam](http://webaudio.github.io/web-audio-api/#the-audioparam-interface)
* as well as additional conveniences. Read more about working with signals
* [here](https://github.com/Tonejs/Tone.js/wiki/Signals).
* @param value Initial value of the signal
* @param units unit The units the signal is in.
* @example
* const signal = new Tone.Signal(10);
*/
export class Signal<Type extends Unit = "number">
extends ToneAudioNode<SignalOptions>
implements AbstractParam<Type> {
name = "Signal";
static getDefaults(): SignalOptions {
return Object.assign(ToneAudioNode.getDefaults(), {
channelCount: 1,
channelCountMode: "explicit",
channelInterpretation: "discrete",
convert: true,
numberOfInputs: 1,
numberOfOutputs: 1,
units: "number" as Unit,
value: 0,
});
}
/**
* The constant source node which generates the signal
*/
private _constantSource: ConstantSourceNode = this.context.createConstantSource();
readonly output: AudioNode = this._constantSource;
private _param: Param<Type>;
readonly input: Param<Type>;
protected _internalChannels = [this._constantSource];
constructor(value?: UnitMap[Type], units?: Unit);
constructor(options?: Partial<SignalOptions>);
constructor() {
super(optionsFromArguments(Signal.getDefaults(), arguments, ["value", "units"]));
const options = optionsFromArguments(Signal.getDefaults(), arguments, ["value", "units"]);
this._constantSource.start(0);
this.input = this._param = new Param({
context: this.context,
convert: options.convert,
param: this._constantSource.offset,
units: options.units,
value: options.value,
});
}
connect(destination: InputNode, outputNum = 0, inputNum = 0): this {
if (destination instanceof Param || destination instanceof AudioParam || destination instanceof Signal) {
// cancel changes
destination.cancelScheduledValues(0);
// reset the value
destination.setValueAtTime(0, 0);
// mark the value as overridden
if (destination instanceof Signal) {
destination.overridden = true;
}
}
super.connect(destination, outputNum, inputNum);
return this;
}
///////////////////////////////////////////////////////////////////////////
// ABSTRACT PARAM INTERFACE
// just a proxy for the ConstantSourceNode's offset AudioParam
// all docs are generated from ParamInterface.ts
///////////////////////////////////////////////////////////////////////////
setValueAtTime(value: UnitMap[Type], time: Time): this {
this._param.setValueAtTime(value, time);
return this;
}
getValueAtTime(time: Time): UnitMap[Type] {
return this._param.getValueAtTime(time);
}
setRampPoint(time: Time): this {
this._param.setRampPoint(time);
return this;
}
linearRampToValueAtTime(value: UnitMap[Type], time: Time): this {
this._param.linearRampToValueAtTime(value, time);
return this;
}
exponentialRampToValueAtTime(value: UnitMap[Type], time: Time): this {
this._param.exponentialRampToValueAtTime(value, time);
return this;
}
exponentialRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
this._param.exponentialRampTo(value, rampTime, startTime);
return this;
}
linearRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
this._param.linearRampTo(value, rampTime, startTime);
return this;
}
targetRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
this._param.targetRampTo(value, rampTime, startTime);
return this;
}
exponentialApproachValueAtTime(value: UnitMap[Type], time: Time, rampTime: Time): this {
this._param.exponentialApproachValueAtTime(value, time, rampTime);
return this;
}
setTargetAtTime(value: UnitMap[Type], startTime: Time, timeConstant: number): this {
this._param.setTargetAtTime(value, startTime, timeConstant);
return this;
}
setValueCurveAtTime(values: Array<UnitMap[Type]>, startTime: Time, duration: Time, scaling?: number): this {
this._param.setValueCurveAtTime(values, startTime, duration, scaling);
return this;
}
cancelScheduledValues(time: Time): this {
this._param.cancelScheduledValues(time);
return this;
}
cancelAndHoldAtTime(time: Time): this {
this._param.cancelAndHoldAtTime(time);
return this;
}
rampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this {
this._param.rampTo(value, rampTime, startTime);
return this;
}
get value(): UnitMap[Type] {
return this._param.value;
}
set value(value: UnitMap[Type]) {
this._param.value = value;
}
get convert(): boolean {
return this._param.convert;
}
set convert(convert: boolean) {
this._param.convert = convert;
}
get units(): Unit {
return this._param.units;
}
get overridden(): boolean {
return this._param.overridden;
}
set overridden(overridden: boolean) {
this._param.overridden = overridden;
}
get maxValue(): UnitMap[Type] {
return this._param.maxValue;
}
get minValue(): UnitMap[Type] {
return this._param.minValue;
}
}

36
Tone/type/Conversions.ts Normal file
View file

@ -0,0 +1,36 @@
import "../type/Units";
/**
* Equal power gain scale. Good for cross-fading.
* @param percent (0-1)
*/
export function equalPowerScale(percent: NormalRange): number {
const piFactor = 0.5 * Math.PI;
return Math.sin(percent * piFactor);
}
/**
* Convert decibels into gain.
*/
export function dbToGain(db: Decibels): GainFactor {
return Math.pow(10, db / 20);
}
/**
* Convert gain to decibels.
*/
export function gainToDb(gain: GainFactor): Decibels {
return 20 * (Math.log(gain) / Math.LN10);
}
/**
* Convert an interval (in semitones) to a frequency ratio.
* @param interval the number of semitones above the base note
* @example
* tone.intervalToFrequencyRatio(0); // 1
* tone.intervalToFrequencyRatio(12); // 2
* tone.intervalToFrequencyRatio(-12); // 0.5
*/
export function intervalToFrequencyRatio(interval: Interval): number {
return Math.pow(2, (interval / 12));
}

127
Tone/type/Time.ts Normal file
View file

@ -0,0 +1,127 @@
import { TypeBase, typeBaseExpressions } from "./TypeBase";
/**
* Tone.Time is a primitive type for encoding Time values.
* Tone.Time can be passed into the parameter of any method which takes time as an argument.
* @param val The time value.
* @param units The units of the value.
* @example
* var t = Time("4n");//a quarter note
*/
export class TimeConstructor extends TypeBase<"time"> {
protected _expressions = Object.assign({}, typeBaseExpressions, {
now : {
regexp: /^\+(.+)/,
method(capture): Seconds {
return this._now() + (new this.constructor(capture));
},
},
quantize: {
regexp: /^@(.+)/,
method(capture) {
return 0;
// if (Tone.Transport) {
// const quantTo = new this.constructor(capture);
// return this._secondsToUnits(Tone.Transport.nextSubdivision(quantTo));
// } else {
// }
},
},
});
/**
* Quantize the time by the given subdivision. Optionally add a
* percentage which will move the time value towards the ideal
* quantized value by that percentage.
* @param val The subdivision to quantize to
* @param percent Move the time value towards the quantized value by a percentage.
* @example
* Time(21).quantize(2) //returns 22
* Time(0.6).quantize("4n", 0.5) //returns 0.55
*/
quantize(subdiv: number | string | TimeObject, percent = 1): Seconds {
const subdivision = new TimeConstructor(subdiv).valueOf();
const value = this.valueOf();
const multiple = Math.round(value / subdivision);
const ideal = multiple * subdivision;
const diff = ideal - value;
return value + diff * percent;
}
///////////////////////////////////////////////////////////////////////////
// CONVERSIONS
///////////////////////////////////////////////////////////////////////////
/**
* Convert a Time to Notation. The notation values are will be the
* closest representation between 1m to 128th note.
* @return {Notation}
* @example
* //if the Transport is at 120bpm:
* Time(2).toNotation();//returns "1m"
*/
toNotation(): Subdivision {
const time = this.toSeconds();
const testNotations: Subdivision[] = ["1m"];
for (let power = 1; power < 8; power++) {
const subdiv = Math.pow(2, power);
testNotations.push(subdiv + "n." as Subdivision);
testNotations.push(subdiv + "n" as Subdivision);
testNotations.push(subdiv + "t" as Subdivision);
}
// find the closets notation representation
let closest = testNotations[0];
let closestSeconds = new TimeConstructor(testNotations[0]).toSeconds();
testNotations.forEach(notation => {
const notationSeconds = new TimeConstructor(notation).toSeconds();
if (Math.abs(notationSeconds - time) < Math.abs(closestSeconds - time)) {
closest = notation;
closestSeconds = notationSeconds;
}
});
return closest;
}
/**
* Return the time encoded as Bars:Beats:Sixteenths.
*/
toBarsBeatsSixteenths(): BarsBeatsSixteenths {
const quarterTime = this._beatsToUnits(1);
let quarters = this.valueOf() / quarterTime;
quarters = parseFloat(quarters.toFixed(4));
const measures = Math.floor(quarters / this._getTimeSignature());
let sixteenths = (quarters % 1) * 4;
quarters = Math.floor(quarters) % this._getTimeSignature();
const sixteenthString = sixteenths.toString();
if (sixteenthString.length > 3) {
// the additional parseFloat removes insignificant trailing zeroes
sixteenths = parseFloat(parseFloat(sixteenthString).toFixed(3));
}
const progress = [measures, quarters, sixteenths];
return progress.join(":");
}
/**
* Return the time in ticks.
*/
toTicks(): Ticks {
const quarterTime = this._beatsToUnits(1);
const quarters = this.valueOf() / quarterTime;
return Math.round(quarters * this._getPPQ());
}
/**
* Return the time in seconds.
*/
toSeconds(): Seconds {
return this.valueOf();
}
/**
* Return the value as a midi note.
*/
toMidi(): MidiNote {
return 0;
// return Tone.Frequency.ftom(this.toFrequency());
}
}

299
Tone/type/TypeBase.ts Normal file
View file

@ -0,0 +1,299 @@
import { getContext } from "Tone/core/Global";
import { isDefined, isObject , isString, isUndef } from "../core/Util";
/**
* TypeBase is a flexible encoding of time which can be evaluated to and from a string.
* @param val The time value as a number, string or object
* @param units Unit values
* @example
* TypeBase(4, "n")
* TypeBase(2, "t")
* TypeBase("2t")
* TypeBase({"2t" : 2})
* TypeBase("2t") + TypeBase("4n");
*/
export abstract class TypeBase<Type extends Unit> {
/**
* The value of the units
*/
private _val: string | number | TimeObject;
/**
* The units of time
*/
private _units: TypeBaseUnits;
/**
* All of the conversion expressions
*/
protected _expressions: TypeBaseExpression = typeBaseExpressions;
/**
* The default AudioContext
*/
get defaultContext(): BaseAudioContext {
return getContext();
}
/**
* The default units if none are given is seconds
*/
private _defaultUnits = "s" as TypeBaseUnits;
constructor(val: string | number | TimeObject, units?: TypeBaseUnits) {
this._val = val;
this._units = units || this._defaultUnits;
// test if the value is a string representation of a number
if (isUndef(this._units) && isString(this._val) && this._val.charAt(0) !== "+") {
this._val = parseFloat(this._val);
this._units = this._defaultUnits;
}
}
///////////////////////////////////////////////////////////////////////////
// VALUE OF
///////////////////////////////////////////////////////////////////////////
/**
* Evaluate the time value. Returns the time in seconds.
*/
valueOf(): number {
if (isUndef(this._val)) {
return this._noArg();
} else if (isString(this._val) && isUndef(this._units)) {
for (const units in this._expressions) {
if (this._expressions[units].regexp.test(this._val.trim())) {
this._units = units as TypeBaseUnits;
break;
}
}
} else if (isObject(this._val)) {
let total = 0;
for (const typeName in this._val) {
if (isDefined(this._val[typeName])) {
const quantity = this._val[typeName];
// @ts-ignore
const time = (new this.constructor(typeName)).valueOf() * quantity;
total += time;
}
}
return total;
}
if (isDefined(this._units)) {
const expr = this._expressions[this._units];
const matching = this._val.toString().trim().match(expr.regexp);
if (matching) {
return expr.method.apply(this, matching.slice(1));
} else {
return expr.method.call(this, this._val);
}
} else if (isString(this._val)) {
return parseFloat(this._val);
} else {
return this._val;
}
}
///////////////////////////////////////////////////////////////////////////
// UNIT CONVERSIONS
///////////////////////////////////////////////////////////////////////////
/**
* Returns the value of a frequency in the current units
*/
protected _frequencyToUnits(freq: Hertz): UnitMap[Type] {
return 1 / freq;
}
/**
* Return the value of the beats in the current units
*/
protected _beatsToUnits(beats): number {
return (60 / this._getBpm()) * beats;
}
/**
* Returns the value of a second in the current units
*/
protected _secondsToUnits(seconds: Seconds): UnitMap[Type] {
return seconds;
}
/**
* Returns the value of a tick in the current time units
* @private
*/
protected _ticksToUnits(ticks: Ticks): UnitMap[Type] {
return (ticks * (this._beatsToUnits(1)) / this._getPPQ());
}
/**
* With no arguments, return 'now'
*/
protected _noArg(): Seconds {
return this._now();
}
///////////////////////////////////////////////////////////////////////////
// TEMPO CONVERSIONS
///////////////////////////////////////////////////////////////////////////
/**
* Return the bpm, or 120 if Transport is not available
*/
protected _getBpm(): BPM {
return 120;
}
/**
* Return the timeSignature or 4 if Transport is not available
*/
protected _getTimeSignature(): number {
return 4;
}
/**
* Return the PPQ or 192 if Transport is not available
*/
protected _getPPQ(): number {
return 192;
}
/**
* Return the current time in whichever context is relevant
*/
protected _now(): Seconds {
return this.defaultContext.currentTime;
}
///////////////////////////////////////////////////////////////////////////
// CONVERSION INTERFACE
///////////////////////////////////////////////////////////////////////////
/**
* Return the value in seconds
*/
toSeconds(): Seconds {
return this.valueOf();
}
/**
* Return the value in hertz
*/
toFrequency(): Hertz {
return 1 / this.toSeconds();
}
/**
* Return the time in samples
*/
toSamples(): Samples {
return this.toSeconds() * this.defaultContext.sampleRate;
}
/**
* Return the time in milliseconds.
*/
toMilliseconds(): Milliseconds {
return this.toSeconds() * 1000;
}
}
///////////////////////////////////////////////////////////////////////////
// EXPRESSIONS
///////////////////////////////////////////////////////////////////////////
/**
* The units that the TypeBase can accept. extended by other classes
*/
type TypeBaseUnits = "s" | "n" | "t" | "m" | "i" | "hz" | "tr" | "samples" | "number";
/**
* The format of the type conversion expressions
*/
type TypeBaseExpression = {
[key in TypeBaseUnits]: {
regexp: RegExp;
method: (value: string, ...args: any[]) => number;
};
};
/**
* All of the conversion expressions
*/
export const typeBaseExpressions: TypeBaseExpression = {
n: {
regexp: /^(\d+)n(\.?)$/i,
method(value, dot) {
const numericValue = parseInt(value);
const scalar = dot === "." ? 1.5 : 1;
if (numericValue === 1) {
return this._beatsToUnits(this._getTimeSignature()) * scalar;
} else {
return this._beatsToUnits(4 / numericValue) * scalar;
}
},
},
t: {
regexp: /^(\d+)t$/i,
method(value) {
const numericValue = parseInt(value);
return this._beatsToUnits(8 / (Math.floor(numericValue) * 3));
},
},
m: {
regexp: /^(\d+)m$/i,
method(value) {
return this._beatsToUnits(parseInt(value) * this._getTimeSignature());
},
},
i: {
regexp: /^(\d+)i$/i,
method(value) {
return this._ticksToUnits(parseInt(value));
},
},
hz: {
regexp: /^(\d+(?:\.\d+)?)hz$/i,
method(value) {
return this._frequencyToUnits(parseFloat(value));
},
},
tr: {
regexp: /^(\d+(?:\.\d+)?):(\d+(?:\.\d+)?):?(\d+(?:\.\d+)?)?$/,
method(m, q, s) {
let total = 0;
if (m && m !== "0") {
total += this._beatsToUnits(this._getTimeSignature() * parseFloat(m));
}
if (q && q !== "0") {
total += this._beatsToUnits(parseFloat(q));
}
if (s && s !== "0") {
total += this._beatsToUnits(parseFloat(s) / 4);
}
return total;
},
},
s: {
regexp: /^(\d+(?:\.\d+)?)s$/,
method(value) {
return this._secondsToUnits(parseFloat(value));
},
},
samples: {
regexp: /^(\d+)samples$/,
method(value) {
return parseInt(value) / this.context.sampleRate;
},
},
number: {
regexp: /^(\d+(?:\.\d+)?)$/,
method(value) {
return this._expressions[this._defaultUnits].method.call(this, value);
},
},
};

158
Tone/type/Units.ts Normal file
View file

@ -0,0 +1,158 @@
/**
* A note in Scientific pitch notation.
* The pitch class + octave number
* e.g. "C4", "D#3", "G-1"
*/
type Note = string;
/**
* A number representing a time in seconds
*/
type Seconds = number;
/**
* A number used to measure the intensity of a sound on a logarithmic scale.
*/
type Decibels = number;
/**
* A number that is between [0, 1]
*/
type NormalRange = number;
/**
* A number that is between [-1, 1]
*/
type AudioRange = number;
/**
* Half-step note increments, i.e. 12 is an octave above the root. and 1 is a half-step up.
*/
type Interval = number;
/**
* A number representing the multiplication factor applied to a signal
*/
type GainFactor = number;
/**
* A number greater than or equal to 0.
*/
type Positive = number;
/**
* Represents a subdivision of a measure.
* The number represents the subdivision. "t" represents a triplet.
* e.g. "4n" is a quarter note, and "4t" is a quarter note triplet.
*/
type Subdivision = "1m" | "1n" | "2n" | "2t" | "4n" | "4t" | "8n" | "8t" | "16n" | "16t" |
"32n" | "32t" | "64n" | "64t" | "128n" | "128t" | "256n" | "256t";
/**
* A time object has a subdivision as the keys and a number as the values.
* The
*/
type TimeObject = {
[sub in Subdivision]: number;
};
/**
* Time can be described in a number of ways. Read more [Time](https://github.com/Tonejs/Tone.js/wiki/Time).
*
* * Numbers, which will be taken literally as the time (in seconds).
* * Notation, ("4n", "8t") describes time in BPM and time signature relative values.
* * TransportTime, ("4:3:2") will also provide tempo and time signature relative times
* in the form BARS:QUARTERS:SIXTEENTHS.
* * Frequency, ("8hz") is converted to the length of the cycle in seconds.
* * Now-Relative, ("+1") prefix any of the above with "+" and it will be interpreted as
* "the current time plus whatever expression follows".
* * Object, ({"4n" : 3, "8t" : -1}). The resulting time is equal to
* the sum of all of the keys multiplied by the values in the object.
* * No Argument, for methods which accept time, no argument will be interpreted as
* "now" (i.e. the currentTime).
*/
type Time = Seconds | TimeObject | Subdivision | undefined;
/**
* Frequency can be described similar to time, except ultimately the
* values are converted to frequency instead of seconds. A number
* is taken literally as the value in hertz. Additionally any of the
* Time encodings can be used. Note names in the form
* of NOTE OCTAVE (i.e. C4) are also accepted and converted to their
* frequency value.
*/
type Frequency = Time | Note;
/**
* TransportTime describes a position along the Transport's timeline. It is
* similar to Time in that it uses all the same encodings, but TransportTime specifically
* pertains to the Transport's timeline, which is startable, stoppable, loopable, and seekable.
* [Read more](https://github.com/Tonejs/Tone.js/wiki/TransportTime)
*/
type TransportTime = Time;
/**
* Ticks are the basic subunit of the Transport. They are
* the smallest unit of time that the Transport supports.
*/
type Ticks = number;
/**
* Beats per minute
*/
type BPM = number;
/**
* Angle between 0 and 360.
*/
type Degrees = number;
/**
* A number representing a midi note.
*/
type MidiNote = number;
/**
* A colon-separated representation of time in the form of
* Bars:Beats:Sixteenths.
*/
type BarsBeatsSixteenths = string;
/**
* Sampling is the reduction of a continuous signal to a discrete signal.
* Audio is typically sampled 44100 times per second.
*/
type Samples = number;
/**
* Hertz are a frequency representation defined as one cycle per second.
*/
type Hertz = number;
/**
* One millisecond is a thousandth of a second.
*/
type Milliseconds = number;
/**
* Map the unit name to a unit value
*/
interface UnitMap {
number: number;
decibels: Decibels;
normalRange: NormalRange;
audioRange: AudioRange;
gain: GainFactor;
positive: Positive;
time: Time;
frequency: Frequency;
transportTime: TransportTime;
ticks: Ticks;
bpm: BPM;
degrees: Degrees;
samples: Samples;
hertz: Hertz;
}
/**
* All of the units names
*/
type Unit = keyof UnitMap;

1
Tone/version.ts Normal file
View file

@ -0,0 +1 @@
export const version = "dev";

935
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{ {
"name": "tone", "name": "tone",
"version": "13.8.0", "version": "14.0.0",
"description": "A Web Audio framework for making interactive music in the browser.", "description": "A Web Audio framework for making interactive music in the browser.",
"main": "build/Tone.js", "main": "build/Tone.js",
"files": [ "files": [
@ -19,9 +19,8 @@
"collect:tests": "node scripts/collect_tests.js --file $npm_config_file --dir $npm_config_dir", "collect:tests": "node scripts/collect_tests.js --file $npm_config_file --dir $npm_config_dir",
"increment": "node scripts/increment_version.js", "increment": "node scripts/increment_version.js",
"karma": "karma start scripts/karma.conf.js --single-run", "karma": "karma start scripts/karma.conf.js --single-run",
"karma:watch": "karma start scripts/karma.conf.js --auto-watch", "karma:watch": "karma start scripts/karma.conf.js --auto-watch --browsers Chrome",
"lint": "eslint Tone/*/*.js", "lint:fix": "eslint Tone/*/*.ts --fix",
"lint:fix": "eslint Tone/*/*.js --fix",
"scratch": "webpack -w --env.scratch --mode=development", "scratch": "webpack -w --env.scratch --mode=development",
"test": "npm run collect:tests && npm run karma", "test": "npm run collect:tests && npm run karma",
"test:watch": "npm run collect:tests && npm run karma:watch", "test:watch": "npm run collect:tests && npm run karma:watch",
@ -30,6 +29,8 @@
"test:html": "mocha ./test/html/testHTML.js --timeout 30000", "test:html": "mocha ./test/html/testHTML.js --timeout 30000",
"test:node": "node ./test/html/node_test.js", "test:node": "node ./test/html/node_test.js",
"tsd": "node ./scripts/generate_docs.js", "tsd": "node ./scripts/generate_docs.js",
"lint": "tslint --project tslint.json",
"tsc:watch": "tsc --watch",
"watch": "npm run increment && npm run collect:deps && webpack -w --env.production --mode=development" "watch": "npm run increment && npm run collect:deps && webpack -w --env.production --mode=development"
}, },
"repository": { "repository": {
@ -54,8 +55,14 @@
}, },
"devDependencies": { "devDependencies": {
"@babel/polyfill": "^7.0.0-beta.47", "@babel/polyfill": "^7.0.0-beta.47",
"@tonejs/plot": "0.0.20",
"@types/chai": "^4.1.7",
"@types/mocha": "^5.2.6",
"@types/ua-parser-js": "^0.7.32",
"@typescript-eslint/eslint-plugin": "^1.5.0",
"@typescript-eslint/parser": "^1.5.0",
"audiobuffer-to-wav": "^1.0.0", "audiobuffer-to-wav": "^1.0.0",
"chai": "^1.9.1", "chai": "^1.10.0",
"codecov": "^3.1.0", "codecov": "^3.1.0",
"concurrently": "^3.5.1", "concurrently": "^3.5.1",
"eslint": "^5.13.0", "eslint": "^5.13.0",
@ -66,19 +73,21 @@
"http-server": "^0.11.1", "http-server": "^0.11.1",
"istanbul-instrumenter-loader": "^3.0.1", "istanbul-instrumenter-loader": "^3.0.1",
"jsdoc": "^3.5.5", "jsdoc": "^3.5.5",
"karma": "^4.0.0", "karma": "^4.0.1",
"karma-chrome-launcher": "^2.2.0", "karma-chrome-launcher": "^2.2.0",
"karma-coverage": "^1.1.1", "karma-coverage": "^1.1.1",
"karma-firefox-launcher": "^1.1.0", "karma-firefox-launcher": "^1.1.0",
"karma-mocha": "^1.3.0", "karma-mocha": "^1.3.0",
"karma-safari-launcher": "^1.0.0", "karma-safari-launcher": "^1.0.0",
"karma-sourcemap-loader": "^0.3.7", "karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "^3.0.5", "karma-typescript": "^4.0.0",
"mocha": "^5.1.1", "mocha": "^5.1.1",
"proxy-polyfill": "^0.2.0", "proxy-polyfill": "^0.2.0",
"semver": "^5.5.0", "semver": "^5.5.0",
"teoria": "^2.5.0", "teoria": "^2.5.0",
"tsd-jsdoc": "^2.1.2", "tsd-jsdoc": "^2.1.2",
"tslint": "^5.14.0",
"typescript": "^3.3.4000",
"ua-parser-js": "^0.7.19", "ua-parser-js": "^0.7.19",
"webpack": "^4.6.0", "webpack": "^4.6.0",
"webpack-cli": "^3.1.2", "webpack-cli": "^3.1.2",

View file

@ -1,96 +1,101 @@
// Karma configuration // Karma configuration
var path = require("path"); const path = require("path");
var BROWSERS = ["HeadlessChrome", "HeadlessFirefox", "Safari"]; let BROWSERS = ["HeadlessChrome", "HeadlessFirefox", "Safari"];
if (process.env.BROWSER === "chrome"){ if (process.env.BROWSER === "chrome") {
BROWSERS = ["HeadlessChrome"]; BROWSERS = ["HeadlessChrome"];
} else if (process.env.BROWSER === "firefox"){ } else if (process.env.BROWSER === "firefox") {
BROWSERS = ["HeadlessFirefox"]; BROWSERS = ["HeadlessFirefox"];
} else if (process.env.BROWSER === "safari"){ } else if (process.env.BROWSER === "safari") {
BROWSERS = ["Safari"]; BROWSERS = ["Safari"];
} else { } else {
BROWSERS = ["HeadlessChrome", "HeadlessFirefox"]; BROWSERS = ["HeadlessChrome", "HeadlessFirefox"];
} }
module.exports = function(config){ module.exports = function(config) {
var configuration = { const configuration = {
// base path that will be used to resolve all patterns (eg. files, exclude) // base path that will be used to resolve all patterns (eg. files, exclude)
basePath : "../", basePath : "../",
// frameworks to use // frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter // available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks : ["mocha"], frameworks : ["mocha", "karma-typescript"],
// list of files / patterns to load in the browser // list of files / patterns to load in the browser
files : [ files : [
"test/test.js", "test/**/*.ts",
{ pattern : "test/audio/*", included : false }, "Tone/**/*.ts",
{ pattern : "test/audio/*/*", included : false }, { pattern : "test/audio/**", included : false },
{ pattern : "test/html/*", included : false }, { pattern : "test/html/**", included : false },
{ pattern : "build/*", included : false },
], ],
// Karma Typescript compiler options
karmaTypescriptConfig: {
compilerOptions : {
// baseUrl: ".",
// module: "commonjs",
// paths : {
// "../Tone" : ["./Tone"],
// // "@tonejs/plot": ["./node_modules/@tonejs/plot/dist/index.d.ts"],
// },
},
bundlerOptions: {
resolve: {
directories: ["Tone", "node_modules", "test"],
},
},
coverageOptions : {
exclude: /test\/.*\.ts$/i,
},
reports: {
html: path.resolve(__dirname, "../coverage"),
lcovonly: path.resolve(__dirname, "../coverage"),
},
tsconfig: "./tsconfig.json",
},
// list of files to exclude // list of files to exclude
exclude : [], exclude : [
"node_modules/*",
],
// preprocess matching files before serving them to the browser // preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors : { preprocessors: {
"test/test.js" : ["webpack", "sourcemap"], "**/*.ts": "karma-typescript",
// "Tone/**/*.ts": "coverage",
}, },
// test results reporter to use // test results reporter to use
// possible values: 'dots', 'progress' // possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter // available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters : ["dots", "coverage"], reporters : ["dots", "karma-typescript"],
coverageReporter : { // coverageReporter : {
type : "lcov", // type : "lcov",
dir : "./coverage" // dir: path.resolve(__dirname, "../coverage"),
}, // },
//plugins //plugins
plugins : [ plugins : [
"karma-coverage", "karma-typescript",
// "karma-coverage",
"karma-mocha", "karma-mocha",
"karma-webpack",
"karma-chrome-launcher", "karma-chrome-launcher",
"karma-firefox-launcher", "karma-firefox-launcher",
"karma-safari-launcher", "karma-safari-launcher",
"karma-sourcemap-loader", // "karma-sourcemap-loader",
], ],
client : { client : {
mocha : { mocha : {
grep: "Gain",
timeout : 30000,
reporter : "html", // change Karma's debug.html to the mocha web reporter reporter : "html", // change Karma's debug.html to the mocha web reporter
ui : "bdd" ui : "bdd",
}
}, },
//webpack
webpack : {
mode : "development",
resolve : {
modules : [
path.resolve(__dirname, "../node_modules"),
path.resolve(__dirname, "../"),
path.resolve(__dirname, "../test")
],
},
module : {
rules : [
//enables correct coverage mapping
{
test : /\.js$/,
use : { loader : "istanbul-instrumenter-loader", query : { esModules : true } },
include : path.resolve(__dirname, "../Tone"),
exclude : path.resolve(__dirname, "../Tone/shim")
}
]
},
devtool : "inline-source-map"
}, },
// web server port // web server port
@ -104,7 +109,7 @@ module.exports = function(config){
// level of logging // level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel : config.LOG_ERROR, logLevel: config.LOG_ERROR,
// enable / disable watching file and executing tests whenever any file changes // enable / disable watching file and executing tests whenever any file changes
autoWatch : false, autoWatch : false,
@ -127,17 +132,18 @@ module.exports = function(config){
customLaunchers : { customLaunchers : {
HeadlessChrome : { HeadlessChrome : {
base : "ChromeHeadless", base : "ChromeHeadless",
flags : ["--no-sandbox", "--use-fake-ui-for-media-stream", "--use-fake-device-for-media-stream", "--autoplay-policy=no-user-gesture-required"] flags : ["--no-sandbox", "--use-fake-ui-for-media-stream", "--use-fake-device-for-media-stream",
"--autoplay-policy=no-user-gesture-required"],
}, },
HeadlessFirefox : { HeadlessFirefox : {
base : "Firefox", base : "Firefox",
flags : ["-headless"], flags : ["-headless"],
prefs : { prefs : {
"focusmanager.testmode" : true,
"media.navigator.permission.disabled" : true, "media.navigator.permission.disabled" : true,
"focusmanager.testmode" : true },
} },
} },
}
}; };
config.set(configuration); config.set(configuration);

109
test/core/Connect.ts Normal file
View file

@ -0,0 +1,109 @@
import { PassesAudio } from "@tonejs/plot";
import { expect } from "chai";
import { connect, disconnect } from "../../Tone/core/Connect";
import { Gain } from "../../Tone/node/Gain";
describe("Connect", () => {
context("native node", () => {
it("can create a connection", async () => {
expect(await PassesAudio((context, input, output) => {
connect(input, output);
})).to.equal(true);
});
it("can disconnect two nodes", async () => {
expect(await PassesAudio((context, input, output) => {
connect(input, output);
disconnect(input, output);
})).to.equal(false);
});
it("can disconnect a node", async () => {
expect(await PassesAudio((context, input, output) => {
connect(input, output);
disconnect(input);
})).to.equal(false);
});
it("can connect one channel to another", async () => {
expect(await PassesAudio((context, input, output) => {
const merge = context.createChannelMerger(2);
const split = context.createChannelSplitter(2);
connect(input, merge, 0, 1);
connect(merge, split, 0, 0);
connect(split, output, 1, 0);
})).to.equal(true);
});
it("can disconnect from an explicit channel", async () => {
expect(await PassesAudio((context, input, output) => {
const merge = context.createChannelMerger(2);
const split = context.createChannelSplitter(2);
connect(input, merge, 0, 1);
connect(merge, split, 0, 0);
connect(split, output, 1, 0);
disconnect(split, output, 1, 0);
})).to.equal(false);
});
it("throws an error if things aren't connected", async () => {
let threwError = false;
await PassesAudio((context, input, output) => {
disconnect(input, output);
}).catch(() => threwError = true);
expect(threwError).to.equal(true);
});
it("throws an error if things aren't connected to a specific channel", async () => {
let threwError = false;
await PassesAudio((context, input, output) => {
const merge = context.createChannelMerger(2);
const split = context.createChannelSplitter(2);
connect(input, merge, 0, 1);
connect(merge, split, 0, 0);
connect(split, output, 1, 0);
disconnect(split, output, 0, 0);
}).catch(() => threwError = true);
expect(threwError).to.equal(true);
});
});
context("ToneAudioNode", () => {
it("can create a connection", async () => {
expect(await PassesAudio((context, input, output) => {
const gain = new Gain({
context,
});
connect(input, gain);
connect(gain, output);
})).to.equal(true);
});
it("can disconnect a node", async () => {
expect(await PassesAudio((context, input, output) => {
const gain = new Gain({
context,
});
connect(input, gain);
connect(gain, output);
connect(gain, output);
disconnect(gain);
})).to.equal(false);
});
it("can disconnect a node explicitly", async () => {
expect(await PassesAudio((context, input, output) => {
const gain = new Gain({
context,
});
connect(input, gain);
connect(gain, output);
connect(gain, output);
disconnect(gain, output);
})).to.equal(false);
});
});
});

View file

@ -0,0 +1,86 @@
import { expect } from "chai";
import { Ticker } from "../../Tone/core/ContextTicker";
import { ONLINE_TESTING } from "../helper/Supports";
describe("Ticker", () => {
function empty(): void {
// do nothing
}
it("can be created and disposed", () => {
const ticker = new Ticker(empty, "offline", 1);
ticker.dispose();
});
it("can adjust the type", () => {
const ticker = new Ticker(empty, "worker", 0.1);
expect(ticker.type).to.equal("worker");
ticker.type = "timeout";
expect(ticker.type).to.equal("timeout");
ticker.type = "offline";
expect(ticker.type).to.equal("offline");
ticker.dispose();
});
it("can get/set the updateInterval", () => {
const ticker = new Ticker(empty, "worker", 0.1);
expect(ticker.updateInterval).to.equal(0.1);
ticker.updateInterval = 0.5;
expect(ticker.updateInterval).to.equal(0.5);
ticker.dispose();
});
if (ONLINE_TESTING) {
context("timeout", () => {
it("provides a callback when set to timeout", done => {
const ticker = new Ticker(() => {
ticker.dispose();
done();
}, "timeout", 0.01);
});
it("can adjust the interval when set to timeout", (done) => {
const ticker = new Ticker(() => {
ticker.dispose();
done();
}, "timeout", 0.01);
ticker.updateInterval = 0.1;
});
});
}
context("worker", () => {
it("provides a callback when set to worker", done => {
const ticker = new Ticker(() => {
ticker.dispose();
done();
}, "worker", 0.01);
});
it("falls back to timeout if the constructor throws an error", done => {
const URL = window.URL;
// @ts-ignore
window.URL = null;
const ticker = new Ticker(() => {
expect(ticker.type).to.equal("timeout");
ticker.dispose();
window.URL = URL;
done();
}, "worker", 0.01);
});
it("can adjust the interval when set to worker", (done) => {
const ticker = new Ticker(() => {
ticker.dispose();
done();
}, "worker", 0.01);
ticker.updateInterval = 0.1;
});
});
});

87
test/core/Emitter.ts Normal file
View file

@ -0,0 +1,87 @@
import { expect } from "chai";
import { Emitter } from "../../Tone/core/Emitter";
describe("Emitter", () => {
it("can be created and disposed", () => {
const emitter = new Emitter();
emitter.dispose();
});
it("can bind events", done => {
const emitter = new Emitter();
emitter.on("something", () => {
done();
emitter.dispose();
});
emitter.emit("something");
emitter.dispose();
});
it("can unbind events", () => {
const emitter = new Emitter();
const callback = () => {
throw new Error("should call this");
};
emitter.on("something", callback);
emitter.off("something", callback);
emitter.emit("something");
emitter.dispose();
});
it("removes all events when no callback is given", () => {
const emitter = new Emitter();
emitter.on("something", () => {
throw new Error("should call this");
});
emitter.on("something", () => {
throw new Error("should call this");
});
emitter.off("something");
emitter.emit("something");
emitter.off("something-else");
emitter.dispose();
});
it("can remove an event while emitting", done => {
const emitter = new Emitter();
emitter.on("something", () => {
emitter.off("something");
});
emitter.on("something-else", () => {
emitter.dispose();
done();
});
emitter.emit("something");
emitter.emit("something-else");
});
it("can invoke an event once", () => {
const emitter = new Emitter();
emitter.once("something", val => {
expect(val).to.equal(1);
});
emitter.emit("something", 1);
emitter.emit("something", 2);
emitter.dispose();
});
it("can pass arguments to the callback", done => {
const emitter = new Emitter();
emitter.on("something", (arg0, arg1) => {
expect(arg0).to.equal("A");
expect(arg1).to.equal("B");
emitter.dispose();
done();
});
emitter.emit("something", "A", "B");
});
// it("can mixin its methods to another object", done => {
// const emitter = {};
// Emitter.mixin(emitter);
// emitter.on("test", done);
// emitter.emit("test");
// });
});

593
test/core/Timeline.ts Normal file
View file

@ -0,0 +1,593 @@
// import Test from "helper/Test";
import { expect } from "chai";
import { Timeline } from "Tone/core/Timeline";
interface TimelineStateEvent {
state: string;
time: number;
}
interface TimelineNameEvent {
name: string;
time: number;
}
interface TimelineAddedEvent {
time: number;
added?: boolean;
}
interface TimelineValueEvent {
time: number;
value: any;
}
describe("Timeline", () => {
it("can be created and disposed", () => {
const sched = new Timeline();
sched.dispose();
});
it("accepts events into the timeline", () => {
const sched = new Timeline<TimelineStateEvent>();
sched.add({
state : "A",
time : 0,
});
sched.add({
state : "B",
time : 1,
});
sched.add({
state : "C",
time : 2,
});
sched.dispose();
});
it("can insert events in the timeline in the right order", () => {
const sched = new Timeline();
sched.add({
time : 0,
});
sched.add({
time : 2,
});
sched.add({
time : 1,
});
let index = 0;
const eventTimes = [0, 1, 2];
sched.forEach((event) => {
expect(event.time).to.equal(eventTimes[index++]);
});
sched.dispose();
});
it("can get the length of the timeline", () => {
const sched = new Timeline();
expect(sched.length).to.equal(0);
sched.add({
time : 0,
});
expect(sched.length).to.equal(1);
sched.dispose();
});
it("can remove items from the timeline", () => {
const sched = new Timeline();
const obj = { time : 0 };
sched.add(obj);
sched.add({
time : 2,
});
expect(sched.length).to.equal(2);
sched.remove(obj);
expect(sched.length).to.equal(1);
sched.dispose();
});
it("has no effect to remove an object which is not there", () => {
const sched = new Timeline();
sched.add({
time : 2,
});
sched.remove({time : 1});
expect(sched.length).to.equal(1);
sched.forEach((event) => {
sched.remove({time : 4});
});
expect(sched.length).to.equal(1);
sched.dispose();
});
it("can search for events in the timeline by time", () => {
const sched = new Timeline();
sched.add({
time : 0,
});
sched.add({
time : 2,
});
sched.add({
time : 1,
});
// expect(sched._search(0)).to.equal(0);
// expect(sched._search(0.01)).to.equal(0);
// expect(sched._search(1)).to.equal(1);
// expect(sched._search(1.01)).to.equal(1);
// expect(sched._search(2)).to.equal(2);
// expect(sched._search(20000)).to.equal(2);
// expect(sched._search(-1)).to.equal(-1);
sched.dispose();
});
it("can get a previous event", () => {
const sched = new Timeline();
const event0 = {
time : 0,
};
const event1 = {
time : 1,
};
sched.add(event0);
sched.add(event1);
expect(sched.previousEvent(event1)).to.equal(event0);
expect(sched.previousEvent(event0)).to.equal(null);
sched.dispose();
});
it("can get the scheduled event at the given time", () => {
const sched = new Timeline<TimelineStateEvent>();
sched.add({
state : "A",
time : 2,
});
sched.add({
state : "C",
time : 9.4,
});
sched.add({
state : "B",
time : 6,
});
expect(sched.get(0)).is.null;
const e1 = sched.get(2);
const e2 = sched.get(5.9);
const e3 = sched.get(6.1);
const e4 = sched.get(12);
if (e1 && e2 && e3 && e4) {
expect(e1.state).is.equal("A");
expect(e2.state).is.equal("A");
expect(e3.state).is.equal("B");
expect(e4.state).is.equal("C");
} else {
throw new Error("expected 4 events");
}
sched.dispose();
});
it("puts the second scheduled event after if two events are scheduled at the same time", () => {
const sched = new Timeline<TimelineNameEvent>();
sched.add({
name : "A",
time : 0,
});
sched.add({
name : "B",
time : 0,
});
const firstEvent0 = sched.get(0);
if (firstEvent0) {
expect(firstEvent0.name).is.equal("B");
}
sched.add({
name : "C",
time : 0,
});
const firstEvent1 = sched.get(0);
if (firstEvent1) {
expect(firstEvent1.name).is.equal("C");
}
sched.dispose();
});
it("can the next event after the given time", () => {
const sched = new Timeline<TimelineStateEvent>();
expect(sched.getAfter(0)).is.null;
sched.add({
state : "A",
time : 0.1,
});
sched.add({
state : "B",
time : 1.1,
});
sched.add({
state : "C",
time : 2.1,
});
const firstEvent = sched.getAfter(0);
const secondEvent = sched.getAfter(1);
if (firstEvent && secondEvent) {
expect(firstEvent.state).is.equal("A");
expect(secondEvent.state).is.equal("B");
} else {
throw new Error("should have 2 events");
}
expect(sched.getAfter(3)).is.null;
sched.dispose();
});
it("can the event before the event before the given time", () => {
const sched = new Timeline<TimelineStateEvent>();
expect(sched.getBefore(0)).is.null;
sched.add({
state : "A",
time : 0.1,
});
sched.add({
state : "B",
time : 1.1,
});
sched.add({
state : "C",
time : 2.1,
});
expect(sched.getBefore(0)).is.null;
const firstEvent = sched.getBefore(1.1);
const secondEvent = sched.getBefore(2.1);
const thirdEvent = sched.getBefore(3);
if (firstEvent && secondEvent && thirdEvent) {
expect(firstEvent.state).is.equal("A");
expect(secondEvent.state).is.equal("B");
expect(thirdEvent.state).is.equal("C");
} else {
throw new Error("should have 3 events");
}
sched.dispose();
});
it("can cancel an item", () => {
const sched = new Timeline();
sched.add({ time : 3 });
sched.add({ time : 5 });
sched.add({ time : 4 });
sched.add({ time : 8 });
sched.add({ time : 5 });
expect(sched.length).to.equal(5);
sched.cancel(10);
expect(sched.length).to.equal(5);
sched.cancel(5);
expect(sched.length).to.equal(2);
sched.cancel(3);
expect(sched.length).to.equal(0);
sched.dispose();
});
it("can cancel items after the given time", () => {
const sched = new Timeline();
for (let i = 0; i < 100; i++) {
sched.add({ time : 100 - i });
}
sched.cancel(10);
expect(sched.length).to.equal(9);
sched.cancel(5);
expect(sched.length).to.equal(4);
sched.cancel(0);
expect(sched.length).to.equal(0);
sched.dispose();
});
it("can cancel items before the given time", () => {
const sched = new Timeline();
for (let i = 0; i < 100; i++) {
sched.add({ time : i });
}
sched.cancelBefore(9);
expect(sched.length).to.equal(90);
sched.cancelBefore(10.1);
expect(sched.length).to.equal(89);
sched.cancelBefore(100);
expect(sched.length).to.equal(0);
sched.dispose();
});
it("has no problem with many items", () => {
const sched = new Timeline();
for (let i = 0; i < 10000; i++) {
sched.add({
time : i,
});
}
for (let j = 0; j < 1000; j++) {
const val = sched.get(j);
if (val) {
expect(val.time).to.equal(j);
}
}
sched.dispose();
});
it("can constrain the length of the timeline", () => {
const sched = new Timeline(4);
for (let i = 0; i < 10000; i++) {
sched.add({
time : i,
});
}
expect(sched.length).to.equal(4);
sched.dispose();
});
it("can peek and shift off the first element", () => {
const timeline = new Timeline<TimelineValueEvent>();
timeline.add({
time : 0,
value : "a",
});
timeline.add({
time : 1,
value : "b",
});
timeline.add({
time : 2,
value : "c",
});
expect(timeline.length).to.equal(3);
const peekValue = timeline.peek();
if (peekValue) {
expect(peekValue.value).to.equal("a");
} else {
throw new Error("should have value");
}
expect(timeline.length).to.equal(3);
const shiftValue = timeline.shift();
if (shiftValue) {
expect(shiftValue.value).to.equal("a");
} else {
throw new Error("should have value");
}
expect(timeline.length).to.equal(2);
const peekValue2 = timeline.peek();
if (peekValue2) {
expect(peekValue2.value).to.equal("b");
} else {
throw new Error("should have value");
}
const shiftValue2 = timeline.shift();
if (shiftValue2) {
expect(shiftValue2.value).to.equal("b");
} else {
throw new Error("should have value");
}
expect(timeline.length).to.equal(1);
timeline.dispose();
});
context("Iterators", () => {
it("iterates over all items and returns and item", () => {
const sched = new Timeline();
sched.add({ time : 0 });
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEach((event) => {
expect(event).to.be.an("object");
count++;
});
expect(count).to.equal(5);
sched.dispose();
});
it("iterates over all items before the given time", () => {
const sched = new Timeline();
sched.add({ time : 0 });
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachBefore(0.3, (event) => {
expect(event).to.be.an("object");
expect(event.time).to.be.at.most(0.3);
count++;
});
expect(count).to.equal(4);
sched.dispose();
});
it("handles time ranges before the available objects", () => {
const sched = new Timeline();
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachBefore(0, () => {
count++;
});
expect(count).to.equal(0);
sched.dispose();
});
it("iterates over all items after the given time", () => {
const sched = new Timeline();
sched.add({ time : 0 });
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachAfter(0.1, (event) => {
expect(event).to.be.an("object");
expect(event.time).to.be.above(0.1);
count++;
});
expect(count).to.equal(3);
sched.dispose();
});
it("handles time ranges after the available objects", () => {
const sched = new Timeline();
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachAfter(0.5, () => {
count++;
});
expect(count).to.equal(0);
sched.dispose();
});
it("handles time ranges before the first object", () => {
const sched = new Timeline();
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachAfter(-Infinity, () => {
count++;
});
expect(count).to.equal(4);
sched.dispose();
});
it("can iterate after inclusive of the item at the given time", () => {
const sched = new Timeline();
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachFrom(0.2, () => {
count++;
});
expect(count).to.equal(4);
count = 0;
sched.forEachFrom(0.21, () => {
count++;
});
expect(count).to.equal(2);
count = 0;
sched.forEachFrom(0, () => {
count++;
});
expect(count).to.equal(5);
sched.dispose();
});
it("iterates over all items at the given time", () => {
const sched = new Timeline();
sched.add({ time : 0 });
sched.add({ time : 0 });
sched.add({ time : 0.2 });
sched.add({ time : 0.2 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachAtTime(0.1, (event) => {
count++;
});
expect(count).to.equal(0);
// and with an actual time
sched.forEachAtTime(0.2, (event) => {
expect(event.time).to.equal(0.2);
count++;
});
expect(count).to.equal(2);
sched.dispose();
});
it("can remove items during iterations", () => {
const sched = new Timeline();
for (let i = 0; i < 1000; i++) {
sched.add({ time : i });
}
sched.forEach((event) => {
sched.remove(event);
});
expect(sched.length).to.equal(0);
sched.dispose();
});
it("can add items during iteration", () => {
interface AddedInterface {
time: number;
added?: boolean;
}
const sched = new Timeline<AddedInterface>();
for (let i = 0; i < 1000; i++) {
sched.add({ time : i });
}
let added = false;
sched.forEach((event) => {
if (!added) {
added = true;
sched.add({
added : true,
time : 10,
});
}
});
expect(sched.length).to.equal(1001);
sched.dispose();
});
it("can iterate between a time range", () => {
const sched = new Timeline();
sched.add({ time : 0.1 });
sched.add({ time : 0.2 });
sched.add({ time : 0.3 });
sched.add({ time : 0.4 });
let count = 0;
sched.forEachBetween(0.2, 0.4, (event) => {
count++;
expect(event.time).to.be.within(0.2, 0.3);
});
expect(count).to.equal(2);
count = 0;
sched.forEachBetween(0.21, 0.4, (event) => {
count++;
expect(event.time).to.be.within(0.21, 0.3);
});
expect(count).to.equal(1);
count = 0;
sched.forEachBetween(0.21, 0.39, (event) => {
count++;
expect(event.time).to.be.within(0.21, 0.39);
});
expect(count).to.equal(1);
count = 0;
sched.forEachBetween(0, 0.11, (event) => {
count++;
expect(event.time).to.be.within(0, 0.11);
});
expect(count).to.equal(1);
count = 0;
sched.forEachBetween(0, 0.09, (event) => {
count++;
expect(event.time).to.be.within(0, 0.09);
});
expect(count).to.equal(0);
count = 0;
sched.forEachBetween(0.41, 0.5, (event) => {
count++;
expect(event.time).to.be.within(0.41, 0.5);
});
expect(count).to.equal(0);
sched.dispose();
});
});
});

40
test/helper/Basic.ts Normal file
View file

@ -0,0 +1,40 @@
import { expect } from "chai";
import { Tone } from "../../Tone/core/Tone";
import { AudioProcessor } from "../../Tone/node/AudioProcessor";
export const testAudioContext = new OfflineAudioContext(1, 1, 11025);
export function BasicTests(Constr, ...args: any[]) {
context("Basic", () => {
it("can be created and disposed", () => {
const instance = new Constr(...args);
instance.dispose();
});
it("extends Tone", () => {
const instance = new Constr(...args);
expect(instance).to.be.an.instanceof(Tone);
instance.dispose();
});
it("can specify the AudioContext", () => {
const instance = new Constr(Object.assign({
context: testAudioContext,
}, ...args));
if (instance instanceof AudioProcessor) {
expect(instance.context).to.equal(testAudioContext);
// also check all of it's attributes to see if they also have the right context
for (const member in instance) {
if (instance[member] instanceof AudioProcessor) {
expect(instance[member].context).to.equal(testAudioContext);
}
}
}
instance.dispose();
});
});
}

10
test/helper/Connect.ts Normal file
View file

@ -0,0 +1,10 @@
import { getContext } from "../../Tone/core/Global";
import { Gain } from "../../Tone/node/Gain";
export function connectFrom(): Gain {
return new Gain();
}
export function connectTo(): Gain {
return new Gain();
}

View file

@ -0,0 +1,13 @@
import { expect } from "chai";
import { Offline } from "./Offline";
/**
* Test that the output of the callback is a constant value
*/
export async function ConstantOutput(
callback: (context: BaseAudioContext) => Promise<void> | void,
value: number, threshold = 0.01,
): Promise<void> {
const buffer = await Offline(callback, 0.01, 1);
expect(buffer.value()).to.be.closeTo(value, threshold);
}

17
test/helper/Dispose.ts Normal file
View file

@ -0,0 +1,17 @@
export function isDisposed(instance){
for (let prop in instance) {
const member = instance[prop];
if (typeof member !== "function" &&
typeof member !== "string" &&
typeof member !== "number" &&
typeof member !== "boolean" &&
typeof member !== "undefined" &&
prop !== "preset" &&
!(member instanceof AudioContext) &&
!instance.constructor.prototype[prop]) {
if (member !== null) {
throw Error("property was not completely disposed: " + prop);
}
}
}
}

15
test/helper/Offline.ts Normal file
View file

@ -0,0 +1,15 @@
import { Offline as PlotOffline } from "@tonejs/plot";
import { getContext, setContext } from "../../Tone/core/Global";
export async function Offline(
callback: (context: BaseAudioContext) => Promise<void> | void,
duration = 0.1, channels = 1,
) {
const buffer = await PlotOffline(async context => {
const originalContext = getContext();
setContext(context);
await callback(context);
setContext(originalContext);
}, duration, channels);
return buffer;
}

24
test/helper/PassAudio.ts Normal file
View file

@ -0,0 +1,24 @@
import { expect } from "chai";
import { ToneAudioNode } from "../../Tone/node/AudioNode";
import { Signal } from "../../Tone/signal/Signal";
import { Offline } from "../helper/Offline";
/**
* Make sure that the audio passes from input node
* to the destination node
*/
export function PassAudio(
callback: (input: ToneAudioNode) => void,
): Promise<void> {
const duration = 0.2;
return Offline(() => {
const sig = new Signal(0);
callback(sig);
sig.setValueAtTime(1, duration / 2);
}, 0.2, 1).then(buffer => {
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(duration / 2 - 0.01)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(duration / 2 + 0.01)).to.not.equal(0);
expect(buffer.getValueAtTime(duration - 0.01)).to.not.equal(0);
});
}

41
test/helper/Supports.ts Normal file
View file

@ -0,0 +1,41 @@
import * as UserAgentParser from "ua-parser-js";
const parsed = new UserAgentParser().getBrowser();
const name = parsed.name as string;
const version = parseInt(parsed.major as string);
function is(browser, above?) {
above = above || 0;
return name.includes(browser) && version >= above;
}
function isnt(browser, below?) {
below = below || Infinity;
return !(name.includes(browser) && version <= below);
}
function isntVersion(browser, browserVersion?) {
return name.includes(browser) && version !== browserVersion;
}
// can disconnect from a specific node
export const NODE_DISCONNECT = is("Chrome", 50);
// offline rendering matches Chrome closely
// chrome is the platform the files were rendered on
// so it is the default for continuity testing
export const CHROME_AUDIO_RENDERING = is("Chrome");
// firefox does not correctly handle the situation where
// a linear/exponential ramp is scheduled after setTargetValueAtTime
export const SCHEDULE_RAMP_AFTER_SET_TARGET = is("Chrome");
// if the tests run in focus
export const ONLINE_TESTING = isntVersion("Chrome", 71);
// the close method resolves a promise
export const AUDIO_CONTEXT_CLOSE_RESOLVES = isnt("Firefox") && isnt("Safari", 10);
// if it supports gUM testing
export const GET_USER_MEDIA = isnt("Safari");

177
test/node/AudioNode.ts Normal file
View file

@ -0,0 +1,177 @@
import { expect } from "chai";
import { ToneAudioNode } from "../../Tone/node/AudioNode";
import { isDisposed } from "../helper/Dispose";
describe("ToneAudioNode", () => {
// context("constructor", () => {
// it("can be created and disposed", () => {
// const node = new ToneAudioNode();
// node.dispose();
// isDisposed(node);
// });
// });
// context("properties", () => {
// it("handles input and output connections", () => {
// const node = new ToneAudioNode({
// numberOfInputs : 1,
// numberOfOutputs: 2,
// });
// expect(node.numberOfInputs).to.equal(1);
// expect(node.numberOfOutputs).to.equal(2);
// node.dispose();
// });
// it("reports its inputs and outputs", () => {
// const node0 = new ToneAudioNode({
// numberOfInputs : 3,
// numberOfOutputs : 2,
// });
// expect(node0.numberOfInputs).to.equal(3);
// expect(node0.numberOfOutputs).to.equal(2);
// node0.dispose();
// const node1 = new ToneAudioNode({
// numberOfInputs : 0,
// numberOfOutputs : 1,
// });
// expect(node1.numberOfInputs).to.equal(0);
// expect(node1.numberOfOutputs).to.equal(1);
// node1.dispose();
// const node2 = new ToneAudioNode({
// numberOfInputs : 1,
// numberOfOutputs : 0,
// });
// expect(node2.numberOfInputs).to.equal(1);
// expect(node2.numberOfOutputs).to.equal(0);
// node2.dispose();
// });
// it("is able to get and set the channelCount, channelCountMode and channelInterpretation", () => {
// const node0 = new ToneAudioNode({
// channelCount : 4,
// numberOfInputs: 1,
// numberOfOutputs: 1,
// });
// expect(node0.channelCount).to.equal(4);
// node0.channelCount = 1;
// expect(node0.channelCount).to.equal(1);
// node0.dispose();
// const node1 = new ToneAudioNode({
// numberOfInputs : 1,
// numberOfOutputs : 2,
// });
// expect(node1.channelCountMode).to.equal("max");
// node1.channelCountMode = "explicit";
// expect(node1.channelCountMode).to.equal("explicit");
// node1.dispose();
// const node2 = new ToneAudioNode({
// channelInterpretation : "speakers",
// numberOfInputs : 2,
// numberOfOutputs : 0,
// });
// expect(node2.channelInterpretation).to.equal("speakers");
// node2.channelInterpretation = "discrete";
// expect(node2.channelInterpretation).to.equal("discrete");
// node2.dispose();
// });
// });
// context("connections", () => {
// it("can connect with args", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs : 1,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 1,
// });
// nodeA.connect(nodeB, 0, 0);
// nodeA.dispose();
// nodeB.dispose();
// });
// it("can connect with no args", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs: 1,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 1,
// });
// nodeA.connect(nodeB);
// nodeA.dispose();
// nodeB.dispose();
// });
// it("can connect with one arg", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs: 2,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 1,
// });
// nodeA.connect(nodeB, 1);
// nodeA.dispose();
// nodeB.dispose();
// });
// it("Tone nodes can disconnect from everything with no args", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs: 1,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 1,
// });
// nodeA.connect(nodeB);
// nodeA.disconnect();
// nodeA.dispose();
// nodeB.dispose();
// });
// it("Tone nodes can disconnect from a specific node", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs: 1,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 1,
// });
// nodeA.connect(nodeB);
// nodeA.disconnect(nodeB);
// nodeA.dispose();
// nodeB.dispose();
// });
// it("Tone nodes can disconnect from a specific node and input/output", () => {
// const nodeA = new ToneAudioNode({
// numberOfOutputs: 2,
// });
// const nodeB = new ToneAudioNode({
// numberOfInputs: 2,
// });
// nodeA.connect(nodeB, 1, 1);
// nodeA.disconnect(nodeB, 1, 1);
// nodeA.dispose();
// nodeB.dispose();
// });
// it("throws an error if they are not connected", () => {
// // const nodeA = new ToneAudioNode({
// // numberOfOutputs: 2,
// // });
// // const nodeB = new ToneAudioNode({
// // numberOfInputs: 2,
// // });
// // nodeA.connect(nodeB, 1, 1);
// // expect(() => {
// // nodeA.disconnect(nodeB, 10, 0);
// // }).throws(Error);
// // nodeA.dispose();
// // nodeB.dispose();
// });
// });
});

94
test/node/Delay.ts Normal file
View file

@ -0,0 +1,94 @@
// import Test from "helper/Test";
import { PassesAudio } from "@tonejs/plot";
import { expect } from "chai";
import { connect } from "Tone/core/Connect";
import { Delay } from "../../Tone/node/Delay";
import { BasicTests } from "../helper/Basic";
import { connectFrom, connectTo } from "../helper/Connect";
describe("Delay", () => {
BasicTests(Delay);
it("can be created and disposed", () => {
const delay = new Delay();
delay.dispose();
});
it("handles input and output connections", () => {
const delay = new Delay();
delay.connect(connectTo());
connectFrom().connect(delay);
connectFrom().connect(delay.delayTime);
delay.dispose();
});
it("can be constructed with an options object", () => {
const delay = new Delay({
delayTime: 0.3,
maxDelay: 2,
});
expect(delay.delayTime.value).to.be.closeTo(0.3, 0.001);
expect(delay.maxDelay).to.equal(2);
delay.dispose();
});
it("if the constructor delay time is greater than maxDelay, use that as the maxDelay time", () => {
const delay = new Delay(3);
expect(delay.delayTime.value).to.be.closeTo(3, 0.001);
delay.dispose();
});
it("can set the delayTime value", () => {
const delay = new Delay();
expect(delay.delayTime.value).to.be.closeTo(0, 0.001);
delay.delayTime.value = 0.2;
expect(delay.delayTime.value).to.be.closeTo(0.2, 0.001);
delay.dispose();
});
it("can be constructed with options object", () => {
const delay = new Delay({
delayTime: 0.4,
});
expect(delay.delayTime.value).to.be.closeTo(0.4, 0.001);
delay.dispose();
});
it("can be constructed with an initial value", () => {
const delay = new Delay(0.3);
expect(delay.delayTime.value).to.be.closeTo(0.3, 0.001);
delay.dispose();
});
it("can set the units", () => {
const delay = new Delay(0);
expect(delay.delayTime.value).to.be.closeTo(0, 0.001);
delay.dispose();
});
it("can get the value using 'get'", () => {
const delay = new Delay(2);
const value = delay.get();
expect(value.delayTime).to.be.closeTo(2, 0.001);
delay.dispose();
});
it("can set the value using 'set'", () => {
const delay = new Delay(5);
delay.set({
delayTime: 4,
});
expect(delay.delayTime.value).to.be.closeTo(4, 0.001);
delay.dispose();
});
it("passes audio through", async () => {
expect(await PassesAudio((context, input, output) => {
const delay = new Delay({
context,
}).connect(output);
connect(input, delay);
})).to.equal(true);
});
});

75
test/node/Gain.ts Normal file
View file

@ -0,0 +1,75 @@
import { expect } from "chai";
import { Gain } from "../../Tone/node/Gain";
import { BasicTests } from "../helper/Basic";
import { connectFrom, connectTo } from "../helper/Connect";
import { PassAudio } from "../helper/PassAudio";
describe("Gain", () => {
BasicTests(Gain);
it("can be created and disposed", () => {
const gainNode = new Gain();
gainNode.dispose();
});
it("handles input and output connections", () => {
const gainNode = new Gain();
gainNode.connect(connectTo());
connectFrom().connect(gainNode);
connectFrom().connect(gainNode.gain);
gainNode.dispose();
});
it("can set the gain value", () => {
const gainNode = new Gain();
expect(gainNode.gain.value).to.be.closeTo(1, 0.001);
gainNode.gain.value = 0.2;
expect(gainNode.gain.value).to.be.closeTo(0.2, 0.001);
gainNode.dispose();
});
it("can be constructed with options object", () => {
const gainNode = new Gain({
gain : 0.4,
});
expect(gainNode.gain.value).to.be.closeTo(0.4, 0.001);
gainNode.dispose();
});
it("can be constructed with an initial value", () => {
const gainNode = new Gain(3);
expect(gainNode.gain.value).to.be.closeTo(3, 0.001);
gainNode.dispose();
});
it("can set the units", () => {
const gainNode = new Gain(0, "decibels");
expect(gainNode.gain.value).to.be.closeTo(0, 0.001);
expect(gainNode.gain.units).to.equal("decibels");
gainNode.dispose();
});
it("can get the value using 'get'", () => {
const gainNode = new Gain(5);
const value = gainNode.get();
expect(value.gain).to.be.closeTo(5, 0.001);
gainNode.dispose();
});
it("can set the value using 'set'", () => {
const gainNode = new Gain(5);
gainNode.set({
gain: 4,
});
expect(gainNode.gain.value).to.be.closeTo(4, 0.001);
gainNode.dispose();
});
it("passes audio through", () => {
return PassAudio((input) => {
const gainNode = new Gain().toMaster();
input.connect(gainNode);
});
});
});

435
test/node/Param.ts Normal file
View file

@ -0,0 +1,435 @@
import { Compare, Offline, Plot } from "@tonejs/plot";
import { expect } from "chai";
import { getContext } from "../../Tone/core/Global";
import { Param } from "../../Tone/node/Param";
import { BasicTests, testAudioContext } from "../helper/Basic";
import { SCHEDULE_RAMP_AFTER_SET_TARGET } from "../helper/Supports";
const audioContext = getContext();
describe("Param", () => {
BasicTests(Param, {
param: testAudioContext.createOscillator().frequency,
});
context("constructor", () => {
it("can be created and disposed", async () => {
await Offline(context => {
const param = new Param<"time">({
context,
param: context.createConstantSource().offset,
units: "time",
});
expect(param.getValueAtTime(0)).to.equal(1);
param.dispose();
});
});
it("can pass in a value", async () => {
await Offline(context => {
const param = new Param({
context,
param: context.createConstantSource().offset,
value : 1.1,
});
expect(param.getValueAtTime(0)).to.equal(1.1);
param.dispose();
});
});
it ("requires a param in the constructor", () => {
expect(() => {
const param = new Param({
value: 1.1,
});
}).throws(Error);
});
});
context("Scheduling Curves", () => {
const sampleRate = 11025;
function matchesOutputCurve(param, outBuffer): void {
outBuffer.toArray()[0].forEach((sample, index) => {
try {
expect(param.getValueAtTime(index / sampleRate)).to.be.closeTo(sample, 0.1);
} catch (e) {
console.log(index / sampleRate);
throw e;
}
});
}
if (SCHEDULE_RAMP_AFTER_SET_TARGET) {
it("correctly handles setTargetAtTime followed by a ramp", async () => {
let param;
// this fails on FF
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
param = new Param({
context,
param: source.offset,
});
param.setTargetAtTime(2, 0.5, 0.1);
expect(param.getValueAtTime(0.6)).to.be.closeTo(1.6, 0.1);
param.linearRampToValueAtTime(0.5, 0.7);
expect(param.getValueAtTime(0.6)).to.be.closeTo(0.75, 0.1);
}, 1.5, 1, sampleRate);
document.body.appendChild(await Plot.signal(testBuffer));
matchesOutputCurve(param, testBuffer);
});
it("schedules a value curve", async () => {
let param;
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
param = new Param({
context,
param: source.offset,
units: "number",
value : 0,
});
param.setValueCurveAtTime([0, 0.5, 0, 1, 1.5], 0.1, 0.8, 0.5);
expect(param.getValueAtTime(0.91)).to.be.closeTo(0.75, 0.01);
}, 1, 1, sampleRate);
document.body.appendChild(await Plot.signal(testBuffer));
matchesOutputCurve(param, testBuffer);
});
it ("a mixture of scheduling curves", async () => {
let param;
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
param = new Param({
context,
param: source.offset,
value : 0.1,
});
param.setValueAtTime(0, 0);
param.setValueAtTime(1, 0.1);
param.linearRampToValueAtTime(3, 0.2);
param.exponentialRampToValueAtTime(0.01, 0.3);
param.setTargetAtTime(-1, 0.35, 0.2);
param.cancelAndHoldAtTime(0.6);
param.rampTo(1.1, 0.2, 0.7);
param.exponentialRampTo(0, 0.1, 0.85);
param.setValueAtTime(0, 1);
param.linearRampTo(1, 0.2, 1);
param.targetRampTo(0, 0.1, 1.1);
param.setValueAtTime(4, 1.2);
param.cancelScheduledValues(1.2);
param.linearRampToValueAtTime(1, 1.3);
}, 1.5, 1, sampleRate);
document.body.appendChild(await Plot.signal(testBuffer));
matchesOutputCurve(param, testBuffer);
});
it("can cancel and hold", async () => {
let param;
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
param = new Param({
context,
param: source.offset,
value: 0.1,
});
param.setValueAtTime(0, 0);
param.setValueAtTime(1, 0.2);
param.cancelAndHoldAtTime(0.1);
param.linearRampToValueAtTime(1, 0.3);
param.cancelAndHoldAtTime(0.2);
expect(param.getValueAtTime(0.2)).to.be.closeTo(0.5, 0.001);
param.exponentialRampToValueAtTime(0, 0.4);
param.cancelAndHoldAtTime(0.25);
expect(param.getValueAtTime(0.25)).to.be.closeTo(0.033, 0.001);
param.setTargetAtTime(1, 0.3, 0.1);
param.cancelAndHoldAtTime(0.4);
expect(param.getValueAtTime(0.4)).to.be.closeTo(0.644, 0.001);
param.setValueAtTime(0, 0.45);
param.setValueAtTime(1, 0.48);
param.cancelAndHoldAtTime(0.45);
expect(param.getValueAtTime(0.45)).to.be.closeTo(0, 0.001);
}, 0.5, 1, sampleRate);
matchesOutputCurve(param, testBuffer);
// document.body.appendChild(await Plot.signal(testBuffer));
});
it ("matches known values", async () => {
await Compare.toFile(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
param: source.offset,
value: 0.1,
});
param.setValueAtTime(0, 0);
param.setValueAtTime(1, 0.2);
param.cancelAndHoldAtTime(0.1);
param.linearRampToValueAtTime(1, 0.3);
param.cancelAndHoldAtTime(0.2);
param.exponentialRampToValueAtTime(0, 0.4);
param.cancelAndHoldAtTime(0.25);
param.setTargetAtTime(1, 0.3, 0.1);
param.cancelAndHoldAtTime(0.4);
}, "/base/test/audio/param/curve_0.wav", 0.01, 0.5, 1, 11025);
});
}
});
context("Units", () => {
it("can be created with specific units", () => {
const gain = audioContext.createGain();
const param = new Param<"bpm">({
context: audioContext,
param : gain.gain,
units : "bpm",
});
expect(param.units).to.equal("bpm");
param.dispose();
});
it("can evaluate the given units", () => {
const gain = audioContext.createGain();
const param = new Param<"decibels">({
context: audioContext,
param: gain.gain,
units: "decibels",
});
param.value = 0.5;
expect(param.value).to.be.closeTo(0.5, 0.001);
param.dispose();
});
it("can be forced to not convert", async () => {
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
convert : false,
param: source.offset,
units : "decibels",
});
param.value = -10;
expect(param.value).to.be.closeTo(-10, 0.01);
}, 0.001, 1);
expect(testBuffer.getValueAtTime(0)).to.be.closeTo(-10, 0.01);
});
});
context("Unit Conversions", () => {
function testUnitConversion(units: Unit, inputValue: any, inputVerification: number, outputValue: number): void {
it(`converts to ${units}`, async () => {
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
param: source.offset,
units,
});
param.value = inputValue;
expect(param.value).to.be.closeTo(inputVerification, 0.01);
}, 0.001, 1);
expect(testBuffer.getValueAtTime(0)).to.be.closeTo(outputValue, 0.01);
});
}
testUnitConversion("number", 3, 3, 3);
testUnitConversion("decibels", -10, -10, 0.31);
testUnitConversion("decibels", -20, -20, 0.1);
testUnitConversion("decibels", -100, -100, 0);
testUnitConversion("gain", 1.2, 1.2, 1.2);
testUnitConversion("positive", 1.5, 1.5, 1.5);
testUnitConversion("positive", -1.5, 0, 0);
testUnitConversion("time", 2, 2, 2);
testUnitConversion("time", 0, 0, 0);
testUnitConversion("frequency", 20, 20, 20);
testUnitConversion("frequency", 0.1, 0.1, 0.1);
testUnitConversion("normalRange", -1, 0, 0);
testUnitConversion("normalRange", 0.5, 0.5, 0.5);
testUnitConversion("normalRange", 1.5, 1, 1);
testUnitConversion("audioRange", -1.1, -1, -1);
testUnitConversion("audioRange", 0.5, 0.5, 0.5);
testUnitConversion("audioRange", 1.5, 1, 1);
});
context("min/maxValue", () => {
function testMinMaxValue(units: Unit, min, max): void {
it(`has proper min/max for ${units}`, () => {
const source = audioContext.createConstantSource();
source.connect(audioContext.destination);
const param = new Param({
context : audioContext,
param: source.offset,
units,
});
expect(param.minValue).to.be.equal(min);
expect(param.maxValue).to.be.equal(max);
});
}
// number, decibels, normalRange, audioRange, gain
// positive, time, frequency, transportTime, ticks, bpm, degrees, samples, hertz
const rangeMax = 3.4028234663852886e+38;
testMinMaxValue("number", -rangeMax, rangeMax);
testMinMaxValue("decibels", -Infinity, rangeMax);
testMinMaxValue("normalRange", 0, 1);
testMinMaxValue("audioRange", -1, 1);
testMinMaxValue("gain", -rangeMax, rangeMax);
testMinMaxValue("positive", 0, rangeMax);
testMinMaxValue("time", 0, rangeMax);
testMinMaxValue("frequency", 0, rangeMax);
testMinMaxValue("transportTime", 0, rangeMax);
testMinMaxValue("ticks", 0, rangeMax);
testMinMaxValue("bpm", 0, rangeMax);
testMinMaxValue("degrees", -rangeMax, rangeMax);
testMinMaxValue("samples", 0, rangeMax);
testMinMaxValue("hertz", 0, rangeMax);
});
// const allSchedulingMethods = ['setValueAtTime', 'linearRampToValueAtTime', 'exponentialRampToValueAtTime']
context("setValueAtTime", () => {
function testSetValueAtTime(units: Unit, value0, value1, value2): void {
it(`can schedule value with units ${units}`, async () => {
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
param: source.offset,
units,
});
param.setValueAtTime(value0, 0);
param.setValueAtTime(value1, 0.01);
param.setValueAtTime(value2, 0.02);
expect(param.getValueAtTime(0)).to.be.closeTo(value0, 0.01);
expect(param.getValueAtTime(0.01)).to.be.closeTo(value1, 0.01);
expect(param.getValueAtTime(0.02)).to.be.closeTo(value2, 0.01);
}, 0.022, 1);
expect(testBuffer.getValueAtTime(0)).to.be.closeTo(0, 0.01);
expect(testBuffer.getValueAtTime(0.011)).to.be.closeTo(1, 0.01);
expect(testBuffer.getValueAtTime(0.021)).to.be.closeTo(0.5, 0.01);
});
}
const allUnits: Unit[] = ["number", "decibels", "normalRange", "audioRange", "gain",
"positive", "time", "frequency", "transportTime", "ticks", "bpm", "degrees", "samples", "hertz"];
allUnits.forEach(unit => {
if (unit === "decibels") {
testSetValueAtTime(unit, -100, 0, -6);
} else {
testSetValueAtTime(unit, 0, 1, 0.5);
}
});
});
["linearRampToValueAtTime", "exponentialRampToValueAtTime"].forEach(method => {
context(method, () => {
function testRampToValueAtTime(units: Unit, value0, value1, value2): void {
it(`can schedule value with units ${units}`, async () => {
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
param: source.offset,
units,
});
param.setValueAtTime(value0, 0);
param[method](value1, 0.01);
param[method](value2, 0.02);
expect(param.getValueAtTime(0)).to.be.closeTo(value0, 0.01);
expect(param.getValueAtTime(0.01)).to.be.closeTo(value1, 0.01);
expect(param.getValueAtTime(0.02)).to.be.closeTo(value2, 0.01);
}, 0.022, 1);
expect(testBuffer.getValueAtTime(0)).to.be.closeTo(1, 0.01);
expect(testBuffer.getValueAtTime(0.01)).to.be.closeTo(0.7, 0.01);
expect(testBuffer.getValueAtTime(0.02)).to.be.closeTo(0, 0.01);
});
}
const allUnits: Unit[] = ["number", "decibels", "normalRange", "audioRange", "gain",
"positive", "time", "frequency", "transportTime", "ticks", "bpm", "degrees", "samples", "hertz"];
allUnits.forEach(unit => {
if (unit === "decibels") {
testRampToValueAtTime(unit, 0, -3, -100);
} else {
testRampToValueAtTime(unit, 1, 0.7, 0);
}
});
});
});
["linearRampTo", "exponentialRampTo", "rampTo", "targetRampTo"].forEach(method => {
context(method, () => {
function testRampToValueAtTime(units: Unit, value0, value1, value2): void {
it(`can schedule value with units ${units}`, async () => {
const testBuffer = await Offline(context => {
const source = context.createConstantSource();
source.connect(context.destination);
source.start(0);
const param = new Param({
context,
param: source.offset,
units,
value: value0,
});
param[method](value1, 0.009, 0);
param[method](value2, 0.01, 0.01);
expect(param.getValueAtTime(0)).to.be.closeTo(value0, 0.02);
expect(param.getValueAtTime(0.01)).to.be.closeTo(value1, 0.02);
if (units !== "decibels") {
expect(param.getValueAtTime(0.025)).to.be.closeTo(value2, 0.01);
}
}, 0.021, 1);
// document.body.appendChild(await Plot.signal(testBuffer));
expect(testBuffer.getValueAtTime(0)).to.be.closeTo(1, 0.01);
expect(testBuffer.getValueAtTime(0.01)).to.be.closeTo(0.7, 0.01);
expect(testBuffer.getValueAtTime(0.02)).to.be.closeTo(0, 0.01);
});
}
const allUnits: Unit[] = ["number", "decibels", "normalRange", "audioRange", "gain",
"positive", "time", "frequency", "transportTime", "ticks", "bpm", "degrees", "samples", "hertz"];
allUnits.forEach(unit => {
if (unit === "decibels") {
testRampToValueAtTime(unit, 0, -3, -100);
} else {
testRampToValueAtTime(unit, 1, 0.7, 0);
}
});
});
});
});

454
test/signal/Signal.ts Normal file
View file

@ -0,0 +1,454 @@
import { expect } from "chai";
import { Gain } from "../../Tone/node/Gain";
import { Signal } from "../../Tone/signal/Signal";
import { BasicTests } from "../helper/Basic";
import { connectFrom, connectTo } from "../helper/Connect";
import { ConstantOutput } from "../helper/ConstantOutput";
import { Offline } from "../helper/Offline";
describe("Signal", () => {
BasicTests(Signal);
context("Signal Rate Value", () => {
it("handles input and output connections", () => {
const signal = new Signal();
connectFrom().connect(signal);
signal.connect(connectTo());
signal.dispose();
});
it("can be created with an options object", () => {
const signal = new Signal({
units: "positive",
value: 0.2,
});
expect(signal.value).to.be.closeTo(0.2, 0.001);
expect(signal.units).to.equal("positive");
signal.dispose();
});
it("can start with a value initially", () => {
const signal = new Signal(2);
expect(signal.value).to.equal(2);
signal.dispose();
});
it("can set a value", () => {
const signal = new Signal(0);
signal.value = 10;
expect(signal.value).to.equal(10);
signal.dispose();
});
it("outputs a constant signal", () => {
return ConstantOutput((context) => {
const sig = new Signal(2.5).toMaster();
}, 2.5);
});
it("takes on another signal's value when connected", () => {
return ConstantOutput((context) => {
const sigA = new Signal(1).toMaster();
const sigB = new Signal(3);
sigB.connect(sigA);
}, 3);
});
it("takes the first signals value when many values are chained", () => {
return ConstantOutput((context) => {
const sigA = new Signal(3).toMaster();
const sigB = new Signal(1).connect(sigA);
const sigC = new Signal(2).connect(sigB);
}, 2);
});
});
context("Scheduling", () => {
it("can be scheduled to set a value in the future", async () => {
const buffer = await Offline((context) => {
const sig = new Signal(0).toMaster();
sig.setValueAtTime(2, 0.2);
}, 0.25);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(0.19)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(2, 0.001);
expect(buffer.getValueAtTime(0.24)).to.be.closeTo(2, 0.001);
});
it("can linear ramp from the current value to another value in the future", async () => {
const buffer = await Offline(context => {
const sig = new Signal(0).toMaster();
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(1, 0.1);
}, 0.1);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(0.05)).to.be.closeTo(0.5, 0.001);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(1, 0.001);
});
it("can schedule multiple automations", async () => {
const buffer = await Offline(context => {
const sig = new Signal(0).toMaster();
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(0.5, 0.5);
sig.linearRampToValueAtTime(0, 1);
}, 1);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.25, 0.001);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0.5, 0.001);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(0.25, 0.001);
expect(buffer.getValueAtTime(1)).to.be.closeTo(0, 0.001);
});
it("can schedule multiple automations from a connected signal", async () => {
const buffer = await Offline((context) => {
const output = new Signal(1).toMaster();
const sig = new Signal(0).connect(output);
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(0.5, 0.5);
sig.linearRampToValueAtTime(0, 1);
}, 1);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.001);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.25, 0.001);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0.5, 0.001);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(0.25, 0.001);
expect(buffer.getValueAtTime(1)).to.be.closeTo(0, 0.001);
});
it("can disconnect from all the connected notes", () => {
return ConstantOutput((context) => {
const output0 = new Signal(1).toMaster();
const output1 = new Signal(1).toMaster();
const sig = new Signal(0).connect(output0);
sig.connect(output1);
sig.disconnect();
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(0.5, 0.5);
sig.linearRampToValueAtTime(0, 1);
}, 0);
});
it("can disconnect from a specific node", () => {
return ConstantOutput((context) => {
const output = new Signal(1).toMaster();
const sig = new Signal(0).connect(output);
sig.disconnect(output);
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(0.5, 0.5);
sig.linearRampToValueAtTime(0, 1);
}, 0);
});
it("can schedule multiple automations from a connected signal through a multiple nodes", async () => {
const buffer = await Offline(() => {
const output = new Signal(0).toMaster();
const proxy = new Signal(0).connect(output);
const gain = new Gain(1).connect(proxy);
const sig = new Signal(0).connect(gain);
sig.setValueAtTime(0, 0);
sig.linearRampToValueAtTime(0.5, 0.5);
sig.linearRampToValueAtTime(0, 1);
}, 1);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(0.1, 0.01);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.25, 0.01);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(0.25, 0.01);
expect(buffer.getValueAtTime(1)).to.be.closeTo(0, 0.01);
});
it("can cancel an automation", () => {
return ConstantOutput(() => {
const sig = new Signal(1).toMaster();
sig.setValueAtTime(4, 0.1);
sig.exponentialRampToValueAtTime(3, 0.2);
sig.cancelScheduledValues(0);
}, 1);
});
it("can cancel and hold a linear automation curve", async () => {
const buffer = await Offline(() => {
const sig = new Signal(0).toMaster();
sig.linearRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5);
}, 1);
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(0.5, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1, 0.1);
});
it("can cancel and hold an exponential automation curve", () => {
return Offline(() => {
const sig = new Signal(1).toMaster();
sig.exponentialRampTo(2, 1);
sig.cancelAndHoldAtTime(0.5);
}, 1).then(buffer => {
expect(buffer.getValueAtTime(0)).to.be.closeTo(1, 0.1);
expect(buffer.getValueAtTime(0.25)).to.be.closeTo(1.2, 0.1);
expect(buffer.getValueAtTime(0.5)).to.be.closeTo(1.4, 0.1);
expect(buffer.getValueAtTime(0.75)).to.be.closeTo(1.4, 0.1);
});
});
it("can set a linear ramp from the current time", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.linearRampTo(2, 0.3);
}, 0.5).then((buffer) => {
buffer.forEach((sample, time) => {
if (time > 0.3) {
expect(sample).to.be.closeTo(2, 0.02);
}
});
});
});
it("can set an linear ramp in the future", () => {
return Offline(() => {
const sig = new Signal(1).toMaster();
sig.linearRampTo(50, 0.3, 0.2);
}, 0.7).then((buffer) => {
buffer.forEach((sample, time) => {
if (time >= 0.6) {
expect(sample).to.be.closeTo(50, 0.5);
} else if (time < 0.2) {
expect(sample).to.closeTo(1, 0.01);
}
});
});
});
it("can set a exponential approach ramp from the current time", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.targetRampTo(1, 0.3);
}, 0.5).then((buffer) => {
expect(buffer.getValueAtTime(0)).to.be.below(0.0001);
expect(buffer.getValueAtTime(0.3)).to.be.closeTo(1, 0.02);
});
});
it("can set an exponential approach ramp in the future", () => {
return Offline(() => {
const sig = new Signal(1).toMaster();
sig.targetRampTo(50, 0.3, 0.2);
}, 0.7).then((buffer) => {
expect(buffer.getValueAtTime(0)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.2)).to.be.closeTo(1, 0.0001);
expect(buffer.getValueAtTime(0.6)).to.be.closeTo(50, 0.5);
});
});
it("can set an exponential ramp from the current time", () => {
return Offline(() => {
const sig = new Signal(1).toMaster();
sig.exponentialRampTo(50, 0.4);
}, 0.6).then((buffer) => {
buffer.forEach((sample, time) => {
if (time >= 0.4) {
expect(sample).to.be.closeTo(50, 0.5);
} else if (time < 0.39) {
expect(sample).to.be.lessThan(50);
}
});
});
});
it("can set an exponential ramp in the future", () => {
return Offline(() => {
const sig = new Signal(1).toMaster();
sig.exponentialRampTo(50, 0.3, 0.2);
}, 0.8).then((buffer) => {
buffer.forEach((sample, time) => {
if (time >= 0.6) {
expect(sample).to.be.closeTo(50, 0.5);
} else if (time < 0.2) {
expect(sample).to.equal(1);
}
});
});
});
it("rampTo ramps from the current value", () => {
return Offline(() => {
const sig = new Signal(3).toMaster();
sig.rampTo(0.2, 0.1);
}, 0.4).then((buffer) => {
buffer.forEach((sample, time) => {
if (time >= 0.1) {
expect(sample).to.be.closeTo(0.2, 0.1);
} else {
expect(sample).to.be.greaterThan(0.2);
}
});
});
});
it("rampTo ramps from the current value at a specific time", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.rampTo(2, 0.1, 0.4);
}, 0.6).then((buffer) => {
buffer.forEach((sample, time) => {
if (time < 0.4) {
expect(sample).to.be.closeTo(0, 0.1);
} else if (time > 0.5) {
expect(sample).to.be.closeTo(2, 0.1);
}
});
});
});
it("can set a value curve", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.setValueCurveAtTime([0, 1, 0.5, 0.2], 0, 1);
}, 1).then((buffer) => {
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.01);
expect(buffer.getValueAtTime(0.33 / 2)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.33)).to.be.closeTo(1, 0.02);
expect(buffer.getValueAtTime(0.66)).to.be.closeTo(0.5, 0.02);
expect(buffer.getValueAtTime(0.99)).to.be.closeTo(0.2, 0.02);
});
});
it("can set a value curve in the future", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.setValueCurveAtTime([0, 1, 0.5, 0.2], 0.5, 1);
}, 1.5).then((buffer) => {
expect(buffer.getValueAtTime(0 + 0.5)).to.be.closeTo(0, 0.01);
expect(buffer.getValueAtTime(0.33 / 2 + 0.5)).to.be.closeTo(0.5, 0.01);
expect(buffer.getValueAtTime(0.33 + 0.5)).to.be.closeTo(1, 0.02);
expect(buffer.getValueAtTime(0.66 + 0.5)).to.be.closeTo(0.5, 0.02);
expect(buffer.getValueAtTime(0.99 + 0.5)).to.be.closeTo(0.2, 0.02);
});
});
it("can set an exponential approach", () => {
return Offline(() => {
const sig = new Signal(0).toMaster();
sig.exponentialApproachValueAtTime(2, 0.1, 0.5);
}, 1).then((buffer) => {
expect(buffer.getValueAtTime(0)).to.be.closeTo(0, 0.01);
expect(buffer.getValueAtTime(0.1)).to.be.closeTo(0, 0.01);
expect(buffer.getValueAtTime(0.4)).to.be.closeTo(1.9, 0.1);
expect(buffer.getValueAtTime(0.6)).to.be.closeTo(2, 0.01);
});
});
});
context("Units", () => {
it("can be created with specific units", () => {
const signal = new Signal(0, "bpm");
expect(signal.units).to.equal("bpm");
signal.dispose();
});
it("can evaluate the given units", () => {
const signal = new Signal(2, "time");
signal.value = 0.5;
expect(signal.value).to.be.closeTo(0.5, 0.001);
signal.dispose();
});
it("converts the given units when passed in the constructor", () => {
return ConstantOutput(() => {
const signal = new Signal({
units: "decibels",
value: -10,
}).toMaster();
}, 0.315);
});
it("can be set to not convert the given units", () => {
return ConstantOutput(() => {
const signal = new Signal({
convert: false,
units: "decibels",
value: -10,
}).toMaster();
}, -10);
});
it("converts Frequency units", () => {
const signal = new Signal<"frequency">("50hz", "frequency");
expect(signal.value).to.be.closeTo(50, 0.01);
signal.dispose();
});
it("converts Time units", () => {
const signal = new Signal<"time">("4n", "time");
expect(signal.value).to.be.closeTo(0.5, 0.01);
signal.dispose();
});
it("converts NormalRange units", () => {
const signal = new Signal(2, "normalRange");
expect(signal.value).to.be.closeTo(1, 0.01);
signal.dispose();
});
it("converts AudioRange units", () => {
const signal = new Signal(-2, "audioRange");
expect(signal.value).to.be.closeTo(-1, 0.01);
signal.dispose();
});
it("converts Positive units", () => {
const signal = new Signal(-2, "positive");
expect(signal.value).to.be.closeTo(0, 0.01);
signal.dispose();
});
});
// context("Transport Syncing", () => {
// it("maintains its original value after being synced to the transport", () => {
// return ConstantOutput(function(Transport) {
// const sig = new Signal(3).toMaster();
// Transport.syncSignal(sig);
// }, 3);
// });
// it("keeps the ratio when the bpm changes", () => {
// return ConstantOutput(function(Transport) {
// Transport.bpm.value = 120;
// const sig = new Signal(5).toMaster();
// Transport.syncSignal(sig);
// Transport.bpm.value = 240;
// }, 10);
// });
// it("can ramp along with the bpm", () => {
// return Offline(function(Transport) {
// Transport.bpm.value = 120;
// const sig = new Signal(2).toMaster();
// Transport.syncSignal(sig);
// Transport.bpm.rampTo(240, 0.5);
// }).then((buffer) => {
// buffer.forEach((sample, time) => {
// if (time >= 0.5) {
// expect(sample).to.be.closeTo(4, 0.04);
// } else if (time < 0.4) {
// expect(sample).to.be.within(1.95, 3);
// }
// });
// });
// });
// it("returns to the original value when unsynced", () => {
// return ConstantOutput(function(Transport) {
// Transport.bpm.value = 120;
// const sig = new Signal(5).toMaster();
// Transport.syncSignal(sig);
// Transport.bpm.value = 240;
// Transport.unsyncSignal(sig);
// }, 5);
// });
// });
});

25
tsconfig.json Normal file
View file

@ -0,0 +1,25 @@
{
"compileOnSave": true,
"compilerOptions": {
"strictNullChecks": true,
"target": "es5",
"module": "commonjs",
"noImplicitAny": false,
"noUnusedLocals": false,
"removeComments": false,
"experimentalDecorators": true,
// "outFile": "./build/Tone.js",
"outDir": "./build",
"sourceMap": true,
"moduleResolution": "classic",
"strictPropertyInitialization" : true,
"lib": ["es6", "dom"],
"baseUrl": "./",
"paths" : {
"@tonejs/plot" : ["./node_modules/@tonejs/plot/dist/index.d.ts"]
}
},
"include": [
"Tone/**/*.ts", "test/**/*.ts"
]
}

73
tslint.json Normal file
View file

@ -0,0 +1,73 @@
{
"extends": "tslint:recommended",
"rulesDirectory": [
],
"rules": {
"new-parens": true,
"no-arg": true,
"no-bitwise": true,
"no-conditional-assignment": true,
"no-consecutive-blank-lines": true,
"object-literal-sort-keys" : true,
"member-access" : false,
"member-ordering" : false,
"interface-name": false,
"arrow-parens" : false,
"typedef" : [
true,
"call-signature"
],
"one-line": [
true,
"check-catch",
"check-finally",
"check-else",
"check-whitespace"
],
"variable-name": [
true,
"allow-leading-underscore"
],
"quotemark": [
true,
"double",
"avoid-escape"
],
"indent": [
true,
"tabs"
],
"whitespace": [
true,
"check-decl",
"check-operator",
"check-module",
"check-separator",
"check-rest-spreak",
"check-type",
"check-typecast",
"check-type-operator",
"check-branch",
"check-operator",
"check-typecast"
],
"no-console": {
"severity": "warning",
"options": [
"debug",
"info",
"log",
"time",
"timeEnd",
"trace"
]
}
},
"jsRules": {
"indent": [
true,
"tabs"
]
}
}