From df08425a70a18c085a49933ff0f09f5c335dc9fe Mon Sep 17 00:00:00 2001 From: tambien Date: Fri, 12 Apr 2019 10:37:47 -0400 Subject: [PATCH] started retooling with typescript --- Tone/core/Connect.ts | 113 +++++ Tone/core/Context.ts | 379 ++++++++++++++ Tone/core/ContextTicker.ts | 146 ++++++ Tone/core/Decorator.ts | 20 + Tone/core/Emitter.ts | 120 +++++ Tone/core/FromContext.ts | 14 + Tone/core/Global.ts | 65 +++ Tone/core/Timeline.ts | 370 ++++++++++++++ Tone/core/Tone.ts | 147 ++++++ Tone/core/Util.ts | 169 +++++++ Tone/node/AbstractParam.ts | 185 +++++++ Tone/node/AudioNode.ts | 275 ++++++++++ Tone/node/AudioProcessor.ts | 156 ++++++ Tone/node/Delay.ts | 82 +++ Tone/node/Gain.ts | 77 +++ Tone/node/Param.ts | 414 +++++++++++++++ Tone/node/Split.ts | 0 Tone/signal/Signal.ts | 178 +++++++ Tone/type/Conversions.ts | 36 ++ Tone/type/Time.ts | 127 +++++ Tone/type/TypeBase.ts | 299 +++++++++++ Tone/type/Units.ts | 158 ++++++ Tone/version.ts | 1 + package-lock.json | 925 ++++++++++++++++++++++------------ package.json | 23 +- scripts/karma.conf.js | 116 +++-- test/core/Connect.ts | 109 ++++ test/core/ContextTicker.ts | 86 ++++ test/core/Emitter.ts | 87 ++++ test/core/Timeline.ts | 593 ++++++++++++++++++++++ test/helper/Basic.ts | 40 ++ test/helper/Connect.ts | 10 + test/helper/ConstantOutput.ts | 13 + test/helper/Dispose.ts | 17 + test/helper/Offline.ts | 15 + test/helper/PassAudio.ts | 24 + test/helper/Supports.ts | 41 ++ test/node/AudioNode.ts | 177 +++++++ test/node/Delay.ts | 94 ++++ test/node/Gain.ts | 75 +++ test/node/Param.ts | 435 ++++++++++++++++ test/signal/Signal.ts | 454 +++++++++++++++++ tsconfig.json | 25 + tslint.json | 73 +++ 44 files changed, 6581 insertions(+), 382 deletions(-) create mode 100644 Tone/core/Connect.ts create mode 100644 Tone/core/Context.ts create mode 100644 Tone/core/ContextTicker.ts create mode 100644 Tone/core/Decorator.ts create mode 100644 Tone/core/Emitter.ts create mode 100644 Tone/core/FromContext.ts create mode 100644 Tone/core/Global.ts create mode 100644 Tone/core/Timeline.ts create mode 100644 Tone/core/Tone.ts create mode 100644 Tone/core/Util.ts create mode 100644 Tone/node/AbstractParam.ts create mode 100644 Tone/node/AudioNode.ts create mode 100644 Tone/node/AudioProcessor.ts create mode 100644 Tone/node/Delay.ts create mode 100644 Tone/node/Gain.ts create mode 100644 Tone/node/Param.ts create mode 100644 Tone/node/Split.ts create mode 100644 Tone/signal/Signal.ts create mode 100644 Tone/type/Conversions.ts create mode 100644 Tone/type/Time.ts create mode 100644 Tone/type/TypeBase.ts create mode 100644 Tone/type/Units.ts create mode 100644 Tone/version.ts create mode 100644 test/core/Connect.ts create mode 100644 test/core/ContextTicker.ts create mode 100644 test/core/Emitter.ts create mode 100644 test/core/Timeline.ts create mode 100644 test/helper/Basic.ts create mode 100644 test/helper/Connect.ts create mode 100644 test/helper/ConstantOutput.ts create mode 100644 test/helper/Dispose.ts create mode 100644 test/helper/Offline.ts create mode 100644 test/helper/PassAudio.ts create mode 100644 test/helper/Supports.ts create mode 100644 test/node/AudioNode.ts create mode 100644 test/node/Delay.ts create mode 100644 test/node/Gain.ts create mode 100644 test/node/Param.ts create mode 100644 test/signal/Signal.ts create mode 100644 tsconfig.json create mode 100644 tslint.json diff --git a/Tone/core/Connect.ts b/Tone/core/Connect.ts new file mode 100644 index 00000000..37fa55d7 --- /dev/null +++ b/Tone/core/Connect.ts @@ -0,0 +1,113 @@ +import { InputNode, OutputNode, ToneAudioNode } from "../node/AudioNode"; +import { isArray, isDefined, isNumber } from "./Util"; + +/** + * connect together all of the arguments in series + * @param nodes + */ +export function connectSeries(...nodes: InputNode[]): void { + nodes.reduce((prev, current) => { + if (prev instanceof ToneAudioNode || prev instanceof AudioNode) { + connect(prev, current); + } + return current; + }, nodes[0]); +} + +/** + * Connect two nodes together so that signal flows from the + * first node to the second. Optionally specify the input and output channels. + * @param srcNode The source node + * @param dstNode The destination node + * @param outputNumber The output channel of the srcNode + * @param inputNumber The input channel of the dstNode + */ +export function connect(srcNode: OutputNode, dstNode: InputNode, outputNumber = 0, inputNumber = 0): void { + + // resolve the input of the dstNode + while (!(dstNode instanceof AudioNode || dstNode instanceof AudioParam)) { + if (isArray(dstNode.input)) { + this.assert(dstNode.input.length < inputNumber, "the output number is greater than the number of outputs"); + dstNode = dstNode.input[inputNumber]; + } else if (isDefined(dstNode.input)) { + dstNode = dstNode.input; + } + inputNumber = 0; + } + + if (srcNode instanceof ToneAudioNode) { + if (isArray(srcNode.output)) { + this.assert(srcNode.output.length < outputNumber, "the output number is greater than the number of outputs"); + srcNode = srcNode.output[outputNumber]; + } else if (isDefined(srcNode.output)) { + srcNode = srcNode.output; + } + outputNumber = 0; + } + + // make the connection + if (dstNode instanceof AudioParam) { + srcNode.connect(dstNode, outputNumber); + } else { + srcNode.connect(dstNode, outputNumber, inputNumber); + } +} + +/** + * Disconnect a node from all nodes or optionally include a destination node and input/output channels. + * @param srcNode The source node + * @param dstNode The destination node + * @param outputNumber The output channel of the srcNode + * @param inputNumber The input channel of the dstNode + */ +export function disconnect( + srcNode: OutputNode, + dstNode?: InputNode, + outputNumber = 0, + inputNumber = 0, +): void { + + // resolve the destination node + if (isDefined(dstNode)) { + while (dstNode instanceof ToneAudioNode) { + if (isArray(dstNode.input)) { + if (isNumber(inputNumber)) { + this.assert(dstNode.input.length < inputNumber, "the input number is greater than the number of inputs"); + dstNode = dstNode.input[inputNumber]; + } else { + // disconnect from all of the nodes + // since we don't know which one was connected + dstNode.input.forEach(dst => { + try { + // catch errors from disconnecting from nodes that are not connected + disconnect(srcNode, dst, outputNumber); + // tslint:disable-next-line: no-empty + } catch (e) { } + }); + } + inputNumber = 0; + } else if (dstNode.input) { + dstNode = dstNode.input; + } + } + } + + // resolve the src node + while (!(srcNode instanceof AudioNode)) { + if (isArray(srcNode.output)) { + this.assert(srcNode.output.length < outputNumber, "the output number is greater than the number of outputs"); + srcNode = srcNode.output[outputNumber]; + } else if (isDefined(srcNode.output)) { + srcNode = srcNode.output; + } + outputNumber = 0; + } + + if (dstNode instanceof AudioParam) { + srcNode.disconnect(dstNode, outputNumber); + } else if (dstNode instanceof AudioNode) { + srcNode.disconnect(dstNode, outputNumber, inputNumber); + } else { + srcNode.disconnect(); + } +} diff --git a/Tone/core/Context.ts b/Tone/core/Context.ts new file mode 100644 index 00000000..275c7d98 --- /dev/null +++ b/Tone/core/Context.ts @@ -0,0 +1,379 @@ +import { Ticker, TickerClockSource } from "./ContextTicker"; +import { Emitter } from "./Emitter"; +import { Timeline } from "./Timeline"; +import { isString, Omit, optionsFromArguments } from "./Util"; + +type ContextLatencyHint = AudioContextLatencyCategory | "fastest"; + +// these are either not used in Tone.js or deprecated and not implemented. +type ExcludedFromBaseAudioContext = "createScriptProcessor" | "onstatechange" | "addEventListener" + | "removeEventListener" | "listener" | "dispatchEvent" | "audioWorklet"; + +// the subset of the BaseAudioContext which Tone.Context implements. +type BaseAudioContextSubset = Omit; + +interface ContextOptions { + clockSource: TickerClockSource; + latencyHint: ContextLatencyHint; + lookAhead: Seconds; + updateInterval: Seconds; +} + +interface ContextTimeoutEvent { + callback: (...args: any[]) => void; + id: number; + time: Seconds; +} + +/** + * Wrapper around the native AudioContext. + */ +export class Context extends Emitter implements BaseAudioContextSubset { + + name = "Context"; + + static getDefaults(): ContextOptions { + return { + clockSource: "worker", + latencyHint: "interactive", + lookAhead: 0.1, + updateInterval: 0.03, + }; + } + + /** + * The amount of time into the future events are scheduled + */ + lookAhead: Seconds; + + /** + * private reference to the BaseAudioContext + */ + private readonly _context: BaseAudioContext; + + /** + * A reliable callback method + */ + private readonly _ticker: Ticker; + + /** + * The default latency hint + */ + private _latencyHint: ContextLatencyHint | Seconds; + + /** + * An object containing all of the constants AudioBufferSourceNodes + */ + private _constants = new Map(); + + /** + * All of the setTimeout events. + */ + private _timeouts: Timeline; + + /** + * The timeout id counter + */ + private _timeoutIds = 0; + + constructor(context: BaseAudioContext) { + super(); + + this._context = context; + + const defaults = Context.getDefaults(); + this._latencyHint = defaults.latencyHint; + this.lookAhead = defaults.lookAhead; + this._timeouts = new Timeline(); + + this._ticker = new Ticker(this.emit.bind(this, "tick"), defaults.clockSource, defaults.updateInterval); + this.on("tick", this._timeoutLoop.bind(this)); + + // fwd events from the context + this._context.addEventListener("statechange", () => { + this.emit("statechange", this.state); + }); + } + + /////////////////////////////////////////////////////////////////////// + // BASE AUDIO CONTEXT METHODS + /////////////////////////////////////////////////////////////////////// + + createAnalyser(): AnalyserNode { + return this._context.createAnalyser(); + } + createOscillator(): OscillatorNode { + return this._context.createOscillator(); + } + createBufferSource(): AudioBufferSourceNode { + return this._context.createBufferSource(); + } + createBiquadFilter(): BiquadFilterNode { + return this._context.createBiquadFilter(); + } + createBuffer(numberOfChannels: number, length: number, sampleRate: number): AudioBuffer { + return this._context.createBuffer(numberOfChannels, length, sampleRate); + } + createChannelMerger(numberOfInputs?: number | undefined): ChannelMergerNode { + return this._context.createChannelMerger(numberOfInputs); + } + createChannelSplitter(numberOfOutputs?: number | undefined): ChannelSplitterNode { + return this._context.createChannelSplitter(numberOfOutputs); + } + createConstantSource(): ConstantSourceNode { + return this._context.createConstantSource(); + } + createConvolver(): ConvolverNode { + return this._context.createConvolver(); + } + createDelay(maxDelayTime?: number | undefined): DelayNode { + return this._context.createDelay(maxDelayTime); + } + createDynamicsCompressor(): DynamicsCompressorNode { + return this._context.createDynamicsCompressor(); + } + createGain(): GainNode { + return this._context.createGain(); + } + createIIRFilter(feedforward: number[], feedback: number[]): IIRFilterNode { + return this._context.createIIRFilter(feedforward, feedback); + } + createPanner(): PannerNode { + return this._context.createPanner(); + } + createPeriodicWave( + real: number[] | Float32Array, + imag: number[] | Float32Array, + constraints?: PeriodicWaveConstraints | undefined, + ): PeriodicWave { + return this._context.createPeriodicWave(real, imag, constraints); + } + createStereoPanner(): StereoPannerNode { + return this._context.createStereoPanner(); + } + createWaveShaper(): WaveShaperNode { + return this._context.createWaveShaper(); + } + decodeAudioData(audioData: ArrayBuffer): Promise { + return this._context.decodeAudioData(audioData); + } + /** + * The audio output destination. Alias for Tone.Master + */ + get destination(): AudioDestinationNode { + return this._context.destination; + } + /** + * The current time in seconds of the AudioContext. + */ + get currentTime(): Seconds { + return this._context.currentTime; + } + /** + * The current time in seconds of the AudioContext. + */ + get state(): AudioContextState { + return this._context.state; + } + /** + * The current time in seconds of the AudioContext. + */ + get sampleRate(): number { + return this._context.sampleRate; + } + /** + * The listener + */ + get listener(): AudioListener { + return this._context.listener; + } + + /////////////////////////////////////////////////////////////////////// + // TICKER + /////////////////////////////////////////////////////////////////////// + + /** + * How often the interval callback is invoked. + * This number corresponds to how responsive the scheduling + * can be. context.updateInterval + context.lookAhead gives you the + * total latency between scheduling an event and hearing it. + */ + get updateInterval(): Seconds { + return this._ticker.updateInterval; + } + set updateInterval(interval: Seconds) { + this._ticker.updateInterval = interval; + } + + /** + * What the source of the clock is, either "worker" (Web Worker [default]), + * "timeout" (setTimeout), or "offline" (none). + */ + get clockSource(): TickerClockSource { + return this._ticker.type; + } + set clockSource(type: TickerClockSource) { + this._ticker.type = type; + } + + /** + * The type of playback, which affects tradeoffs between audio + * output latency and responsiveness. + * + * In addition to setting the value in seconds, the latencyHint also + * accepts the strings "interactive" (prioritizes low latency), + * "playback" (prioritizes sustained playback), "balanced" (balances + * latency and performance), and "fastest" (lowest latency, might glitch more often). + * @example + * //set the lookAhead to 0.3 seconds + * Tone.context.latencyHint = 0.3; + */ + get latencyHint(): ContextLatencyHint | Seconds { + return this._latencyHint; + } + set latencyHint(hint: ContextLatencyHint | Seconds) { + let lookAheadValue = 0; + this._latencyHint = hint; + if (isString(hint)) { + switch (hint) { + case "interactive": + lookAheadValue = 0.1; + break; + case "playback": + lookAheadValue = 0.8; + break; + case "balanced": + lookAheadValue = 0.25; + break; + case "fastest": + lookAheadValue = 0.01; + break; + } + } + this.lookAhead = lookAheadValue; + this.updateInterval = lookAheadValue / 3; + } + + /** + * The unwrapped AudioContext. + */ + get rawContext(): BaseAudioContext { + return this._context; + } + + /** + * The current audio context time + */ + now(): Seconds { + return this._context.currentTime + this.lookAhead; + } + + /** + * Starts the audio context from a suspended state. This is required + * to initially start the AudioContext. + */ + resume(): Promise { + if (this._context.state === "suspended" && this._context instanceof AudioContext) { + return this._context.resume(); + } else { + return Promise.resolve(); + } + } + + /** + * Promise which is invoked when the context is running. + * Tries to resume the context if it's not started. + * @return {Promise} + */ + async close(): Promise { + if (this._context instanceof AudioContext) { + await this._context.close(); + } + return this; + } + + /** + * Generate a looped buffer at some constant value. + */ + getConstant(val: number): AudioBufferSourceNode { + if (this._constants.has(val)) { + return this._constants.get(val) as AudioBufferSourceNode; + } else { + const buffer = this._context.createBuffer(1, 128, this._context.sampleRate); + const arr = buffer.getChannelData(0); + for (let i = 0; i < arr.length; i++) { + arr[i] = val; + } + const constant = this._context.createBufferSource(); + constant.channelCount = 1; + constant.channelCountMode = "explicit"; + constant.buffer = buffer; + constant.loop = true; + constant.start(0); + this._constants.set(val, constant); + return constant; + } + } + + /** + * Clean up. Also closes the audio context. + */ + dispose(): Context { + this._ticker.dispose(); + this._timeouts.dispose(); + Object.keys(this._constants).map(val => this._constants[val].disconnect()); + this.close(); + return this; + } + + /////////////////////////////////////////////////////////////////////// + // TIMEOUTS + /////////////////////////////////////////////////////////////////////// + + /** + * The private loop which keeps track of the context scheduled timeouts + * Is invoked from the clock source + */ + private _timeoutLoop(): void { + const now = this.now(); + let firstEvent = this._timeouts.peek(); + while (this._timeouts.length && firstEvent && firstEvent.time <= now) { + // invoke the callback + firstEvent.callback(); + firstEvent = this._timeouts.peek(); + // shift the first event off + this._timeouts.shift(); + } + } + + /** + * A setTimeout which is guarented by the clock source. + * Also runs in the offline context. + * @param fn The callback to invoke + * @param timeout The timeout in seconds + * @returns ID to use when invoking Context.clearTimeout + */ + setTimeout(fn: (...args: any[]) => void, timeout: Seconds): number { + this._timeoutIds++; + const now = this.now(); + this._timeouts.add({ + callback : fn, + id : this._timeoutIds, + time : now + timeout, + }); + return this._timeoutIds; + } + + /** + * Clears a previously scheduled timeout with Tone.context.setTimeout + * @param id The ID returned from setTimeout + */ + clearTimeout(id: number): Context { + this._timeouts.forEach(event => { + if (event.id === id) { + this._timeouts.remove(event); + } + }); + return this; + } +} diff --git a/Tone/core/ContextTicker.ts b/Tone/core/ContextTicker.ts new file mode 100644 index 00000000..1eb3edcc --- /dev/null +++ b/Tone/core/ContextTicker.ts @@ -0,0 +1,146 @@ + +export type TickerClockSource = "worker" | "timeout" | "offline"; + +/** + * A class which provides a reliable callback using either + * a Web Worker, or if that isn't supported, falls back to setTimeout. + */ +export class Ticker { + + /** + * Either "worker" or "timeout" or "offline" + */ + private _type: TickerClockSource; + + /** + * The update interval of the worker + */ + private _updateInterval: Seconds; + + /** + * The callback to invoke at regular intervals + */ + private _callback: () => void; + + /** + * track the callback interval + */ + private _timeout!: number; + + /** + * private reference to the worker + */ + private _worker!: Worker; + + constructor(callback: () => void, type: TickerClockSource, updateInterval: Seconds) { + + this._callback = callback; + this._type = type; + this._updateInterval = updateInterval; + + // create the clock source for the first time + this._createClock(); + } + + /** + * Generate a web worker + */ + private _createWorker(): void { + + const blob = new Blob([ + // the initial timeout time + `var timeoutTime = ${(this._updateInterval * 1000).toFixed(1)}; + // onmessage callback + self.onmessage = function(msg){ + timeoutTime = parseInt(msg.data); + }; + // the tick function which posts a message + // and schedules a new tick + function tick(){ + setTimeout(tick, timeoutTime); + self.postMessage('tick'); + } + // call tick initially + tick();`, + ]); + const blobUrl = window.URL.createObjectURL(blob); + const worker = new Worker(blobUrl); + + worker.onmessage = this._callback.bind(this); + + this._worker = worker; + } + + /** + * Create a timeout loop + */ + private _createTimeout(): void { + this._timeout = window.setTimeout(() => { + this._createTimeout(); + this._callback(); + }, this._updateInterval * 1000); + } + + /** + * Create the clock source. + */ + private _createClock(): void { + if (this._type === "worker") { + try { + this._createWorker(); + } catch (e) { + // workers not supported, fallback to timeout + this._type = "timeout"; + this._createClock(); + } + } else if (this._type === "timeout") { + this._createTimeout(); + } + } + + /** + * Clean up the current clock source + */ + private _disposeClock(): void { + if (this._timeout) { + clearTimeout(this._timeout); + this._timeout = 0; + } + if (this._worker) { + this._worker.terminate(); + this._worker.onmessage = null; + } + } + + /** + * The rate in seconds the ticker will update + */ + get updateInterval(): Seconds { + return this._updateInterval; + } + set updateInterval(interval: Seconds) { + this._updateInterval = Math.max(interval, 128 / 44100); + if (this._type === "worker") { + this._worker.postMessage(Math.max(interval * 1000, 1)); + } + } + + /** + * The type of the ticker, either a worker or a timeout + */ + get type(): TickerClockSource { + return this._type; + } + set type(type: TickerClockSource) { + this._disposeClock(); + this._type = type; + this._createClock(); + } + + /** + * Clean up + */ + dispose(): void { + this._disposeClock(); + } +} diff --git a/Tone/core/Decorator.ts b/Tone/core/Decorator.ts new file mode 100644 index 00000000..a7bef8c6 --- /dev/null +++ b/Tone/core/Decorator.ts @@ -0,0 +1,20 @@ +function optionsFromArguments(defaults: Options, args: any[], keys: string[]): Options { + return defaults; +} + +type ObjectConstructor = new (...args: any[]) => {}; + +export function useDefaultsAndArguments( + defaults: DefaultOptions, + optionsOrder: string[], +) { + return (classDef: T) => { + return class extends classDef { + // assign the instance defaults + defaults: DefaultOptions = defaults; + constructor(...args: any[]) { + super(optionsFromArguments(defaults, args, optionsOrder)); + } + }; + }; +} diff --git a/Tone/core/Emitter.ts b/Tone/core/Emitter.ts new file mode 100644 index 00000000..501cea72 --- /dev/null +++ b/Tone/core/Emitter.ts @@ -0,0 +1,120 @@ +import { Tone } from "./Tone"; +import { isUndef } from "./Util"; + +interface EventObject { + [event: string]: Array<(...args: any[]) => void>; +} + +/** + * Emitter gives classes which extend it + * the ability to listen for and emit events. + * Inspiration and reference from Jerome Etienne's [MicroEvent](https://github.com/jeromeetienne/microevent.js). + * MIT (c) 2011 Jerome Etienne. + */ +export class Emitter extends Tone { + + name = "Emitter"; + + /** + * Private container for the events + */ + private _events: EventObject = {}; + + /** + * Bind a callback to a specific event. + * @param event The name of the event to listen for. + * @param callback The callback to invoke when the event is emitted + */ + on(event: string, callback: (...args: any[]) => void): Emitter { + // split the event + const events = event.split(/\W+/); + events.forEach(eventName => { + if (!this._events.hasOwnProperty(eventName)) { + this._events[eventName] = []; + } + this._events[eventName].push(callback); + }); + return this; + } + + /** + * Bind a callback which is only invoked once + * @param event The name of the event to listen for. + * @param callback The callback to invoke when the event is emitted + */ + once(event: string, callback: (...args: any[]) => void): Emitter { + const boundCallback = (...args: any[]) => { + // invoke the callback + callback(...args); + // remove the event + this.off(event, boundCallback); + }; + this.on(event, boundCallback); + return this; + } + + /** + * Remove the event listener. + * @param event The event to stop listening to. + * @param callback The callback which was bound to the event with Emitter.on. + * If no callback is given, all callbacks events are removed. + */ + off(event: string, callback?: (...args: any[]) => void): Emitter { + const events = event.split(/\W+/); + events.forEach(eventName => { + if (this._events.hasOwnProperty(event)) { + if (isUndef(callback)) { + this._events[event] = []; + } else { + const eventList = this._events[event]; + for (let i = 0; i < eventList.length; i++) { + if (eventList[i] === callback) { + eventList.splice(i, 1); + } + } + } + } + }); + return this; + } + + /** + * Invoke all of the callbacks bound to the event + * with any arguments passed in. + * @param event The name of the event. + * @param args The arguments to pass to the functions listening. + */ + emit(event, ...args: any[]): Emitter { + if (this._events) { + if (this._events.hasOwnProperty(event)) { + const eventList = this._events[event].slice(0); + for (let i = 0, len = eventList.length; i < len; i++) { + eventList[i].apply(this, args); + } + } + } + return this; + } + + /** + * Add Emitter functions (on/off/emit) to the object + */ + // static mixin(object) { + // const functions = ["on", "once", "off", "emit"]; + // object._events = {}; + // functions.forEach(func => { + // const emitterFunc = Emitter.prototype[func]; + // object[func] = emitterFunc; + // }); + // return Emitter; + // } + + /** + * Clean up + */ + dispose(): Emitter { + this._events = {}; + return this; + } +} + diff --git a/Tone/core/FromContext.ts b/Tone/core/FromContext.ts new file mode 100644 index 00000000..2a0292aa --- /dev/null +++ b/Tone/core/FromContext.ts @@ -0,0 +1,14 @@ +import { Gain } from "../node/Gain"; +import { isUndef } from "./Util"; + +export function fromContext(context) { + const Tone: any = {}; + if (isUndef(Gain.prototype.defaultContext)) { + Tone.Gain = class extends Gain { + get defaultContext() { + return context; + } + }; + } + return Tone; +} diff --git a/Tone/core/Global.ts b/Tone/core/Global.ts new file mode 100644 index 00000000..90300250 --- /dev/null +++ b/Tone/core/Global.ts @@ -0,0 +1,65 @@ +import { isUndef } from "./Util"; + +/** + * The global audio context which is getable and assignable through + * getContext and setContext + */ +let globalContext: BaseAudioContext; + +/** + * Returns the default system-wide AudioContext + */ +export function getContext(): BaseAudioContext { + if (!globalContext) { + globalContext = new AudioContext(); + } + return globalContext; +} + +/** + * Set the default audio context + */ +export function setContext(context: BaseAudioContext): void { + globalContext = context; +} + +/** + * Most browsers will not play _any_ audio until a user + * clicks something (like a play button). Invoke this method + * on a click or keypress event handler to start the audio context. + * More about the Autoplay policy + * [here](https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#webaudio) + * @example + * document.querySelector('#playbutton').addEventListener('click', () => Tone.start()) + */ +export function start(): Promise < void> { + return globalContext.resume(); +} + +/** + * True if the current environment has the necessary APIs to run Tone.js + */ +// // export const supported: boolean = toneGlobal.hasOwnProperty("Promise") && toneGlobal.hasOwnProperty("AudioContext"); + +// set the audio context initially, and if one is not already created +// if (Tone.supported && !Tone.initialized){ +// if (!Tone.global.TONE_AUDIO_CONTEXT){ +// Tone.global.TONE_AUDIO_CONTEXT = new Context(); +// } +// Tone.context = Tone.global.TONE_AUDIO_CONTEXT; + +// // log on first initialization +// // allow optional silencing of this log +// if (!Tone.global.TONE_SILENCE_LOGGING){ +// var prefix = "v"; +// if (Tone.version === "dev"){ +// prefix = ""; +// } +// var printString = " * Tone.js " + prefix + Tone.version + " * "; +// // eslint-disable-next-line no-console +// console.log("%c" + printString, "background: #000; color: #fff"); +// } +// } else if (!Tone.supported && !Tone.global.TONE_SILENCE_LOGGING){ +// // eslint-disable-next-line no-console +// console.warn("This browser does not support Tone.js"); +// } diff --git a/Tone/core/Timeline.ts b/Tone/core/Timeline.ts new file mode 100644 index 00000000..298abb67 --- /dev/null +++ b/Tone/core/Timeline.ts @@ -0,0 +1,370 @@ +import { Tone } from "./Tone"; +import { optionsFromArguments } from "./Util"; + +interface TimelineOptions { + memory: number; +} + +/** + * An event must have a time number + */ +interface TimelineEvent { + time: number; +} + +/** + * @class A Timeline class for scheduling and maintaining state + * along a timeline. All events must have a "time" property. + * Internally, events are stored in time order for fast + * retrieval. + * @extends {Tone} + * @param {Positive} [memory=Infinity] The number of previous events that are retained. + */ +export class Timeline extends Tone { + + name = "Timeline"; + + /** + * The memory of the timeline, i.e. + * how many events in the past it will retain + */ + memory: number; + + /** + * The array of scheduled timeline events + */ + private _timeline: GenericEvent[]; + + constructor(options?: Partial); + constructor(memory?: number); + constructor() { + super(); + const options = optionsFromArguments(Timeline.getDefaults(), arguments, ["memory"]); + + this._timeline = []; + this.memory = options.memory; + } + + static getDefaults(): TimelineOptions { + return { + memory: Infinity, + }; + } + + /** + * The number of items in the timeline. + */ + get length(): number { + return this._timeline.length; + } + + /** + * Insert an event object onto the timeline. Events must have a "time" attribute. + * @param event The event object to insert into the timeline. + */ + add(event: GenericEvent): Timeline { + // the event needs to have a time attribute + this.assert(Reflect.has(event, "time"), "Timeline: events must have a time attribute"); + event.time = event.time.valueOf(); + const index = this._search(event.time); + this._timeline.splice(index + 1, 0, event); + // if the length is more than the memory, remove the previous ones + if (this.length > this.memory) { + const diff = this.length - this.memory; + this._timeline.splice(0, diff); + } + return this; + } + + /** + * Remove an event from the timeline. + * @param {Object} event The event object to remove from the list. + * @returns {Timeline} this + */ + remove(event: GenericEvent): Timeline { + const index = this._timeline.indexOf(event); + if (index !== -1) { + this._timeline.splice(index, 1); + } + return this; + } + + /** + * Get the nearest event whose time is less than or equal to the given time. + * @param time The time to query. + */ + get(time: number): GenericEvent | null { + const index = this._search(time); + if (index !== -1) { + return this._timeline[index]; + } else { + return null; + } + } + + /** + * Return the first event in the timeline without removing it + * @returns {Object} The first event object + */ + peek(): GenericEvent | undefined { + return this._timeline[0]; + } + + /** + * Return the first event in the timeline and remove it + */ + shift(): GenericEvent | undefined { + return this._timeline.shift(); + } + + /** + * Get the event which is scheduled after the given time. + * @param time The time to query. + */ + getAfter(time: number): GenericEvent | null { + const index = this._search(time); + if (index + 1 < this._timeline.length) { + return this._timeline[index + 1]; + } else { + return null; + } + } + + /** + * Get the event before the event at the given time. + * @param time The time to query. + */ + getBefore(time: number): GenericEvent | null { + const len = this._timeline.length; + // if it's after the last item, return the last item + if (len > 0 && this._timeline[len - 1].time < time) { + return this._timeline[len - 1]; + } + const index = this._search(time); + if (index - 1 >= 0) { + return this._timeline[index - 1]; + } else { + return null; + } + } + + /** + * Cancel events at and after the given time + * @param time The time to query. + */ + cancel(after: number): Timeline { + if (this._timeline.length > 1) { + let index = this._search(after); + if (index >= 0) { + if (this._timeline[index].time === after) { + // get the first item with that time + for (let i = index; i >= 0; i--) { + if (this._timeline[i].time === after) { + index = i; + } else { + break; + } + } + this._timeline = this._timeline.slice(0, index); + } else { + this._timeline = this._timeline.slice(0, index + 1); + } + } else { + this._timeline = []; + } + } else if (this._timeline.length === 1) { + // the first item's time + if (this._timeline[0].time >= after) { + this._timeline = []; + } + } + return this; + } + + /** + * Cancel events before or equal to the given time. + * @param {Number} time The time to cancel before. + * @returns {Timeline} this + */ + cancelBefore(time): Timeline { + const index = this._search(time); + if (index >= 0) { + this._timeline = this._timeline.slice(index + 1); + } + return this; + } + + /** + * Returns the previous event if there is one. null otherwise + * @param {Object} event The event to find the previous one of + * @return {Object} The event right before the given event + */ + previousEvent(event): GenericEvent | null { + const index = this._timeline.indexOf(event); + if (index > 0) { + return this._timeline[index - 1]; + } else { + return null; + } + } + + /** + * Does a binary search on the timeline array and returns the + * nearest event index whose time is after or equal to the given time. + * If a time is searched before the first index in the timeline, -1 is returned. + * If the time is after the end, the index of the last item is returned. + * @param time + * @private + */ + private _search(time: number): number { + if (this._timeline.length === 0) { + return -1; + } + let beginning = 0; + const len = this._timeline.length; + let end = len; + if (len > 0 && this._timeline[len - 1].time <= time) { + return len - 1; + } + while (beginning < end) { + // calculate the midpoint for roughly equal partition + let midPoint = Math.floor(beginning + (end - beginning) / 2); + const event = this._timeline[midPoint]; + const nextEvent = this._timeline[midPoint + 1]; + if (event.time === time) { + // choose the last one that has the same time + for (let i = midPoint; i < this._timeline.length; i++) { + const testEvent = this._timeline[i]; + if (testEvent.time === time) { + midPoint = i; + } + } + return midPoint; + } else if (event.time < time && nextEvent.time > time) { + return midPoint; + } else if (event.time > time) { + // search lower + end = midPoint; + } else { + // search upper + beginning = midPoint + 1; + } + } + return -1; + } + + /** + * Internal iterator. Applies extra safety checks for + * removing items from the array. + */ + private _iterate( + callback: (event: GenericEvent) => void, + lowerBound = 0, upperBound = this._timeline.length - 1, + ): void { + this._timeline.slice(lowerBound, upperBound + 1).forEach(callback); + } + + /** + * Iterate over everything in the array + * @param callback The callback to invoke with every item + */ + forEach(callback: (event: GenericEvent) => void): Timeline { + this._iterate(callback); + return this; + } + + /** + * Iterate over everything in the array at or before the given time. + * @param time The time to check if items are before + * @param callback The callback to invoke with every item + */ + forEachBefore(time, callback: (event: GenericEvent) => void): Timeline { + // iterate over the items in reverse so that removing an item doesn't break things + const upperBound = this._search(time); + if (upperBound !== -1) { + this._iterate(callback, 0, upperBound); + } + return this; + } + + /** + * Iterate over everything in the array after the given time. + * @param time The time to check if items are before + * @param callback The callback to invoke with every item + */ + forEachAfter(time, callback: (event: GenericEvent) => void): Timeline { + // iterate over the items in reverse so that removing an item doesn't break things + const lowerBound = this._search(time); + this._iterate(callback, lowerBound + 1); + return this; + } + + /** + * Iterate over everything in the array between the startTime and endTime. + * The timerange is inclusive of the startTime, but exclusive of the endTime. + * range = [startTime, endTime). + * @param startTime The time to check if items are before + * @param endTime The end of the test interval. + * @param callback The callback to invoke with every item + */ + forEachBetween(startTime: number, endTime: number, callback: (event: GenericEvent) => void): Timeline { + let lowerBound = this._search(startTime); + let upperBound = this._search(endTime); + if (lowerBound !== -1 && upperBound !== -1) { + if (this._timeline[lowerBound].time !== startTime) { + lowerBound += 1; + } + // exclusive of the end time + if (this._timeline[upperBound].time === endTime) { + upperBound -= 1; + } + this._iterate(callback, lowerBound, upperBound); + } else if (lowerBound === -1) { + this._iterate(callback, 0, upperBound); + } + return this; + } + + /** + * Iterate over everything in the array at or after the given time. Similar to + * forEachAfter, but includes the item(s) at the given time. + * @param time The time to check if items are before + * @param callback The callback to invoke with every item + */ + forEachFrom(time: number, callback: (event: GenericEvent) => void): Timeline { + // iterate over the items in reverse so that removing an item doesn't break things + let lowerBound = this._search(time); + // work backwards until the event time is less than time + while (lowerBound >= 0 && this._timeline[lowerBound].time >= time) { + lowerBound--; + } + this._iterate(callback, lowerBound + 1); + return this; + } + + /** + * Iterate over everything in the array at the given time + * @param time The time to check if items are before + * @param callback The callback to invoke with every item + */ + forEachAtTime(time: number, callback: (event: GenericEvent) => void): Timeline { + // iterate over the items in reverse so that removing an item doesn't break things + const upperBound = this._search(time); + if (upperBound !== -1) { + this._iterate(event => { + if (event.time === time) { + callback(event); + } + }, 0, upperBound); + } + return this; + } + + /** + * Clean up. + */ + dispose(): Timeline { + this._timeline = []; + return this; + } +} diff --git a/Tone/core/Tone.ts b/Tone/core/Tone.ts new file mode 100644 index 00000000..440cda2c --- /dev/null +++ b/Tone/core/Tone.ts @@ -0,0 +1,147 @@ +/** + * Tone.js + * @author Yotam Mann + * @license http://opensource.org/licenses/MIT MIT License + * @copyright 2014-2019 Yotam Mann + */ +import "../type/Units"; +import { version } from "../version"; + +/////////////////////////////////////////////////////////////////////////// +// TONE +/////////////////////////////////////////////////////////////////////////// + +// tslint:disable-next-line: no-empty-interface +export interface BaseToneOptions {} + +/** + * @class Tone is the base class of all other classes. + * @constructor + */ +export abstract class Tone { + + /** + * The version number semver + */ + static version: string = version; + + /** + * The name of the class + */ + protected abstract name: string; + + /** + * disconnect and dispose. + */ + abstract dispose(): Tone; + + /** + * Takes a partial options an returns the completed options by filling in the defaults + */ + static getDefaults(): BaseToneOptions { + return {}; + } + + /////////////////////////////////////////////////////////////////////////// + // DEBUGGING + /////////////////////////////////////////////////////////////////////////// + + /** + * Set this debug flag to log all events that happen in this class. + */ + protected debug: boolean = false; + + /** + * Prints the outputs to the console log for debugging purposes. + * Prints the contents only if either the object has a property + * called `debug` set to true, or a variable called TONE_DEBUG_CLASS + * is set to the name of the class. + * @param args + * @example + * //prints all logs originating from Tone.OscillatorNode + * Tone.global.TONE_DEBUG_CLASS = "OscillatorNode" + */ + protected log(...args): void { + // if the object is either set to debug = true + // or if there is a string on the Tone.global.with the class name + // if (this.debug || this.toString() === global.TONE_DEBUG_CLASS) { + // args.unshift(this.toString() + ":"); + // // eslint-disable-next-line no-console + // console.log(...args); + // } + } + + /** + * Assert that the statement is true, otherwise invoke the error. + * @param {Boolean} statement + * @param {String} error The message which is passed into an Error + * @private + */ + protected assert(statement: boolean, error: string): void { + if (!statement) { + throw new Error(error); + } + } + + /////////////////////////////////////////////////////////////////////////// + // DEFAULTS + /////////////////////////////////////////////////////////////////////////// + + /** + * If the `given` parameter is undefined, use the `fallback`. + * If both `given` and `fallback` are object literals, it will + * return a deep copy which includes all of the parameters from both + * objects. If a parameter is undefined in given, it will return + * the fallback property. + *

+ * WARNING: if object is self referential, it will go into an an + * infinite recursive loop. + * @memberOf Tone + * @param {*} given + * @param {*} fallback + * @return {*} + */ + // static defaultArg(given, fallback) { + // if (isObject(given) && isObject(fallback)) { + // const ret = {}; + // // make a deep copy of the given object + // for (const givenProp in given) { + // ret[givenProp] = Tone.defaultArg(fallback[givenProp], given[givenProp]); + // } + // for (const fallbackProp in fallback) { + // ret[fallbackProp] = Tone.defaultArg(given[fallbackProp], fallback[fallbackProp]); + // } + // return ret; + // } else { + // return isUndef(given) ? fallback : given; + // } + // } + + // protected options(argsArray: IArguments, keys: string[]): object { + // let options: any = {}; + // const args = Array.from(argsArray); + // if (args[0] instanceof BaseAudioContext) { + // options.context = args.shift(); + // } + // if (args.length === 1 && isObject(args[0])) { + // options = Object.assign(options, args[0]); + // } else { + // for (let i = 0; i < keys.length; i++) { + // if (isDefined(args[i])) { + // options[keys[i]] = args[i]; + // } + // } + // } + // return deepMerge(this.getDefaults(), options); + // } + + /** + * Convert the class to a string + * @example + * const osc = new Oscillator() + * osc.toString() // "Oscillator" + */ + toString(): string { + return this.name; + } +} diff --git a/Tone/core/Util.ts b/Tone/core/Util.ts new file mode 100644 index 00000000..253f18d3 --- /dev/null +++ b/Tone/core/Util.ts @@ -0,0 +1,169 @@ +import { BaseToneOptions } from "./Tone"; + +// return an interface which excludes certain keys +export type Omit = Pick>; + +/** + * Recursively merge an object + * @param target the object to merge into + * @param sources the source objects to merge + */ +export function deepMerge(target: T, ...sources: T[]): T { + if (!sources.length) { return target; } + const source = sources.shift(); + + if (isObject(target) && isObject(source)) { + for (const key in source) { + if (isObject(source[key])) { + if (!target[key]) { Object.assign(target, { [key]: {} }); } + deepMerge(target[key], source[key] as any); + } else { + Object.assign(target, { [key]: source[key] as any }); + } + } + } + + return deepMerge(target, ...sources); +} + +/** + * Convert an args array into an object. + */ +export function optionsFromArguments(defaults: T, argsArray: IArguments, keys: string[]): T { + const opts: any = {}; + const args = Array.from(argsArray); + if (args.length === 1 && isObject(args[0])) { + deepMerge(opts, args[0]); + } else { + for (let i = 0; i < keys.length; i++) { + if (isDefined(args[i])) { + opts[keys[i]] = args[i]; + } + } + } + return deepMerge(defaults, opts); +} + +/** + * Return this instances default values by calling Constructor.getDefaults() + */ +export function getDefaultsFromInstance(instance: T): BaseToneOptions { + type ToneClass = { + constructor: ToneClass; + getDefaults: () => BaseToneOptions; + } & T; + + return (instance as ToneClass).constructor.getDefaults(); +} + +/** + * Take an array of arguments and return a formatted options object. + * @param args the arguments passed into the function + * @param keys an array of keys + * @param defaults the class's defaults + */ +// export function defaultArg(given: T, fallback): T { + +// } + +/** + * Test if the arg is undefined + */ +export function isUndef(arg: any): arg is undefined { + return typeof arg === "undefined"; +} + +/** + * Test if the arg is not undefined + */ +export function isDefined(arg: T | undefined): arg is T { + return !isUndef(arg); +} + +/** + * Test if the arg is a function + */ +export function isFunction(arg: any): arg is (a: any) => any { + return typeof arg === "function"; +} + +/** + * Test if the argument is a number. + */ +export function isNumber(arg: any): arg is number { + return (typeof arg === "number"); +} + +/** + * Test if the given argument is an object literal (i.e. `{}`); + */ +export function isObject(arg: any): arg is object { + return (Object.prototype.toString.call(arg) === "[object Object]" && arg.constructor === Object); +} + +/** + * Test if the argument is a boolean. + */ +export function isBoolean(arg: any): arg is boolean { + return (typeof arg === "boolean"); +} + +/** + * Test if the argument is an Array + */ +export function isArray(arg: any): arg is any[] { + return (Array.isArray(arg)); +} + +/** + * Test if the argument is a string. + */ +export function isString(arg: any): arg is string { + return (typeof arg === "string"); +} + +/** + * Test if the argument is in the form of a note in scientific pitch notation. + * e.g. "C4" + */ +export function isNote(arg: any): arg is Note { + return isString(arg) && /^([a-g]{1}(?:b|#|x|bb)?)(-?[0-9]+)/i.test(arg); +} + +/** + * Make the property not writable using `defineProperty`. Internal use only. + */ +export function readOnly(target: object, property: string | string[]): void { + if (isArray(property)) { + property.forEach(str => readOnly(target, str)); + } else { + Object.defineProperty(target, property, { + enumerable : true, + writable : false, + }); + } +} + +/** + * Make an attribute writeable. Internal use only. + */ +export function writable(target: object, property: string | string[]): void { + if (isArray(property)) { + property.forEach(str => this._writable(str)); + } else { + Object.defineProperty(target, property, { + writable : true, + }); + } +} + +/** + * Apply a mixin to extend the derived constructor with the prototype of the baseConstructors + */ +export function applyMixins(derivedCtor: any, baseCtors: any[]): void { + baseCtors.forEach(baseCtor => { + Object.getOwnPropertyNames(baseCtor.prototype).forEach(name => { + derivedCtor.prototype[name] = baseCtor.prototype[name]; + }); + }); +} diff --git a/Tone/node/AbstractParam.ts b/Tone/node/AbstractParam.ts new file mode 100644 index 00000000..175a3f9d --- /dev/null +++ b/Tone/node/AbstractParam.ts @@ -0,0 +1,185 @@ +import "../type/Units"; +import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor"; + +export abstract class AbstractParam { + + /** + * Schedules a parameter value change at the given time. + * Does this work? is this working. are you working? + * @param value The value to set the signal. + * @param time The time when the change should occur. + * @example + * //set the frequency to "G4" in exactly 1 second from now. + * freq.setValueAtTime("G4", "+1"); + */ + abstract setValueAtTime(value: UnitMap[Type], time: Time): this; + + /** + * Get the signals value at the given time. Subsequent scheduling + * may invalidate the returned value. + * @param time When to get the value + */ + abstract getValueAtTime(time: Time): UnitMap[Type]; + + /** + * Creates a schedule point with the current value at the current time. + * This is useful for creating an automation anchor point in order to + * schedule changes from the current value. + * @param time When to add a ramp point. + * @example + * param.getValueAtTime(Tone.now()) + */ + abstract setRampPoint(time: Time): this; + + /** + * Schedules a linear continuous change in parameter value from the + * previous scheduled parameter value to the given value. + */ + abstract linearRampToValueAtTime(value: UnitMap[Type], time: Time): this; + + /** + * Schedules an exponential continuous change in parameter value from + * the previous scheduled parameter value to the given value. + */ + abstract exponentialRampToValueAtTime(value: UnitMap[Type], time: Time): this; + + /** + * Schedules an exponential continuous change in parameter value from + * the current time and current value to the given value over the + * duration of the rampTime. + * @param value The value to ramp to. + * @param rampTime the time that it takes the + * value to ramp from it's current value + * @param startTime When the ramp should start. + * @example + * //exponentially ramp to the value 2 over 4 seconds. + * signal.exponentialRampTo(2, 4); + */ + abstract exponentialRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this; + + /** + * Schedules an linear continuous change in parameter value from + * the current time and current value to the given value over the + * duration of the rampTime. + * + * @param value The value to ramp to. + * @param rampTime the time that it takes the + * value to ramp from it's current value + * @param startTime When the ramp should start. + * @returns {Param} this + * @example + * //linearly ramp to the value 4 over 3 seconds. + * signal.linearRampTo(4, 3); + */ + abstract linearRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this; + + /** + * Start exponentially approaching the target value at the given time. Since it + * is an exponential approach it will continue approaching after the ramp duration. The + * rampTime is the time that it takes to reach over 99% of the way towards the value. + * @param value The value to ramp to. + * @param rampTime the time that it takes the + * value to ramp from it's current value + * @param startTime When the ramp should start. + * @example + * //exponentially ramp to the value 2 over 4 seconds. + * signal.exponentialRampTo(2, 4); + */ + abstract targetRampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this; + + /** + * Start exponentially approaching the target value at the given time. Since it + * is an exponential approach it will continue approaching after the ramp duration. The + * rampTime is the time that it takes to reach over 99% of the way towards the value. This methods + * is similar to setTargetAtTime except the third argument is a time instead of a 'timeConstant' + * @param value The value to ramp to. + * @param time When the ramp should start. + * @param rampTime the time that it takes the value to ramp from it's current value + * @example + * //exponentially ramp to the value 2 over 4 seconds. + * signal.exponentialRampTo(2, 4); + */ + abstract exponentialApproachValueAtTime(value: UnitMap[Type], time: Time, rampTime: Time): this; + + /** + * Start exponentially approaching the target value at the given time with + * a rate having the given time constant. + * @param value + * @param startTime + * @param timeConstant + */ + abstract setTargetAtTime(value: UnitMap[Type], startTime: Time, timeConstant: number): this; + + /** + * Sets an array of arbitrary parameter values starting at the given time + * for the given duration. + * + * @param values + * @param startTime + * @param duration + * @param scaling If the values in the curve should be scaled by some value + */ + abstract setValueCurveAtTime(values: Type[], startTime: Time, duration: Time, scaling?: number): this; + + /** + * Cancels all scheduled parameter changes with times greater than or + * equal to startTime. + */ + abstract cancelScheduledValues(time: Time): this; + + /** + * This is similar to [cancelScheduledValues](#cancelScheduledValues) except + * it holds the automated value at time until the next automated event. + */ + abstract cancelAndHoldAtTime(time: Time): this; + + /** + * Ramps to the given value over the duration of the rampTime. + * Automatically selects the best ramp type (exponential or linear) + * depending on the `units` of the signal + * + * @param value + * @param rampTime The time that it takes the value to ramp from it's current value + * @param startTime When the ramp should start. + * @example + * //ramp to the value either linearly or exponentially + * //depending on the "units" value of the signal + * signal.rampTo(0, 10); + * @example + * //schedule it to ramp starting at a specific time + * signal.rampTo(0, 10, 5) + */ + abstract rampTo(value: UnitMap[Type], rampTime: Time, startTime?: Time): this; + + /** + * The current value of the parameter. Setting this value + * is equivalent to setValueAtTime(value, context.currentTime) + */ + abstract value: UnitMap[Type]; + + /** + * If the value should be converted or not + */ + abstract convert: boolean; + + /** + * The unit type + */ + abstract readonly units: Unit; + + /** + * True if the signal value is being overridden by + * a connected signal. Internal use only. + */ + abstract readonly overridden: boolean = false; + + /** + * The minimum value of the output given the units + */ + abstract readonly minValue: UnitMap[Type]; + + /** + * The maximum value of the output given the units + */ + abstract readonly maxValue: UnitMap[Type]; +} diff --git a/Tone/node/AudioNode.ts b/Tone/node/AudioNode.ts new file mode 100644 index 00000000..3ce203db --- /dev/null +++ b/Tone/node/AudioNode.ts @@ -0,0 +1,275 @@ +import { connect, connectSeries, disconnect } from "../core/Connect"; +import { isArray, isDefined, optionsFromArguments } from "../core/Util"; +import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor"; +import { Param } from "./Param"; + +export type InputNode = ToneAudioNode | AudioNode | AudioParam | Param; +export type OutputNode = ToneAudioNode | AudioNode; + +export interface ChannelProperties { + channelCount: number; + channelCountMode: ChannelCountMode; + channelInterpretation: ChannelInterpretation; +} + +/** + * The possible options for this node + */ +export interface ToneAudioNodeOptions extends AudioProcessorOptions { + numberOfInputs: number; + numberOfOutputs: number; + channelCount: number; + channelCountMode: ChannelCountMode; + channelInterpretation: ChannelInterpretation; +} + +/** + * ToneAudioNode is the base class for classes which process audio. + */ +export abstract class ToneAudioNode +extends AudioProcessor { + + abstract name = "ToneAudioNode"; + + /** + * The input node or nodes. If the object is a source, + * it does not have any input and this.input is undefined. + */ + abstract input: InputNode | InputNode[] | undefined; + + /** + * The output nodes. If the object is a sink, + * it does not have any output and this.output is undefined. + */ + abstract output: OutputNode | OutputNode[] | undefined; + + /** + * The number of inputs feeding into the AudioNode. + * For source nodes, this will be 0. + */ + readonly numberOfInputs: number; + + /** + * The number of outputs of the AudioNode. + */ + readonly numberOfOutputs: number; + + /** + * List all of the node that must be set to match the ChannelProperties + */ + protected abstract _internalChannels: OutputNode[]; + + static getDefaults(): ToneAudioNodeOptions { + return Object.assign(AudioProcessor.getDefaults(), { + channelCount: 2, + channelCountMode: "max" as ChannelCountMode, + channelInterpretation: "speakers" as ChannelInterpretation, + numberOfInputs: 0, + numberOfOutputs: 0, + }); + } + + constructor(options: Partial); + constructor() { + super(optionsFromArguments(ToneAudioNode.getDefaults(), arguments, ["context"])); + + const options = optionsFromArguments(ToneAudioNode.getDefaults(), arguments, ["context"]); + + this.numberOfInputs = options.numberOfInputs; + this.numberOfOutputs = options.numberOfInputs; + } + + protected createInsOuts(numberOfInputs: number = 0, numberOfOutputs: number = 0): void { + if (numberOfInputs === 1) { + this.input = this.context.createGain(); + } else if (numberOfInputs > 1) { + this.input = []; + for (let i = 0; i < numberOfInputs; i++) { + this.input[i] = this.context.createGain(); + } + } + + if (numberOfOutputs === 1) { + this.output = this.context.createGain(); + } else if (numberOfOutputs > 1) { + this.output = []; + for (let o = 0; o < numberOfOutputs; o++) { + this.output[o] = this.context.createGain(); + } + } + } + + /////////////////////////////////////////////////////////////////////////// + // AUDIO PROPERTIES + /////////////////////////////////////////////////////////////////////////// + + /** + * Set the audio options for this node such as channelInterpretation + * channelCount, etc. + * @param options + */ + private _setChannelProperties(options: ChannelProperties): void { + if (this._internalChannels.length) { + this._internalChannels.forEach(node => { + node.channelCount = options.channelCount; + node.channelCountMode = options.channelCountMode; + node.channelInterpretation = options.channelInterpretation; + }); + } + } + + /** + * Get the current audio options for this node such as channelInterpretation + * channelCount, etc. + */ + private _getChannelProperties(): ChannelProperties { + if (this._internalChannels.length) { + const node = this._internalChannels[0]; + return { + channelCount: node.channelCount, + channelCountMode: node.channelCountMode, + channelInterpretation: node.channelInterpretation, + }; + } else { + // return the defaults + return { + channelCount: 2, + channelCountMode: "max", + channelInterpretation: "speakers", + }; + } + } + + /** + * channelCount is the number of channels used when up-mixing and down-mixing + * connections to any inputs to the node. The default value is 2 except for + * specific nodes where its value is specially determined. + */ + get channelCount(): number { + return this._getChannelProperties().channelCount; + } + set channelCount(channelCount: number) { + const props = this._getChannelProperties(); + // merge it with the other properties + this._setChannelProperties(Object.assign(props, { channelCount })); + } + + /** + * channelCountMode determines how channels will be counted when up-mixing and + * down-mixing connections to any inputs to the node. + * The default value is "max". This attribute has no effect for nodes with no inputs. + */ + get channelCountMode(): ChannelCountMode { + return this._getChannelProperties().channelCountMode; + } + set channelCountMode(channelCountMode: ChannelCountMode) { + const props = this._getChannelProperties(); + // merge it with the other properties + this._setChannelProperties(Object.assign(props, { channelCountMode })); + } + + /** + * channelInterpretation determines how individual channels will be treated + * when up-mixing and down-mixing connections to any inputs to the node. + * The default value is "speakers". + */ + get channelInterpretation(): ChannelInterpretation { + return this._getChannelProperties().channelInterpretation; + } + set channelInterpretation(channelInterpretation: ChannelInterpretation) { + const props = this._getChannelProperties(); + // merge it with the other properties + this._setChannelProperties(Object.assign(props, { channelInterpretation })); + } + + /////////////////////////////////////////////////////////////////////////// + // CONNECTIONS + /////////////////////////////////////////////////////////////////////////// + + /** + * connect the output of a ToneAudioNode to an AudioParam, AudioNode, or ToneAudioNode + * @param unit The output to connect to + * @param outputNum The output to connect from + * @param inputNum The input to connect to + */ + connect(destination: InputNode, outputNum = 0, inputNum = 0): this { + connect(this, destination, outputNum, inputNum); + return this; + } + + /** + * Connect the output to the context's destination node. + * alias for {@link toDestination} + */ + toMaster(): this { + this.connect(this.context.destination); + return this; + } + + /** + * disconnect the output + * @param output Either the output index to disconnect if the output is an array, or the node to disconnect from. + */ + disconnect(destination?: InputNode, outputNum = 0, inputNum = 0): this { + disconnect(this, destination, outputNum, inputNum); + return this; + } + + /** + * Connect the output of this node to the rest of the nodes in series. + * @param nodes + * @example + * //connect a node to an effect, panVol and then to the master output + * node.chain(effect, panVol, Tone.Destination); + */ + chain(...nodes: InputNode[]): this { + connectSeries(...nodes); + return this; + } + + /** + * connect the output of this node to the rest of the nodes in parallel. + * @param nodes + * @returns this + */ + fan(...nodes: InputNode[]): this { + nodes.forEach(node => this.connect(node)); + return this; + } + + /** + * Dispose and disconnect + */ + dispose(): this { + if (isDefined(this.input)) { + if (isArray(this.input)) { + this.input.forEach(input => { + if (input instanceof ToneAudioNode) { + input.dispose(); + } else if (input instanceof AudioNode) { + input.disconnect(); + } + }); + } else { + if (this.input instanceof AudioNode) { + this.input.disconnect(); + } + } + } + if (isDefined(this.output)) { + if (isArray(this.output)) { + this.output.forEach(output => { + if (output instanceof ToneAudioNode) { + output.dispose(); + } else { + output.disconnect(); + } + }); + } else { + this.output.disconnect(); + } + } + this._internalChannels = []; + return this; + } +} diff --git a/Tone/node/AudioProcessor.ts b/Tone/node/AudioProcessor.ts new file mode 100644 index 00000000..9a4e5ce3 --- /dev/null +++ b/Tone/node/AudioProcessor.ts @@ -0,0 +1,156 @@ +import { getContext } from "../core/Global"; +import { Tone } from "../core/Tone"; +import { getDefaultsFromInstance, isDefined, isUndef, optionsFromArguments } from "../core/Util"; +import "../type/Units"; + +/** + * A unit which process audio + */ +export interface AudioProcessorOptions { + context: BaseAudioContext; +} + +/** + * The BaseAudioContext belonging to this node + */ +export abstract class AudioProcessor extends Tone { + + /** + * The context belonging to the node. + */ + readonly context: BaseAudioContext; + + readonly defaultContext?: BaseAudioContext; + + constructor(context?: BaseAudioContext | Partial) { + const options = optionsFromArguments(AudioProcessor.getDefaults(), arguments, ["context"]); + super(); + if (this.defaultContext) { + this.context = this.defaultContext; + } else { + this.context = options.context; + } + } + + static getDefaults(): AudioProcessorOptions { + return { + context: getContext(), + }; + } + + /** + * Return the current time of the BaseAudioContext clock plus the lookAhead. + */ + now(): Seconds { + return this.context.currentTime; + } + + /** + * Return the current time of the BaseAudioContext clock without any lookAhead. + */ + immediate(): Seconds { + return this.context.currentTime; + } + + /** + * The duration in seconds of one sample. + */ + get sampleTime(): Seconds { + return 1 / this.context.sampleRate; + } + + /** + * The number of seconds of 1 processing block (128 samples) + */ + get blockTime(): Seconds { + return 128 / this.context.sampleRate; + } + + /** + * Convert the incoming time to seconds + */ + toSeconds(time: Time): Seconds { + if (isUndef(time)) { + return this.now(); + } else { + return time as Seconds; + } + } + + /** + * Convert the input to a frequency number + */ + toFrequency(frequency: Frequency): Hertz { + return frequency as Hertz; + } + + /////////////////////////////////////////////////////////////////////////// + // GET/SET + /////////////////////////////////////////////////////////////////////////// + + /** + * Get the object's attributes. Given no arguments get + * will return all available object properties and their corresponding + * values. Pass in a single attribute to retrieve or an array + * of attributes. The attribute strings can also include a "." + * to access deeper properties. + * @param params the parameters to get, otherwise will return all available. + * @example + * osc.get(); + * //returns {"type" : "sine", "frequency" : 440, ...etc} + * @example + * osc.get("type"); + * //returns { "type" : "sine"} + * @example + * //use dot notation to access deep properties + * synth.get(["envelope.attack", "envelope.release"]); + * //returns {"envelope" : {"attack" : 0.2, "release" : 0.4}} + */ + get(): Options { + const defaults = getDefaultsFromInstance(this) as Options; + Object.keys(defaults).forEach(attribute => { + if (Reflect.has(this, attribute)) { + const member = this[attribute]; + if (isDefined(member) && isDefined(member.value)) { + defaults[attribute] = member.value; + } else if (member instanceof AudioProcessor) { + defaults[attribute] = member.get(); + } else { + defaults[attribute] = member; + } + } + + }); + return defaults; + } + + /** + * Set the parameters at once. Either pass in an + * object mapping parameters to values, or to set a + * single parameter, by passing in a string and value. + * The last argument is an optional ramp time which + * will ramp any signal values to their destination value + * over the duration of the rampTime. + * @param params + * @example + * //set values using an object + * filter.set({ + * "frequency" : 300, + * "type" : "highpass" + * }); + */ + set(props: Partial): AudioProcessor { + Object.keys(props).forEach(attribute => { + if (Reflect.has(this, attribute)) { + if (isDefined(this[attribute]) && isDefined(this[attribute].value)) { + this[attribute].value = props[attribute]; + } else if (this[attribute] instanceof AudioProcessor) { + this[attribute].set(props[attribute]); + } else { + this[attribute] = props[attribute]; + } + } + }); + return this; + } +} diff --git a/Tone/node/Delay.ts b/Tone/node/Delay.ts new file mode 100644 index 00000000..2b914e6b --- /dev/null +++ b/Tone/node/Delay.ts @@ -0,0 +1,82 @@ +import { optionsFromArguments, readOnly } from "../core/Util"; +import { ToneAudioNode, ToneAudioNodeOptions } from "./AudioNode"; +import { Param } from "./Param"; + +export interface DelayOptions extends ToneAudioNodeOptions { + delayTime: number; + maxDelay: number; +} + +/** + * Wrapper around Web Audio's native [DelayNode](http://webaudio.github.io/web-audio-api/#the-delaynode-interface). + * @param delayTime The delay applied to the incoming signal. + * @param maxDelay The maximum delay time. + */ +export class Delay extends ToneAudioNode { + + readonly name = "Delay"; + + /** + * The maximum delay time. This cannot be changed after + * the value is passed into the constructor. + */ + readonly maxDelay: Time; + + /** + * The amount of time the incoming signal is delayed. + */ + readonly delayTime: Param<"time">; + + /** + * Private reference to the internal DelayNode + */ + private _delayNode: DelayNode; + readonly input: DelayNode; + readonly output: DelayNode; + + /** + * The internal channels for channel routing changes + */ + protected _internalChannels: AudioNode[]; + + constructor(options?: Partial) + constructor(delayTime?: Time, maxDelay?: Time) + constructor() { + super(optionsFromArguments(Delay.getDefaults(), arguments, ["delayTime", "maxDelay"])); + + const options = optionsFromArguments(Delay.getDefaults(), arguments, ["delayTime", "maxDelay"]); + + this.maxDelay = Math.max(this.toSeconds(options.maxDelay), this.toSeconds(options.delayTime)); + + this._delayNode = this.input = this.output = this.context.createDelay(options.maxDelay); + this._internalChannels = [this._delayNode]; + + this.delayTime = new Param({ + context: this.context, + param : this._delayNode.delayTime, + units : "time", + value : options.delayTime, + }); + + readOnly(this, "delayTime"); + } + + static getDefaults(): DelayOptions { + return Object.assign(ToneAudioNode.getDefaults(), { + delayTime : 0, + maxDelay: 1, + numberOfInputs: 1, + numberOfOutputs: 1, + }); + } + + /** + * Clean up. + */ + dispose(): this { + super.dispose(); + this._delayNode.disconnect(); + this.delayTime.dispose(); + return this; + } +} diff --git a/Tone/node/Gain.ts b/Tone/node/Gain.ts new file mode 100644 index 00000000..3eefe2ef --- /dev/null +++ b/Tone/node/Gain.ts @@ -0,0 +1,77 @@ +import { optionsFromArguments, readOnly } from "../core/Util"; +import "../type/Units"; +import { ToneAudioNode, ToneAudioNodeOptions } from "./AudioNode"; +import { Param } from "./Param"; + +interface GainOptions extends ToneAudioNodeOptions { + gain: number; + units: Unit; + convert: boolean; +} + +/** + * A thin wrapper around the Native Web Audio GainNode. + * The GainNode is a basic building block of the Web Audio + * API and is useful for routing audio and adjusting gains. + * @param gain The initial gain of the GainNode + * @param units The units of the gain parameter. + */ +export class Gain extends ToneAudioNode { + + readonly name = "Gain"; + + /** + * The gain parameter of the gain node. + */ + readonly gain: Param; + + /** + * The wrapped GainNode. + */ + private _gainNode: GainNode = this.context.createGain(); + + // input = output + readonly input: GainNode = this._gainNode; + readonly output: GainNode = this._gainNode; + + /** + * Add all of the gain nodes + */ + protected _internalChannels: AudioNode[] = [this._gainNode]; + + constructor(gain?: GainFactor, units?: Unit); + constructor(options?: Partial); + constructor() { + super(optionsFromArguments(Gain.getDefaults(), arguments, ["gain", "units"])); + const options = optionsFromArguments(Gain.getDefaults(), arguments, ["gain", "units"]); + + this.gain = new Param({ + context : this.context, + convert : options.convert, + param : this._gainNode.gain, + units : options.units, + value : options.gain, + }); + readOnly(this, "gain"); + } + + static getDefaults(): GainOptions { + return Object.assign(ToneAudioNode.getDefaults(), { + convert : true, + gain : 1, + numberOfInputs: 1, + numberOfOutputs: 1, + units : "gain" as Unit, + }); + } + + /** + * Clean up. + */ + dispose(): this { + super.dispose(); + this._gainNode.disconnect(); + this.gain.dispose(); + return this; + } +} diff --git a/Tone/node/Param.ts b/Tone/node/Param.ts new file mode 100644 index 00000000..24e64ebd --- /dev/null +++ b/Tone/node/Param.ts @@ -0,0 +1,414 @@ +import { Timeline } from "../core/Timeline"; +import { deepMerge, isDefined, optionsFromArguments } from "../core/Util"; +import { dbToGain, gainToDb } from "../type/Conversions"; +import "../type/Units"; +import { AbstractParam } from "./AbstractParam"; +import { AudioProcessor, AudioProcessorOptions } from "./AudioProcessor"; + +export interface ParamOptions extends AudioProcessorOptions { + units: Unit; + value?: number; + param: AudioParam; + convert: boolean; +} + +/** + * the possible automation types + */ +type AutomationType = "linear" | "exponential" | "setValue" | "setTarget" | "cancel"; + +/** + * The events on the automation + */ +export interface AutomationEvent { + type: AutomationType; + time: number; + value: number; + constant?: number; +} + +/** + * Param wraps the native Web Audio's AudioParam to provide + * additional unit conversion functionality. It also + * serves as a base-class for classes which have a single, + * automatable parameter. + */ +export class Param +extends AudioProcessor +implements AbstractParam { + + name = "Param"; + + static getDefaults(): ParamOptions { + return Object.assign(AudioProcessor.getDefaults(), { + convert: true, + units: "number" as Unit, + } as ParamOptions); + } + + /** + * The input connection + */ + readonly input: AudioParam; + readonly units: Unit; + convert: boolean; + overridden: boolean = false; + + /** + * The timeline which tracks all of the automations. + */ + private _events: Timeline; + + /** + * The native parameter to control + */ + protected _param: AudioParam; + + /** + * The default value before anything is assigned + */ + private _initialValue: number; + + /** + * The minimum output value + */ + private _minOutput = 1e-5; + + constructor(param: AudioParam, units?: Unit, convert?: boolean); + constructor(options: Partial); + constructor() { + super(optionsFromArguments(Param.getDefaults(), arguments, ["param", "units", "convert"])); + + const options = optionsFromArguments(Param.getDefaults(), arguments, ["param", "units", "convert"]); + + this.assert(isDefined(options.param) && options.param instanceof AudioParam, "param must be an AudioParam"); + + // initialize + this._param = this.input = options.param; + this._events = new Timeline(1000); + this._initialValue = this._param.value; + this.units = options.units; + this.convert = options.convert; + + // if the value is defined, set it immediately + if (isDefined(options.value)) { + this.setValueAtTime(options.value, 0); + } + } + + get value(): UnitMap[Type] { + const now = this.now(); + return this.getValueAtTime(now); + } + set value(value: UnitMap[Type]) { + this._initialValue = this._fromType(value); + this.cancelScheduledValues(this.now()); + this.setValueAtTime(value, this.now()); + } + + get minValue(): number { + if (this.units === "time" || this.units === "frequency" || + this.units === "normalRange" || this.units === "positive" || + this.units === "transportTime" || this.units === "ticks" || + this.units === "bpm" || this.units === "hertz" || this.units === "samples") { + return 0; + } else if (this.units === "audioRange") { + return -1; + } else if (this.units === "decibels") { + return -Infinity; + } else { + return this._param.minValue; + } + } + + get maxValue(): number { + if (this.units === "normalRange" || + this.units === "audioRange") { + return 1; + } else { + return this._param.maxValue; + } + } + + /** + * Type guard based on the unit name + */ + private _is(arg, type: Unit): arg is T { + return this.units === type; + } + + /** + * Convert the given value from the type specified by Param.units + * into the destination value (such as Gain or Frequency). + */ + private _fromType(val: UnitMap[Type]): number { + if (this.convert && !this.overridden) { + if (this._is