Tone.js/Tone/core/context/Context.ts

581 lines
16 KiB
TypeScript
Raw Normal View History

2019-05-23 18:00:49 +00:00
import { Ticker, TickerClockSource } from "../clock/Ticker";
import { Seconds } from "../type/Units";
2019-08-19 17:01:37 +00:00
import { isAudioContext } from "../util/AdvancedTypeCheck";
2019-07-11 04:14:53 +00:00
import { optionsFromArguments } from "../util/Defaults";
2019-10-31 19:43:16 +00:00
import { Omit } from "../util/Interface";
2019-05-23 18:00:49 +00:00
import { Timeline } from "../util/Timeline";
import { isDefined, isString } from "../util/TypeCheck";
import { AnyAudioContext, createAudioContext, createAudioWorkletNode } from "./AudioContext";
import { closeContext, initializeContext } from "./ContextInitialization";
2019-11-13 19:16:20 +00:00
import { BaseContext, ContextLatencyHint } from "./BaseContext";
import { assert } from "../util/Debug";
2019-07-11 13:57:06 +00:00
type Transport = import("../clock/Transport").Transport;
type Destination = import("./Destination").Destination;
type Listener = import("./Listener").Listener;
type Draw = import("../util/Draw").Draw;
2019-04-12 14:37:47 +00:00
// these are either not used in Tone.js or deprecated and not implemented.
2020-04-15 01:06:21 +00:00
export type ExcludedFromBaseAudioContext = "onstatechange" | "addEventListener" | "removeEventListener" | "listener" | "dispatchEvent" | "audioWorklet" | "destination" | "createScriptProcessor";
2019-09-14 21:47:07 +00:00
// "createMediaStreamSource" | "createMediaElementSource" | "createMediaStreamTrackSource" |
// "baseLatency" | "suspend" |
2019-04-12 14:37:47 +00:00
// the subset of the BaseAudioContext which Tone.Context implements.
2019-05-23 18:00:49 +00:00
export type BaseAudioContextSubset = Omit<BaseAudioContext, ExcludedFromBaseAudioContext>;
2019-04-12 14:37:47 +00:00
2019-05-23 18:00:49 +00:00
export interface ContextOptions {
2019-04-12 14:37:47 +00:00
clockSource: TickerClockSource;
latencyHint: ContextLatencyHint;
lookAhead: Seconds;
updateInterval: Seconds;
context: AnyAudioContext;
2019-04-12 14:37:47 +00:00
}
2019-05-23 18:00:49 +00:00
export interface ContextTimeoutEvent {
2019-04-12 14:37:47 +00:00
callback: (...args: any[]) => void;
id: number;
time: Seconds;
}
/**
* Wrapper around the native AudioContext.
2019-08-26 17:44:43 +00:00
* @category Core
2019-04-12 14:37:47 +00:00
*/
export class Context extends BaseContext {
2019-04-12 14:37:47 +00:00
2019-09-04 23:18:44 +00:00
readonly name: string = "Context";
2019-04-12 14:37:47 +00:00
/**
2019-07-11 15:13:23 +00:00
* The amount of time into the future events are scheduled. Giving Web Audio
* a short amount of time into the future to schedule events can reduce clicks and
* improve performance. This value can be set to 0 to get the lowest latency.
2019-04-12 14:37:47 +00:00
*/
lookAhead: Seconds;
/**
* private reference to the BaseAudioContext
*/
protected readonly _context: AnyAudioContext;
2019-04-12 14:37:47 +00:00
/**
* A reliable callback method
*/
private readonly _ticker: Ticker;
/**
2019-09-14 20:39:18 +00:00
* The default latency hint
2019-04-12 14:37:47 +00:00
*/
private _latencyHint!: ContextLatencyHint | Seconds;
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* An object containing all of the constants AudioBufferSourceNodes
2019-04-12 14:37:47 +00:00
*/
private _constants = new Map<number, AudioBufferSourceNode>();
/**
2019-09-14 20:39:18 +00:00
* All of the setTimeout events.
2019-04-12 14:37:47 +00:00
*/
2019-05-23 18:00:49 +00:00
private _timeouts: Timeline<ContextTimeoutEvent> = new Timeline();
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* The timeout id counter
2019-04-12 14:37:47 +00:00
*/
private _timeoutIds = 0;
2019-05-23 18:00:49 +00:00
/**
* A reference the Transport singleton belonging to this context
*/
private _transport!: Transport;
/**
* A reference the Listener singleton belonging to this context
*/
private _listener!: Listener;
2020-04-15 01:06:21 +00:00
/**
* A reference the Destination singleton belonging to this context
*/
private _destination!: Destination;
/**
* A reference the Transport singleton belonging to this context
*/
private _draw!: Draw;
/**
* Private indicator if the context has been initialized
*/
2019-11-17 18:09:19 +00:00
private _initialized = false;
2019-05-23 18:00:49 +00:00
/**
* Indicates if the context is an OfflineAudioContext or an AudioContext
*/
readonly isOffline: boolean = false;
constructor(context?: AnyAudioContext);
2019-05-23 18:00:49 +00:00
constructor(options?: Partial<ContextOptions>);
constructor() {
2019-04-12 14:37:47 +00:00
super();
2019-05-23 18:00:49 +00:00
const options = optionsFromArguments(Context.getDefaults(), arguments, ["context"]);
2019-04-12 14:37:47 +00:00
if (options.context) {
this._context = options.context;
} else {
this._context = createAudioContext({
latencyHint: options.latencyHint,
});
}
2019-04-12 14:37:47 +00:00
2019-05-23 18:00:49 +00:00
this._ticker = new Ticker(this.emit.bind(this, "tick"), options.clockSource, options.updateInterval);
2019-04-12 14:37:47 +00:00
this.on("tick", this._timeoutLoop.bind(this));
// fwd events from the context
this._context.onstatechange = () => {
2019-04-12 14:37:47 +00:00
this.emit("statechange", this.state);
};
this._setLatencyHint(options.latencyHint);
this.lookAhead = options.lookAhead;
2019-04-12 14:37:47 +00:00
}
static getDefaults(): ContextOptions {
return {
clockSource: "worker",
latencyHint: "interactive",
lookAhead: 0.1,
updateInterval: 0.05,
} as ContextOptions;
}
/**
* Finish setting up the context. **You usually do not need to do this manually.**
*/
private initialize(): this {
if (!this._initialized) {
// add any additional modules
2019-07-11 13:57:06 +00:00
initializeContext(this);
this._initialized = true;
}
return this;
}
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
// BASE AUDIO CONTEXT METHODS
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
createAnalyser(): AnalyserNode {
return this._context.createAnalyser();
}
createOscillator(): OscillatorNode {
return this._context.createOscillator();
}
createBufferSource(): AudioBufferSourceNode {
return this._context.createBufferSource();
}
createBiquadFilter(): BiquadFilterNode {
return this._context.createBiquadFilter();
}
createBuffer(numberOfChannels: number, length: number, sampleRate: number): AudioBuffer {
return this._context.createBuffer(numberOfChannels, length, sampleRate);
}
createChannelMerger(numberOfInputs?: number | undefined): ChannelMergerNode {
return this._context.createChannelMerger(numberOfInputs);
}
createChannelSplitter(numberOfOutputs?: number | undefined): ChannelSplitterNode {
return this._context.createChannelSplitter(numberOfOutputs);
}
createConstantSource(): ConstantSourceNode {
return this._context.createConstantSource();
}
createConvolver(): ConvolverNode {
return this._context.createConvolver();
}
createDelay(maxDelayTime?: number | undefined): DelayNode {
return this._context.createDelay(maxDelayTime);
}
createDynamicsCompressor(): DynamicsCompressorNode {
return this._context.createDynamicsCompressor();
}
createGain(): GainNode {
return this._context.createGain();
}
createIIRFilter(feedForward: number[] | Float32Array, feedback: number[] | Float32Array): IIRFilterNode {
// @ts-ignore
2019-09-04 02:08:20 +00:00
return this._context.createIIRFilter(feedForward, feedback);
2019-04-12 14:37:47 +00:00
}
createPanner(): PannerNode {
return this._context.createPanner();
}
createPeriodicWave(
real: number[] | Float32Array,
imag: number[] | Float32Array,
constraints?: PeriodicWaveConstraints | undefined,
): PeriodicWave {
return this._context.createPeriodicWave(real, imag, constraints);
}
createStereoPanner(): StereoPannerNode {
return this._context.createStereoPanner();
}
createWaveShaper(): WaveShaperNode {
return this._context.createWaveShaper();
}
createMediaStreamSource(stream: MediaStream): MediaStreamAudioSourceNode {
2020-04-15 01:06:21 +00:00
assert(isAudioContext(this._context), "Not available if OfflineAudioContext");
const context = this._context as AudioContext;
return context.createMediaStreamSource(stream);
}
createMediaStreamDestination(): MediaStreamAudioDestinationNode {
assert(isAudioContext(this._context), "Not available if OfflineAudioContext");
const context = this._context as AudioContext;
return context.createMediaStreamDestination();
}
2019-04-12 14:37:47 +00:00
decodeAudioData(audioData: ArrayBuffer): Promise<AudioBuffer> {
return this._context.decodeAudioData(audioData);
}
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* The current time in seconds of the AudioContext.
2019-04-12 14:37:47 +00:00
*/
get currentTime(): Seconds {
return this._context.currentTime;
}
/**
2019-09-14 20:39:18 +00:00
* The current time in seconds of the AudioContext.
2019-04-12 14:37:47 +00:00
*/
get state(): AudioContextState {
return this._context.state;
}
/**
2019-09-14 20:39:18 +00:00
* The current time in seconds of the AudioContext.
2019-04-12 14:37:47 +00:00
*/
get sampleRate(): number {
return this._context.sampleRate;
}
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* The listener
2019-04-12 14:37:47 +00:00
*/
get listener(): Listener {
this.initialize();
return this._listener;
}
set listener(l) {
assert(!this._initialized, "The listener cannot be set after initialization.");
this._listener = l;
2019-04-12 14:37:47 +00:00
}
/**
2019-09-14 20:39:18 +00:00
* There is only one Transport per Context. It is created on initialization.
*/
get transport(): Transport {
this.initialize();
return this._transport;
}
set transport(t: Transport) {
assert(!this._initialized, "The transport cannot be set after initialization.");
this._transport = t;
}
/**
* This is the Draw object for the context which is useful for synchronizing the draw frame with the Tone.js clock.
*/
get draw(): Draw {
this.initialize();
return this._draw;
}
set draw(d) {
assert(!this._initialized, "Draw cannot be set after initialization.");
this._draw = d;
}
/**
2019-09-14 20:39:18 +00:00
* A reference to the Context's destination node.
*/
get destination(): Destination {
this.initialize();
return this._destination;
}
set destination(d: Destination) {
assert(!this._initialized, "The destination cannot be set after initialization.");
this._destination = d;
}
//--------------------------------------------
// AUDIO WORKLET
//--------------------------------------------
/**
* Maps a module name to promise of the addModule method
*/
private _workletModules: Map<string, Promise<void>> = new Map()
/**
* Create an audio worklet node from a name and options. The module
* must first be loaded using [[addAudioWorkletModule]].
*/
createAudioWorkletNode(
2020-04-15 01:06:21 +00:00
name: string,
options?: Partial<AudioWorkletNodeOptions>
): AudioWorkletNode {
return createAudioWorkletNode(this.rawContext, name, options);
}
2020-04-15 01:06:21 +00:00
/**
* Add an AudioWorkletProcessor module
* @param url The url of the module
* @param name The name of the module
*/
async addAudioWorkletModule(url: string, name: string): Promise<void> {
assert(isDefined(this.rawContext.audioWorklet), "AudioWorkletNode is only available in a secure context (https or localhost)");
if (!this._workletModules.has(name)) {
this._workletModules.set(name, this.rawContext.audioWorklet.addModule(url));
}
await this._workletModules.get(name);
}
/**
* Returns a promise which resolves when all of the worklets have been loaded on this context
*/
protected async workletsAreReady(): Promise<void> {
const promises: Promise<void>[] = [];
this._workletModules.forEach(promise => promises.push(promise));
await Promise.all(promises);
}
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
// TICKER
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* How often the interval callback is invoked.
* This number corresponds to how responsive the scheduling
* can be. context.updateInterval + context.lookAhead gives you the
* total latency between scheduling an event and hearing it.
2019-04-12 14:37:47 +00:00
*/
get updateInterval(): Seconds {
return this._ticker.updateInterval;
}
set updateInterval(interval: Seconds) {
this._ticker.updateInterval = interval;
}
/**
2019-09-14 20:39:18 +00:00
* What the source of the clock is, either "worker" (default),
* "timeout", or "offline" (none).
2019-04-12 14:37:47 +00:00
*/
get clockSource(): TickerClockSource {
return this._ticker.type;
}
set clockSource(type: TickerClockSource) {
this._ticker.type = type;
}
/**
2019-05-23 18:00:49 +00:00
* The type of playback, which affects tradeoffs between audio
* output latency and responsiveness.
* In addition to setting the value in seconds, the latencyHint also
* accepts the strings "interactive" (prioritizes low latency),
* "playback" (prioritizes sustained playback), "balanced" (balances
2020-05-27 01:09:32 +00:00
* latency and performance).
2019-05-23 18:00:49 +00:00
* @example
* // prioritize sustained playback
* const context = new Tone.Context({ latencyHint: "playback" });
* // set this context as the global Context
* Tone.setContext(context);
2020-08-04 01:21:17 +00:00
* // the global context is gettable with Tone.getContext()
* console.log(Tone.getContext().latencyHint);
2019-04-12 14:37:47 +00:00
*/
get latencyHint(): ContextLatencyHint | Seconds {
return this._latencyHint;
}
/**
* Update the lookAhead and updateInterval based on the latencyHint
*/
private _setLatencyHint(hint: ContextLatencyHint | Seconds): void {
2019-04-12 14:37:47 +00:00
let lookAheadValue = 0;
this._latencyHint = hint;
if (isString(hint)) {
switch (hint) {
case "interactive":
lookAheadValue = 0.1;
break;
case "playback":
lookAheadValue = 0.5;
2019-04-12 14:37:47 +00:00
break;
case "balanced":
lookAheadValue = 0.25;
break;
}
}
this.lookAhead = lookAheadValue;
this.updateInterval = lookAheadValue / 2;
2019-04-12 14:37:47 +00:00
}
/**
* The unwrapped AudioContext or OfflineAudioContext
2019-04-12 14:37:47 +00:00
*/
get rawContext(): AnyAudioContext {
2019-04-12 14:37:47 +00:00
return this._context;
}
/**
2019-10-29 18:29:52 +00:00
* The current audio context time plus a short [[lookAhead]].
2019-04-12 14:37:47 +00:00
*/
now(): Seconds {
return this._context.currentTime + this.lookAhead;
}
2019-10-29 18:29:52 +00:00
/**
* The current audio context time without the [[lookAhead]].
* In most cases it is better to use [[now]] instead of [[immediate]] since
* with [[now]] the [[lookAhead]] is applied equally to _all_ components including internal components,
* to making sure that everything is scheduled in sync. Mixing [[now]] and [[immediate]]
* can cause some timing issues. If no lookAhead is desired, you can set the [[lookAhead]] to `0`.
2019-10-29 18:29:52 +00:00
*/
immediate(): Seconds {
return this._context.currentTime;
}
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* Starts the audio context from a suspended state. This is required
2020-05-19 01:13:22 +00:00
* to initially start the AudioContext. See [[Tone.start]]
2019-04-12 14:37:47 +00:00
*/
resume(): Promise<void> {
if (this._context.state === "suspended" && isAudioContext(this._context)) {
2019-04-12 14:37:47 +00:00
return this._context.resume();
} else {
return Promise.resolve();
}
}
/**
2020-05-19 01:13:22 +00:00
* Close the context. Once closed, the context can no longer be used and
* any AudioNodes created from the context will be silent.
2019-04-12 14:37:47 +00:00
*/
async close(): Promise<void> {
if (isAudioContext(this._context)) {
2019-04-12 14:37:47 +00:00
await this._context.close();
}
if (this._initialized) {
closeContext(this);
}
2019-04-12 14:37:47 +00:00
}
/**
2020-05-19 01:13:22 +00:00
* **Internal** Generate a looped buffer at some constant value.
2019-04-12 14:37:47 +00:00
*/
getConstant(val: number): AudioBufferSourceNode {
if (this._constants.has(val)) {
return this._constants.get(val) as AudioBufferSourceNode;
} else {
const buffer = this._context.createBuffer(1, 128, this._context.sampleRate);
const arr = buffer.getChannelData(0);
for (let i = 0; i < arr.length; i++) {
arr[i] = val;
}
const constant = this._context.createBufferSource();
constant.channelCount = 1;
constant.channelCountMode = "explicit";
constant.buffer = buffer;
constant.loop = true;
constant.start(0);
this._constants.set(val, constant);
return constant;
}
}
/**
2019-09-14 20:39:18 +00:00
* Clean up. Also closes the audio context.
2019-04-12 14:37:47 +00:00
*/
2019-05-23 18:00:49 +00:00
dispose(): this {
super.dispose();
2019-04-12 14:37:47 +00:00
this._ticker.dispose();
this._timeouts.dispose();
Object.keys(this._constants).map(val => this._constants[val].disconnect());
return this;
}
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
// TIMEOUTS
2019-09-14 21:47:07 +00:00
//---------------------------
2019-04-12 14:37:47 +00:00
/**
2019-09-14 20:39:18 +00:00
* The private loop which keeps track of the context scheduled timeouts
* Is invoked from the clock source
2019-04-12 14:37:47 +00:00
*/
2019-09-14 21:47:07 +00:00
private _timeoutLoop(): void {
2019-04-12 14:37:47 +00:00
const now = this.now();
let firstEvent = this._timeouts.peek();
while (this._timeouts.length && firstEvent && firstEvent.time <= now) {
// invoke the callback
firstEvent.callback();
// shift the first event off
this._timeouts.shift();
2019-05-23 18:00:49 +00:00
// get the next one
firstEvent = this._timeouts.peek();
2019-04-12 14:37:47 +00:00
}
}
/**
2019-09-14 20:39:18 +00:00
* A setTimeout which is guaranteed by the clock source.
* Also runs in the offline context.
* @param fn The callback to invoke
* @param timeout The timeout in seconds
* @returns ID to use when invoking Context.clearTimeout
2019-04-12 14:37:47 +00:00
*/
setTimeout(fn: (...args: any[]) => void, timeout: Seconds): number {
this._timeoutIds++;
const now = this.now();
this._timeouts.add({
2019-09-14 21:47:07 +00:00
callback: fn,
id: this._timeoutIds,
time: now + timeout,
2019-04-12 14:37:47 +00:00
});
return this._timeoutIds;
}
/**
2019-09-14 20:39:18 +00:00
* Clears a previously scheduled timeout with Tone.context.setTimeout
* @param id The ID returned from setTimeout
2019-04-12 14:37:47 +00:00
*/
clearTimeout(id: number): this {
2019-04-12 14:37:47 +00:00
this._timeouts.forEach(event => {
if (event.id === id) {
this._timeouts.remove(event);
}
});
return this;
}
/**
* Clear the function scheduled by [[setInterval]]
*/
clearInterval(id: number): this {
return this.clearTimeout(id);
}
/**
* Adds a repeating event to the context's callback clock
*/
setInterval(fn: (...args: any[]) => void, interval: Seconds): number {
const id = ++this._timeoutIds;
const intervalFn = () => {
const now = this.now();
this._timeouts.add({
2019-09-14 21:47:07 +00:00
callback: () => {
// invoke the callback
fn();
// invoke the event to repeat it
intervalFn();
},
id,
2019-09-14 21:47:07 +00:00
time: now + interval,
});
};
// kick it off
intervalFn();
return id;
}
2019-04-12 14:37:47 +00:00
}