Merge branch 'dev'

This commit is contained in:
Yotam Mann 2020-09-23 21:25:45 -04:00
commit 641ada9f09
50 changed files with 1013 additions and 1139 deletions

View file

@ -10,7 +10,7 @@ To contribute examples, please follow the current style of the examples. Add you
There is always more work that can be done on documentation. Especially adding good examples to methods and members to make the docs more informative and useful for people coming from diverse musical and technical backgrounds.
All of the docs are written in [jsdoc](http://usejsdoc.org/)-style comments in the source code. If you catch a mistake, please send a pull request.
All of the docs are written in [TypeDoc](https://typedoc.org/)-style comments in the source code. If you catch a mistake, please send a pull request.
Along with this, it'd be great to integrate more visuals and references in the docs to help illustrate concepts.

View file

@ -1,5 +1,5 @@
sudo: false
dist: trusty
dist: bionic
language: node_js
node_js:
- '11'

View file

@ -46,6 +46,7 @@
* Buffer.on("loaded") is should now use: `Tone.loaded(): Promise<void>`
* Removing bower ([71c8b3b](https://github.com/Tonejs/Tone.js/commit/71c8b3bbb96e45cfc4aa2cce8a2d8c61a092c91e)), closes [#197](https://github.com/Tonejs/Tone.js/issues/197)
* Removing Ctrl classes ([51d06bd](https://github.com/Tonejs/Tone.js/commit/51d06bd9873b2f1936a3169930f9696f1ccfb845))
* `Players.get(name: string)` is renamed to `Players.player(name: string)`
# 13.8.25

View file

@ -24,6 +24,12 @@ Or to install the 'next' version
npm install tone@next
```
To import Tone.js:
```js
import * as Tone from 'tone'
```
# Hello Tone
```javascript

View file

@ -41,7 +41,9 @@ export class Channel extends ToneAudioNode<ChannelOptions> {
private _panVol: PanVol;
/**
* The L/R panning control.
* The L/R panning control. -1 = hard left, 1 = hard right.
* @min -1
* @max 1
*/
readonly pan: Param<"audioRange">;

View file

@ -9,11 +9,12 @@ import { optionsFromArguments } from "../../core/util/Defaults";
export type MidSideMergeOptions = ToneAudioNodeOptions;
/**
* MidSideMerge merges the mid and side signal after they've been separated by [[MidSideMerge]]
* MidSideMerge merges the mid and side signal after they've been separated by [[MidSideSplit]]
* ```
* Mid = (Left+Right)/sqrt(2); // obtain mid-signal from left and right
* Side = (Left-Right)/sqrt(2); // obtain side-signal from left and righ
* Side = (Left-Right)/sqrt(2); // obtain side-signal from left and right
* ```
* @category Component
*/
export class MidSideMerge extends ToneAudioNode<MidSideMergeOptions> {

View file

@ -14,6 +14,7 @@ export type MidSideSplitOptions = ToneAudioNodeOptions;
* Mid = (Left+Right)/sqrt(2); // obtain mid-signal from left and right
* Side = (Left-Right)/sqrt(2); // obtain side-signal from left and right
* ```
* @category Component
*/
export class MidSideSplit extends ToneAudioNode<MidSideSplitOptions> {

View file

@ -34,7 +34,9 @@ export class PanVol extends ToneAudioNode<PanVolOptions> {
private _panner: Panner;
/**
* The L/R panning control.
* The L/R panning control. -1 = hard left, 1 = hard right.
* @min -1
* @max 1
*/
readonly pan: Param<"audioRange">;

View file

@ -13,10 +13,10 @@ interface TonePannerOptions extends ToneAudioNodeOptions {
* Panner is an equal power Left/Right Panner. It is a wrapper around the StereoPannerNode.
* @example
* return Tone.Offline(() => {
* // move the input signal from right to left
* const panner = new Tone.Panner(1).toDestination();
* panner.pan.rampTo(-1, 0.5);
* const osc = new Tone.Oscillator(100).connect(panner).start();
* // move the input signal from right to left
* const panner = new Tone.Panner(1).toDestination();
* panner.pan.rampTo(-1, 0.5);
* const osc = new Tone.Oscillator(100).connect(panner).start();
* }, 0.5, 2);
* @category Component
*/

View file

@ -34,6 +34,7 @@ export interface RecorderOptions extends ToneAudioNodeOptions {
* anchor.href = url;
* anchor.click();
* }, 4000);
* @category Component
*/
export class Recorder extends ToneAudioNode<RecorderOptions> {

View file

@ -21,6 +21,7 @@ export interface GateOptions extends ToneAudioNodeOptions {
* const mic = new Tone.UserMedia().connect(gate);
* // the gate will only pass through the incoming
* // signal when it's louder than -30db
* @category Component
*/
export class Gate extends ToneAudioNode<GateOptions> {

View file

@ -21,6 +21,7 @@ export interface LimiterOptions extends ToneAudioNodeOptions {
* const limiter = new Tone.Limiter(-20).toDestination();
* const oscillator = new Tone.Oscillator().connect(limiter);
* oscillator.start();
* @category Component
*/
export class Limiter extends ToneAudioNode<LimiterOptions> {

View file

@ -13,6 +13,7 @@ export interface MidSideCompressorOptions extends ToneAudioNodeOptions {
/**
* MidSideCompressor applies two different compressors to the [[mid]]
* and [[side]] signal components of the input. See [[MidSideSplit]] and [[MidSideMerge]].
* @category Component
*/
export class MidSideCompressor extends ToneAudioNode<MidSideCompressorOptions> {

View file

@ -26,6 +26,7 @@ export interface MultibandCompressorOptions extends ToneAudioNodeOptions {
* threshold: -12
* }
* });
* @category Component
*/
export class MultibandCompressor extends ToneAudioNode<MultibandCompressorOptions> {

View file

@ -433,7 +433,7 @@ export class Envelope extends ToneAudioNode<EnvelopeOptions> {
* @example
* const env = new Tone.Envelope(0.5, 1, 0.4, 2);
* env.triggerAttackRelease(2);
* setInterval(() => console.log(env.getValueAtTime), 100);
* setInterval(() => console.log(env.getValueAtTime(Tone.now())), 100);
*/
getValueAtTime(time: Time): NormalRange {
return this._sig.getValueAtTime(time);

View file

@ -23,6 +23,7 @@ export interface FrequencyEnvelopeOptions extends EnvelopeOptions {
* });
* freqEnv.connect(oscillator.frequency);
* freqEnv.triggerAttack();
* @category Component
*/
export class FrequencyEnvelope extends Envelope {

View file

@ -17,6 +17,7 @@ export interface BaseToneOptions { }
/**
* @class Tone is the base class of all other classes.
* @category Core
* @constructor
*/
export abstract class Tone {

View file

@ -26,7 +26,7 @@ export class Ticker {
/**
* track the callback interval
*/
private _timeout!: number;
private _timeout!: ReturnType<typeof setTimeout>;
/**
* private reference to the worker

View file

@ -56,7 +56,7 @@ export abstract class AbstractParam<TypeName extends UnitName> {
* @example
* return Tone.Offline(() => {
* const signal = new Tone.Signal(0).toDestination();
* // the ramp is starts from the previously scheduled value
* // the ramp starts from the previously scheduled value
* signal.setValueAtTime(0, 0.1);
* signal.linearRampToValueAtTime(1, 0.4);
* }, 0.5, 1);
@ -68,10 +68,10 @@ export abstract class AbstractParam<TypeName extends UnitName> {
* the previous scheduled parameter value to the given value.
* @example
* return Tone.Offline(() => {
* const signal = new Tone.Signal(0).toDestination();
* // the ramp is starts from the previously scheduled value
* signal.setValueAtTime(0, 0.1);
* signal.exponentialRampToValueAtTime(1, 0.4);
* const signal = new Tone.Signal(1).toDestination();
* // the ramp starts from the previously scheduled value, which must be positive
* signal.setValueAtTime(1, 0.1);
* signal.exponentialRampToValueAtTime(0, 0.4);
* }, 0.5, 1);
*/
abstract exponentialRampToValueAtTime(value: UnitMap[TypeName], time: Time): this;
@ -90,6 +90,11 @@ export abstract class AbstractParam<TypeName extends UnitName> {
* const noise = new Tone.Noise().connect(delay).start().stop("+0.1");
* // making the delay time shorter over time will also make the pitch rise
* delay.delayTime.exponentialRampTo(0.01, 20);
* @example
* return Tone.Offline(() => {
* const signal = new Tone.Signal(.1).toDestination();
* signal.exponentialRampTo(5, 0.3, 0.1);
* }, 0.5, 1);
*/
abstract exponentialRampTo(value: UnitMap[TypeName], rampTime: Time, startTime?: Time): this;
@ -104,6 +109,12 @@ export abstract class AbstractParam<TypeName extends UnitName> {
* @param startTime When the ramp should start.
* @returns {Param} this
* @example
* const delay = new Tone.FeedbackDelay(0.5, 0.98).toDestination();
* // a short burst of noise through the feedback delay
* const noise = new Tone.Noise().connect(delay).start().stop("+0.1");
* // making the delay time shorter over time will also make the pitch rise
* delay.delayTime.linearRampTo(0.01, 20);
* @example
* return Tone.Offline(() => {
* const signal = new Tone.Signal(1).toDestination();
* signal.linearRampTo(0, 0.3, 0.1);

View file

@ -382,6 +382,8 @@ export class Context extends BaseContext {
* const context = new Tone.Context({ latencyHint: "playback" });
* // set this context as the global Context
* Tone.setContext(context);
* // the global context is gettable with Tone.getContext()
* console.log(Tone.getContext().latencyHint);
*/
get latencyHint(): ContextLatencyHint | Seconds {
return this._latencyHint;

View file

@ -46,6 +46,7 @@ export type AutomationEvent = NormalAutomationEvent | TargetAutomationEvent;
* additional unit conversion functionality. It also
* serves as a base-class for classes which have a single,
* automatable parameter.
* @category Core
*/
export class Param<TypeName extends UnitName = "number">
extends ToneWithContext<ParamOptions<TypeName>>

View file

@ -27,7 +27,7 @@ interface ToneAudioBuffersOptions {
* }, () => {
* const player = new Tone.Player().toDestination();
* // play one of the samples when they all load
* player.buffer = pianoSamples.get("C2");
* player.buffer = pianoSamples.get("A2");
* player.start();
* });
* @example

View file

@ -95,10 +95,13 @@ export abstract class ToneWithContext<Options extends ToneWithContextOptions> ex
}
/**
* Convert the incoming time to seconds
* Convert the incoming time to seconds.
* This is calculated against the current [[Tone.Transport]] bpm
* @example
* const gain = new Tone.Gain();
* console.log(gain.toSeconds("4n"));
* setInterval(() => console.log(gain.toSeconds("4n")), 100);
* // ramp the tempo to 60 bpm over 30 seconds
* Tone.getTransport().bpm.rampTo(60, 30);
*/
toSeconds(time?: Time): Seconds {
return new TimeClass(this.context, time).toSeconds();
@ -173,12 +176,14 @@ export abstract class ToneWithContext<Options extends ToneWithContextOptions> ex
/**
* Set multiple properties at once with an object.
* @example
* const filter = new Tone.Filter();
* const filter = new Tone.Filter().toDestination();
* // set values using an object
* filter.set({
* frequency: 300,
* frequency: "C6",
* type: "highpass"
* });
* const player = new Tone.Player("https://tonejs.github.io/audio/berklee/Analogsynth_octaves_highmid.mp3").connect(filter);
* player.autostart = true;
*/
set(props: RecursivePartial<Options>): this {
Object.keys(props).forEach(attribute => {

View file

@ -1,10 +1,10 @@
export * from "./clock/Clock";
export * from "./clock/Transport";
// export * from "./clock/Transport";
export * from "./context/Context";
export * from "./context/BaseContext";
export * from "./context/Delay";
export * from "./context/Destination";
// export * from "./context/Destination";
export * from "./context/Gain";
export * from "./context/Offline";
export * from "./context/OfflineContext";
@ -19,7 +19,7 @@ export * from "./type/Time";
export * from "./type/Ticks";
export * from "./type/TransportTime";
export * from "./util/Draw";
import "./util/Draw";
export * from "./util/Emitter";
export * from "./util/IntervalTimeline";
export * from "./util/StateTimeline";

View file

@ -46,7 +46,7 @@ export function deepMerge(target: any, ...sources: any[]): any {
/**
* Returns true if the two arrays have the same value for each of the elements
*/
export function deepEquals(arrayA: number[] | string[], arrayB: number[] | string[]): boolean {
export function deepEquals<T>(arrayA: T[], arrayB: T[]): boolean {
return arrayA.length === arrayB.length && arrayA.every((element, index) => arrayB[index] === element);
}

View file

@ -29,6 +29,19 @@ describe("Emitter", () => {
emitter.dispose();
});
it("can unbind duplicate events", () => {
const emitter = new Emitter();
const callback = () => {
throw new Error("should call this");
};
emitter.on("something", callback);
emitter.on("something", callback);
emitter.on("something", callback);
emitter.off("something", callback);
emitter.emit("something");
emitter.dispose();
});
it("'off' does nothing if there is no event scheduled", () => {
const emitter = new Emitter();
const callback = () => {

View file

@ -10,6 +10,7 @@ export interface EmitterEventObject {
* the ability to listen for and emit events.
* Inspiration and reference from Jerome Etienne's [MicroEvent](https://github.com/jeromeetienne/microevent.js).
* MIT (c) 2011 Jerome Etienne.
* @category Core
*/
export class Emitter<EventType extends string = string> extends Tone {
@ -73,7 +74,7 @@ export class Emitter<EventType extends string = string> extends Tone {
this._events[event] = [];
} else {
const eventList = this._events[event];
for (let i = 0; i < eventList.length; i++) {
for (let i = eventList.length - 1; i >= 0; i--) {
if (eventList[i] === callback) {
eventList.splice(i, 1);
}

View file

@ -13,6 +13,7 @@ export type MidSideEffectOptions = EffectOptions;
* Applies a Mid/Side seperation and recombination.
* Algorithm found in [kvraudio forums](http://www.kvraudio.com/forum/viewtopic.php?t=212587).
* This is a base-class for Mid/Side Effects.
* @category Effect
*/
export abstract class MidSideEffect<Options extends MidSideEffectOptions> extends Effect<Options> {

View file

@ -11,17 +11,7 @@ export interface StereoFeedbackEffectOptions extends StereoEffectOptions {
}
/**
* Just like a stereo feedback effect, but the feedback is routed from left to right
* and right to left instead of on the same channel.
* ```
* +--------------------------------+ feedbackL <-----------------------------------+
* | |
* +--> +-----> +----> +---+
* feedbackMerge +--> split (EFFECT) merge +--> feedbackSplit
* +--> +-----> +----> +---+
* | |
* +--------------------------------+ feedbackR <-----------------------------------+
* ```
* Base class for stereo feedback effects where the effectReturn is fed back into the same channel.
*/
export class StereoFeedbackEffect<Options extends StereoFeedbackEffectOptions> extends StereoEffect<Options> {

View file

@ -5,7 +5,7 @@ import { expect } from "chai";
import { CompareToFile } from "test/helper/CompareToFile";
import { Oscillator } from "Tone/source/oscillator/Oscillator";
describe("Effect", () => {
describe("Vibrato", () => {
BasicTests(Vibrato);
EffectTests(Vibrato);
@ -13,7 +13,7 @@ describe("Effect", () => {
return CompareToFile(() => {
const vibrato = new Vibrato(4, 1).toDestination();
const osc = new Oscillator().connect(vibrato).start();
}, "vibrato.wav", 0.01);
}, "vibrato.wav", 0.02);
});
context("API", () => {

View file

@ -28,6 +28,7 @@ interface PartOptions<T> extends Omit<ToneEventOptions<CallbackType<T>>, "value"
* // will be passed in as the second argument
* synth.triggerAttackRelease(note, "8n", time);
* }), [[0, "C2"], ["0:2", "C3"], ["0:3:2", "G2"]]);
* Tone.Transport.start();
* @example
* const synth = new Tone.Synth().toDestination();
* // use an array of objects as long as the object has a "time" attribute
@ -37,6 +38,7 @@ interface PartOptions<T> extends Omit<ToneEventOptions<CallbackType<T>>, "value"
* }), [{ time: 0, note: "C3", velocity: 0.9 },
* { time: "0:2", note: "C4", velocity: 0.5 }
* ]).start(0);
* Tone.Transport.start();
* @category Event
*/
export class Part<ValueType = any> extends ToneEvent<ValueType> {

View file

@ -10,7 +10,7 @@ import { TimeClass } from "./core/type/Time";
import { TransportTimeClass } from "./core/type/TransportTime";
import { isDefined, isFunction } from "./core/util/TypeCheck";
import { omitFromObject } from "./core/util/Defaults";
import { Draw } from "./classes";
import { Draw } from "./core/util/Draw";
type ClassesWithoutSingletons = Omit<typeof Classes, "Transport" | "Destination" | "Draw">;

View file

@ -80,14 +80,25 @@ export abstract class Instrument<Options extends InstrumentOptions> extends Tone
* Tone.Transport.start();
*/
sync(): this {
if (!this._synced) {
this._synced = true;
if (this._syncState()) {
this._syncMethod("triggerAttack", 1);
this._syncMethod("triggerRelease", 0);
}
return this;
}
/**
* set _sync
*/
protected _syncState(): boolean {
let changed = false;
if (!this._synced) {
this._synced = true;
changed = true;
}
return changed;
}
/**
* Wrap the given method so that it can be synchronized
* @param method Which method to wrap and sync

View file

@ -140,7 +140,7 @@ export abstract class ModulationSynth<Options extends ModulationSynthOptions> ex
{
type: "sine"
}
),
) as OmniOscillatorSynthOptions,
envelope: Object.assign(
omitFromObject(
Envelope.getDefaults(),
@ -162,7 +162,7 @@ export abstract class ModulationSynth<Options extends ModulationSynthOptions> ex
{
type: "square"
}
),
) as OmniOscillatorSynthOptions,
modulationEnvelope: Object.assign(
omitFromObject(
Envelope.getDefaults(),

View file

@ -131,7 +131,7 @@ export class MonoSynth extends Monophonic<MonoSynthOptions> {
{
type: "sawtooth",
},
),
) as OmniOscillatorSynthOptions,
});
}

View file

@ -103,8 +103,10 @@ export class NoiseSynth extends Instrument<NoiseSynthOptions> {
}
sync(): this {
this._syncMethod("triggerAttack", 0);
this._syncMethod("triggerRelease", 0);
if (this._syncState()) {
this._syncMethod("triggerAttack", 0);
this._syncMethod("triggerRelease", 0);
}
return this;
}

View file

@ -334,8 +334,10 @@ export class PolySynth<Voice extends Monophonic<any> = Synth> extends Instrument
}
sync(): this {
this._syncMethod("triggerAttack", 1);
this._syncMethod("triggerRelease", 1);
if (this._syncState()) {
this._syncMethod("triggerAttack", 1);
this._syncMethod("triggerRelease", 1);
}
return this;
}

View file

@ -254,8 +254,10 @@ export class Sampler extends Instrument<SamplerOptions> {
}
sync(): this {
this._syncMethod("triggerAttack", 1);
this._syncMethod("triggerRelease", 1);
if (this._syncState()) {
this._syncMethod("triggerAttack", 1);
this._syncMethod("triggerRelease", 1);
}
return this;
}

View file

@ -7,7 +7,7 @@ import { readOnly } from "../core/util/Interface";
import { RecursivePartial } from "../core/util/Interface";
import { Signal } from "../signal/Signal";
import { OmniOscillator } from "../source/oscillator/OmniOscillator";
import { OmniOscillatorSynthOptions } from "../source/oscillator/OscillatorInterface";
import { OmniOscillatorOptions, OmniOscillatorSynthOptions } from "../source/oscillator/OscillatorInterface";
import { Source } from "../source/Source";
import { Monophonic, MonophonicOptions } from "./Monophonic";
@ -94,7 +94,7 @@ export class Synth<Options extends SynthOptions = SynthOptions> extends Monophon
{
type: "triangle",
},
),
) as OmniOscillatorOptions,
});
}

View file

@ -17,6 +17,7 @@ export type GreaterThanOptions = SignalOptions<"number">;
* const gt = new Tone.GreaterThan(2).toDestination();
* const sig = new Tone.Signal(4).connect(gt);
* }, 0.1, 1);
* @category Signal
*/
export class GreaterThan extends Signal<"number"> {

View file

@ -14,6 +14,7 @@ export type GreaterThanZeroOptions = SignalOperatorOptions
* const sig = new Tone.Signal(0.5).connect(gt0);
* sig.setValueAtTime(-1, 0.05);
* }, 0.1, 1);
* @category Signal
*/
export class GreaterThanZero extends SignalOperator<GreaterThanZeroOptions> {

View file

@ -14,6 +14,7 @@ export interface ScaleExpOptions extends ScaleOptions {
* @example
* const scaleExp = new Tone.ScaleExp(0, 100, 2);
* const signal = new Tone.Signal(0.5).connect(scaleExp);
* @category Signal
*/
export class ScaleExp extends Scale<ScaleExpOptions> {

View file

@ -159,6 +159,8 @@ export class Player extends Source<PlayerOptions> {
this._activeSources.delete(source);
if (this._activeSources.size === 0 && !this._synced &&
this._state.getValueAtTime(this.now()) === "started") {
// remove the 'implicitEnd' event and replace with an explicit end
this._state.cancel(this.now());
this._state.setStateAtTime("stopped", this.now());
}
}

View file

@ -206,7 +206,7 @@ export class FatOscillator extends Source<FatOscillatorOptions> implements ToneO
}
set phase(phase: Degrees) {
this._phase = phase;
this._forEach(osc => osc.phase = phase);
this._forEach((osc, i) => osc.phase = this._phase + (i / this.count) * 360);
}
get baseType(): OscillatorType {

View file

@ -10,7 +10,7 @@ import { FMOscillator } from "./FMOscillator";
import { Oscillator } from "./Oscillator";
import {
generateWaveform,
OmniOscillatorConstructorOptions, OmniOscillatorOptions,
OmniOscillatorOptions,
OmniOscillatorType, ToneOscillatorInterface, ToneOscillatorType
} from "./OscillatorInterface";
import { PulseOscillator } from "./PulseOscillator";
@ -69,7 +69,7 @@ const OmniOscillatorSourceMap: {
* @category Source
*/
export class OmniOscillator<OscType extends AnyOscillator>
extends Source<OmniOscillatorConstructorOptions>
extends Source<OmniOscillatorOptions>
implements Omit<ToneOscillatorInterface, "type"> {
readonly name: string = "OmniOscillator";
@ -92,7 +92,7 @@ export class OmniOscillator<OscType extends AnyOscillator>
* @param type The type of the oscillator.
*/
constructor(frequency?: Frequency, type?: OmniOscillatorType);
constructor(options?: Partial<OmniOscillatorConstructorOptions>);
constructor(options?: Partial<OmniOscillatorOptions>);
constructor() {
super(optionsFromArguments(OmniOscillator.getDefaults(), arguments, ["frequency", "type"]));
@ -212,7 +212,7 @@ export class OmniOscillator<OscType extends AnyOscillator>
}
}
set(props: Partial<OmniOscillatorConstructorOptions>): this {
set(props: Partial<OmniOscillatorOptions>): this {
// make sure the type is set first
if (Reflect.has(props, "type") && props.type) {
this.type = props.type;

View file

@ -442,22 +442,13 @@ export type OmniOscillatorType =
"amsine" | "amsquare" | "amsawtooth" | "amtriangle" | "amcustom" | AMTypeWithPartials |
TypeWithPartials | OscillatorType | "pulse" | "pwm";
export type OmniOscillatorConstructorOptions =
export type OmniOscillatorOptions =
PulseOscillatorOptions | PWMOscillatorOptions |
OmniFatCustomOscillatorOptions | OmniFatTypeOscillatorOptions | OmniFatPartialsOscillatorOptions |
OmniFMCustomOscillatorOptions | OmniFMTypeOscillatorOptions | OmniFMPartialsOscillatorOptions |
OmniAMCustomOscillatorOptions | OmniAMTypeOscillatorOptions | OmniAMPartialsOscillatorOptions |
ToneOscillatorConstructorOptions;
// export type OmniOscillatorSourceOptions = OmniOscillatorConstructorOptions & SourceOptions;
export type OmniOscillatorOptions =
PulseOscillatorOptions & PWMOscillatorOptions &
OmniFatCustomOscillatorOptions & OmniFatTypeOscillatorOptions & OmniFatPartialsOscillatorOptions &
OmniFMCustomOscillatorOptions & OmniFMTypeOscillatorOptions & OmniFMPartialsOscillatorOptions &
OmniAMCustomOscillatorOptions & OmniAMTypeOscillatorOptions & OmniAMPartialsOscillatorOptions &
ToneOscillatorConstructorOptions;
type OmitSourceOptions<T extends BaseOscillatorOptions> = Omit<T, "frequency" | "detune" | "context">;
/**

1888
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -78,11 +78,11 @@
"http-server": "^0.12.3",
"istanbul-instrumenter-loader": "^3.0.1",
"jsdom": "^16.3.0",
"karma": "^4.4.1",
"karma": "^5.1.1",
"karma-chrome-launcher": "^2.2.0",
"karma-coverage": "^2.0.2",
"karma-firefox-launcher": "^1.3.0",
"karma-mocha": "^1.3.0",
"karma-mocha": "^2.0.1",
"karma-safari-launcher": "^1.0.0",
"karma-sourcemap-loader": "^0.3.7",
"karma-spec-reporter": "0.0.32",
@ -95,15 +95,15 @@
"ts-loader": "^7.0.1",
"ts-node": "^8.4.1",
"typedoc": "^0.17.6",
"typescript": "^3.8.3",
"typescript": "^4.0.3",
"ua-parser-js": "^0.7.20",
"webpack": "^4.43.0",
"webpack-cli": "^3.3.11",
"webpack": "^4.44.1",
"webpack-cli": "^3.3.12",
"yargs": "^13.2.0"
},
"dependencies": {
"standardized-audio-context": "^24.1.5",
"tslib": "^1.10.0"
"standardized-audio-context": "^25.0.4",
"tslib": "^2.0.1"
},
"prettier": {
"trailingComma": "es5",

View file

@ -5,6 +5,10 @@ import { Offline } from "./Offline";
import { OutputAudio } from "./OutputAudio";
import { Monophonic } from "Tone/instrument/Monophonic";
function wait(time) {
return new Promise(done => setTimeout(done, time));
}
export function InstrumentTest(Constr, note, constrArg?, optionsIndex?): void {
context("Instrument Tests", () => {
@ -167,6 +171,35 @@ export function InstrumentTest(Constr, note, constrArg?, optionsIndex?): void {
});
});
it("can unsync and re-sync triggerAttack to the Transport", () => {
return Offline(async ({ transport }) => {
const instance = new Constr(constrArg);
instance.toDestination();
instance.sync();
if (note) {
instance.triggerAttack(note, 0.1);
} else {
instance.triggerAttack(0.1);
}
transport.start(0.1);
await wait(100);
instance.unsync();
transport.stop();
instance.sync();
if (note) {
instance.triggerAttack(note, 0.1);
} else {
instance.triggerAttack(0.1);
}
transport.start(0.1);
}, 1).then((buffer) => {
expect(buffer.getTimeOfFirstSound()).to.be.within(0.19, 0.25);
});
});
it("calling sync and unsync multiple times has no effect", () => {
return Offline(({ transport }) => {
const instance = new Constr(constrArg);
@ -218,5 +251,17 @@ export function InstrumentTest(Constr, note, constrArg?, optionsIndex?): void {
}
}, 3);
});
it("can do portamento glide between notes", () => {
return Offline(() => {
const instance = new Constr(constrArg);
if (instance instanceof Monophonic) {
instance.portamento = 0.5;
instance.triggerAttackRelease("C4", 0.2, 0);
expect(instance.getLevelAtTime(0.4)).to.be.greaterThan(0);
instance.triggerAttackRelease("C2", 0.2, 0.4);
}
}, 0.5);
});
});
}

View file

@ -2,9 +2,9 @@
// Karma configuration
const path = require("path");
const argv = require("yargs")
.alias("i", "file")
.alias("d", "dir")
.argv;
.alias("i", "file")
.alias("d", "dir")
.argv;
let BROWSERS = ["HeadlessChrome", "HeadlessFirefox", "Safari"];
@ -28,7 +28,7 @@ if (process.env.BROWSER === "chrome") {
BROWSERS = ["HeadlessChrome", "HeadlessFirefox"];
}
module.exports = function (config) {
module.exports = function(config) {
const configuration = {
// base path that will be used to resolve all patterns (eg. files, exclude)