Tone.js/Tone/source/Microphone.js

141 lines
3.4 KiB
JavaScript
Raw Normal View History

///////////////////////////////////////////////////////////////////////////////
//
// WEB RTC MICROPHONE
//
///////////////////////////////////////////////////////////////////////////////
2014-06-19 05:40:16 +00:00
define(["Tone/core/Tone", "Tone/source/Source"], function(Tone){
2014-04-06 00:47:59 +00:00
2014-06-19 05:40:16 +00:00
/**
* WebRTC Microphone
*
* CHROME ONLY (for now) because of the
* use of the MediaStreamAudioSourceNode
*
* @constructor
* @extends {Tone.Source}
* @param {number=} inputNum
*/
2014-04-06 00:47:59 +00:00
Tone.Microphone = function(inputNum){
Tone.call(this);
2014-06-19 05:40:16 +00:00
/**
* @type {MediaStreamAudioSourceNode}
* @private
*/
this._mediaStream = null;
/**
* @type {LocalMediaStream}
* @private
*/
this._stream = null;
/**
* @type {Object}
* @private
*/
2014-04-06 00:47:59 +00:00
this.constraints = {"audio" : true};
2014-06-19 05:40:16 +00:00
//get the option
2014-04-06 00:47:59 +00:00
var self = this;
MediaStreamTrack.getSources(function (media_sources) {
if (inputNum < media_sources.length){
self.constraints.audio = {
optional : [{ sourceId: media_sources[inputNum].id}]
2014-06-19 05:40:16 +00:00
};
}
2014-04-06 00:47:59 +00:00
});
2014-06-19 05:40:16 +00:00
};
2014-06-19 05:40:16 +00:00
Tone.extend(Tone.Microphone, Tone.Source);
2014-06-19 05:40:16 +00:00
/**
* start the _stream.
*
* accepts a time to stay consisten with other sources, even though
* it can't be stopped in a sample accurate way.
* uses setTimeout to approximate the behavior
*
* @param {Tone.Time} time
*/
Tone.Microphone.prototype.start = function(time){
if (this.state === Tone.Source.State.STOPPED){
this.state = Tone.Source.State.STARTED;
if (time){
var self = this;
setTimeout(function(){
navigator.getUserMedia(self.constraints,
self._onStream.bind(self), self._onStreamError.bind(self));
}, this.toSeconds(time) * 1000);
} else {
navigator.getUserMedia(this.constraints,
this._onStream.bind(this), this._onStreamError.bind(this));
}
}
};
2014-06-19 05:40:16 +00:00
/**
* stop the _stream.
*
* accepts a time to stay consisten with other sources, even though
* it can't be stopped in a sample accurate way.
* uses setTimeout to approximate the behavior
*
* @param {Tone.Time} time
*/
Tone.Microphone.prototype.stop = function(time){
if (this._stream && this.state === Tone.Source.State.STARTED){
if (time){
var self = this;
setTimeout(function(){
self.state = Tone.Source.State.STOPPED;
self._stream.stop();
}, this.toSeconds(time) * 1000);
} else {
this.state = Tone.Source.State.STOPPED;
this._stream.stop();
}
2014-04-06 00:47:59 +00:00
}
};
2014-06-19 05:40:16 +00:00
/**
* called when the _stream is successfully setup
* @param {LocalMediaStream} _stream
* @private
*/
Tone.Microphone.prototype._onStream = function(_stream) {
this._stream = _stream;
// Wrap a MediaStreamSourceNode around the live input _stream.
this._mediaStream = this.context.createMediaStreamSource(_stream);
this._mediaStream.connect(this.output);
};
/**
* called on error
* @param {Error} e
* @private
*/
2014-04-06 00:47:59 +00:00
Tone.Microphone.prototype._onStreamError = function(e) {
console.error(e);
};
2014-06-20 04:58:02 +00:00
/**
* clean up
*/
Tone.Microphone.prototype.dispose = function(e) {
this.input.disconnect();
this.output.disconnect();
this._stream.disconnect();
this._mediaStream.disconnect();
this.input = null;
this.output = null;
this._stream = null;
this._mediaStream = null;
};
2014-04-06 00:47:59 +00:00
//polyfill
2014-06-19 05:40:16 +00:00
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
2014-04-06 00:47:59 +00:00
return Tone.Microphone;
});