diff --git a/.gitignore b/.gitignore index 2d2438ef..4d87b1b4 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ dist dist-test npm-debug.log +samples diff --git a/debug/index.html b/debug/index.html index b3ae21d3..b66f04c8 100644 --- a/debug/index.html +++ b/debug/index.html @@ -356,7 +356,7 @@

footer

prepareSourceBuffer(combined, outputType, function () { console.log('appending...'); window.vjsBuffer.appendBuffer(bytes); - video.play(); + // video.play(); }); } }); @@ -379,7 +379,7 @@

footer

if ($('#working-active').checked) { prepareSourceBuffer(function() { window.vjsBuffer.appendBuffer(bytes); - video.play(); + // video.play(); }); } diff --git a/lib/mp4/transmuxer.js b/lib/mp4/transmuxer.js index 1408c0a9..b666cf20 100644 --- a/lib/mp4/transmuxer.js +++ b/lib/mp4/transmuxer.js @@ -712,10 +712,6 @@ VideoSegmentStream.prototype = new Stream(); * in the source; false to adjust the first segment to start at media timeline start. */ CoalesceStream = function(options, metadataStream) { - // Number of Tracks per output segment - // If greater than 1, we combine multiple - // tracks into a single segment - this.numberOfTracks = 0; this.metadataStream = metadataStream; options = options || {}; @@ -730,8 +726,12 @@ CoalesceStream = function(options, metadataStream) { this.keepOriginalTimestamps = options.keepOriginalTimestamps; } + this.tracks = { + video: null, + audio: null + }; + this.pendingTracks = []; - this.videoTrack = null; this.pendingBoxes = []; this.pendingCaptions = []; this.pendingMetadata = []; @@ -760,12 +760,34 @@ CoalesceStream = function(options, metadataStream) { this.pendingBytes += output.boxes.byteLength; if (output.track.type === 'video') { - this.videoTrack = output.track; + this.tracks.video.track = output.track; } if (output.track.type === 'audio') { - this.audioTrack = output.track; + this.tracks.audio.track = output.track; } }; + + this.reset = function(flush) { + if (this.tracks.video) { + this.tracks.video.track = null; + if (flush) { + this.tracks.video.flushed = false; + } + } + + if (this.tracks.audio) { + this.tracks.audio.track = null; + if (flush) { + this.tracks.audio.flushed = false; + } + } + + this.pendingTracks.length = 0; + this.pendingBoxes.length = 0; + this.pendingCaptions.length = 0; + this.pendingBytes = 0; + this.pendingMetadata.length = 0; + }; }; CoalesceStream.prototype = new Stream(); @@ -784,54 +806,64 @@ CoalesceStream.prototype.flush = function(flushSource) { timelineStartPts = 0, i; - if (this.pendingTracks.length < this.numberOfTracks) { - if (flushSource !== 'VideoSegmentStream' && - flushSource !== 'AudioSegmentStream') { - // Return because we haven't received a flush from a data-generating - // portion of the segment (meaning that we have only recieved meta-data - // or captions.) - return; - } else if (this.remuxTracks) { - // Return until we have enough tracks from the pipeline to remux (if we - // are remuxing audio and video into a single MP4) - return; - } else if (this.pendingTracks.length === 0) { - // In the case where we receive a flush without any data having been - // received we consider it an emitted track for the purposes of coalescing - // `done` events. - // We do this for the case where there is an audio and video track in the - // segment but no audio data. (seen in several playlists with alternate - // audio tracks and no audio present in the main TS segments.) - this.emittedTracks++; - - if (this.emittedTracks >= this.numberOfTracks) { - this.trigger('done'); - this.emittedTracks = 0; - } - return; - } + if (flushSource !== 'VideoSegmentStream' && flushSource !== 'AudioSegmentStream') { + // Return because we haven't received a flush from a data-generating + // portion of the segment (meaning that we have only recieved metadata + // or captions.) + return; + } + + if (flushSource === 'VideoSegmentStream') { + this.tracks.video.flushed = true; } - if (this.videoTrack) { - timelineStartPts = this.videoTrack.timelineStartInfo.pts; + if (flushSource === 'AudioSegmentStream') { + this.tracks.audio.flushed = true; + } + + var waitingOn = { + video: this.tracks.video && !this.tracks.video.flushed, + audio: this.tracks.audio && !this.tracks.audio.flushed + }; + + var done = !(waitingOn.video || waitingOn.audio); + + if (this.remuxTracks && !done) { + // Return until we have enough tracks from the pipeline to remux (if we + // are remuxing audio and video into a single MP4) + return; + } + + if (this.pendingTracks.length === 0 && done) { + // In the case where we receive a flush without any data having been + // received we consider it an emitted track for the purposes of coalescing + // `done` events. + // We do this for the case where there is an audio and video track in the + // segment but no audio data. (seen in several playlists with alternate + // audio tracks and no audio present in the main TS segments.) + this.reset(true); + this.trigger('done'); + return; + } + + if (this.tracks.video && this.tracks.video.track) { + timelineStartPts = this.tracks.video.track.timelineStartInfo.pts; VIDEO_PROPERTIES.forEach(function(prop) { - event.info[prop] = this.videoTrack[prop]; + event.info[prop] = this.tracks.video.track[prop]; }, this); - } else if (this.audioTrack) { - timelineStartPts = this.audioTrack.timelineStartInfo.pts; + } else if (this.tracks.audio && this.tracks.audio.track) { + timelineStartPts = this.tracks.audio.track.timelineStartInfo.pts; AUDIO_PROPERTIES.forEach(function(prop) { - event.info[prop] = this.audioTrack[prop]; + event.info[prop] = this.tracks.audio.track[prop]; }, this); } - if (this.pendingTracks.length === 1) { - event.type = this.pendingTracks[0].type; - } else { + if (this.remuxTracks && this.tracks.video && this.tracks.audio) { event.type = 'combined'; + } else { + event.type = this.pendingTracks[0].type; } - this.emittedTracks += this.pendingTracks.length; - initSegment = mp4.initSegment(this.pendingTracks); // Create a new typed array to hold the init segment @@ -889,21 +921,15 @@ CoalesceStream.prototype.flush = function(flushSource) { // it for the first event.metadata.dispatchType = this.metadataStream.dispatchType; - // Reset stream state - this.pendingTracks.length = 0; - this.videoTrack = null; - this.pendingBoxes.length = 0; - this.pendingCaptions.length = 0; - this.pendingBytes = 0; - this.pendingMetadata.length = 0; + this.reset(); // Emit the built segment this.trigger('data', event); // Only emit `done` if all tracks have been flushed and emitted - if (this.emittedTracks >= this.numberOfTracks) { + if (done) { + this.reset(true); this.trigger('done'); - this.emittedTracks = 0; } }; /** @@ -962,7 +988,10 @@ Transmuxer = function(options) { type: 'audio' }; // hook up the audio segment stream to the first track with aac data - pipeline.coalesceStream.numberOfTracks++; + pipeline.coalesceStream.tracks.audio = { + track: null, + flushed: false + }; pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options); // Set up the final part of the audio pipeline pipeline.adtsStream @@ -1039,7 +1068,10 @@ Transmuxer = function(options) { // hook up the video segment stream to the first track with h264 data if (videoTrack && !pipeline.videoSegmentStream) { - pipeline.coalesceStream.numberOfTracks++; + pipeline.coalesceStream.tracks.video = { + track: null, + flushed: false + }; pipeline.videoSegmentStream = new VideoSegmentStream(videoTrack, options); pipeline.videoSegmentStream.on('timelineStartInfo', function(timelineStartInfo) { @@ -1073,7 +1105,10 @@ Transmuxer = function(options) { if (audioTrack && !pipeline.audioSegmentStream) { // hook up the audio segment stream to the first track with aac data - pipeline.coalesceStream.numberOfTracks++; + pipeline.coalesceStream.tracks.audio = { + track: null, + flushed: false + }; pipeline.audioSegmentStream = new AudioSegmentStream(audioTrack, options); // Set up the final part of the audio pipeline