| New file |
| | |
| | | <template> |
| | | <div class="video-player"> |
| | | <video ref="videoPlayer" :poster="poster" preload="auto" muted></video> |
| | | <div class="controls"> |
| | | <!-- 全屏 --> |
| | | <i class="el-icon-full-screen" @click="fullScreen"></i> |
| | | </div> |
| | | </div> |
| | | </template> |
| | | <script> |
| | | import Wfs from "./wfs"; |
| | | export default { |
| | | name: "CameraPlayer", |
| | | props: { |
| | | wsAddr: { |
| | | type: String, |
| | | // default: "ws://192.168.1.182:10101/ws" |
| | | default: `${location.protocol === "https" ? "wss" : "ws"}://${location.host}/ws` |
| | | }, |
| | | cameraName: { |
| | | type: String, |
| | | default: "" |
| | | }, |
| | | cameraID: { |
| | | type: String, |
| | | default: "" |
| | | }, |
| | | rtspUrl: { |
| | | type: String, |
| | | default: "" |
| | | }, |
| | | isRunning: { |
| | | type: Boolean, |
| | | default: false |
| | | }, |
| | | isGb: { |
| | | type: Boolean, |
| | | default: false |
| | | } |
| | | }, |
| | | |
| | | computed: { |
| | | poster() { |
| | | return "/images/player/player_poster.gif?t=" + Math.random() |
| | | } |
| | | }, |
| | | data() { |
| | | return { |
| | | wfs: {}, |
| | | wfsId: 0 |
| | | }; |
| | | }, |
| | | watch: { |
| | | rtspUrl: function (newVal, oldVal) { |
| | | if (newVal !== oldVal) { |
| | | if (this.wfs.config) { |
| | | this.wfs.destroy(); |
| | | } |
| | | this.$nextTick(() => { |
| | | this.clickStart(); |
| | | }); |
| | | } |
| | | } |
| | | }, |
| | | mounted() { |
| | | this.clickStart(); |
| | | }, |
| | | beforeDestroy() { |
| | | this.wfs.destroy(); |
| | | this.wfsId = ""; |
| | | }, |
| | | methods: { |
| | | checkConnect(id) { |
| | | // console.log(this.wfs) |
| | | if (id !== this.wfsId) { |
| | | return |
| | | } |
| | | |
| | | if (this.wfs.websocketLoader && this.wfs.websocketLoader.client) { |
| | | if (this.wfs.websocketLoader.client.disconnected) { |
| | | this.clickStart(); |
| | | console.log("实时视频已断开,正在重连") |
| | | return |
| | | } |
| | | } |
| | | |
| | | let _this = this; |
| | | setTimeout(() => { |
| | | _this.checkConnect(id) |
| | | }, 10000) |
| | | }, |
| | | clickStart() { |
| | | if (this.rtspUrl == "") { |
| | | return; |
| | | } |
| | | |
| | | if (this.cameraID == "") { |
| | | this.cameraID = this.getUuid(); |
| | | } |
| | | |
| | | if (Wfs.isSupported()) { |
| | | let wsAddr = this.wsAddr; |
| | | let cameraInfo = { |
| | | cameraID: this.cameraID, |
| | | rtspUrl: this.rtspUrl, |
| | | isRunning: this.isRunning, |
| | | isGb28181: this.isGb |
| | | }; |
| | | |
| | | // let camera = document.getElementById(this.cameraID); |
| | | let camera = this.$refs.videoPlayer; |
| | | this.wfs = new Wfs(); |
| | | let randomId = this.getUuid(); |
| | | this.wfsId = randomId; |
| | | this.wfs.attachMedia(camera, "chX", "H264Raw", wsAddr, cameraInfo); |
| | | |
| | | this.checkConnect(randomId) |
| | | } |
| | | }, |
| | | getUuid() { |
| | | var s = []; |
| | | var hexDigits = "0123456789abcdefghijkopqrst"; |
| | | for (var i = 0; i < 36; i++) { |
| | | s[i] = hexDigits.substr(Math.floor(Math.random() * 0x10), 1); |
| | | } |
| | | s[14] = "4"; // bits 12-15 of the time_hi_and_version field to 0010 |
| | | s[19] = hexDigits.substr((s[19] & 0x3) | 0x8, 1); // bits 6-7 of the clock_seq_hi_and_reserved to 01 |
| | | s[8] = s[13] = s[18] = s[23] = "-"; |
| | | var uuid = s.join(""); |
| | | return uuid; |
| | | }, |
| | | fullScreen() { |
| | | this.$refs.videoPlayer.webkitRequestFullScreen(); |
| | | } |
| | | } |
| | | }; |
| | | </script> |
| | | |
| | | <style lang="scss"> |
| | | #area-canvas { |
| | | background: transparent; |
| | | position: absolute; |
| | | top: 0; |
| | | left: 0; |
| | | padding: 0; |
| | | width: 100%; |
| | | height: 100%; |
| | | } |
| | | |
| | | video { |
| | | object-fit: fill; |
| | | width: 100%; |
| | | height: 100%; |
| | | } |
| | | .video-player { |
| | | position: relative; |
| | | width: 100%; |
| | | height: 100%; |
| | | .controls { |
| | | display: none; |
| | | position: absolute; |
| | | bottom: 0%; |
| | | color: #fff; |
| | | width: 100%; |
| | | height: 10%; |
| | | background: -webkit-linear-gradient(rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.9)); |
| | | |
| | | i { |
| | | float: right; |
| | | font-size: 15px; |
| | | margin-right: 7%; |
| | | cursor: pointer; |
| | | } |
| | | i:hover { |
| | | color: rgb(236, 235, 235); |
| | | -moz-box-shadow: 2px 2px 7px #000; |
| | | -webkit-box-shadow: 2px 2px 7px #000; |
| | | box-shadow: 2px 2px 7px #000; |
| | | } |
| | | } |
| | | } |
| | | |
| | | .video-player:hover { |
| | | .controls { |
| | | display: block; |
| | | } |
| | | } |
| | | </style> |
| New file |
| | |
| | | /* |
| | | * Buffer Controller |
| | | */ |
| | | /* eslint-disable */ |
| | | import Event from '../events'; |
| | | import EventHandler from '../event-handler'; |
| | | |
| | | class BufferController extends EventHandler { |
| | | constructor(wfs) { |
| | | super( |
| | | wfs, |
| | | Event.MEDIA_ATTACHING, |
| | | Event.BUFFER_APPENDING, |
| | | Event.BUFFER_RESET |
| | | ); |
| | | |
| | | this.mediaSource = null; |
| | | this.media = null; |
| | | this.pendingTracks = {}; |
| | | this.sourceBuffer = {}; |
| | | this.segments = []; |
| | | |
| | | this.appended = 0; |
| | | this._msDuration = null; |
| | | |
| | | // Source Buffer listeners |
| | | this.onsbue = this.onSBUpdateEnd.bind(this); |
| | | |
| | | this.browserType = 0; |
| | | if (navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) { |
| | | this.browserType = 1; |
| | | } |
| | | this.mediaType = 'H264Raw'; |
| | | |
| | | this.websocketName = undefined; |
| | | this.channelName = undefined; |
| | | this.cameraInfo = {}; |
| | | } |
| | | |
| | | destroy() { |
| | | EventHandler.prototype.destroy.call(this); |
| | | } |
| | | |
| | | onMediaAttaching(data) { |
| | | let media = (this.media = data.media); |
| | | this.mediaType = data.mediaType; |
| | | this.websocketName = data.websocketName; |
| | | this.channelName = data.channelName; |
| | | this.cameraInfo = data.cameraInfo; |
| | | if (media) { |
| | | // setup the media source |
| | | var ms = (this.mediaSource = new MediaSource()); |
| | | //Media Source listeners |
| | | this.onmso = this.onMediaSourceOpen.bind(this); |
| | | this.onmse = this.onMediaSourceEnded.bind(this); |
| | | this.onmsc = this.onMediaSourceClose.bind(this); |
| | | ms.addEventListener('sourceopen', this.onmso); |
| | | ms.addEventListener('sourceended', this.onmse); |
| | | ms.addEventListener('sourceclose', this.onmsc); |
| | | // link video and media Source |
| | | media.src = URL.createObjectURL(ms); |
| | | } |
| | | } |
| | | |
| | | onMediaDetaching() {} |
| | | |
| | | onBufferAppending(data) { |
| | | if (!this.segments) { |
| | | this.segments = [data]; |
| | | } else { |
| | | this.segments.push(data); |
| | | } |
| | | this.doAppending(); |
| | | } |
| | | |
| | | onMediaSourceClose() { |
| | | console.log('media source closed'); |
| | | } |
| | | |
| | | onMediaSourceEnded() { |
| | | console.log('media source ended'); |
| | | } |
| | | |
| | | onSBUpdateEnd(event) { |
| | | // Firefox |
| | | if (this.browserType === 1) { |
| | | this.mediaSource.endOfStream(); |
| | | this.media.play(); |
| | | } |
| | | |
| | | this.appending = false; |
| | | this.doAppending(); |
| | | this.updateMediaElementDuration(); |
| | | } |
| | | |
| | | updateMediaElementDuration() {} |
| | | |
| | | onMediaSourceOpen() { |
| | | let mediaSource = this.mediaSource; |
| | | if (mediaSource) { |
| | | // once received, don't listen anymore to sourceopen event |
| | | mediaSource.removeEventListener('sourceopen', this.onmso); |
| | | } |
| | | |
| | | if (this.mediaType === 'FMp4') { |
| | | this.checkPendingTracks(); |
| | | } |
| | | |
| | | this.wfs.trigger(Event.MEDIA_ATTACHED, { |
| | | media: this.media, |
| | | channelName: this.channelName, |
| | | mediaType: this.mediaType, |
| | | websocketName: this.websocketName, |
| | | cameraInfo: this.cameraInfo |
| | | }); |
| | | } |
| | | |
| | | checkPendingTracks() { |
| | | this.createSourceBuffers({ tracks: 'video', mimeType: '' }); |
| | | this.pendingTracks = {}; |
| | | } |
| | | |
| | | onBufferReset(data) { |
| | | if (this.mediaType === 'H264Raw') { |
| | | this.createSourceBuffers({ tracks: 'video', mimeType: data.mimeType }); |
| | | } |
| | | } |
| | | |
| | | createSourceBuffers(tracks) { |
| | | var sourceBuffer = this.sourceBuffer, |
| | | mediaSource = this.mediaSource; |
| | | let mimeType; |
| | | if (tracks.mimeType === '') { |
| | | mimeType = 'video/mp4;codecs=avc1.420028'; // avc1.42c01f avc1.42801e avc1.640028 avc1.420028 |
| | | } else { |
| | | mimeType = 'video/mp4;codecs=' + tracks.mimeType; |
| | | } |
| | | |
| | | try { |
| | | let sb = (sourceBuffer['video'] = mediaSource.addSourceBuffer(mimeType)); |
| | | sb.addEventListener('updateend', this.onsbue); |
| | | track.buffer = sb; |
| | | } catch (err) {} |
| | | this.wfs.trigger(Event.BUFFER_CREATED, { tracks: tracks }); |
| | | this.media.play(); |
| | | } |
| | | |
| | | doAppending() { |
| | | var wfs = this.wfs, |
| | | sourceBuffer = this.sourceBuffer, |
| | | segments = this.segments; |
| | | if (Object.keys(sourceBuffer).length) { |
| | | if (this.media.error) { |
| | | this.segments = []; |
| | | console.log( |
| | | 'trying to append although a media error occured, flush segment and abort' |
| | | ); |
| | | return; |
| | | } |
| | | if (this.appending) { |
| | | return; |
| | | } |
| | | |
| | | if (segments && segments.length) { |
| | | var segment = segments.shift(); |
| | | try { |
| | | if (sourceBuffer[segment.type]) { |
| | | this.parent = segment.parent; |
| | | sourceBuffer[segment.type].appendBuffer(segment.data); |
| | | this.appendError = 0; |
| | | this.appended++; |
| | | this.appending = true; |
| | | } else { |
| | | } |
| | | } catch (err) { |
| | | // in case any error occured while appending, put back segment in segments table |
| | | segments.unshift(segment); |
| | | var event = { type: ErrorTypes.MEDIA_ERROR }; |
| | | if (err.code !== 22) { |
| | | if (this.appendError) { |
| | | this.appendError++; |
| | | } else { |
| | | this.appendError = 1; |
| | | } |
| | | event.details = ErrorDetails.BUFFER_APPEND_ERROR; |
| | | event.frag = this.fragCurrent; |
| | | if (this.appendError > wfs.config.appendErrorMaxRetry) { |
| | | segments = []; |
| | | event.fatal = true; |
| | | return; |
| | | } else { |
| | | event.fatal = false; |
| | | } |
| | | } else { |
| | | this.segments = []; |
| | | event.details = ErrorDetails.BUFFER_FULL_ERROR; |
| | | return; |
| | | } |
| | | } |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | export default BufferController; |
| New file |
| | |
| | | /* |
| | | * Flow Controller |
| | | */ |
| | | /* eslint-disable */ |
| | | import Event from '../events'; |
| | | import EventHandler from '../event-handler'; |
| | | |
| | | class FlowController extends EventHandler { |
| | | constructor(wfs) { |
| | | super( |
| | | wfs, |
| | | Event.MEDIA_ATTACHED, |
| | | Event.BUFFER_CREATED, |
| | | Event.FILE_PARSING_DATA, |
| | | Event.FILE_HEAD_LOADED, |
| | | Event.FILE_DATA_LOADED, |
| | | Event.WEBSOCKET_ATTACHED, |
| | | Event.FRAG_PARSING_DATA, |
| | | Event.FRAG_PARSING_INIT_SEGMENT |
| | | ); |
| | | |
| | | this.fileStart = 0; |
| | | this.fileEnd = 0; |
| | | this.pendingAppending = 0; |
| | | this.mediaType = undefined; |
| | | channelName: this.channelName; |
| | | } |
| | | |
| | | destroy() { |
| | | EventHandler.prototype.destroy.call(this); |
| | | } |
| | | |
| | | onMediaAttached(data) { |
| | | if (data.websocketName != undefined) { |
| | | var client = new WebSocket(data.websocketName); |
| | | this.wfs.attachWebsocket(client, data.channelName, data.cameraInfo); |
| | | } else { |
| | | console.log('websocketName ERROE!!!'); |
| | | } |
| | | } |
| | | |
| | | onBufferCreated(data) { |
| | | this.mediaType = data.mediaType; |
| | | } |
| | | |
| | | onFileHeadLoaded(data) {} |
| | | |
| | | onFileDataLoaded(data) {} |
| | | |
| | | onFileParsingData(data) {} |
| | | |
| | | onWebsocketAttached(data) { |
| | | this.wfs.trigger(Event.BUFFER_APPENDING, { |
| | | type: 'video', |
| | | data: data.payload, |
| | | parent: 'main' |
| | | }); |
| | | } |
| | | |
| | | onFragParsingInitSegment(data) { |
| | | var tracks = data.tracks, |
| | | trackName, |
| | | track; |
| | | |
| | | track = tracks.video; |
| | | if (track) { |
| | | track.id = data.id; |
| | | } |
| | | |
| | | for (trackName in tracks) { |
| | | track = tracks[trackName]; |
| | | var initSegment = track.initSegment; |
| | | if (initSegment) { |
| | | this.pendingAppending++; |
| | | this.wfs.trigger(Event.BUFFER_APPENDING, { |
| | | type: trackName, |
| | | data: initSegment, |
| | | parent: 'main' |
| | | }); |
| | | } |
| | | } |
| | | } |
| | | |
| | | onFragParsingData(data) { |
| | | if (data.type === 'video') { |
| | | } |
| | | |
| | | [data.data1, data.data2].forEach(buffer => { |
| | | if (buffer) { |
| | | this.pendingAppending++; |
| | | this.wfs.trigger(Event.BUFFER_APPENDING, { |
| | | type: data.type, |
| | | data: buffer, |
| | | parent: 'main' |
| | | }); |
| | | } |
| | | }); |
| | | } |
| | | } |
| | | export default FlowController; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | /** |
| | | * Parser for exponential Golomb codes, a variable-bitwidth number encoding scheme used by h264. |
| | | */ |
| | | |
| | | import {logger} from '../utils/logger'; |
| | | |
| | | class ExpGolomb { |
| | | |
| | | constructor(data) { |
| | | this.data = data; |
| | | // the number of bytes left to examine in this.data |
| | | this.bytesAvailable = this.data.byteLength; |
| | | // the current word being examined |
| | | this.word = 0; // :uint |
| | | // the number of bits left to examine in the current word |
| | | this.bitsAvailable = 0; // :uint |
| | | } |
| | | |
| | | // ():void |
| | | loadWord() { |
| | | var |
| | | position = this.data.byteLength - this.bytesAvailable, |
| | | workingBytes = new Uint8Array(4), |
| | | availableBytes = Math.min(4, this.bytesAvailable); |
| | | if (availableBytes === 0) { |
| | | throw new Error('no bytes available'); |
| | | } |
| | | workingBytes.set(this.data.subarray(position, position + availableBytes)); |
| | | this.word = new DataView(workingBytes.buffer).getUint32(0); |
| | | // track the amount of this.data that has been processed |
| | | this.bitsAvailable = availableBytes * 8; |
| | | this.bytesAvailable -= availableBytes; |
| | | } |
| | | |
| | | // (count:int):void |
| | | skipBits(count) { |
| | | var skipBytes; // :int |
| | | if (this.bitsAvailable > count) { |
| | | this.word <<= count; |
| | | this.bitsAvailable -= count; |
| | | } else { |
| | | count -= this.bitsAvailable; |
| | | skipBytes = count >> 3; |
| | | count -= (skipBytes >> 3); |
| | | this.bytesAvailable -= skipBytes; |
| | | this.loadWord(); |
| | | this.word <<= count; |
| | | this.bitsAvailable -= count; |
| | | } |
| | | } |
| | | |
| | | // (size:int):uint |
| | | readBits(size) { |
| | | var |
| | | bits = Math.min(this.bitsAvailable, size), // :uint |
| | | valu = this.word >>> (32 - bits); // :uint |
| | | if (size > 32) { |
| | | logger.error('Cannot read more than 32 bits at a time'); |
| | | } |
| | | this.bitsAvailable -= bits; |
| | | if (this.bitsAvailable > 0) { |
| | | this.word <<= bits; |
| | | } else if (this.bytesAvailable > 0) { |
| | | this.loadWord(); |
| | | } |
| | | bits = size - bits; |
| | | if (bits > 0) { |
| | | return valu << bits | this.readBits(bits); |
| | | } else { |
| | | return valu; |
| | | } |
| | | } |
| | | |
| | | // ():uint |
| | | skipLZ() { |
| | | var leadingZeroCount; // :uint |
| | | for (leadingZeroCount = 0; leadingZeroCount < this.bitsAvailable; ++leadingZeroCount) { |
| | | if (0 !== (this.word & (0x80000000 >>> leadingZeroCount))) { |
| | | // the first bit of working word is 1 |
| | | this.word <<= leadingZeroCount; |
| | | this.bitsAvailable -= leadingZeroCount; |
| | | return leadingZeroCount; |
| | | } |
| | | } |
| | | // we exhausted word and still have not found a 1 |
| | | this.loadWord(); |
| | | return leadingZeroCount + this.skipLZ(); |
| | | } |
| | | |
| | | // ():void |
| | | skipUEG() { |
| | | this.skipBits(1 + this.skipLZ()); |
| | | } |
| | | |
| | | // ():void |
| | | skipEG() { |
| | | this.skipBits(1 + this.skipLZ()); |
| | | } |
| | | |
| | | // ():uint |
| | | readUEG() { |
| | | var clz = this.skipLZ(); // :uint |
| | | return this.readBits(clz + 1) - 1; |
| | | } |
| | | |
| | | // ():int |
| | | readEG() { |
| | | var valu = this.readUEG(); // :int |
| | | if (0x01 & valu) { |
| | | // the number is odd if the low order bit is set |
| | | return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2 |
| | | } else { |
| | | return -1 * (valu >>> 1); // divide by two then make it negative |
| | | } |
| | | } |
| | | |
| | | // Some convenience functions |
| | | // :Boolean |
| | | readBoolean() { |
| | | return 1 === this.readBits(1); |
| | | } |
| | | |
| | | // ():int |
| | | readUByte() { |
| | | return this.readBits(8); |
| | | } |
| | | |
| | | // ():int |
| | | readUShort() { |
| | | return this.readBits(16); |
| | | } |
| | | // ():int |
| | | readUInt() { |
| | | return this.readBits(32); |
| | | } |
| | | |
| | | /** |
| | | * Advance the ExpGolomb decoder past a scaling list. The scaling |
| | | * list is optionally transmitted as part of a sequence parameter |
| | | * set and is not relevant to transmuxing. |
| | | * @param count {number} the number of entries in this scaling list |
| | | * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1 |
| | | */ |
| | | skipScalingList(count) { |
| | | var |
| | | lastScale = 8, |
| | | nextScale = 8, |
| | | j, |
| | | deltaScale; |
| | | for (j = 0; j < count; j++) { |
| | | if (nextScale !== 0) { |
| | | deltaScale = this.readEG(); |
| | | nextScale = (lastScale + deltaScale + 256) % 256; |
| | | } |
| | | lastScale = (nextScale === 0) ? lastScale : nextScale; |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * Read a sequence parameter set and return some interesting video |
| | | * properties. A sequence parameter set is the H264 metadata that |
| | | * describes the properties of upcoming video frames. |
| | | * @param data {Uint8Array} the bytes of a sequence parameter set |
| | | * @return {object} an object with configuration parsed from the |
| | | * sequence parameter set, including the dimensions of the |
| | | * associated video frames. |
| | | */ |
| | | readSPS() { |
| | | var |
| | | frameCropLeftOffset = 0, |
| | | frameCropRightOffset = 0, |
| | | frameCropTopOffset = 0, |
| | | frameCropBottomOffset = 0, |
| | | sarScale = 1, |
| | | profileIdc,profileCompat,levelIdc, |
| | | numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1, |
| | | picHeightInMapUnitsMinus1, |
| | | frameMbsOnlyFlag, |
| | | scalingListCount, |
| | | i; |
| | | this.readUByte(); |
| | | profileIdc = this.readUByte(); // profile_idc |
| | | profileCompat = this.readBits(5); // constraint_set[0-4]_flag, u(5) |
| | | this.skipBits(3); // reserved_zero_3bits u(3), |
| | | levelIdc = this.readUByte(); //level_idc u(8) |
| | | this.skipUEG(); // seq_parameter_set_id |
| | | // some profiles have more optional data we don't need |
| | | if (profileIdc === 100 || |
| | | profileIdc === 110 || |
| | | profileIdc === 122 || |
| | | profileIdc === 244 || |
| | | profileIdc === 44 || |
| | | profileIdc === 83 || |
| | | profileIdc === 86 || |
| | | profileIdc === 118 || |
| | | profileIdc === 128) { |
| | | var chromaFormatIdc = this.readUEG(); |
| | | if (chromaFormatIdc === 3) { |
| | | this.skipBits(1); // separate_colour_plane_flag |
| | | } |
| | | this.skipUEG(); // bit_depth_luma_minus8 |
| | | this.skipUEG(); // bit_depth_chroma_minus8 |
| | | this.skipBits(1); // qpprime_y_zero_transform_bypass_flag |
| | | if (this.readBoolean()) { // seq_scaling_matrix_present_flag |
| | | scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12; |
| | | for (i = 0; i < scalingListCount; i++) { |
| | | if (this.readBoolean()) { // seq_scaling_list_present_flag[ i ] |
| | | if (i < 6) { |
| | | this.skipScalingList(16); |
| | | } else { |
| | | this.skipScalingList(64); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | } |
| | | this.skipUEG(); // log2_max_frame_num_minus4 |
| | | var picOrderCntType = this.readUEG(); |
| | | if (picOrderCntType === 0) { |
| | | this.readUEG(); //log2_max_pic_order_cnt_lsb_minus4 |
| | | } else if (picOrderCntType === 1) { |
| | | this.skipBits(1); // delta_pic_order_always_zero_flag |
| | | this.skipEG(); // offset_for_non_ref_pic |
| | | this.skipEG(); // offset_for_top_to_bottom_field |
| | | numRefFramesInPicOrderCntCycle = this.readUEG(); |
| | | for(i = 0; i < numRefFramesInPicOrderCntCycle; i++) { |
| | | this.skipEG(); // offset_for_ref_frame[ i ] |
| | | } |
| | | } |
| | | this.skipUEG(); // max_num_ref_frames |
| | | this.skipBits(1); // gaps_in_frame_num_value_allowed_flag |
| | | picWidthInMbsMinus1 = this.readUEG(); |
| | | picHeightInMapUnitsMinus1 = this.readUEG(); |
| | | frameMbsOnlyFlag = this.readBits(1); |
| | | if (frameMbsOnlyFlag === 0) { |
| | | this.skipBits(1); // mb_adaptive_frame_field_flag |
| | | } |
| | | this.skipBits(1); // direct_8x8_inference_flag |
| | | if (this.readBoolean()) { // frame_cropping_flag |
| | | frameCropLeftOffset = this.readUEG(); |
| | | frameCropRightOffset = this.readUEG(); |
| | | frameCropTopOffset = this.readUEG(); |
| | | frameCropBottomOffset = this.readUEG(); |
| | | } |
| | | if (this.readBoolean()) { |
| | | // vui_parameters_present_flag |
| | | if (this.readBoolean()) { |
| | | // aspect_ratio_info_present_flag |
| | | let sarRatio; |
| | | const aspectRatioIdc = this.readUByte(); |
| | | switch (aspectRatioIdc) { |
| | | case 1: sarRatio = [1,1]; break; |
| | | case 2: sarRatio = [12,11]; break; |
| | | case 3: sarRatio = [10,11]; break; |
| | | case 4: sarRatio = [16,11]; break; |
| | | case 5: sarRatio = [40,33]; break; |
| | | case 6: sarRatio = [24,11]; break; |
| | | case 7: sarRatio = [20,11]; break; |
| | | case 8: sarRatio = [32,11]; break; |
| | | case 9: sarRatio = [80,33]; break; |
| | | case 10: sarRatio = [18,11]; break; |
| | | case 11: sarRatio = [15,11]; break; |
| | | case 12: sarRatio = [64,33]; break; |
| | | case 13: sarRatio = [160,99]; break; |
| | | case 14: sarRatio = [4,3]; break; |
| | | case 15: sarRatio = [3,2]; break; |
| | | case 16: sarRatio = [2,1]; break; |
| | | case 255: { |
| | | sarRatio = [this.readUByte() << 8 | this.readUByte(), this.readUByte() << 8 | this.readUByte()]; |
| | | break; |
| | | } |
| | | } |
| | | if (sarRatio) { |
| | | sarScale = sarRatio[0] / sarRatio[1]; |
| | | } |
| | | } |
| | | } |
| | | return { |
| | | width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale), |
| | | height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - ((frameMbsOnlyFlag? 2 : 4) * (frameCropTopOffset + frameCropBottomOffset)) |
| | | }; |
| | | } |
| | | |
| | | readSliceType() { |
| | | // skip NALu type |
| | | this.readUByte(); |
| | | // discard first_mb_in_slice |
| | | this.readUEG(); |
| | | // return slice_type |
| | | return this.readUEG(); |
| | | } |
| | | } |
| | | |
| | | export default ExpGolomb; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | |
| | | import Event from '../events'; |
| | | import ExpGolomb from './exp-golomb'; |
| | | import EventHandler from '../event-handler'; |
| | | import MP4Remuxer from '../remux/mp4-remuxer'; |
| | | import { logger } from '../utils/logger'; |
| | | |
| | | class h264Demuxer extends EventHandler { |
| | | |
| | | constructor(wfs, config = null) { |
| | | super(wfs, |
| | | Event.H264_DATA_PARSING); |
| | | |
| | | this.config = this.wfs.config || config; |
| | | this.wfs = wfs; |
| | | this.id = 'main'; |
| | | |
| | | this.remuxer = new MP4Remuxer(this.wfs, this.id, this.config); |
| | | this.contiguous = true; |
| | | this.timeOffset = 1; |
| | | this.sn = 0; |
| | | this.TIMESCALE = 90000; |
| | | this.timestamp = 0; |
| | | this.scaleFactor = this.TIMESCALE / 1000; |
| | | this.H264_TIMEBASE = 3000; |
| | | this._avcTrack = { |
| | | container: 'video/mp2t', type: 'video', id: 1, sequenceNumber: 0, |
| | | samples: [], len: 0, nbNalu: 0, dropped: 0, count: 0 |
| | | }; |
| | | this.browserType = 0; |
| | | if (navigator.userAgent.toLowerCase().indexOf('firefox') !== -1) { |
| | | this.browserType = 1; |
| | | } |
| | | } |
| | | |
| | | destroy() { |
| | | EventHandler.prototype.destroy.call(this); |
| | | } |
| | | |
| | | getTimestampM() { |
| | | this.timestamp += this.H264_TIMEBASE; |
| | | return this.timestamp; |
| | | } |
| | | |
| | | onH264DataParsing(event) { |
| | | this._parseAVCTrack(event.data); |
| | | // console.log(this.browserType,'onH264DataParsing') |
| | | this.remuxer.pushVideo(0, this.sn, this._avcTrack, this.timeOffset, this.contiguous); |
| | | this.sn += 1; |
| | | } |
| | | |
| | | _parseAVCTrack(array) { |
| | | var track = this._avcTrack, |
| | | samples = track.samples, |
| | | units = this._parseAVCNALu(array), |
| | | units2 = [], |
| | | debug = false, |
| | | key = false, |
| | | length = 0, |
| | | expGolombDecoder, |
| | | avcSample, |
| | | push, |
| | | i; |
| | | var debugString = ''; |
| | | var pushAccesUnit = function () { |
| | | if (units2.length) { |
| | | if (!this.config.forceKeyFrameOnDiscontinuity || |
| | | key === true || |
| | | (track.sps && (samples.length || this.contiguous))) { |
| | | var tss = this.getTimestampM(); |
| | | avcSample = { units: { units: units2, length: length }, pts: tss, dts: tss, key: key }; |
| | | samples.push(avcSample); |
| | | track.len += length; |
| | | track.nbNalu += units2.length; |
| | | } else { |
| | | track.dropped++; |
| | | } |
| | | units2 = []; |
| | | length = 0; |
| | | } |
| | | }.bind(this); |
| | | |
| | | units.forEach(unit => { |
| | | switch (unit.type) { |
| | | //NDR |
| | | case 1: |
| | | push = true; |
| | | if (debug) { |
| | | debugString += 'NDR '; |
| | | } |
| | | break; |
| | | //IDR |
| | | case 5: |
| | | push = true; |
| | | if (debug) { |
| | | debugString += 'IDR '; |
| | | } |
| | | key = true; |
| | | break; |
| | | //SEI |
| | | case 6: |
| | | unit.data = this.discardEPB(unit.data); |
| | | expGolombDecoder = new ExpGolomb(unit.data); |
| | | // skip frameType |
| | | expGolombDecoder.readUByte(); |
| | | break; |
| | | //SPS |
| | | case 7: |
| | | push = false; |
| | | if (debug) { |
| | | debugString += 'SPS '; |
| | | } |
| | | if (!track.sps) { |
| | | expGolombDecoder = new ExpGolomb(unit.data); |
| | | var config = expGolombDecoder.readSPS(); |
| | | track.width = config.width; |
| | | track.height = config.height; |
| | | track.sps = [unit.data]; |
| | | track.duration = 0; |
| | | var codecarray = unit.data.subarray(1, 4); |
| | | var codecstring = 'avc1.'; |
| | | for (i = 0; i < 3; i++) { |
| | | var h = codecarray[i].toString(16); |
| | | if (h.length < 2) { |
| | | h = '0' + h; |
| | | } |
| | | codecstring += h; |
| | | } |
| | | track.codec = codecstring; |
| | | this.wfs.trigger(Event.BUFFER_RESET, { mimeType: track.codec }); |
| | | push = true; |
| | | } |
| | | break; |
| | | //PPS |
| | | case 8: |
| | | push = false; |
| | | if (debug) { |
| | | debugString += 'PPS '; |
| | | } |
| | | if (!track.pps) { |
| | | track.pps = [unit.data]; |
| | | push = true; |
| | | } |
| | | break; |
| | | case 9: |
| | | push = false; |
| | | if (debug) { |
| | | debugString += 'AUD '; |
| | | } |
| | | pushAccesUnit(); |
| | | break; |
| | | default: |
| | | push = false; |
| | | debugString += 'unknown NAL ' + unit.type + ' '; |
| | | break; |
| | | } |
| | | |
| | | if (push) { |
| | | units2.push(unit); |
| | | length += unit.data.byteLength; |
| | | } |
| | | |
| | | }); |
| | | |
| | | if (debug || debugString.length) { |
| | | logger.log(debugString); |
| | | } |
| | | |
| | | pushAccesUnit(); |
| | | |
| | | } |
| | | |
| | | _parseAVCNALu(array) { |
| | | var i = 0, len = array.byteLength, value, overflow, state = 0; //state = this.avcNaluState; |
| | | var units = [], unit, unitType, lastUnitStart, lastUnitType; |
| | | while (i < len) { |
| | | value = array[i++]; |
| | | // finding 3 or 4-byte start codes (00 00 01 OR 00 00 00 01) |
| | | switch (state) { |
| | | case 0: |
| | | if (value === 0) { |
| | | state = 1; |
| | | } |
| | | break; |
| | | case 1: |
| | | if (value === 0) { |
| | | state = 2; |
| | | } else { |
| | | state = 0; |
| | | } |
| | | break; |
| | | case 2: |
| | | case 3: |
| | | if (value === 0) { |
| | | state = 3; |
| | | } else if (value === 1 && i < len) { |
| | | unitType = array[i] & 0x1f; |
| | | if (lastUnitStart) { |
| | | unit = { data: array.subarray(lastUnitStart, i - state - 1), type: lastUnitType }; |
| | | units.push(unit); |
| | | } else { |
| | | } |
| | | lastUnitStart = i; |
| | | lastUnitType = unitType; |
| | | state = 0; |
| | | } else { |
| | | state = 0; |
| | | } |
| | | break; |
| | | default: |
| | | break; |
| | | } |
| | | } |
| | | |
| | | if (lastUnitStart) { |
| | | unit = { data: array.subarray(lastUnitStart, len), type: lastUnitType, state: state }; |
| | | units.push(unit); |
| | | } |
| | | |
| | | return units; |
| | | } |
| | | |
| | | /** |
| | | * remove Emulation Prevention bytes from a RBSP |
| | | */ |
| | | discardEPB(data) { |
| | | var length = data.byteLength, |
| | | EPBPositions = [], |
| | | i = 1, |
| | | newLength, newData; |
| | | // Find all `Emulation Prevention Bytes` |
| | | while (i < length - 2) { |
| | | if (data[i] === 0 && |
| | | data[i + 1] === 0 && |
| | | data[i + 2] === 0x03) { |
| | | EPBPositions.push(i + 2); |
| | | i += 2; |
| | | } else { |
| | | i++; |
| | | } |
| | | } |
| | | // If no Emulation Prevention Bytes were found just return the original |
| | | // array |
| | | if (EPBPositions.length === 0) { |
| | | return data; |
| | | } |
| | | // Create a new array to hold the NAL unit data |
| | | newLength = length - EPBPositions.length; |
| | | newData = new Uint8Array(newLength); |
| | | var sourceIndex = 0; |
| | | |
| | | for (i = 0; i < newLength; sourceIndex++ , i++) { |
| | | if (sourceIndex === EPBPositions[0]) { |
| | | // Skip this byte |
| | | sourceIndex++; |
| | | // Remove this position index |
| | | EPBPositions.shift(); |
| | | } |
| | | newData[i] = data[sourceIndex]; |
| | | } |
| | | return newData; |
| | | } |
| | | |
| | | |
| | | } |
| | | export default h264Demuxer; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | export const ErrorTypes = { |
| | | // Identifier for a network error (loading error / timeout ...) |
| | | NETWORK_ERROR: 'networkError', |
| | | // Identifier for a media Error (video/parsing/mediasource error) |
| | | MEDIA_ERROR: 'mediaError', |
| | | // Identifier for all other errors |
| | | OTHER_ERROR: 'otherError' |
| | | }; |
| | | |
| | | export const ErrorDetails = { |
| | | // Identifier for a manifest load error - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | MANIFEST_LOAD_ERROR: 'manifestLoadError', |
| | | // Identifier for a manifest load timeout - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | MANIFEST_LOAD_TIMEOUT: 'manifestLoadTimeOut', |
| | | // Identifier for a manifest parsing error - data: { url : faulty URL, reason : error reason} |
| | | MANIFEST_PARSING_ERROR: 'manifestParsingError', |
| | | // Identifier for a manifest with only incompatible codecs error - data: { url : faulty URL, reason : error reason} |
| | | MANIFEST_INCOMPATIBLE_CODECS_ERROR: 'manifestIncompatibleCodecsError', |
| | | // Identifier for a level load error - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | LEVEL_LOAD_ERROR: 'levelLoadError', |
| | | // Identifier for a level load timeout - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | LEVEL_LOAD_TIMEOUT: 'levelLoadTimeOut', |
| | | // Identifier for a level switch error - data: { level : faulty level Id, event : error description} |
| | | LEVEL_SWITCH_ERROR: 'levelSwitchError', |
| | | // Identifier for an audio track load error - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | AUDIO_TRACK_LOAD_ERROR: 'audioTrackLoadError', |
| | | // Identifier for an audio track load timeout - data: { url : faulty URL, response : { code: error code, text: error text }} |
| | | AUDIO_TRACK_LOAD_TIMEOUT: 'audioTrackLoadTimeOut', |
| | | // Identifier for fragment load error - data: { frag : fragment object, response : { code: error code, text: error text }} |
| | | FRAG_LOAD_ERROR: 'fragLoadError', |
| | | // Identifier for fragment loop loading error - data: { frag : fragment object} |
| | | FRAG_LOOP_LOADING_ERROR: 'fragLoopLoadingError', |
| | | // Identifier for fragment load timeout error - data: { frag : fragment object} |
| | | FRAG_LOAD_TIMEOUT: 'fragLoadTimeOut', |
| | | // Identifier for a fragment decryption error event - data: parsing error description |
| | | FRAG_DECRYPT_ERROR: 'fragDecryptError', |
| | | // Identifier for a fragment parsing error event - data: parsing error description |
| | | FRAG_PARSING_ERROR: 'fragParsingError', |
| | | // Identifier for decrypt key load error - data: { frag : fragment object, response : { code: error code, text: error text }} |
| | | KEY_LOAD_ERROR: 'keyLoadError', |
| | | // Identifier for decrypt key load timeout error - data: { frag : fragment object} |
| | | KEY_LOAD_TIMEOUT: 'keyLoadTimeOut', |
| | | // Triggered when an exception occurs while adding a sourceBuffer to MediaSource - data : { err : exception , mimeType : mimeType } |
| | | BUFFER_ADD_CODEC_ERROR: 'bufferAddCodecError', |
| | | // Identifier for a buffer append error - data: append error description |
| | | BUFFER_APPEND_ERROR: 'bufferAppendError', |
| | | // Identifier for a buffer appending error event - data: appending error description |
| | | BUFFER_APPENDING_ERROR: 'bufferAppendingError', |
| | | // Identifier for a buffer stalled error event |
| | | BUFFER_STALLED_ERROR: 'bufferStalledError', |
| | | // Identifier for a buffer full event |
| | | BUFFER_FULL_ERROR: 'bufferFullError', |
| | | // Identifier for a buffer seek over hole event |
| | | BUFFER_SEEK_OVER_HOLE: 'bufferSeekOverHole', |
| | | // Identifier for an internal exception happening inside hls.js while handling an event |
| | | INTERNAL_EXCEPTION: 'internalException' |
| | | }; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | /* |
| | | * |
| | | * All objects in the event handling chain should inherit from this class |
| | | * |
| | | */ |
| | | import Event from './events'; |
| | | |
| | | class EventHandler { |
| | | |
| | | constructor(wfs, ...events) { |
| | | this.wfs = wfs; |
| | | this.onEvent = this.onEvent.bind(this); |
| | | this.handledEvents = events; |
| | | this.useGenericHandler = true; |
| | | |
| | | this.registerListeners(); |
| | | } |
| | | |
| | | destroy() { |
| | | this.unregisterListeners(); |
| | | } |
| | | |
| | | isEventHandler() { |
| | | return typeof this.handledEvents === 'object' && this.handledEvents.length && typeof this.onEvent === 'function'; |
| | | } |
| | | |
| | | registerListeners() { |
| | | if (this.isEventHandler()) { |
| | | this.handledEvents.forEach(function(event) { |
| | | if (event === 'wfsEventGeneric') { |
| | | //throw new Error('Forbidden event name: ' + event); |
| | | } |
| | | this.wfs.on(event, this.onEvent); |
| | | }.bind(this)); |
| | | } |
| | | } |
| | | |
| | | unregisterListeners() { |
| | | if (this.isEventHandler()) { |
| | | this.handledEvents.forEach(function(event) { |
| | | this.wfs.off(event, this.onEvent); |
| | | }.bind(this)); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * arguments: event (string), data (any) |
| | | */ |
| | | onEvent(event, data) { |
| | | this.onEventGeneric(event, data); |
| | | } |
| | | |
| | | onEventGeneric(event, data) { |
| | | var eventToFunction = function(event, data) { |
| | | var funcName = 'on' + event.replace('wfs', ''); |
| | | if (typeof this[funcName] !== 'function') { |
| | | //throw new Error(`Event ${event} has no generic handler in this ${this.constructor.name} class (tried ${funcName})`); |
| | | } |
| | | return this[funcName].bind(this, data); |
| | | }; |
| | | try { |
| | | eventToFunction.call(this, event, data).call(); |
| | | } catch (err) { |
| | | console.log(`internal error happened while processing ${event}:${err.message}`); |
| | | // this.hls.trigger(Event.ERROR, {type: ErrorTypes.OTHER_ERROR, details: ErrorDetails.INTERNAL_EXCEPTION, fatal: false, event : event, err : err}); |
| | | } |
| | | } |
| | | } |
| | | |
| | | export default EventHandler; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | module.exports = { |
| | | |
| | | MEDIA_ATTACHING: 'wfsMediaAttaching', |
| | | |
| | | MEDIA_ATTACHED: 'wfsMediaAttached', |
| | | |
| | | FRAG_LOADING: 'wfsFragLoading', |
| | | |
| | | BUFFER_CREATED: 'wfsBufferCreated', |
| | | |
| | | BUFFER_APPENDING: 'wfsBufferAppending', |
| | | |
| | | BUFFER_RESET: 'wfsBufferReset', |
| | | |
| | | FRAG_PARSING_DATA: 'wfsFragParsingData', |
| | | |
| | | FRAG_PARSING_INIT_SEGMENT: 'wfsFragParsingInitSegment', |
| | | //------------------------------------------ |
| | | H264_DATA_PARSING: 'wfsH264DataParsing', |
| | | |
| | | H264_DATA_PARSED: 'wfsH264DataParsed', |
| | | //------------------------------------------ |
| | | WEBSOCKET_ATTACHED: 'wfsWebsocketAttached', |
| | | |
| | | WEBSOCKET_ATTACHING: 'wfsWebsocketAttaching', |
| | | |
| | | WEBSOCKET_DATA_UPLOADING: 'wfsWebsocketDataUploading', |
| | | |
| | | WEBSOCKET_MESSAGE_SENDING: 'wfsWebsocketMessageSending', |
| | | //------------------------------------------ |
| | | FILE_HEAD_LOADING: 'wfsFileHeadLoading', |
| | | |
| | | FILE_HEAD_LOADED: 'wfsFileHeadLoaded', |
| | | |
| | | FILE_DATA_LOADING: 'wfsFileDataLoading', |
| | | |
| | | FILE_DATA_LOADED: 'wfsFileDataLoaded', |
| | | |
| | | FILE_PARSING_DATA: 'wfsFileParsingData' |
| | | //------------------------------------------ |
| | | |
| | | }; |
| New file |
| | |
| | | /** |
| | | * AAC helper |
| | | */ |
| | | /* eslint-disable */ |
| | | class AAC { |
| | | static getSilentFrame(channelCount) { |
| | | if (channelCount === 1) { |
| | | return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x23, 0x80]); |
| | | } else if (channelCount === 2) { |
| | | return new Uint8Array([0x21, 0x00, 0x49, 0x90, 0x02, 0x19, 0x00, 0x23, 0x80]); |
| | | } else if (channelCount === 3) { |
| | | return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x8e]); |
| | | } else if (channelCount === 4) { |
| | | return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x80, 0x2c, 0x80, 0x08, 0x02, 0x38]); |
| | | } else if (channelCount === 5) { |
| | | return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x38]); |
| | | } else if (channelCount === 6) { |
| | | return new Uint8Array([0x00, 0xc8, 0x00, 0x80, 0x20, 0x84, 0x01, 0x26, 0x40, 0x08, 0x64, 0x00, 0x82, 0x30, 0x04, 0x99, 0x00, 0x21, 0x90, 0x02, 0x00, 0xb2, 0x00, 0x20, 0x08, 0xe0]); |
| | | } |
| | | return null; |
| | | } |
| | | } |
| | | |
| | | export default AAC; |
| New file |
| | |
| | | // This is mostly for support of the es6 module export |
| | | // syntax with the babel compiler, it looks like it doesnt support |
| | | // function exports like we are used to in node/commonjs |
| | | /* eslint-disable */ |
| | | module.exports = require('./wfs.js').default; |
| New file |
| | |
| | | /* |
| | | * File Loader |
| | | */ |
| | | import Event from '../events'; |
| | | import EventHandler from '../event-handler'; |
| | | |
| | | class FileLoader extends EventHandler { |
| | | |
| | | constructor(wfs) { |
| | | super(wfs, |
| | | Event.FRAG_LOADING, |
| | | Event.FILE_HEAD_LOADING, |
| | | Event.FILE_DATA_LOADING); |
| | | this.loaders = {}; |
| | | } |
| | | |
| | | destroy() { |
| | | for (let loaderName in this.loaders) { |
| | | let loader = this.loaders[loaderName]; |
| | | if (loader) { |
| | | loader.destroy(); |
| | | } |
| | | } |
| | | this.loaders = {}; |
| | | EventHandler.prototype.destroy.call(this); |
| | | } |
| | | |
| | | onFileHeadLoading(data) { |
| | | let config = this.wfs.config; |
| | | let loader = new config.loader(config); |
| | | let loaderContext, loaderConfig, loaderCallbacks; |
| | | loaderContext = { url : config.fmp4FileUrl }; |
| | | loaderConfig = { maxRetry : 0 , retryDelay : 0 }; |
| | | loaderCallbacks = { onSuccess : this.fileloadheadsuccess.bind(this) }; |
| | | loader.loadHead(loaderContext,loaderConfig,loaderCallbacks); |
| | | } |
| | | |
| | | fileloadheadsuccess(response ) { |
| | | this.wfs.trigger(Event.FILE_HEAD_LOADED, { size: response}); |
| | | } |
| | | |
| | | onFileDataLoading(data) { |
| | | let config = this.wfs.config; |
| | | let loader = new config.loader(config); |
| | | let loaderContext, loaderConfig, loaderCallbacks; |
| | | loaderContext = { url : config.fmp4FileUrl, responseType : 'arraybuffer', progressData : false}; |
| | | let start = data.fileStart, end = data.fileEnd; |
| | | if (!isNaN(start) && !isNaN(end)) { |
| | | loaderContext.rangeStart = start; |
| | | loaderContext.rangeEnd = end; |
| | | } |
| | | loaderConfig = { timeout : config.fragLoadingTimeOut, maxRetry : 0 , retryDelay : 0, maxRetryDelay : config.fragLoadingMaxRetryTimeout}; |
| | | loaderCallbacks = { onSuccess : this.fileloaddatasuccess.bind(this) }; |
| | | loader.load(loaderContext,loaderConfig,loaderCallbacks); |
| | | } |
| | | |
| | | fileloaddatasuccess(response, stats, context) { |
| | | this.wfs.trigger(Event.FILE_DATA_LOADED, {payload: response.data, stats: stats}); |
| | | } |
| | | |
| | | loaderror(response, context) { |
| | | let loader = context.loader; |
| | | if (loader) { |
| | | loader.abort(); |
| | | } |
| | | this.loaders[context.type] = undefined; |
| | | } |
| | | |
| | | loadtimeout(stats, context) { |
| | | let loader = context.loader; |
| | | if (loader) { |
| | | loader.abort(); |
| | | } |
| | | this.loaders[context.type] = undefined; |
| | | } |
| | | |
| | | loadprogress(stats, context, data) { |
| | | let frag = context.frag; |
| | | frag.loaded = stats.loaded; |
| | | } |
| | | |
| | | } |
| | | |
| | | export default FileLoader; |
| New file |
| | |
| | | /* |
| | | * Websocket Loader |
| | | */ |
| | | /* eslint-disable */ |
| | | import Event from '../events' |
| | | import EventHandler from '../event-handler' |
| | | import SlicesReader from '../utils/h264-nal-slicesreader.js' |
| | | |
| | | class WebsocketLoader extends EventHandler { |
| | | constructor(wfs) { |
| | | super( |
| | | wfs, |
| | | Event.WEBSOCKET_ATTACHING, |
| | | Event.WEBSOCKET_DATA_UPLOADING, |
| | | Event.WEBSOCKET_MESSAGE_SENDING |
| | | ) |
| | | this.buf = null |
| | | this.slicesReader = new SlicesReader(wfs) |
| | | this.mediaType = undefined |
| | | this.channelName = undefined |
| | | this.cameraInfo = {} |
| | | } |
| | | |
| | | destroy() { |
| | | !!this.client && this.client.close() |
| | | this.slicesReader.destroy() |
| | | EventHandler.prototype.destroy.call(this) |
| | | } |
| | | |
| | | onWebsocketAttaching(data) { |
| | | this.mediaType = data.mediaType |
| | | this.channelName = data.channelName |
| | | this.cameraInfo = data.cameraInfo |
| | | if (data.websocket instanceof WebSocket) { |
| | | this.client = data.websocket |
| | | this.client.onopen = this.initSocketClient.bind(this) |
| | | this.client.onclose = function (e) { |
| | | console.log('Websocket Disconnected!') |
| | | this.disconnected = true; |
| | | // console.log(this) |
| | | // this.close = true; |
| | | // this.wfs.attachMedia(this.media, "chX", "H264Raw", this.client.url, this.cameraInfo); |
| | | } |
| | | } |
| | | } |
| | | |
| | | initSocketClient(client) { |
| | | this.client.binaryType = 'arraybuffer' |
| | | this.client.onmessage = this.receiveSocketMessage.bind(this) |
| | | this.wfs.trigger(Event.WEBSOCKET_MESSAGE_SENDING, { |
| | | commandType: 'open', |
| | | channelName: this.channelName, |
| | | commandValue: 'NA', |
| | | cameraInfo: this.cameraInfo |
| | | }) |
| | | this.client.disconnected = false |
| | | // console.log(this) |
| | | console.log('Websocket Open!') |
| | | } |
| | | |
| | | receiveSocketMessage(event) { |
| | | this.buf = new Uint8Array(event.data) |
| | | var copy = new Uint8Array(this.buf) |
| | | |
| | | if (this.mediaType === 'FMp4') { |
| | | this.wfs.trigger(Event.WEBSOCKET_ATTACHED, { payload: copy }) |
| | | } |
| | | if (this.mediaType === 'H264Raw') { |
| | | this.wfs.trigger(Event.H264_DATA_PARSING, { data: copy }) |
| | | } |
| | | } |
| | | |
| | | onWebsocketDataUploading(event) { |
| | | this.client.send(event.data) |
| | | } |
| | | |
| | | onWebsocketMessageSending(event) { |
| | | this.client.send( |
| | | JSON.stringify({ |
| | | cameraID: event.cameraInfo.cameraID, |
| | | rtspUrl: event.cameraInfo.rtspUrl, |
| | | isRunning: event.cameraInfo.isRunning, |
| | | isGb28181: event.cameraInfo.isGb28181 |
| | | }) |
| | | ) |
| | | } |
| | | } |
| | | |
| | | export default WebsocketLoader |
| New file |
| | |
| | | /** |
| | | * dummy remuxer |
| | | */ |
| | | |
| | | class DummyRemuxer { |
| | | constructor(observer, id) { |
| | | this.observer = observer; |
| | | this.id = id; |
| | | } |
| | | |
| | | get passthrough() { |
| | | return false; |
| | | } |
| | | |
| | | destroy() { |
| | | } |
| | | |
| | | insertDiscontinuity() { |
| | | } |
| | | |
| | | remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset) { |
| | | this._remuxAACSamples(audioTrack,timeOffset); |
| | | this._remuxAVCSamples(videoTrack,timeOffset); |
| | | this._remuxID3Samples(id3Track,timeOffset); |
| | | this._remuxTextSamples(textTrack,timeOffset); |
| | | } |
| | | |
| | | _remuxAVCSamples(track, timeOffset) { |
| | | var avcSample, unit; |
| | | // loop through track.samples |
| | | while (track.samples.length) { |
| | | avcSample = track.samples.shift(); |
| | | // loop through AVC sample NALUs |
| | | while (avcSample.units.units.length) { |
| | | unit = avcSample.units.units.shift(); |
| | | } |
| | | } |
| | | //please lint |
| | | timeOffset = timeOffset; |
| | | } |
| | | |
| | | _remuxAACSamples(track,timeOffset) { |
| | | var aacSample,unit; |
| | | // loop through track.samples |
| | | while (track.samples.length) { |
| | | aacSample = track.samples.shift(); |
| | | unit = aacSample.unit; |
| | | } |
| | | //please lint |
| | | timeOffset = timeOffset; |
| | | } |
| | | |
| | | _remuxID3Samples(track,timeOffset) { |
| | | var id3Sample,unit; |
| | | // loop through track.samples |
| | | while (track.samples.length) { |
| | | id3Sample = track.samples.shift(); |
| | | unit = id3Sample.unit; |
| | | } |
| | | //please lint |
| | | timeOffset = timeOffset; |
| | | } |
| | | |
| | | _remuxTextSamples(track,timeOffset) { |
| | | var textSample,bytes; |
| | | // loop through track.samples |
| | | while (track.samples.length) { |
| | | textSample = track.samples.shift(); |
| | | bytes = textSample.bytes; |
| | | } |
| | | //please lint |
| | | timeOffset = timeOffset; |
| | | } |
| | | } |
| | | |
| | | export default DummyRemuxer; |
| | | |
| New file |
| | |
| | | /* eslint-disable */ |
| | | /** |
| | | * Generate MP4 Box |
| | | */ |
| | | |
| | | //import Hex from '../utils/hex'; |
| | | class MP4 { |
| | | static init() { |
| | | MP4.types = { |
| | | avc1: [], // codingname |
| | | avcC: [], |
| | | btrt: [], |
| | | dinf: [], |
| | | dref: [], |
| | | esds: [], |
| | | ftyp: [], |
| | | hdlr: [], |
| | | mdat: [], |
| | | mdhd: [], |
| | | mdia: [], |
| | | mfhd: [], |
| | | minf: [], |
| | | moof: [], |
| | | moov: [], |
| | | mp4a: [], |
| | | mvex: [], |
| | | mvhd: [], |
| | | sdtp: [], |
| | | stbl: [], |
| | | stco: [], |
| | | stsc: [], |
| | | stsd: [], |
| | | stsz: [], |
| | | stts: [], |
| | | tfdt: [], |
| | | tfhd: [], |
| | | traf: [], |
| | | trak: [], |
| | | trun: [], |
| | | trex: [], |
| | | tkhd: [], |
| | | vmhd: [], |
| | | smhd: [] |
| | | }; |
| | | |
| | | var i; |
| | | for (i in MP4.types) { |
| | | if (MP4.types.hasOwnProperty(i)) { |
| | | MP4.types[i] = [ |
| | | i.charCodeAt(0), |
| | | i.charCodeAt(1), |
| | | i.charCodeAt(2), |
| | | i.charCodeAt(3) |
| | | ]; |
| | | } |
| | | } |
| | | |
| | | var videoHdlr = new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x00, // pre_defined |
| | | 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide' |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x56, 0x69, 0x64, 0x65, |
| | | 0x6f, 0x48, 0x61, 0x6e, |
| | | 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'VideoHandler' |
| | | ]); |
| | | |
| | | var audioHdlr = new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x00, // pre_defined |
| | | 0x73, 0x6f, 0x75, 0x6e, // handler_type: 'soun' |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x53, 0x6f, 0x75, 0x6e, |
| | | 0x64, 0x48, 0x61, 0x6e, |
| | | 0x64, 0x6c, 0x65, 0x72, 0x00 // name: 'SoundHandler' |
| | | ]); |
| | | |
| | | MP4.HDLR_TYPES = { |
| | | 'video': videoHdlr, |
| | | 'audio': audioHdlr |
| | | }; |
| | | |
| | | var dref = new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x01, // entry_count |
| | | 0x00, 0x00, 0x00, 0x0c, // entry_size |
| | | 0x75, 0x72, 0x6c, 0x20, // 'url' type |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x01 // entry_flags |
| | | ]); |
| | | |
| | | var stco = new Uint8Array([ |
| | | 0x00, // version |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x00 // entry_count |
| | | ]); |
| | | |
| | | MP4.STTS = MP4.STSC = MP4.STCO = stco; |
| | | |
| | | MP4.STSZ = new Uint8Array([ |
| | | 0x00, // version |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x00, // sample_size |
| | | 0x00, 0x00, 0x00, 0x00, // sample_count |
| | | ]); |
| | | MP4.VMHD = new Uint8Array([ |
| | | 0x00, // version |
| | | 0x00, 0x00, 0x01, // flags |
| | | 0x00, 0x00, // graphicsmode |
| | | 0x00, 0x00, |
| | | 0x00, 0x00, |
| | | 0x00, 0x00 // opcolor |
| | | ]); |
| | | MP4.SMHD = new Uint8Array([ |
| | | 0x00, // version |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, // balance |
| | | 0x00, 0x00 // reserved |
| | | ]); |
| | | |
| | | MP4.STSD = new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x01]);// entry_count |
| | | |
| | | var majorBrand = new Uint8Array([105, 115, 111, 109]); // isom |
| | | var avc1Brand = new Uint8Array([97, 118, 99, 49]); // avc1 |
| | | var minorVersion = new Uint8Array([0, 0, 0, 1]); |
| | | |
| | | MP4.FTYP = MP4.box(MP4.types.ftyp, majorBrand, minorVersion, majorBrand, avc1Brand); |
| | | MP4.DINF = MP4.box(MP4.types.dinf, MP4.box(MP4.types.dref, dref)); |
| | | } |
| | | |
| | | static box(type) { |
| | | var |
| | | payload = Array.prototype.slice.call(arguments, 1), |
| | | size = 8, |
| | | i = payload.length, |
| | | len = i, |
| | | result; |
| | | // calculate the total size we need to allocate |
| | | while (i--) { |
| | | size += payload[i].byteLength; |
| | | } |
| | | result = new Uint8Array(size); |
| | | result[0] = (size >> 24) & 0xff; |
| | | result[1] = (size >> 16) & 0xff; |
| | | result[2] = (size >> 8) & 0xff; |
| | | result[3] = size & 0xff; |
| | | result.set(type, 4); |
| | | // copy the payload into the result |
| | | for (i = 0, size = 8; i < len; i++) { |
| | | // copy payload[i] array @ offset size |
| | | result.set(payload[i], size); |
| | | size += payload[i].byteLength; |
| | | } |
| | | return result; |
| | | } |
| | | |
| | | static hdlr(type) { |
| | | return MP4.box(MP4.types.hdlr, MP4.HDLR_TYPES[type]); |
| | | } |
| | | |
| | | static mdat(data) { |
| | | // console.log( "mdat==> ",data.length ); |
| | | return MP4.box(MP4.types.mdat, data); |
| | | } |
| | | |
| | | static mdhd(timescale, duration) { |
| | | duration *= timescale; |
| | | return MP4.box(MP4.types.mdhd, new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x02, // creation_time |
| | | 0x00, 0x00, 0x00, 0x03, // modification_time |
| | | (timescale >> 24) & 0xFF, |
| | | (timescale >> 16) & 0xFF, |
| | | (timescale >> 8) & 0xFF, |
| | | timescale & 0xFF, // timescale |
| | | (duration >> 24), |
| | | (duration >> 16) & 0xFF, |
| | | (duration >> 8) & 0xFF, |
| | | duration & 0xFF, // duration |
| | | 0x55, 0xc4, // 'und' language (undetermined) |
| | | 0x00, 0x00 |
| | | ])); |
| | | } |
| | | |
| | | static mdia(track) { |
| | | return MP4.box(MP4.types.mdia, MP4.mdhd(track.timescale, track.duration), MP4.hdlr(track.type), MP4.minf(track)); |
| | | } |
| | | |
| | | static mfhd(sequenceNumber) { |
| | | return MP4.box(MP4.types.mfhd, new Uint8Array([ |
| | | 0x00, |
| | | 0x00, 0x00, 0x00, // flags |
| | | (sequenceNumber >> 24), |
| | | (sequenceNumber >> 16) & 0xFF, |
| | | (sequenceNumber >> 8) & 0xFF, |
| | | sequenceNumber & 0xFF, // sequence_number |
| | | ])); |
| | | } |
| | | |
| | | static minf(track) { |
| | | if (track.type === 'audio') { |
| | | return MP4.box(MP4.types.minf, MP4.box(MP4.types.smhd, MP4.SMHD), MP4.DINF, MP4.stbl(track)); |
| | | } else { |
| | | return MP4.box(MP4.types.minf, MP4.box(MP4.types.vmhd, MP4.VMHD), MP4.DINF, MP4.stbl(track)); |
| | | } |
| | | } |
| | | |
| | | static moof(sn, baseMediaDecodeTime, track) { |
| | | return MP4.box(MP4.types.moof, MP4.mfhd(sn), MP4.traf(track, baseMediaDecodeTime)); |
| | | } |
| | | /** |
| | | * @param tracks... (optional) {array} the tracks associated with this movie |
| | | */ |
| | | static moov(tracks) { |
| | | var |
| | | i = tracks.length, |
| | | boxes = []; |
| | | |
| | | while (i--) { |
| | | boxes[i] = MP4.trak(tracks[i]); |
| | | } |
| | | |
| | | return MP4.box.apply(null, [MP4.types.moov, MP4.mvhd(tracks[0].timescale, tracks[0].duration)].concat(boxes).concat(MP4.mvex(tracks))); |
| | | } |
| | | |
| | | static mvex(tracks) { |
| | | var |
| | | i = tracks.length, |
| | | boxes = []; |
| | | |
| | | while (i--) { |
| | | boxes[i] = MP4.trex(tracks[i]); |
| | | } |
| | | return MP4.box.apply(null, [MP4.types.mvex].concat(boxes)); |
| | | } |
| | | |
| | | static mvhd(timescale, duration) { |
| | | // duration *= timescale; |
| | | duration *= 0; |
| | | var |
| | | bytes = new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | 0x00, 0x00, 0x00, 0x01, // creation_time |
| | | 0x00, 0x00, 0x00, 0x02, // modification_time |
| | | (timescale >> 24) & 0xFF, |
| | | (timescale >> 16) & 0xFF, |
| | | (timescale >> 8) & 0xFF, |
| | | timescale & 0xFF, // timescale |
| | | (duration >> 24) & 0xFF, |
| | | (duration >> 16) & 0xFF, |
| | | (duration >> 8) & 0xFF, |
| | | duration & 0xFF, // duration |
| | | 0x00, 0x01, 0x00, 0x00, // 1.0 rate |
| | | 0x01, 0x00, // 1.0 volume |
| | | 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x01, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x01, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, // pre_defined |
| | | 0xff, 0xff, 0xff, 0xff // next_track_ID |
| | | ]); |
| | | return MP4.box(MP4.types.mvhd, bytes); |
| | | } |
| | | |
| | | static sdtp(track) { |
| | | var |
| | | samples = track.samples || [], |
| | | bytes = new Uint8Array(4 + samples.length), |
| | | flags, |
| | | i; |
| | | // leave the full box header (4 bytes) all zero |
| | | // write the sample table |
| | | for (i = 0; i < samples.length; i++) { |
| | | flags = samples[i].flags; |
| | | bytes[i + 4] = (flags.dependsOn << 4) | |
| | | (flags.isDependedOn << 2) | |
| | | (flags.hasRedundancy); |
| | | } |
| | | |
| | | return MP4.box(MP4.types.sdtp, bytes); |
| | | } |
| | | |
| | | static stbl(track) { |
| | | return MP4.box(MP4.types.stbl, MP4.stsd(track), MP4.box(MP4.types.stts, MP4.STTS), MP4.box(MP4.types.stsc, MP4.STSC), MP4.box(MP4.types.stsz, MP4.STSZ), MP4.box(MP4.types.stco, MP4.STCO)); |
| | | } |
| | | |
| | | static avc1(track) { |
| | | var sps = [], pps = [], i, data, len; |
| | | // assemble the SPSs |
| | | |
| | | for (i = 0; i < track.sps.length; i++) { |
| | | data = track.sps[i]; |
| | | len = data.byteLength; |
| | | sps.push((len >>> 8) & 0xFF); |
| | | sps.push((len & 0xFF)); |
| | | sps = sps.concat(Array.prototype.slice.call(data)); // SPS |
| | | } |
| | | |
| | | // assemble the PPSs |
| | | for (i = 0; i < track.pps.length; i++) { |
| | | data = track.pps[i]; |
| | | len = data.byteLength; |
| | | pps.push((len >>> 8) & 0xFF); |
| | | pps.push((len & 0xFF)); |
| | | pps = pps.concat(Array.prototype.slice.call(data)); |
| | | } |
| | | |
| | | var avcc = MP4.box(MP4.types.avcC, new Uint8Array([ |
| | | 0x01, // version |
| | | sps[3], // profile |
| | | sps[4], // profile compat |
| | | sps[5], // level |
| | | 0xfc | 3, // lengthSizeMinusOne, hard-coded to 4 bytes |
| | | 0xE0 | track.sps.length // 3bit reserved (111) + numOfSequenceParameterSets |
| | | ].concat(sps).concat([ |
| | | track.pps.length // numOfPictureParameterSets |
| | | ]).concat(pps))), // "PPS" |
| | | width = track.width, |
| | | height = track.height; |
| | | //console.log('avcc:' + Hex.hexDump(avcc)); |
| | | return MP4.box(MP4.types.avc1, new Uint8Array([ |
| | | 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x01, // data_reference_index |
| | | 0x00, 0x00, // pre_defined |
| | | 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, // pre_defined |
| | | (width >> 8) & 0xFF, |
| | | width & 0xff, // width |
| | | (height >> 8) & 0xFF, |
| | | height & 0xff, // height |
| | | 0x00, 0x48, 0x00, 0x00, // horizresolution |
| | | 0x00, 0x48, 0x00, 0x00, // vertresolution |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x01, // frame_count |
| | | 0x12, |
| | | 0x6a, 0x65, 0x66, 0x66, // wfs.js |
| | | 0x2d, 0x79, 0x61, 0x6e, |
| | | 0x2f, 0x2f, 0x2f, 0x67, |
| | | 0x77, 0x66, 0x73, 0x2E, |
| | | 0x6A, 0x73, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, // compressorname |
| | | 0x00, 0x18, // depth = 24 |
| | | 0x11, 0x11]), // pre_defined = -1 |
| | | avcc, |
| | | MP4.box(MP4.types.btrt, new Uint8Array([ |
| | | 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB |
| | | 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate |
| | | 0x00, 0x2d, 0xc6, 0xc0])) // avgBitrate |
| | | ); |
| | | } |
| | | |
| | | static esds(track) { |
| | | var configlen = track.config.length; |
| | | return new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | |
| | | 0x03, // descriptor_type |
| | | 0x17 + configlen, // length |
| | | 0x00, 0x01, //es_id |
| | | 0x00, // stream_priority |
| | | |
| | | 0x04, // descriptor_type |
| | | 0x0f + configlen, // length |
| | | 0x40, //codec : mpeg4_audio |
| | | 0x15, // stream_type |
| | | 0x00, 0x00, 0x00, // buffer_size |
| | | 0x00, 0x00, 0x00, 0x00, // maxBitrate |
| | | 0x00, 0x00, 0x00, 0x00, // avgBitrate |
| | | |
| | | 0x05 // descriptor_type |
| | | ].concat([configlen]).concat(track.config).concat([0x06, 0x01, 0x02])); // GASpecificConfig)); // length + audio config descriptor |
| | | } |
| | | |
| | | static mp4a(track) { |
| | | var audiosamplerate = track.audiosamplerate; |
| | | return MP4.box(MP4.types.mp4a, new Uint8Array([ |
| | | 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x01, // data_reference_index |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, track.channelCount, // channelcount |
| | | 0x00, 0x10, // sampleSize:16bits |
| | | 0x00, 0x00, 0x00, 0x00, // reserved2 |
| | | (audiosamplerate >> 8) & 0xFF, |
| | | audiosamplerate & 0xff, // |
| | | 0x00, 0x00]), |
| | | MP4.box(MP4.types.esds, MP4.esds(track))); |
| | | } |
| | | |
| | | static stsd(track) { |
| | | if (track.type === 'audio') { |
| | | return MP4.box(MP4.types.stsd, MP4.STSD, MP4.mp4a(track)); |
| | | } else { |
| | | return MP4.box(MP4.types.stsd, MP4.STSD, MP4.avc1(track)); |
| | | } |
| | | } |
| | | |
| | | static tkhd(track) { |
| | | var id = track.id, |
| | | duration = track.duration * track.timescale, |
| | | width = track.width, |
| | | height = track.height; |
| | | |
| | | // console.log( "tkhd==> ",track.id, track.duration, track.timescale, width,height ); |
| | | |
| | | return MP4.box(MP4.types.tkhd, new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x07, // flags |
| | | 0x00, 0x00, 0x00, 0x00, // creation_time |
| | | 0x00, 0x00, 0x00, 0x00, // modification_time |
| | | (id >> 24) & 0xFF, |
| | | (id >> 16) & 0xFF, |
| | | (id >> 8) & 0xFF, |
| | | id & 0xFF, // track_ID |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | (duration >> 24), |
| | | (duration >> 16) & 0xFF, |
| | | (duration >> 8) & 0xFF, |
| | | duration & 0xFF, // duration |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, // reserved |
| | | 0x00, 0x00, // layer |
| | | 0x00, 0x00, // alternate_group |
| | | 0x00, 0x00, // non-audio track volume |
| | | 0x00, 0x00, // reserved |
| | | 0x00, 0x01, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x01, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x00, 0x00, 0x00, 0x00, |
| | | 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix |
| | | (width >> 8) & 0xFF, |
| | | width & 0xFF, |
| | | 0x00, 0x00, // width |
| | | (height >> 8) & 0xFF, |
| | | height & 0xFF, |
| | | 0x00, 0x00 // height |
| | | ])); |
| | | } |
| | | |
| | | static traf(track, baseMediaDecodeTime) { |
| | | var sampleDependencyTable = MP4.sdtp(track), |
| | | id = track.id; |
| | | |
| | | |
| | | |
| | | // console.log( "traf==> ",id ,baseMediaDecodeTime); |
| | | |
| | | return MP4.box(MP4.types.traf, |
| | | MP4.box(MP4.types.tfhd, new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | (id >> 24), |
| | | (id >> 16) & 0XFF, |
| | | (id >> 8) & 0XFF, |
| | | (id & 0xFF) // track_ID |
| | | ])), |
| | | MP4.box(MP4.types.tfdt, new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | (baseMediaDecodeTime >> 24), |
| | | (baseMediaDecodeTime >> 16) & 0XFF, |
| | | (baseMediaDecodeTime >> 8) & 0XFF, |
| | | (baseMediaDecodeTime & 0xFF) // baseMediaDecodeTime |
| | | ])), |
| | | MP4.trun(track, |
| | | sampleDependencyTable.length + |
| | | 16 + // tfhd |
| | | 16 + // tfdt |
| | | 8 + // traf header |
| | | 16 + // mfhd |
| | | 8 + // moof header |
| | | 8), // mdat header |
| | | sampleDependencyTable); |
| | | } |
| | | |
| | | /** |
| | | * Generate a track box. |
| | | * @param track {object} a track definition |
| | | * @return {Uint8Array} the track box |
| | | */ |
| | | static trak(track) { |
| | | track.duration = track.duration || 0xffffffff; |
| | | return MP4.box(MP4.types.trak, MP4.tkhd(track), MP4.mdia(track)); |
| | | } |
| | | |
| | | static trex(track) { |
| | | var id = track.id; |
| | | return MP4.box(MP4.types.trex, new Uint8Array([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x00, 0x00, // flags |
| | | (id >> 24), |
| | | (id >> 16) & 0XFF, |
| | | (id >> 8) & 0XFF, |
| | | (id & 0xFF), // track_ID |
| | | 0x00, 0x00, 0x00, 0x01, // default_sample_description_index |
| | | 0x00, 0x00, 0x00, 0x00, // default_sample_duration |
| | | 0x00, 0x00, 0x00, 0x00, // default_sample_size |
| | | 0x00, 0x01, 0x00, 0x01 // default_sample_flags |
| | | ])); |
| | | } |
| | | |
| | | static trun(track, offset) { |
| | | var samples = track.samples || [], |
| | | len = samples.length, |
| | | arraylen = 12 + (16 * len), |
| | | array = new Uint8Array(arraylen), |
| | | i, sample, duration, size, flags, cts; |
| | | |
| | | //sample = samples[0]; |
| | | // console.log( "trun==> ",sample.duration, sample.cts ,sample.size,len ); |
| | | |
| | | offset += 8 + arraylen; |
| | | array.set([ |
| | | 0x00, // version 0 |
| | | 0x00, 0x0f, 0x01, // flags |
| | | (len >>> 24) & 0xFF, |
| | | (len >>> 16) & 0xFF, |
| | | (len >>> 8) & 0xFF, |
| | | len & 0xFF, // sample_count |
| | | (offset >>> 24) & 0xFF, |
| | | (offset >>> 16) & 0xFF, |
| | | (offset >>> 8) & 0xFF, |
| | | offset & 0xFF // data_offset |
| | | ], 0); |
| | | for (i = 0; i < len; i++) { |
| | | sample = samples[i]; |
| | | duration = sample.duration; |
| | | size = sample.size; |
| | | flags = sample.flags; |
| | | cts = sample.cts; |
| | | array.set([ |
| | | (duration >>> 24) & 0xFF, |
| | | (duration >>> 16) & 0xFF, |
| | | (duration >>> 8) & 0xFF, |
| | | duration & 0xFF, // sample_duration |
| | | (size >>> 24) & 0xFF, |
| | | (size >>> 16) & 0xFF, |
| | | (size >>> 8) & 0xFF, |
| | | size & 0xFF, // sample_size |
| | | (flags.isLeading << 2) | flags.dependsOn, |
| | | (flags.isDependedOn << 6) | |
| | | (flags.hasRedundancy << 4) | |
| | | (flags.paddingValue << 1) | |
| | | flags.isNonSync, |
| | | flags.degradPrio & 0xF0 << 8, |
| | | flags.degradPrio & 0x0F, // sample_flags |
| | | (cts >>> 24) & 0xFF, |
| | | (cts >>> 16) & 0xFF, |
| | | (cts >>> 8) & 0xFF, |
| | | cts & 0xFF // sample_composition_time_offset |
| | | ], 12 + 16 * i); |
| | | } |
| | | return MP4.box(MP4.types.trun, array); |
| | | } |
| | | |
| | | static initSegment(tracks) { |
| | | if (!MP4.types) { |
| | | MP4.init(); |
| | | } |
| | | var movie = MP4.moov(tracks), result; |
| | | result = new Uint8Array(MP4.FTYP.byteLength + movie.byteLength); |
| | | result.set(MP4.FTYP); |
| | | result.set(movie, MP4.FTYP.byteLength); |
| | | |
| | | |
| | | return result; |
| | | } |
| | | } |
| | | |
| | | export default MP4; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | /** |
| | | * fMP4 remuxer |
| | | */ |
| | | import AAC from '../helper/aac'; |
| | | import Event from '../events'; |
| | | import { logger } from '../utils/logger'; |
| | | import MP4 from '../remux/mp4-generator'; |
| | | import { ErrorTypes, ErrorDetails } from '../errors'; |
| | | import '../utils/polyfill'; |
| | | |
| | | class MP4Remuxer { |
| | | constructor(observer, id, config) { |
| | | this.observer = observer; |
| | | this.id = id; |
| | | this.config = config; |
| | | this.ISGenerated = false; |
| | | this.PES2MP4SCALEFACTOR = 4; |
| | | this.PES_TIMESCALE = 90000; |
| | | this.MP4_TIMESCALE = this.PES_TIMESCALE / this.PES2MP4SCALEFACTOR; |
| | | this.nextAvcDts = 90300; |
| | | this.H264_TIMEBASE = 3600; |
| | | } |
| | | |
| | | get passthrough() { |
| | | return false; |
| | | } |
| | | |
| | | destroy() { |
| | | } |
| | | |
| | | insertDiscontinuity() { |
| | | this._initPTS = this._initDTS = undefined; |
| | | } |
| | | |
| | | switchLevel() { |
| | | this.ISGenerated = false; |
| | | } |
| | | |
| | | pushVideo(level, sn, videoTrack, timeOffset, contiguous) { |
| | | this.level = level; |
| | | this.sn = sn; |
| | | let videoData; |
| | | // generate Init Segment if needed |
| | | if (!this.ISGenerated) { |
| | | this.generateVideoIS(videoTrack, timeOffset); |
| | | } |
| | | if (this.ISGenerated) { |
| | | // if (videoTrack.samples.length) { |
| | | this.remuxVideo_2(videoTrack, timeOffset, contiguous); |
| | | // } |
| | | } |
| | | } |
| | | |
| | | remuxVideo_2(track, timeOffset, contiguous, audioTrackLength) { |
| | | var offset = 8, |
| | | pesTimeScale = this.PES_TIMESCALE, |
| | | pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR, |
| | | mp4SampleDuration, |
| | | mdat, moof, |
| | | firstPTS, firstDTS, |
| | | nextDTS, |
| | | inputSamples = track.samples, |
| | | outputSamples = []; |
| | | |
| | | /* concatenate the video data and construct the mdat in place |
| | | (need 8 more bytes to fill length and mpdat type) */ |
| | | mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8); |
| | | let view = new DataView(mdat.buffer); |
| | | view.setUint32(0, mdat.byteLength); |
| | | mdat.set(MP4.types.mdat, 4); |
| | | var sampleDuration = 0; |
| | | let ptsnorm, dtsnorm, mp4Sample, lastDTS; |
| | | |
| | | for (let i = 0; i < inputSamples.length; i++) { |
| | | let avcSample = inputSamples[i], |
| | | mp4SampleLength = 0, |
| | | compositionTimeOffset; |
| | | // convert NALU bitstream to MP4 format (prepend NALU with size field) |
| | | while (avcSample.units.units.length) { |
| | | let unit = avcSample.units.units.shift(); |
| | | view.setUint32(offset, unit.data.byteLength); |
| | | offset += 4; |
| | | mdat.set(unit.data, offset); |
| | | offset += unit.data.byteLength; |
| | | mp4SampleLength += 4 + unit.data.byteLength; |
| | | } |
| | | |
| | | let pts = avcSample.pts - this._initPTS; |
| | | let dts = avcSample.dts - this._initDTS; |
| | | dts = Math.min(pts, dts); |
| | | |
| | | if (lastDTS !== undefined) { |
| | | ptsnorm = this._PTSNormalize(pts, lastDTS); |
| | | dtsnorm = this._PTSNormalize(dts, lastDTS); |
| | | sampleDuration = (dtsnorm - lastDTS) |
| | | if (sampleDuration <= 0) { |
| | | logger.log(`invalid sample duration at PTS/DTS: ${avcSample.pts}/${avcSample.dts}|dts norm: ${dtsnorm}|lastDTS: ${lastDTS}:${sampleDuration}`); |
| | | sampleDuration = 1; |
| | | } |
| | | } else { |
| | | var nextAvcDts = this.nextAvcDts, delta; |
| | | ptsnorm = this._PTSNormalize(pts, nextAvcDts); |
| | | dtsnorm = this._PTSNormalize(dts, nextAvcDts); |
| | | if (nextAvcDts) { |
| | | delta = Math.round((dtsnorm - nextAvcDts)); |
| | | if (/*contiguous ||*/ Math.abs(delta) < 600) { |
| | | if (delta) { |
| | | if (delta > 1) { |
| | | logger.log(`AVC:${delta} ms hole between fragments detected,filling it`); |
| | | } else if (delta < -1) { |
| | | logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`); |
| | | } |
| | | dtsnorm = nextAvcDts; |
| | | ptsnorm = Math.max(ptsnorm - delta, dtsnorm); |
| | | logger.log(`Video/PTS/DTS adjusted: ${ptsnorm}/${dtsnorm},delta:${delta}`); |
| | | } |
| | | } |
| | | } |
| | | this.firstPTS = Math.max(0, ptsnorm); |
| | | this.firstDTS = Math.max(0, dtsnorm); |
| | | sampleDuration = 0.03; |
| | | } |
| | | |
| | | outputSamples.push({ |
| | | size: mp4SampleLength, |
| | | duration: this.H264_TIMEBASE, |
| | | cts: 0, |
| | | flags: { |
| | | isLeading: 0, |
| | | isDependedOn: 0, |
| | | hasRedundancy: 0, |
| | | degradPrio: 0, |
| | | dependsOn: avcSample.key ? 2 : 1, |
| | | isNonSync: avcSample.key ? 0 : 1 |
| | | } |
| | | }); |
| | | lastDTS = dtsnorm; |
| | | |
| | | } |
| | | |
| | | var lastSampleDuration = 0; |
| | | if (outputSamples.length >= 2) { |
| | | lastSampleDuration = outputSamples[outputSamples.length - 2].duration; |
| | | outputSamples[0].duration = lastSampleDuration; |
| | | } |
| | | this.nextAvcDts = dtsnorm + lastSampleDuration; |
| | | let dropped = track.dropped; |
| | | track.len = 0; |
| | | track.nbNalu = 0; |
| | | track.dropped = 0; |
| | | if (outputSamples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) { |
| | | let flags = outputSamples[0].flags; |
| | | flags.dependsOn = 2; |
| | | flags.isNonSync = 0; |
| | | } |
| | | track.samples = outputSamples; |
| | | moof = MP4.moof(track.sequenceNumber++, dtsnorm, track); |
| | | track.samples = []; |
| | | |
| | | let data = { |
| | | id: this.id, |
| | | level: this.level, |
| | | sn: this.sn, |
| | | data1: moof, |
| | | data2: mdat, |
| | | startPTS: ptsnorm, |
| | | endPTS: ptsnorm, |
| | | startDTS: dtsnorm, |
| | | endDTS: dtsnorm, |
| | | type: 'video', |
| | | nb: outputSamples.length, |
| | | dropped: dropped |
| | | }; |
| | | |
| | | this.observer.trigger(Event.FRAG_PARSING_DATA, data); |
| | | return data; |
| | | } |
| | | |
| | | generateVideoIS(videoTrack, timeOffset) { |
| | | var observer = this.observer, |
| | | videoSamples = videoTrack.samples, |
| | | pesTimeScale = this.PES_TIMESCALE, |
| | | tracks = {}, |
| | | data = { id: this.id, level: this.level, sn: this.sn, tracks: tracks, unique: false }, |
| | | computePTSDTS = (this._initPTS === undefined), |
| | | initPTS, initDTS; |
| | | |
| | | if (computePTSDTS) { |
| | | initPTS = initDTS = Infinity; |
| | | } |
| | | |
| | | if (videoTrack.sps && videoTrack.pps && videoSamples.length) { |
| | | videoTrack.timescale = 90000;//this.MP4_TIMESCALE; |
| | | tracks.video = { |
| | | container: 'video/mp4', |
| | | codec: videoTrack.codec, |
| | | initSegment: MP4.initSegment([videoTrack]), |
| | | metadata: { |
| | | width: videoTrack.width, |
| | | height: videoTrack.height |
| | | } |
| | | }; |
| | | if (computePTSDTS) { |
| | | initPTS = Math.min(initPTS, videoSamples[0].pts - this.H264_TIMEBASE); |
| | | initDTS = Math.min(initDTS, videoSamples[0].dts - this.H264_TIMEBASE); |
| | | } |
| | | } |
| | | |
| | | if (Object.keys(tracks).length) { |
| | | observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, data); |
| | | this.ISGenerated = true; |
| | | if (computePTSDTS) { |
| | | this._initPTS = initPTS; |
| | | this._initDTS = initDTS; |
| | | } |
| | | } else { |
| | | console.log("generateVideoIS ERROR==> ", ErrorTypes.MEDIA_ERROR); |
| | | } |
| | | } |
| | | |
| | | remux(level, sn, audioTrack, videoTrack, id3Track, textTrack, timeOffset, contiguous) { |
| | | this.level = level; |
| | | this.sn = sn; |
| | | // generate Init Segment if needed |
| | | if (!this.ISGenerated) { |
| | | this.generateIS(audioTrack, videoTrack, timeOffset); |
| | | } |
| | | |
| | | if (this.ISGenerated) { |
| | | // Purposefully remuxing audio before video, so that remuxVideo can use nextAacPts, which is |
| | | // calculated in remuxAudio. |
| | | //logger.log('nb AAC samples:' + audioTrack.samples.length); |
| | | if (audioTrack.samples.length) { |
| | | let audioData = this.remuxAudio(audioTrack, timeOffset, contiguous); |
| | | //logger.log('nb AVC samples:' + videoTrack.samples.length); |
| | | if (videoTrack.samples.length) { |
| | | let audioTrackLength; |
| | | if (audioData) { |
| | | audioTrackLength = audioData.endPTS - audioData.startPTS; |
| | | } |
| | | this.remuxVideo(videoTrack, timeOffset, contiguous, audioTrackLength); |
| | | } |
| | | } else { |
| | | let videoData; |
| | | //logger.log('nb AVC samples:' + videoTrack.samples.length); |
| | | if (videoTrack.samples.length) { |
| | | videoData = this.remuxVideo(videoTrack, timeOffset, contiguous); |
| | | } |
| | | if (videoData && audioTrack.codec) { |
| | | this.remuxEmptyAudio(audioTrack, timeOffset, contiguous, videoData); |
| | | } |
| | | } |
| | | } |
| | | //logger.log('nb ID3 samples:' + audioTrack.samples.length); |
| | | if (id3Track.samples.length) { |
| | | this.remuxID3(id3Track, timeOffset); |
| | | } |
| | | //logger.log('nb ID3 samples:' + audioTrack.samples.length); |
| | | if (textTrack.samples.length) { |
| | | this.remuxText(textTrack, timeOffset); |
| | | } |
| | | //notify end of parsing |
| | | this.observer.trigger(Event.FRAG_PARSED, { id: this.id, level: this.level, sn: this.sn }); |
| | | } |
| | | |
| | | generateIS(audioTrack, videoTrack, timeOffset) { |
| | | var observer = this.observer, |
| | | audioSamples = audioTrack.samples, |
| | | videoSamples = videoTrack.samples, |
| | | pesTimeScale = this.PES_TIMESCALE, |
| | | tracks = {}, |
| | | data = { id: this.id, level: this.level, sn: this.sn, tracks: tracks, unique: false }, |
| | | computePTSDTS = (this._initPTS === undefined), |
| | | initPTS, initDTS; |
| | | |
| | | if (computePTSDTS) { |
| | | initPTS = initDTS = Infinity; |
| | | } |
| | | if (audioTrack.config && audioSamples.length) { |
| | | audioTrack.timescale = audioTrack.audiosamplerate; |
| | | // MP4 duration (track duration in seconds multiplied by timescale) is coded on 32 bits |
| | | // we know that each AAC sample contains 1024 frames.... |
| | | // in order to avoid overflowing the 32 bit counter for large duration, we use smaller timescale (timescale/gcd) |
| | | // we just need to ensure that AAC sample duration will still be an integer (will be 1024/gcd) |
| | | if (audioTrack.timescale * audioTrack.duration > Math.pow(2, 32)) { |
| | | let greatestCommonDivisor = function (a, b) { |
| | | if (!b) { |
| | | return a; |
| | | } |
| | | return greatestCommonDivisor(b, a % b); |
| | | }; |
| | | audioTrack.timescale = audioTrack.audiosamplerate / greatestCommonDivisor(audioTrack.audiosamplerate, 1024); |
| | | } |
| | | logger.log('audio mp4 timescale :' + audioTrack.timescale); |
| | | tracks.audio = { |
| | | container: 'audio/mp4', |
| | | codec: audioTrack.codec, |
| | | initSegment: MP4.initSegment([audioTrack]), |
| | | metadata: { |
| | | channelCount: audioTrack.channelCount |
| | | } |
| | | }; |
| | | if (computePTSDTS) { |
| | | // remember first PTS of this demuxing context. for audio, PTS + DTS ... |
| | | initPTS = initDTS = audioSamples[0].pts - pesTimeScale * timeOffset; |
| | | } |
| | | } |
| | | |
| | | if (videoTrack.sps && videoTrack.pps && videoSamples.length) { |
| | | videoTrack.timescale = this.MP4_TIMESCALE; |
| | | tracks.video = { |
| | | container: 'video/mp4', |
| | | codec: videoTrack.codec, |
| | | initSegment: MP4.initSegment([videoTrack]), |
| | | metadata: { |
| | | width: videoTrack.width, |
| | | height: videoTrack.height |
| | | } |
| | | }; |
| | | if (computePTSDTS) { |
| | | initPTS = Math.min(initPTS, videoSamples[0].pts - pesTimeScale * timeOffset); |
| | | initDTS = Math.min(initDTS, videoSamples[0].dts - pesTimeScale * timeOffset); |
| | | } |
| | | } |
| | | |
| | | if (Object.keys(tracks).length) { |
| | | observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT, data); |
| | | this.ISGenerated = true; |
| | | if (computePTSDTS) { |
| | | this._initPTS = initPTS; |
| | | this._initDTS = initDTS; |
| | | } |
| | | } else { |
| | | observer.trigger(Event.ERROR, { type: ErrorTypes.MEDIA_ERROR, id: this.id, details: ErrorDetails.FRAG_PARSING_ERROR, fatal: false, reason: 'no audio/video samples found' }); |
| | | } |
| | | } |
| | | |
| | | remuxVideo(track, timeOffset, contiguous, audioTrackLength) { |
| | | var offset = 8, |
| | | pesTimeScale = this.PES_TIMESCALE, |
| | | pes2mp4ScaleFactor = this.PES2MP4SCALEFACTOR, |
| | | mp4SampleDuration, |
| | | mdat, moof, |
| | | firstPTS, firstDTS, |
| | | nextDTS, |
| | | lastPTS, lastDTS, |
| | | inputSamples = track.samples, |
| | | outputSamples = []; |
| | | |
| | | // PTS is coded on 33bits, and can loop from -2^32 to 2^32 |
| | | // PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value |
| | | let nextAvcDts; |
| | | if (contiguous) { |
| | | // if parsed fragment is contiguous with last one, let's use last DTS value as reference |
| | | nextAvcDts = this.nextAvcDts; |
| | | } else { |
| | | // if not contiguous, let's use target timeOffset |
| | | nextAvcDts = timeOffset * pesTimeScale; |
| | | } |
| | | |
| | | // compute first DTS and last DTS, normalize them against reference value |
| | | let sample = inputSamples[0]; |
| | | firstDTS = Math.max(this._PTSNormalize(sample.dts, nextAvcDts) - this._initDTS, 0); |
| | | firstPTS = Math.max(this._PTSNormalize(sample.pts, nextAvcDts) - this._initDTS, 0); |
| | | |
| | | // check timestamp continuity accross consecutive fragments (this is to remove inter-fragment gap/hole) |
| | | let delta = Math.round((firstDTS - nextAvcDts) / 90); |
| | | // if fragment are contiguous, detect hole/overlapping between fragments |
| | | if (contiguous) { |
| | | if (delta) { |
| | | if (delta > 1) { |
| | | logger.log(`AVC:${delta} ms hole between fragments detected,filling it`); |
| | | } else if (delta < -1) { |
| | | logger.log(`AVC:${(-delta)} ms overlapping between fragments detected`); |
| | | } |
| | | // remove hole/gap : set DTS to next expected DTS |
| | | firstDTS = nextAvcDts; |
| | | inputSamples[0].dts = firstDTS + this._initDTS; |
| | | // offset PTS as well, ensure that PTS is smaller or equal than new DTS |
| | | firstPTS = Math.max(firstPTS - delta, nextAvcDts); |
| | | inputSamples[0].pts = firstPTS + this._initDTS; |
| | | logger.log(`Video/PTS/DTS adjusted: ${firstPTS}/${firstDTS},delta:${delta}`); |
| | | } |
| | | } |
| | | nextDTS = firstDTS; |
| | | |
| | | // compute lastPTS/lastDTS |
| | | sample = inputSamples[inputSamples.length - 1]; |
| | | lastDTS = Math.max(this._PTSNormalize(sample.dts, nextAvcDts) - this._initDTS, 0); |
| | | lastPTS = Math.max(this._PTSNormalize(sample.pts, nextAvcDts) - this._initDTS, 0); |
| | | lastPTS = Math.max(lastPTS, lastDTS); |
| | | |
| | | let vendor = navigator.vendor, userAgent = navigator.userAgent, |
| | | isSafari = vendor && vendor.indexOf('Apple') > -1 && userAgent && !userAgent.match('CriOS'); |
| | | |
| | | // on Safari let's signal the same sample duration for all samples |
| | | // sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS |
| | | // set this constant duration as being the avg delta between consecutive DTS. |
| | | if (isSafari) { |
| | | mp4SampleDuration = Math.round((lastDTS - firstDTS) / (pes2mp4ScaleFactor * (inputSamples.length - 1))); |
| | | } |
| | | |
| | | // normalize all PTS/DTS now ... |
| | | for (let i = 0; i < inputSamples.length; i++) { |
| | | let sample = inputSamples[i]; |
| | | if (isSafari) { |
| | | // sample DTS is computed using a constant decoding offset (mp4SampleDuration) between samples |
| | | sample.dts = firstDTS + i * pes2mp4ScaleFactor * mp4SampleDuration; |
| | | } else { |
| | | // ensure sample monotonic DTS |
| | | sample.dts = Math.max(this._PTSNormalize(sample.dts, nextAvcDts) - this._initDTS, firstDTS); |
| | | // ensure dts is a multiple of scale factor to avoid rounding issues |
| | | sample.dts = Math.round(sample.dts / pes2mp4ScaleFactor) * pes2mp4ScaleFactor; |
| | | } |
| | | // we normalize PTS against nextAvcDts, we also substract initDTS (some streams don't start @ PTS O) |
| | | // and we ensure that computed value is greater or equal than sample DTS |
| | | sample.pts = Math.max(this._PTSNormalize(sample.pts, nextAvcDts) - this._initDTS, sample.dts); |
| | | // ensure pts is a multiple of scale factor to avoid rounding issues |
| | | sample.pts = Math.round(sample.pts / pes2mp4ScaleFactor) * pes2mp4ScaleFactor; |
| | | |
| | | } |
| | | |
| | | /* concatenate the video data and construct the mdat in place |
| | | (need 8 more bytes to fill length and mpdat type) */ |
| | | mdat = new Uint8Array(track.len + (4 * track.nbNalu) + 8); |
| | | let view = new DataView(mdat.buffer); |
| | | view.setUint32(0, mdat.byteLength); |
| | | mdat.set(MP4.types.mdat, 4); |
| | | |
| | | for (let i = 0; i < inputSamples.length; i++) { |
| | | let avcSample = inputSamples[i], |
| | | mp4SampleLength = 0, |
| | | compositionTimeOffset; |
| | | // convert NALU bitstream to MP4 format (prepend NALU with size field) |
| | | while (avcSample.units.units.length) { |
| | | let unit = avcSample.units.units.shift(); |
| | | view.setUint32(offset, unit.data.byteLength); |
| | | offset += 4; |
| | | mdat.set(unit.data, offset); |
| | | offset += unit.data.byteLength; |
| | | mp4SampleLength += 4 + unit.data.byteLength; |
| | | } |
| | | |
| | | if (!isSafari) { |
| | | // expected sample duration is the Decoding Timestamp diff of consecutive samples |
| | | if (i < inputSamples.length - 1) { |
| | | mp4SampleDuration = inputSamples[i + 1].dts - avcSample.dts; |
| | | } else { |
| | | let config = this.config, |
| | | lastFrameDuration = avcSample.dts - inputSamples[i > 0 ? i - 1 : i].dts; |
| | | if (config.stretchShortVideoTrack) { |
| | | // In some cases, a segment's audio track duration may exceed the video track duration. |
| | | // Since we've already remuxed audio, and we know how long the audio track is, we look to |
| | | // see if the delta to the next segment is longer than the minimum of maxBufferHole and |
| | | // maxSeekHole. If so, playback would potentially get stuck, so we artificially inflate |
| | | // the duration of the last frame to minimize any potential gap between segments. |
| | | let maxBufferHole = config.maxBufferHole, |
| | | maxSeekHole = config.maxSeekHole, |
| | | gapTolerance = Math.floor(Math.min(maxBufferHole, maxSeekHole) * pesTimeScale), |
| | | deltaToFrameEnd = (audioTrackLength ? firstPTS + audioTrackLength * pesTimeScale : this.nextAacPts) - avcSample.pts; |
| | | if (deltaToFrameEnd > gapTolerance) { |
| | | // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video |
| | | // frame overlap. maxBufferHole/maxSeekHole should be >> lastFrameDuration anyway. |
| | | mp4SampleDuration = deltaToFrameEnd - lastFrameDuration; |
| | | if (mp4SampleDuration < 0) { |
| | | mp4SampleDuration = lastFrameDuration; |
| | | } |
| | | logger.log(`It is approximately ${deltaToFrameEnd / 90} ms to the next segment; using duration ${mp4SampleDuration / 90} ms for the last video frame.`); |
| | | } else { |
| | | mp4SampleDuration = lastFrameDuration; |
| | | } |
| | | } else { |
| | | mp4SampleDuration = lastFrameDuration; |
| | | } |
| | | } |
| | | mp4SampleDuration /= pes2mp4ScaleFactor; |
| | | compositionTimeOffset = Math.round((avcSample.pts - avcSample.dts) / pes2mp4ScaleFactor); |
| | | } else { |
| | | compositionTimeOffset = Math.max(0, mp4SampleDuration * Math.round((avcSample.pts - avcSample.dts) / (pes2mp4ScaleFactor * mp4SampleDuration))); |
| | | } |
| | | outputSamples.push({ |
| | | size: mp4SampleLength, |
| | | // constant duration |
| | | duration: mp4SampleDuration, |
| | | cts: compositionTimeOffset, |
| | | flags: { |
| | | isLeading: 0, |
| | | isDependedOn: 0, |
| | | hasRedundancy: 0, |
| | | degradPrio: 0, |
| | | dependsOn: avcSample.key ? 2 : 1, |
| | | isNonSync: avcSample.key ? 0 : 1 |
| | | } |
| | | }); |
| | | } |
| | | // next AVC sample DTS should be equal to last sample DTS + last sample duration (in PES timescale) |
| | | this.nextAvcDts = lastDTS + mp4SampleDuration * pes2mp4ScaleFactor; |
| | | let dropped = track.dropped; |
| | | track.len = 0; |
| | | track.nbNalu = 0; |
| | | track.dropped = 0; |
| | | if (outputSamples.length && navigator.userAgent.toLowerCase().indexOf('chrome') > -1) { |
| | | let flags = outputSamples[0].flags; |
| | | // chrome workaround, mark first sample as being a Random Access Point to avoid sourcebuffer append issue |
| | | // https://code.google.com/p/chromium/issues/detail?id=229412 |
| | | flags.dependsOn = 2; |
| | | flags.isNonSync = 0; |
| | | } |
| | | track.samples = outputSamples; |
| | | moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track); |
| | | track.samples = []; |
| | | |
| | | let data = { |
| | | id: this.id, |
| | | level: this.level, |
| | | sn: this.sn, |
| | | data1: moof, |
| | | data2: mdat, |
| | | startPTS: firstPTS / pesTimeScale, |
| | | endPTS: (lastPTS + pes2mp4ScaleFactor * mp4SampleDuration) / pesTimeScale, |
| | | startDTS: firstPTS / pesTimeScale, |
| | | endDTS: (lastPTS + pes2mp4ScaleFactor * mp4SampleDuration) / pesTimeScale, |
| | | // startDTS: firstDTS / pesTimeScale, |
| | | // endDTS: this.nextAvcDts / pesTimeScale, |
| | | type: 'video', |
| | | nb: outputSamples.length, |
| | | dropped: dropped |
| | | }; |
| | | this.observer.trigger(Event.FRAG_PARSING_DATA, data); |
| | | return data; |
| | | } |
| | | |
| | | remuxAudio(track, timeOffset, contiguous) { |
| | | let pesTimeScale = this.PES_TIMESCALE, |
| | | mp4timeScale = track.timescale, |
| | | pes2mp4ScaleFactor = pesTimeScale / mp4timeScale, |
| | | expectedSampleDuration = track.timescale * 1024 / track.audiosamplerate; |
| | | var view, |
| | | offset = 8, |
| | | aacSample, mp4Sample, |
| | | unit, |
| | | mdat, moof, |
| | | firstPTS, firstDTS, lastDTS, |
| | | pts, dts, ptsnorm, dtsnorm, |
| | | samples = [], |
| | | samples0 = []; |
| | | |
| | | track.samples.sort(function (a, b) { |
| | | return (a.pts - b.pts); |
| | | }); |
| | | samples0 = track.samples; |
| | | |
| | | let nextAacPts = (contiguous ? this.nextAacPts : timeOffset * pesTimeScale); |
| | | |
| | | // If the audio track is missing samples, the frames seem to get "left-shifted" within the |
| | | // resulting mp4 segment, causing sync issues and leaving gaps at the end of the audio segment. |
| | | // In an effort to prevent this from happening, we inject frames here where there are gaps. |
| | | // When possible, we inject a silent frame; when that's not possible, we duplicate the last |
| | | // frame. |
| | | let firstPtsNorm = this._PTSNormalize(samples0[0].pts - this._initPTS, nextAacPts), |
| | | pesFrameDuration = expectedSampleDuration * pes2mp4ScaleFactor; |
| | | var nextPtsNorm = firstPtsNorm + pesFrameDuration; |
| | | for (var i = 1; i < samples0.length;) { |
| | | // First, let's see how far off this frame is from where we expect it to be |
| | | var sample = samples0[i], |
| | | ptsNorm = this._PTSNormalize(sample.pts - this._initPTS, nextAacPts), |
| | | delta = ptsNorm - nextPtsNorm; |
| | | |
| | | // If we're overlapping by more than half a duration, drop this sample |
| | | if (delta < (-0.5 * pesFrameDuration)) { |
| | | logger.log(`Dropping frame due to ${Math.abs(delta / 90)} ms overlap.`); |
| | | samples0.splice(i, 1); |
| | | track.len -= sample.unit.length; |
| | | // Don't touch nextPtsNorm or i |
| | | } |
| | | // Otherwise, if we're more than half a frame away from where we should be, insert missing frames |
| | | else if (delta > (0.5 * pesFrameDuration)) { |
| | | var missing = Math.round(delta / pesFrameDuration); |
| | | logger.log(`Injecting ${missing} frame${missing > 1 ? 's' : ''} of missing audio due to ${Math.round(delta / 90)} ms gap.`); |
| | | for (var j = 0; j < missing; j++) { |
| | | var newStamp = samples0[i - 1].pts + pesFrameDuration, |
| | | fillFrame = AAC.getSilentFrame(track.channelCount); |
| | | if (!fillFrame) { |
| | | logger.log('Unable to get silent frame for given audio codec; duplicating last frame instead.'); |
| | | fillFrame = sample.unit.slice(0); |
| | | } |
| | | samples0.splice(i, 0, { unit: fillFrame, pts: newStamp, dts: newStamp }); |
| | | track.len += fillFrame.length; |
| | | i += 1; |
| | | } |
| | | |
| | | // Adjust sample to next expected pts |
| | | nextPtsNorm += (missing + 1) * pesFrameDuration; |
| | | sample.pts = samples0[i - 1].pts + pesFrameDuration; |
| | | i += 1; |
| | | } |
| | | // Otherwise, we're within half a frame duration, so just adjust pts |
| | | else { |
| | | if (Math.abs(delta) > (0.1 * pesFrameDuration)) { |
| | | logger.log(`Invalid frame delta ${ptsNorm - nextPtsNorm + pesFrameDuration} at PTS ${Math.round(ptsNorm / 90)} (should be ${pesFrameDuration}).`); |
| | | } |
| | | nextPtsNorm += pesFrameDuration; |
| | | sample.pts = samples0[i - 1].pts + pesFrameDuration; |
| | | i += 1; |
| | | } |
| | | } |
| | | |
| | | while (samples0.length) { |
| | | aacSample = samples0.shift(); |
| | | unit = aacSample.unit; |
| | | pts = aacSample.pts - this._initDTS; |
| | | dts = aacSample.dts - this._initDTS; |
| | | //logger.log(`Audio/PTS:${Math.round(pts/90)}`); |
| | | // if not first sample |
| | | if (lastDTS !== undefined) { |
| | | ptsnorm = this._PTSNormalize(pts, lastDTS); |
| | | dtsnorm = this._PTSNormalize(dts, lastDTS); |
| | | mp4Sample.duration = (dtsnorm - lastDTS) / pes2mp4ScaleFactor; |
| | | } else { |
| | | ptsnorm = this._PTSNormalize(pts, nextAacPts); |
| | | dtsnorm = this._PTSNormalize(dts, nextAacPts); |
| | | let delta = Math.round(1000 * (ptsnorm - nextAacPts) / pesTimeScale); |
| | | // if fragment are contiguous, detect hole/overlapping between fragments |
| | | if (contiguous) { |
| | | // log delta |
| | | if (delta) { |
| | | if (delta > 0) { |
| | | logger.log(`${delta} ms hole between AAC samples detected,filling it`); |
| | | // if we have frame overlap, overlapping for more than half a frame duraion |
| | | } else if (delta < -12) { |
| | | // drop overlapping audio frames... browser will deal with it |
| | | logger.log(`${(-delta)} ms overlapping between AAC samples detected, drop frame`); |
| | | track.len -= unit.byteLength; |
| | | continue; |
| | | } |
| | | // set PTS/DTS to expected PTS/DTS |
| | | ptsnorm = dtsnorm = nextAacPts; |
| | | } |
| | | } |
| | | // remember first PTS of our aacSamples, ensure value is positive |
| | | firstPTS = Math.max(0, ptsnorm); |
| | | firstDTS = Math.max(0, dtsnorm); |
| | | if (track.len > 0) { |
| | | /* concatenate the audio data and construct the mdat in place |
| | | (need 8 more bytes to fill length and mdat type) */ |
| | | mdat = new Uint8Array(track.len + 8); |
| | | view = new DataView(mdat.buffer); |
| | | view.setUint32(0, mdat.byteLength); |
| | | mdat.set(MP4.types.mdat, 4); |
| | | } else { |
| | | // no audio samples |
| | | return; |
| | | } |
| | | } |
| | | mdat.set(unit, offset); |
| | | offset += unit.byteLength; |
| | | //console.log('PTS/DTS/initDTS/normPTS/normDTS/relative PTS : ${aacSample.pts}/${aacSample.dts}/${this._initDTS}/${ptsnorm}/${dtsnorm}/${(aacSample.pts/4294967296).toFixed(3)}'); |
| | | mp4Sample = { |
| | | size: unit.byteLength, |
| | | cts: 0, |
| | | duration: 0, |
| | | flags: { |
| | | isLeading: 0, |
| | | isDependedOn: 0, |
| | | hasRedundancy: 0, |
| | | degradPrio: 0, |
| | | dependsOn: 1, |
| | | } |
| | | }; |
| | | samples.push(mp4Sample); |
| | | lastDTS = dtsnorm; |
| | | } |
| | | var lastSampleDuration = 0; |
| | | var nbSamples = samples.length; |
| | | //set last sample duration as being identical to previous sample |
| | | if (nbSamples >= 2) { |
| | | lastSampleDuration = samples[nbSamples - 2].duration; |
| | | mp4Sample.duration = lastSampleDuration; |
| | | } |
| | | if (nbSamples) { |
| | | // next aac sample PTS should be equal to last sample PTS + duration |
| | | this.nextAacPts = ptsnorm + pes2mp4ScaleFactor * lastSampleDuration; |
| | | //logger.log('Audio/PTS/PTSend:' + aacSample.pts.toFixed(0) + '/' + this.nextAacDts.toFixed(0)); |
| | | track.len = 0; |
| | | track.samples = samples; |
| | | moof = MP4.moof(track.sequenceNumber++, firstDTS / pes2mp4ScaleFactor, track); |
| | | track.samples = []; |
| | | let audioData = { |
| | | id: this.id, |
| | | level: this.level, |
| | | sn: this.sn, |
| | | data1: moof, |
| | | data2: mdat, |
| | | startPTS: firstPTS / pesTimeScale, |
| | | endPTS: this.nextAacPts / pesTimeScale, |
| | | startDTS: firstDTS / pesTimeScale, |
| | | endDTS: (dtsnorm + pes2mp4ScaleFactor * lastSampleDuration) / pesTimeScale, |
| | | type: 'audio', |
| | | nb: nbSamples |
| | | }; |
| | | this.observer.trigger(Event.FRAG_PARSING_DATA, audioData); |
| | | return audioData; |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | remuxEmptyAudio(track, timeOffset, contiguous, videoData) { |
| | | let pesTimeScale = this.PES_TIMESCALE, |
| | | mp4timeScale = track.timescale ? track.timescale : track.audiosamplerate, |
| | | pes2mp4ScaleFactor = pesTimeScale / mp4timeScale, |
| | | |
| | | // sync with video's timestamp |
| | | startDTS = videoData.startDTS * pesTimeScale + this._initDTS, |
| | | endDTS = videoData.endDTS * pesTimeScale + this._initDTS, |
| | | |
| | | // one sample's duration value |
| | | sampleDuration = 1024, |
| | | frameDuration = pes2mp4ScaleFactor * sampleDuration, |
| | | |
| | | // samples count of this segment's duration |
| | | nbSamples = Math.ceil((endDTS - startDTS) / frameDuration), |
| | | |
| | | // silent frame |
| | | silentFrame = AAC.getSilentFrame(track.channelCount); |
| | | |
| | | // Can't remux if we can't generate a silent frame... |
| | | if (!silentFrame) { |
| | | logger.trace('Unable to remuxEmptyAudio since we were unable to get a silent frame for given audio codec!'); |
| | | return; |
| | | } |
| | | |
| | | let samples = []; |
| | | for (var i = 0; i < nbSamples; i++) { |
| | | var stamp = startDTS + i * frameDuration; |
| | | samples.push({ unit: silentFrame.slice(0), pts: stamp, dts: stamp }); |
| | | track.len += silentFrame.length; |
| | | } |
| | | track.samples = samples; |
| | | |
| | | this.remuxAudio(track, timeOffset, contiguous); |
| | | } |
| | | |
| | | remuxID3(track, timeOffset) { |
| | | var length = track.samples.length, sample; |
| | | // consume samples |
| | | if (length) { |
| | | for (var index = 0; index < length; index++) { |
| | | sample = track.samples[index]; |
| | | // setting id3 pts, dts to relative time |
| | | // using this._initPTS and this._initDTS to calculate relative time |
| | | sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE); |
| | | sample.dts = ((sample.dts - this._initDTS) / this.PES_TIMESCALE); |
| | | } |
| | | this.observer.trigger(Event.FRAG_PARSING_METADATA, { |
| | | id: this.id, |
| | | level: this.level, |
| | | sn: this.sn, |
| | | samples: track.samples |
| | | }); |
| | | } |
| | | |
| | | track.samples = []; |
| | | timeOffset = timeOffset; |
| | | } |
| | | |
| | | remuxText(track, timeOffset) { |
| | | track.samples.sort(function (a, b) { |
| | | return (a.pts - b.pts); |
| | | }); |
| | | |
| | | var length = track.samples.length, sample; |
| | | // consume samples |
| | | if (length) { |
| | | for (var index = 0; index < length; index++) { |
| | | sample = track.samples[index]; |
| | | // setting text pts, dts to relative time |
| | | // using this._initPTS and this._initDTS to calculate relative time |
| | | sample.pts = ((sample.pts - this._initPTS) / this.PES_TIMESCALE); |
| | | } |
| | | this.observer.trigger(Event.FRAG_PARSING_USERDATA, { |
| | | id: this.id, |
| | | level: this.level, |
| | | sn: this.sn, |
| | | samples: track.samples |
| | | }); |
| | | } |
| | | |
| | | track.samples = []; |
| | | timeOffset = timeOffset; |
| | | } |
| | | |
| | | _PTSNormalize(value, reference) { |
| | | var offset; |
| | | if (reference === undefined) { |
| | | return value; |
| | | } |
| | | if (reference < value) { |
| | | // - 2^33 |
| | | offset = -8589934592; |
| | | } else { |
| | | // + 2^33 |
| | | offset = 8589934592; |
| | | } |
| | | /* PTS is 33bit (from 0 to 2^33 -1) |
| | | if diff between value and reference is bigger than half of the amplitude (2^32) then it means that |
| | | PTS looping occured. fill the gap */ |
| | | while (Math.abs(value - reference) > 4294967296) { |
| | | value += offset; |
| | | } |
| | | return value; |
| | | } |
| | | |
| | | } |
| | | |
| | | export default MP4Remuxer; |
| New file |
| | |
| | | /** |
| | | * passthrough remuxer |
| | | */ |
| | | import Event from '../events'; |
| | | |
| | | class PassThroughRemuxer { |
| | | constructor(observer,id) { |
| | | this.observer = observer; |
| | | this.id = id; |
| | | this.ISGenerated = false; |
| | | } |
| | | |
| | | get passthrough() { |
| | | return true; |
| | | } |
| | | |
| | | destroy() { |
| | | } |
| | | |
| | | insertDiscontinuity() { |
| | | } |
| | | |
| | | switchLevel() { |
| | | this.ISGenerated = false; |
| | | } |
| | | |
| | | remux(audioTrack,videoTrack,id3Track,textTrack,timeOffset,rawData) { |
| | | var observer = this.observer; |
| | | // generate Init Segment if needed |
| | | if (!this.ISGenerated) { |
| | | var tracks = {}, |
| | | data = { id : this.id, tracks : tracks, unique : true }, |
| | | track = videoTrack, |
| | | codec = track.codec; |
| | | |
| | | if (codec) { |
| | | data.tracks.video = { |
| | | container : track.container, |
| | | codec : codec, |
| | | metadata : { |
| | | width : track.width, |
| | | height : track.height |
| | | } |
| | | }; |
| | | } |
| | | |
| | | track = audioTrack; |
| | | codec = track.codec; |
| | | if (codec) { |
| | | data.tracks.audio = { |
| | | container : track.container, |
| | | codec : codec, |
| | | metadata : { |
| | | channelCount : track.channelCount |
| | | } |
| | | }; |
| | | } |
| | | this.ISGenerated = true; |
| | | observer.trigger(Event.FRAG_PARSING_INIT_SEGMENT,data); |
| | | } |
| | | observer.trigger(Event.FRAG_PARSING_DATA, { |
| | | id : this.id, |
| | | data1: rawData, |
| | | startPTS: timeOffset, |
| | | startDTS: timeOffset, |
| | | type: 'audiovideo', |
| | | nb: 1, |
| | | dropped : 0 |
| | | }); |
| | | } |
| | | } |
| | | |
| | | export default PassThroughRemuxer; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | /* |
| | | * H264 NAL Slicer |
| | | */ |
| | | import Event from '../events'; |
| | | import EventHandler from '../event-handler'; |
| | | import H264Demuxer from '../demux/h264-demuxer'; |
| | | |
| | | class SlicesReader extends EventHandler { |
| | | |
| | | constructor(wfs, config = null) { |
| | | super(wfs, Event.H264_DATA_PARSING); |
| | | |
| | | this.config = this.wfs.config || config; |
| | | this.h264Demuxer = new H264Demuxer(wfs); |
| | | this.wfs = wfs; |
| | | this.lastBuf = null; |
| | | this.nals = []; |
| | | } |
| | | |
| | | destroy() { |
| | | this.lastBuf = null; |
| | | this.nals = []; |
| | | EventHandler.prototype.destroy.call(this); |
| | | } |
| | | |
| | | _read(buffer) { |
| | | var typedAr = null; |
| | | this.nals = []; |
| | | if (!buffer || buffer.byteLength < 1) return; |
| | | if (this.lastBuf) { |
| | | typedAr = new Uint8Array(buffer.byteLength + this.lastBuf.length); |
| | | typedAr.set(this.lastBuf); |
| | | typedAr.set(new Uint8Array(buffer), this.lastBuf.length); |
| | | } else { |
| | | typedAr = new Uint8Array(buffer); |
| | | } |
| | | var lastNalEndPos = 0; |
| | | var b1 = -1; // byte before one |
| | | var b2 = -2; // byte before two |
| | | var nalStartPos = new Array(); |
| | | for (var i = 0; i < typedAr.length; i += 2) { |
| | | var b_0 = typedAr[i]; |
| | | var b_1 = typedAr[i + 1]; |
| | | if (b1 == 0 && b_0 == 0 && b_1 == 0) { |
| | | nalStartPos.push(i - 1); |
| | | } else if (b_1 == 1 && b_0 == 0 && b1 == 0 && b2 == 0) { |
| | | nalStartPos.push(i - 2); |
| | | } |
| | | b2 = b_0; |
| | | b1 = b_1; |
| | | } |
| | | if (nalStartPos.length > 1) { |
| | | for (var i = 0; i < nalStartPos.length - 1; ++i) { |
| | | this.nals.push(typedAr.subarray(nalStartPos[i], nalStartPos[i + 1] + 1)); |
| | | lastNalEndPos = nalStartPos[i + 1]; |
| | | } |
| | | } else { |
| | | lastNalEndPos = nalStartPos[0]; |
| | | } |
| | | if (lastNalEndPos != 0 && lastNalEndPos < typedAr.length) { |
| | | this.lastBuf = typedAr.subarray(lastNalEndPos); |
| | | } else { |
| | | if ( !! !this.lastBuf) { |
| | | this.lastBuf = typedAr; |
| | | } |
| | | var _newBuf = new Uint8Array(this.lastBuf.length + buffer.byteLength); |
| | | _newBuf.set(this.lastBuf); |
| | | _newBuf.set(new Uint8Array(buffer), this.lastBuf.length); |
| | | this.lastBuf = _newBuf; |
| | | } |
| | | } |
| | | |
| | | onH264DataParsing(event) { |
| | | this._read(event.data); |
| | | var $this = this; |
| | | this.nals.forEach(function(nal) { |
| | | $this.wfs.trigger(Event.H264_DATA_PARSED, { |
| | | data: nal |
| | | }); |
| | | }); |
| | | } |
| | | } |
| | | |
| | | export default SlicesReader; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | 'use strict'; |
| | | |
| | | function noop() {} |
| | | |
| | | const fakeLogger = { |
| | | trace: noop, |
| | | debug: noop, |
| | | log: noop, |
| | | warn: noop, |
| | | info: noop, |
| | | error: noop |
| | | }; |
| | | |
| | | let exportedLogger = fakeLogger; |
| | | |
| | | //let lastCallTime; |
| | | // function formatMsgWithTimeInfo(type, msg) { |
| | | // const now = Date.now(); |
| | | // const diff = lastCallTime ? '+' + (now - lastCallTime) : '0'; |
| | | // lastCallTime = now; |
| | | // msg = (new Date(now)).toISOString() + ' | [' + type + '] > ' + msg + ' ( ' + diff + ' ms )'; |
| | | // return msg; |
| | | // } |
| | | |
| | | function formatMsg(type, msg) { |
| | | msg = '[' + type + '] > ' + msg; |
| | | return msg; |
| | | } |
| | | |
| | | function consolePrintFn(type) { |
| | | const func = window.console[type]; |
| | | if (func) { |
| | | return function(...args) { |
| | | if(args[0]) { |
| | | args[0] = formatMsg(type, args[0]); |
| | | } |
| | | func.apply(window.console, args); |
| | | }; |
| | | } |
| | | return noop; |
| | | } |
| | | |
| | | function exportLoggerFunctions(debugConfig, ...functions) { |
| | | functions.forEach(function(type) { |
| | | exportedLogger[type] = debugConfig[type] ? debugConfig[type].bind(debugConfig) : consolePrintFn(type); |
| | | }); |
| | | } |
| | | |
| | | export var enableLogs = function(debugConfig) { |
| | | if (debugConfig === true || typeof debugConfig === 'object') { |
| | | exportLoggerFunctions(debugConfig, |
| | | // Remove out from list here to hard-disable a log-level |
| | | //'trace', |
| | | 'debug', |
| | | 'log', |
| | | 'info', |
| | | 'warn', |
| | | 'error' |
| | | ); |
| | | // Some browsers don't allow to use bind on console object anyway |
| | | // fallback to default if needed |
| | | try { |
| | | exportedLogger.log(); |
| | | } catch (e) { |
| | | exportedLogger = fakeLogger; |
| | | } |
| | | } |
| | | else { |
| | | exportedLogger = fakeLogger; |
| | | } |
| | | }; |
| | | |
| | | export var logger = exportedLogger; |
| New file |
| | |
| | | /* eslint-disable */ |
| | | if (typeof ArrayBuffer !== 'undefined' && !ArrayBuffer.prototype.slice) { |
| | | ArrayBuffer.prototype.slice = function (start, end) { |
| | | var that = new Uint8Array(this); |
| | | if (end === undefined) { |
| | | end = that.length; |
| | | } |
| | | var result = new ArrayBuffer(end - start); |
| | | var resultArray = new Uint8Array(result); |
| | | for (var i = 0; i < resultArray.length; i++) { |
| | | resultArray[i] = that[i + start]; |
| | | } |
| | | return result; |
| | | }; |
| | | } |
| New file |
| | |
| | | /* eslint-disable */ |
| | | class XhrLoader { |
| | | |
| | | constructor(config) { |
| | | if (config && config.xhrSetup) { |
| | | this.xhrSetup = config.xhrSetup; |
| | | } |
| | | } |
| | | |
| | | destroy() { |
| | | this.abort(); |
| | | this.loader = null; |
| | | } |
| | | |
| | | abort() { |
| | | var loader = this.loader; |
| | | if (loader && loader.readyState !== 4) { |
| | | this.stats.aborted = true; |
| | | loader.abort(); |
| | | } |
| | | |
| | | window.clearTimeout(this.requestTimeout); |
| | | this.requestTimeout = null; |
| | | window.clearTimeout(this.retryTimeout); |
| | | this.retryTimeout = null; |
| | | } |
| | | |
| | | loadHead(context, config, callbacks) { |
| | | this.context = context; |
| | | this.config = config; |
| | | this.callbacks = callbacks; |
| | | this.stats = {trequest: performance.now(), retry: 0}; |
| | | this.retryDelay = config.retryDelay; |
| | | var xhr = new XMLHttpRequest; |
| | | xhr.open('head', context.url); |
| | | xhr.onload = function () { |
| | | callbacks.onSuccess(xhr.getResponseHeader('content-length')); |
| | | }; |
| | | xhr.send(); |
| | | } |
| | | |
| | | load(context, config, callbacks) { |
| | | this.context = context; |
| | | this.config = config; |
| | | this.callbacks = callbacks; |
| | | this.stats = {trequest: performance.now(), retry: 0}; |
| | | this.retryDelay = config.retryDelay; |
| | | this.loadInternal(); |
| | | } |
| | | |
| | | loadInternal() { |
| | | var xhr, context = this.context; |
| | | if (typeof XDomainRequest !== 'undefined') { |
| | | xhr = this.loader = new XDomainRequest(); |
| | | } else { |
| | | xhr = this.loader = new XMLHttpRequest(); |
| | | } |
| | | xhr.onloadend = this.loadend.bind(this); |
| | | xhr.onprogress = this.loadprogress.bind(this); |
| | | xhr.open('GET', context.url, true); |
| | | if (context.rangeEnd) { |
| | | xhr.setRequestHeader('Range','bytes=' + context.rangeStart + '-' + (context.rangeEnd-1)); |
| | | } |
| | | xhr.responseType = context.responseType; |
| | | let stats = this.stats; |
| | | stats.tfirst = 0; |
| | | stats.loaded = 0; |
| | | if (this.xhrSetup) { |
| | | this.xhrSetup(xhr, context.url); |
| | | } |
| | | // setup timeout before we perform request |
| | | this.requestTimeout = window.setTimeout(this.loadtimeout.bind(this), this.config.timeout); |
| | | xhr.send(); |
| | | } |
| | | |
| | | loadend(event) { |
| | | var xhr = event.currentTarget, |
| | | status = xhr.status, |
| | | stats = this.stats, |
| | | context = this.context, |
| | | config = this.config; |
| | | // don't proceed if xhr has been aborted |
| | | if (stats.aborted) { |
| | | return; |
| | | } |
| | | // in any case clear the current xhrs timeout |
| | | window.clearTimeout(this.requestTimeout); |
| | | |
| | | // http status between 200 to 299 are all successful |
| | | if (status >= 200 && status < 300) { |
| | | stats.tload = Math.max(stats.tfirst,performance.now()); |
| | | let data,len; |
| | | if (context.responseType === 'arraybuffer') { |
| | | data = xhr.response; |
| | | len = data.byteLength; |
| | | } else { |
| | | data = xhr.responseText; |
| | | len = data.length; |
| | | } |
| | | stats.loaded = stats.total = len; |
| | | let response = { url : xhr.responseURL, data : data }; |
| | | this.callbacks.onSuccess(response, stats, context); |
| | | } else { |
| | | // if max nb of retries reached or if http status between 400 and 499 (such error cannot be recovered, retrying is useless), return error |
| | | if (stats.retry >= config.maxRetry || (status >= 400 && status < 499)) { |
| | | // logger.error(`${status} while loading ${context.url}` ); |
| | | this.callbacks.onError({ code : status, text : xhr.statusText}, context); |
| | | } else { |
| | | // retry |
| | | // logger.warn(`${status} while loading ${context.url}, retrying in ${this.retryDelay}...`); |
| | | // aborts and resets internal state |
| | | this.destroy(); |
| | | // schedule retry |
| | | this.retryTimeout = window.setTimeout(this.loadInternal.bind(this), this.retryDelay); |
| | | // set exponential backoff |
| | | this.retryDelay = Math.min(2 * this.retryDelay, config.maxRetryDelay); |
| | | stats.retry++; |
| | | } |
| | | } |
| | | } |
| | | |
| | | loadtimeout() { |
| | | // logger.warn(`timeout while loading ${this.context.url}` ); |
| | | this.callbacks.onTimeout(this.stats, this.context); |
| | | } |
| | | |
| | | loadprogress(event) { |
| | | var stats = this.stats; |
| | | if (stats.tfirst === 0) { |
| | | stats.tfirst = Math.max(performance.now(), stats.trequest); |
| | | } |
| | | stats.loaded = event.loaded; |
| | | if (event.lengthComputable) { |
| | | stats.total = event.total; |
| | | } |
| | | let onProgress = this.callbacks.onProgress; |
| | | if (onProgress) { |
| | | // last args is to provide on progress data |
| | | onProgress(stats, this.context, null); |
| | | } |
| | | } |
| | | } |
| | | |
| | | export default XhrLoader; |
| New file |
| | |
| | | /** |
| | | * WFS interface, Jeff Yang 2016.10 |
| | | */ |
| | | /* eslint-disable */ |
| | | 'use strict'; |
| | | |
| | | import Event from './events'; |
| | | import FlowController from './controller/flow-controller'; |
| | | import BufferController from './controller/buffer-controller'; |
| | | import EventEmitter from 'events'; |
| | | // import XhrLoader from './utils/xhr-loader'; |
| | | import WebsocketLoader from './loader/websocket-loader'; |
| | | |
| | | class Wfs { |
| | | static get version() { |
| | | // replaced with browserify-versionify transform |
| | | return '__VERSION__' + 'v.0.0.0.1'; |
| | | } |
| | | |
| | | static isSupported() { |
| | | return ( |
| | | window.MediaSource && |
| | | typeof window.MediaSource.isTypeSupported === 'function' && |
| | | window.MediaSource.isTypeSupported( |
| | | 'video/mp4; codecs="avc1.42c01f,mp4a.40.2"' |
| | | ) |
| | | ); |
| | | } |
| | | |
| | | static get Events() { |
| | | return Event; |
| | | } |
| | | |
| | | static get DefaultConfig() { |
| | | if (!Wfs.defaultConfig) { |
| | | Wfs.defaultConfig = { |
| | | autoStartLoad: true, |
| | | startPosition: -1, |
| | | debug: false, |
| | | // H264_TIMEBASE: 3600, |
| | | // fLoader: undefined, |
| | | // loader: XhrLoader, |
| | | //loader: FetchLoader, |
| | | // fmp4FileUrl: 'xxxx.mp4', |
| | | fragLoadingTimeOut: 20000, |
| | | fragLoadingMaxRetry: 6, |
| | | fragLoadingRetryDelay: 1000, |
| | | fragLoadingMaxRetryTimeout: 64000, |
| | | fragLoadingLoopThreshold: 3, |
| | | forceKeyFrameOnDiscontinuity: true, |
| | | appendErrorMaxRetry: 3 |
| | | }; |
| | | } |
| | | return Wfs.defaultConfig; |
| | | } |
| | | |
| | | static set DefaultConfig(defaultConfig) { |
| | | Wfs.defaultConfig = defaultConfig; |
| | | } |
| | | |
| | | constructor(config = {}) { |
| | | var defaultConfig = Wfs.DefaultConfig; |
| | | for (var prop in defaultConfig) { |
| | | if (prop in config) { |
| | | continue; |
| | | } |
| | | config[prop] = defaultConfig[prop]; |
| | | } |
| | | this.config = config; |
| | | // observer setup |
| | | var observer = (this.observer = new EventEmitter()); |
| | | observer.trigger = function trigger(event, ...data) { |
| | | observer.emit(event, event, ...data); |
| | | }; |
| | | |
| | | observer.off = function off(event, ...data) { |
| | | observer.removeListener(event, ...data); |
| | | }; |
| | | this.on = observer.on.bind(observer); |
| | | this.off = observer.off.bind(observer); |
| | | this.trigger = observer.trigger.bind(observer); |
| | | |
| | | this.flowController = new FlowController(this); |
| | | this.bufferController = new BufferController(this); |
| | | // this.fileLoader = new FileLoader(this); |
| | | this.websocketLoader = new WebsocketLoader(this); |
| | | this.mediaType = undefined; |
| | | this.cameraInfo = {}; |
| | | } |
| | | |
| | | destroy() { |
| | | this.flowController.destroy(); |
| | | this.bufferController.destroy(); |
| | | // this.fileLoader.destroy(); |
| | | this.websocketLoader.destroy(); |
| | | } |
| | | |
| | | attachMedia( |
| | | media, |
| | | channelName = 'chX', |
| | | mediaType = 'H264Raw', |
| | | websocketName = 'ws', |
| | | cameraInfo = {} |
| | | ) { |
| | | // 'H264Raw' 'FMp4' |
| | | this.mediaType = mediaType; |
| | | this.media = media; |
| | | this.cameraInfo = cameraInfo; |
| | | this.trigger(Event.MEDIA_ATTACHING, { |
| | | media: media, |
| | | channelName: channelName, |
| | | mediaType: mediaType, |
| | | websocketName: websocketName, |
| | | cameraInfo: cameraInfo |
| | | }); |
| | | } |
| | | attachWebsocket(websocket, channelName, cameraInfo) { |
| | | this.trigger(Event.WEBSOCKET_ATTACHING, { |
| | | websocket: websocket, |
| | | mediaType: this.mediaType, |
| | | channelName: channelName, |
| | | cameraInfo: cameraInfo |
| | | }); |
| | | } |
| | | } |
| | | |
| | | export default Wfs; |
| New file |
| | |
| | | <template> |
| | | <div style="width:100%; height: 100%;"> |
| | | <camera-player :cameraID="query.cameraId" :rtspUrl="query.rtspUrl" :isGb="query.gb28181 ==='1'"></camera-player> |
| | | </div> |
| | | </template> |
| | | |
| | | <script> |
| | | // 接口使用 |
| | | // 参数 cameraId 摄像机id, rtspUrl 摄像机rtsp地址, gb28181 是否是国标摄像机(1 或 0) |
| | | // http://192.168.20.191:7003/view/cameraPlayer/index.html?cameraId=e7e6157a-5929-4e78-b390-e365141169c8&rtspUrl=rtsp://admin:a1234567@192.168.5.51:554/h264/ch1/main/av_stream |
| | | |
| | | import CameraPlayer from "../components/player"; |
| | | |
| | | export default { |
| | | name: "BasicCameraPlayer", |
| | | components: { |
| | | CameraPlayer |
| | | }, |
| | | data() { |
| | | return { |
| | | query: { |
| | | cameraId: "", |
| | | rtspUrl: "", |
| | | gb28181: "0" |
| | | } |
| | | } |
| | | }, |
| | | mounted() { |
| | | this.urlParse(); |
| | | }, |
| | | methods: { |
| | | urlParse() { |
| | | let url = window.location.search; |
| | | let obj = {}; |
| | | let reg = /[?&][^?&]+=[^?&]+/g; |
| | | let arr = url.match(reg); |
| | | if (arr) { |
| | | arr.forEach((item) => { |
| | | let temArr = item.substring(1).split('='); |
| | | let key = decodeURIComponent(temArr[0]); |
| | | let value = decodeURIComponent(temArr[1]); |
| | | obj[key] = value; |
| | | }); |
| | | } |
| | | this.query = Object.assign({}, this.query, obj); |
| | | console.log("cameraPlayer:", this.query) |
| | | } |
| | | } |
| | | }; |
| | | </script> |
| New file |
| | |
| | | import Vue from 'vue'; |
| | | import App from './App.vue'; |
| | | import Mixin from "./mixins"; |
| | | |
| | | Vue.mixin(Mixin); |
| | | new Vue({ |
| | | el: '#app', |
| | | render: h => h(App) |
| | | }) |
| New file |
| | |
| | | import TreeDataPool from "@/Pool/TreeData"; |
| | | |
| | | /* eslint-disable */ |
| | | const onlyTreeDataPool = new TreeDataPool |
| | | |
| | | const mixin = { |
| | | data() { |
| | | return { |
| | | TreeDataPool: onlyTreeDataPool, |
| | | |
| | | }; |
| | | }, |
| | | }; |
| | | export default mixin; |