jsmpeg组件包
This commit is contained in:
parent
8a31a31d93
commit
2c17b8f9d8
|
@ -0,0 +1,8 @@
|
|||
The MIT License (MIT)
|
||||
Copyright (c) 2017 Dominic Szablewski
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,253 @@
|
|||
# JSMpeg – MPEG1 Video & MP2 Audio Decoder in JavaScript
|
||||
|
||||
JSMpeg is a Video Player written in JavaScript. It consists of an MPEG-TS demuxer, MPEG1 video & MP2 audio decoders, WebGL & Canvas2D renderers and WebAudio sound output. JSMpeg can load static videos via Ajax and allows low latency streaming (~50ms) via WebSockets.
|
||||
|
||||
JSMpeg can decode 720p Video at 30fps on an iPhone 5S, works in any modern browser (Chrome, Firefox, Safari, Edge) and comes in at just 20kb gzipped.
|
||||
|
||||
Using it can be as simple as this:
|
||||
```html
|
||||
<script src="jsmpeg.min.js"></script>
|
||||
<div class="jsmpeg" data-url="video.ts"></div>
|
||||
```
|
||||
|
||||
Some more info and demos: [jsmpeg.com](http://jsmpeg.com/)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
A JSMpeg video player can either be created in HTML using the CSS class `jsmpeg` for the container:
|
||||
|
||||
```html
|
||||
<div class="jsmpeg" data-url="<url>"></div>
|
||||
```
|
||||
|
||||
or by directly calling the `JSMpeg.Player()` constructor in JavaScript:
|
||||
|
||||
```javascript
|
||||
var player = new JSMpeg.Player(url [, options]);
|
||||
```
|
||||
|
||||
Note that using the HTML Element (internally `JSMpeg.VideoElement`) provides some features on top of `JSMpeg.Player`. Namely a SVG pause/play button and the ability to "unlock" audio on iOS devices.
|
||||
|
||||
The `url` argument accepts a URL to an MPEG .ts file or a WebSocket server (ws://...).
|
||||
|
||||
The `options` argument supports the following properties:
|
||||
|
||||
- `canvas` – the HTML Canvas elment to use for video rendering. If none is given, the renderer will create its own Canvas element.
|
||||
- `loop` – whether to loop the video (static files only). Default `true`.
|
||||
- `autoplay` - whether to start playing immediately (static files only). Default `false`.
|
||||
- `audio` - whether to decode audio. Default `true`.
|
||||
- `video` - whether to decode video. Default `true`.
|
||||
- `poster` – URL to an image to use as the poster to show before the video plays.
|
||||
- `pauseWhenHidden` – whether to pause playback when the tab is inactive. Default `true`. Note that browsers usually throttle JS in inactive tabs anyway.
|
||||
- `disableGl` - whether to disable WebGL and always use the Canvas2D renderer. Default `false`.
|
||||
- `disableWebAssembly` - whether to disable WebAssembly and always use JavaScript decoders. Default `false`.
|
||||
- `preserveDrawingBuffer` – whether the WebGL context is created with `preserveDrawingBuffer` - necessary for "screenshots" via `canvas.toDataURL()`. Default `false`.
|
||||
- `progressive` - whether to load data in chunks (static files only). When enabled, playback can begin before the whole source has been completely loaded. Default `true`.
|
||||
- `throttled` - when using `progressive`, whether to defer loading chunks when they're not needed for playback yet. Default `true`.
|
||||
- `chunkSize` - when using `progressive`, the chunk size in bytes to load at a time. Default `1024*1024` (1mb).
|
||||
- `decodeFirstFrame` - whether to decode and display the first frame of the video. Useful to set up the Canvas size and use the frame as the "poster" image. This has no effect when using `autoplay` or streaming sources. Default `true`.
|
||||
- `maxAudioLag` – when streaming, the maximum enqueued audio length in seconds.
|
||||
- `videoBufferSize` – when streaming, size in bytes for the video decode buffer. Default 512*1024 (512kb). You may have to increase this for very high bitrates.
|
||||
- `audioBufferSize` – when streaming, size in bytes for the audio decode buffer. Default 128*1024 (128kb). You may have to increase this for very high bitrates.
|
||||
- `onVideoDecode(decoder, time)` – A callback that is called after each decoded and rendered video frame
|
||||
- `onAudioDecode(decoder, time)` – A callback that is called after each decoded audio frame
|
||||
- `onPlay(player)` – A callback that is called whenever playback starts
|
||||
- `onPause(player)` – A callback that is called whenever playback paused (e.g. when .pause() is called or the source has ended)
|
||||
- `onEnded(player)` – A callback that is called when playback has reached the end of the source (only called when `loop` is `false`).
|
||||
- `onStalled(player)` – A callback that is called whenever there's not enough data for playback
|
||||
- `onSourceEstablished(source)` – A callback that is called when source has first received data
|
||||
- `onSourceCompleted(source)` – A callback that is called when the source has received all data
|
||||
|
||||
|
||||
All options except from `canvas` can also be used with the HTML Element through `data-` attributes. E.g. to specify looping and autoplay in JavaScript:
|
||||
|
||||
```javascript
|
||||
var player = new JSMpeg.Player('video.ts' {loop: true, autoplay: true});
|
||||
```
|
||||
|
||||
or HTML
|
||||
```html
|
||||
<div class="jsmpeg" data-url="video.ts"
|
||||
data-loop="true" data-autoplay="true"></div>
|
||||
```
|
||||
|
||||
Note that `camelCased` options have to be hyphenated when used as data attributes. E.g. `decodeFirstFrame: true` becomes `data-decode-first-frame="true"` for the HTML element.
|
||||
|
||||
|
||||
## JSMpeg.Player API
|
||||
|
||||
A `JSMpeg.Player` instance supports the following methods and properties:
|
||||
|
||||
- `.play()` – start playback
|
||||
- `.pause()` – pause playback
|
||||
- `.stop()` – stop playback and seek to the beginning
|
||||
- `.nextFrame()` – advance playback by one video frame. This does not decode audio. Returns `true` on success, `false` when there's not enough data.
|
||||
- `.volume` – get or set the audio volume (0-1)
|
||||
- `.currentTime` – get or set the current playback position in seconds
|
||||
- `.paused` – read only, wether playback is paused
|
||||
- `.destroy()` – stops playback, disconnects the source and cleans up WebGL and WebAudio state. The player can not be used afterwards. If the player created the canvas element it is removed from the document.
|
||||
|
||||
|
||||
## Encoding Video/Audio for JSMpeg
|
||||
|
||||
JSMpeg only supports playback of MPEG-TS containers with the MPEG1 Video Codec and the MP2 Audio Codec. The Video Decoder does not handle B-Frames correctly (though no modern encoder seems to use these by default anyway) and the width of the video has to be a multiple of 2.
|
||||
|
||||
You can encode a suitable video using [ffmpeg](https://ffmpeg.org/) like this:
|
||||
|
||||
```sh
|
||||
ffmpeg -i in.mp4 -f mpegts -codec:v mpeg1video -codec:a mp2 -b 0 out.ts
|
||||
```
|
||||
|
||||
You can also control the video size (`-s`), framerate (`-r`), video bitrate (`-b:v`), audio bitrate (`-b:a`), number of audio channels (`-ac`), sampling rate (`-ar`) and much more. Please refer to the ffmpeg documentation for the details.
|
||||
|
||||
Comprehensive example:
|
||||
```sh
|
||||
ffmpeg -i in.mp4 -f mpegts \
|
||||
-codec:v mpeg1video -s 960x540 -b:v 1500k -r 30 -bf 0 \
|
||||
-codec:a mp2 -ar 44100 -ac 1 -b:a 128k \
|
||||
out.ts
|
||||
```
|
||||
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
While JSMpeg can handle 720p video at 30fps even on an iPhone 5S, keep in mind that MPEG1 is not as efficient as modern codecs. MPEG1 needs quite a bit of bandwidth for HD video. 720p begins to look okay-ish at 2 Mbits/s (that's 250kb/s). Also, the higher the bitrate, the more work JavaScript has to do to decode it.
|
||||
|
||||
This should not be a problem for static files, or if you're only streaming within your local WiFi. If you don't need to support mobile devices, 1080p at 10mbit/s works just fine (if your encoder can keep up). For everything else I would advise you to use 540p (960x540) at 2Mbit/s max.
|
||||
|
||||
Here is a performance comparison with multiple resolutions and features en-/disabled. Test this on your target devices to get a feel for what you can get away with.
|
||||
|
||||
https://jsmpeg.com/perf.html
|
||||
|
||||
|
||||
## Streaming via WebSockets
|
||||
|
||||
JSMpeg can connect to a WebSocket server that sends out binary MPEG-TS data. When streaming, JSMpeg tries to keep latency as low as possible - it immediately decodes everything it has, ignoring video and audio timestamps altogether. To keep everything in sync (and latency low), audio data should be interleaved between video frames very frequently (`-muxdelay` in ffmpeg).
|
||||
|
||||
A separate, buffered streaming mode, where JSMpeg pre-loads a few seconds of data and presents everything with exact timing and audio/video sync is conceivable, but currently not implemented.
|
||||
|
||||
The internal buffers for video and audio are fairly small (512kb and 128kb respectively) and JSMpeg will discard old (even unplayed) data to make room for newly arriving data without much fuzz. This could introduce decoding artifacts when there's a network congestion, but ensures that latency is kept at a minimum. If necessary You can increase the `videoBufferSize` and `audioBufferSize` through the options.
|
||||
|
||||
JSMpeg comes with a tiny WebSocket "relay", written in Node.js. This server accepts an MPEG-TS source over HTTP and serves it via WebSocket to all connecting Browsers. The incoming HTTP stream can be generated using [ffmpeg](https://ffmpeg.org/), [gstreamer](https://gstreamer.freedesktop.org/) or by other means.
|
||||
|
||||
The split between the source and the WebSocket relay is necessary, because ffmpeg doesn't speak the WebSocket protocol. However, this split also allows you to install the WebSocket relay on a public server and share your stream on the Internet (typically NAT in your router prevents the public Internet from connecting _into_ your local network).
|
||||
|
||||
In short, it works like this:
|
||||
|
||||
1. run the websocket-relay.js
|
||||
2. run ffmpeg, send output to the relay's HTTP port
|
||||
3. connect JSMpeg in the browser to the relay's Websocket port
|
||||
|
||||
|
||||
## Example Setup for Streaming: Raspberry Pi Live Webcam
|
||||
|
||||
For this example, ffmpeg and the WebSocket relay run on the same system. This allows you to view the stream in your local network, but not on the public internet.
|
||||
|
||||
This example assumes that your webcam is compatible with Video4Linux2 and appears as `/dev/video0` in the filesystem. Most USB webcams support the UVC standard and should work just fine. The onboard Raspberry Camera can be made available as V4L2 device by loading a kernel module: `sudo modprobe bcm2835-v4l2`.
|
||||
|
||||
|
||||
1) Install ffmpeg (See [How to install ffmpeg on Debian / Raspbian](http://superuser.com/questions/286675/how-to-install-ffmpeg-on-debian)). Using ffmpeg, we can capture the webcam video & audio and encode it into MPEG1/MP2.
|
||||
|
||||
2) Install Node.js and npm (See [Installing Node.js on Debian and Ubuntu based Linux distributions](https://nodejs.org/en/download/package-manager/#debian-and-ubuntu-based-linux-distributions) for newer versions). The Websocket relay is written in Node.js
|
||||
|
||||
3) Install http-server. We will use this to serve the static files (view-stream.html, jsmpeg.min.js), so that we can view the website with the video in our browser. Any other webserver would work as well (nginx, apache, etc.):
|
||||
`sudo npm -g install http-server`
|
||||
|
||||
4) Install git and clone this repository (or just download it as ZIP and unpack)
|
||||
```
|
||||
sudo apt-get install git
|
||||
git clone https://github.com/phoboslab/jsmpeg.git
|
||||
```
|
||||
|
||||
5) Change into the jsmpeg/ directory
|
||||
`cd jsmpeg/`
|
||||
|
||||
6) Install the Node.js Websocket Library:
|
||||
`npm install ws`
|
||||
|
||||
7) Start the Websocket relay. Provide a password and a port for the incomming HTTP video stream and a Websocket port that we can connect to in the browser:
|
||||
`node websocket-relay.js supersecret 8081 8082`
|
||||
|
||||
8) In a new terminal window (still in the `jsmpeg/` directory, start the `http-server` so we can serve the view-stream.html to the browser:
|
||||
`http-server`
|
||||
|
||||
9) Open the streaming website in your browser. The `http-server` will tell you the ip (usually `192.168.[...]`) and port (usually `8080`) where it's running on:
|
||||
`http://192.168.[...]:8080/view-stream.html`
|
||||
|
||||
10) In a third terminal window, start ffmpeg to capture the webcam video and send it to the Websocket relay. Provide the password and port (from step 7) in the destination URL:
|
||||
```
|
||||
ffmpeg \
|
||||
-f v4l2 \
|
||||
-framerate 25 -video_size 640x480 -i /dev/video0 \
|
||||
-f mpegts \
|
||||
-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
|
||||
http://localhost:8081/supersecret
|
||||
```
|
||||
|
||||
You should now see a live webcam image in your browser.
|
||||
|
||||
If ffmpeg failed to open the input video, it's likely that your webcam does not support the given resolution, format or framerate. To get a list of compatible modes run:
|
||||
|
||||
`ffmpeg -f v4l2 -list_formats all -i /dev/video0`
|
||||
|
||||
|
||||
To add the webcam audio, just call ffmpeg with two separate inputs.
|
||||
|
||||
```
|
||||
ffmpeg \
|
||||
-f v4l2 \
|
||||
-framerate 25 -video_size 640x480 -i /dev/video0 \
|
||||
-f alsa \
|
||||
-ar 44100 -c 2 -i hw:0 \
|
||||
-f mpegts \
|
||||
-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
|
||||
-codec:a mp2 -b:a 128k \
|
||||
-muxdelay 0.001 \
|
||||
http://localhost:8081/supersecret
|
||||
```
|
||||
|
||||
Note the `muxdelay` argument. This should reduce lag, but doesn't always work when streaming video and audio - see remarks below.
|
||||
|
||||
|
||||
## Some remarks about ffmpeg muxing and latency
|
||||
|
||||
Adding an audio stream to the MPEG-TS can sometimes introduce considerable latency. I especially found this to be a problem on linux using ALSA and V4L2 (using AVFoundation on macOS worked just fine). However, there is a simple workaround: just run two instances of ffmpeg in parallel. One for audio, one for video. Send both outputs to the same Websocket relay. Thanks to the simplicity of the MPEG-TS format, proper "muxing" of the two streams happens automatically in the relay.
|
||||
|
||||
```
|
||||
ffmpeg \
|
||||
-f v4l2 \
|
||||
-framerate 25 -video_size 640x480 -i /dev/video0 \
|
||||
-f mpegts \
|
||||
-codec:v mpeg1video -s 640x480 -b:v 1000k -bf 0 \
|
||||
-muxdelay 0.001 \
|
||||
http://localhost:8081/supersecret
|
||||
|
||||
# In a second terminal
|
||||
ffmpeg \
|
||||
-f alsa \
|
||||
-ar 44100 -c 2 -i hw:0 \
|
||||
-f mpegts \
|
||||
-codec:a mp2 -b:a 128k \
|
||||
-muxdelay 0.001 \
|
||||
http://localhost:8081/supersecret
|
||||
```
|
||||
In my tests, USB Webcams introduce about ~180ms of latency and there seems to be nothing we can do about it. The Raspberry Pi however has a [camera module](https://www.raspberrypi.org/products/camera-module-v2/) that provides lower latency video capture.
|
||||
|
||||
To capture webcam input on Windows or macOS using ffmpeg, see the [ffmpeg Capture/Webcam Wiki](https://trac.ffmpeg.org/wiki/Capture/Webcam).
|
||||
|
||||
|
||||
## JSMpeg Architecture and Internals
|
||||
|
||||
This library was built in a fairly modular fashion while keeping overhead at a minimum. Implementing new Demuxers, Decoders, Outputs (Renderers, Audio Devices) or Sources should be possible without changing any other parts. However, you would still need to subclass the `JSMpeg.Player` in order to use any new modules.
|
||||
|
||||
Have a look a the [jsmpeg.js source](https://github.com/phoboslab/jsmpeg/blob/master/src/jsmpeg.js) for an overview of how the modules interconnect and what APIs they should provide. I also wrote a blog post about some of JSMpeg's internals: [Decode It Like It's 1999](http://phoboslab.org/log/2017/02/decode-it-like-its-1999).
|
||||
|
||||
Using parts of the library without creating a full player should also be fairly straightforward. E.g. you can create a stand-alone instance of the `JSMpeg.Decoder.MPEG1Video` class, `.connect()` a renderer, `.write()` some data to it and `.decode()` a frame, without touching JSMpeg's other parts.
|
||||
|
||||
|
||||
## Previous Version
|
||||
|
||||
The JSMpeg version currently living in this repo is a complete rewrite of the original jsmpeg library that was just able to decode raw mpeg1video. If you're looking for the old version, see the [v0.2 tag](https://github.com/phoboslab/jsmpeg/releases/tag/v0.2).
|
||||
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
#!/bin/sh
|
||||
|
||||
|
||||
# Build the .wasm Module first
|
||||
|
||||
# Since we're compiling a side module here, so that we can load it without the
|
||||
# runtime cruft, we have to explicitly compile in support for malloc and
|
||||
# friends.
|
||||
# Note memcpy, memmove and memset are explicitly exported, otherwise they will
|
||||
# be eliminated by the SIDE_MODULE=2 setting - not sure why that happens.
|
||||
|
||||
# This NEEDS to be compiled with emscripten 1.38.47. Newer versions mess with
|
||||
# malloc and friends and need some more glue code for side modules that I
|
||||
# haven't quite worked out yet. If you have any idea how to build a SIDE_MODULE
|
||||
# (or STANDALONE_WASM - as seems to be the new deal) with support for malloc,
|
||||
# please let me know or file a PR.
|
||||
|
||||
# To install the correct version, issue the following in your emsdk directory:
|
||||
# ./emsdk install 1.38.47
|
||||
# ./emsdk activate 1.38.47
|
||||
# source ./emsdk_env.sh
|
||||
|
||||
# The $EMSCRIPTEN_LIB var needs to point to the correct directory within the sdk
|
||||
# that has emmalloc.cpp. This is usually $EMSDK/fastcomp/emscripten/system/lib
|
||||
# but it might differ per system. I don't know.
|
||||
# There used to be an $EMSCRIPTEN var set by the emsdk_env script that pointed
|
||||
# to the correct directory, but this seems to have gone now.
|
||||
|
||||
# In conclusion, emscripten encapsulates everything that I hate about native
|
||||
# development :/
|
||||
|
||||
EMSCRIPTEN_LIB=$EMSDK/fastcomp/emscripten/system/lib
|
||||
|
||||
emcc \
|
||||
src/wasm/mpeg1.c \
|
||||
src/wasm/mp2.c \
|
||||
src/wasm/buffer.c \
|
||||
$EMSCRIPTEN_LIB/emmalloc.cpp \
|
||||
$EMSCRIPTEN_LIB/libc/musl/src/string/memcpy.c \
|
||||
$EMSCRIPTEN_LIB/libc/musl/src/string/memmove.c \
|
||||
$EMSCRIPTEN_LIB/libc/musl/src/string/memset.c \
|
||||
-s WASM=1 \
|
||||
-s SIDE_MODULE=2 \
|
||||
-s TOTAL_STACK=5242880\
|
||||
-s USE_PTHREADS=0 \
|
||||
-s LEGALIZE_JS_FFI=0\
|
||||
-s NO_FILESYSTEM=1 \
|
||||
-s DEFAULT_LIBRARY_FUNCS_TO_INCLUDE="[]" \
|
||||
-s "EXPORTED_FUNCTIONS=[
|
||||
'_memcpy',
|
||||
'_memmove',
|
||||
'_memset',
|
||||
'_mpeg1_decoder_create',
|
||||
'_mpeg1_decoder_destroy',
|
||||
'_mpeg1_decoder_get_write_ptr',
|
||||
'_mpeg1_decoder_get_index',
|
||||
'_mpeg1_decoder_set_index',
|
||||
'_mpeg1_decoder_did_write',
|
||||
'_mpeg1_decoder_has_sequence_header',
|
||||
'_mpeg1_decoder_get_frame_rate',
|
||||
'_mpeg1_decoder_get_coded_size',
|
||||
'_mpeg1_decoder_get_width',
|
||||
'_mpeg1_decoder_get_height',
|
||||
'_mpeg1_decoder_get_y_ptr',
|
||||
'_mpeg1_decoder_get_cr_ptr',
|
||||
'_mpeg1_decoder_get_cb_ptr',
|
||||
'_mpeg1_decoder_decode',
|
||||
'_mp2_decoder_create',
|
||||
'_mp2_decoder_destroy',
|
||||
'_mp2_decoder_get_write_ptr',
|
||||
'_mp2_decoder_get_index',
|
||||
'_mp2_decoder_set_index',
|
||||
'_mp2_decoder_did_write',
|
||||
'_mp2_decoder_get_left_channel_ptr',
|
||||
'_mp2_decoder_get_right_channel_ptr',
|
||||
'_mp2_decoder_get_sample_rate',
|
||||
'_mp2_decoder_decode']" \
|
||||
-O3 \
|
||||
-o jsmpeg.wasm
|
||||
|
||||
|
||||
# Concat all .js sources
|
||||
cat \
|
||||
src/jsmpeg.js \
|
||||
src/video-element.js \
|
||||
src/player.js \
|
||||
src/buffer.js \
|
||||
src/ajax.js \
|
||||
src/fetch.js \
|
||||
src/ajax-progressive.js \
|
||||
src/websocket.js \
|
||||
src/ts.js \
|
||||
src/decoder.js \
|
||||
src/mpeg1.js \
|
||||
src/mpeg1-wasm.js \
|
||||
src/mp2.js \
|
||||
src/mp2-wasm.js \
|
||||
src/webgl.js \
|
||||
src/canvas2d.js \
|
||||
src/webaudio.js \
|
||||
src/wasm-module.js \
|
||||
> jsmpeg.js
|
||||
|
||||
# Append the .wasm module to the .js source as base64 string
|
||||
echo "JSMpeg.WASM_BINARY_INLINED='$(base64 -w 0 jsmpeg.wasm)';" \
|
||||
>> jsmpeg.js
|
||||
|
||||
|
||||
# Minify
|
||||
uglifyjs jsmpeg.js -o jsmpeg.min.js
|
||||
|
||||
# Cleanup
|
||||
rm jsmpeg.js
|
||||
rm jsmpeg.wasm
|
||||
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,137 @@
|
|||
JSMpeg.Source.AjaxProgressive = (function(){ "use strict";
|
||||
|
||||
var AjaxProgressiveSource = function(url, options) {
|
||||
this.url = url;
|
||||
this.destination = null;
|
||||
this.request = null;
|
||||
this.streaming = false;
|
||||
|
||||
this.completed = false;
|
||||
this.established = false;
|
||||
this.progress = 0;
|
||||
|
||||
this.fileSize = 0;
|
||||
this.loadedSize = 0;
|
||||
this.chunkSize = options.chunkSize || 1024*1024;
|
||||
|
||||
this.isLoading = false;
|
||||
this.loadStartTime = 0;
|
||||
this.throttled = options.throttled !== false;
|
||||
this.aborted = false;
|
||||
|
||||
this.onEstablishedCallback = options.onSourceEstablished;
|
||||
this.onCompletedCallback = options.onSourceCompleted;
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.connect = function(destination) {
|
||||
this.destination = destination;
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.start = function() {
|
||||
this.request = new XMLHttpRequest();
|
||||
|
||||
this.request.onreadystatechange = function() {
|
||||
if (this.request.readyState === this.request.DONE) {
|
||||
this.fileSize = parseInt(
|
||||
this.request.getResponseHeader("Content-Length")
|
||||
);
|
||||
this.loadNextChunk();
|
||||
}
|
||||
}.bind(this);
|
||||
|
||||
this.request.onprogress = this.onProgress.bind(this);
|
||||
this.request.open('HEAD', this.url);
|
||||
this.request.send();
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.resume = function(secondsHeadroom) {
|
||||
if (this.isLoading || !this.throttled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Guess the worst case loading time with lots of safety margin. This is
|
||||
// somewhat arbitrary...
|
||||
var worstCaseLoadingTime = this.loadTime * 8 + 2;
|
||||
if (worstCaseLoadingTime > secondsHeadroom) {
|
||||
this.loadNextChunk();
|
||||
}
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.destroy = function() {
|
||||
this.request.abort();
|
||||
this.aborted = true;
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.loadNextChunk = function() {
|
||||
var start = this.loadedSize,
|
||||
end = Math.min(this.loadedSize + this.chunkSize-1, this.fileSize-1);
|
||||
|
||||
if (start >= this.fileSize || this.aborted) {
|
||||
this.completed = true;
|
||||
if (this.onCompletedCallback) {
|
||||
this.onCompletedCallback(this);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.isLoading = true;
|
||||
this.loadStartTime = JSMpeg.Now();
|
||||
this.request = new XMLHttpRequest();
|
||||
|
||||
this.request.onreadystatechange = function() {
|
||||
if (
|
||||
this.request.readyState === this.request.DONE &&
|
||||
this.request.status >= 200 && this.request.status < 300
|
||||
) {
|
||||
this.onChunkLoad(this.request.response);
|
||||
}
|
||||
else if (this.request.readyState === this.request.DONE) {
|
||||
// Retry?
|
||||
if (this.loadFails++ < 3) {
|
||||
this.loadNextChunk();
|
||||
}
|
||||
}
|
||||
}.bind(this);
|
||||
|
||||
if (start === 0) {
|
||||
this.request.onprogress = this.onProgress.bind(this);
|
||||
}
|
||||
|
||||
this.request.open('GET', this.url+'?'+start+"-"+end);
|
||||
this.request.setRequestHeader("Range", "bytes="+start+"-"+end);
|
||||
this.request.responseType = "arraybuffer";
|
||||
this.request.send();
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.onProgress = function(ev) {
|
||||
this.progress = (ev.loaded / ev.total);
|
||||
};
|
||||
|
||||
AjaxProgressiveSource.prototype.onChunkLoad = function(data) {
|
||||
var isFirstChunk = !this.established;
|
||||
this.established = true;
|
||||
this.progress = 1;
|
||||
|
||||
this.loadedSize += data.byteLength;
|
||||
this.loadFails = 0;
|
||||
this.isLoading = false;
|
||||
|
||||
if (isFirstChunk && this.onEstablishedCallback) {
|
||||
this.onEstablishedCallback(this);
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
this.destination.write(data);
|
||||
}
|
||||
|
||||
this.loadTime = JSMpeg.Now() - this.loadStartTime;
|
||||
if (!this.throttled) {
|
||||
this.loadNextChunk();
|
||||
}
|
||||
};
|
||||
|
||||
return AjaxProgressiveSource;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
JSMpeg.Source.Ajax = (function(){ "use strict";
|
||||
|
||||
var AjaxSource = function(url, options) {
|
||||
this.url = url;
|
||||
this.destination = null;
|
||||
this.request = null;
|
||||
this.streaming = false;
|
||||
|
||||
this.completed = false;
|
||||
this.established = false;
|
||||
this.progress = 0;
|
||||
|
||||
this.onEstablishedCallback = options.onSourceEstablished;
|
||||
this.onCompletedCallback = options.onSourceCompleted;
|
||||
};
|
||||
|
||||
AjaxSource.prototype.connect = function(destination) {
|
||||
this.destination = destination;
|
||||
};
|
||||
|
||||
AjaxSource.prototype.start = function() {
|
||||
this.request = new XMLHttpRequest();
|
||||
|
||||
this.request.onreadystatechange = function() {
|
||||
if (
|
||||
this.request.readyState === this.request.DONE &&
|
||||
this.request.status === 200
|
||||
) {
|
||||
this.onLoad(this.request.response);
|
||||
}
|
||||
}.bind(this);
|
||||
|
||||
this.request.onprogress = this.onProgress.bind(this);
|
||||
this.request.open('GET', this.url);
|
||||
this.request.responseType = "arraybuffer";
|
||||
this.request.send();
|
||||
};
|
||||
|
||||
AjaxSource.prototype.resume = function(secondsHeadroom) {
|
||||
// Nothing to do here
|
||||
};
|
||||
|
||||
AjaxSource.prototype.destroy = function() {
|
||||
this.request.abort();
|
||||
};
|
||||
|
||||
AjaxSource.prototype.onProgress = function(ev) {
|
||||
this.progress = (ev.loaded / ev.total);
|
||||
};
|
||||
|
||||
AjaxSource.prototype.onLoad = function(data) {
|
||||
this.established = true;
|
||||
this.completed = true;
|
||||
this.progress = 1;
|
||||
|
||||
if (this.onEstablishedCallback) {
|
||||
this.onEstablishedCallback(this);
|
||||
}
|
||||
if (this.onCompletedCallback) {
|
||||
this.onCompletedCallback(this);
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
this.destination.write(data);
|
||||
}
|
||||
};
|
||||
|
||||
return AjaxSource;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,198 @@
|
|||
JSMpeg.BitBuffer = (function(){ "use strict";
|
||||
|
||||
var BitBuffer = function(bufferOrLength, mode) {
|
||||
if (typeof(bufferOrLength) === 'object') {
|
||||
this.bytes = (bufferOrLength instanceof Uint8Array)
|
||||
? bufferOrLength
|
||||
: new Uint8Array(bufferOrLength);
|
||||
|
||||
this.byteLength = this.bytes.length;
|
||||
}
|
||||
else {
|
||||
this.bytes = new Uint8Array(bufferOrLength || 1024*1024);
|
||||
this.byteLength = 0;
|
||||
}
|
||||
|
||||
this.mode = mode || BitBuffer.MODE.EXPAND;
|
||||
this.index = 0;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.resize = function(size) {
|
||||
var newBytes = new Uint8Array(size);
|
||||
if (this.byteLength !== 0) {
|
||||
this.byteLength = Math.min(this.byteLength, size);
|
||||
newBytes.set(this.bytes, 0, this.byteLength);
|
||||
}
|
||||
this.bytes = newBytes;
|
||||
this.index = Math.min(this.index, this.byteLength << 3);
|
||||
};
|
||||
|
||||
BitBuffer.prototype.evict = function(sizeNeeded) {
|
||||
var bytePos = this.index >> 3,
|
||||
available = this.bytes.length - this.byteLength;
|
||||
|
||||
// If the current index is the write position, we can simply reset both
|
||||
// to 0. Also reset (and throw away yet unread data) if we won't be able
|
||||
// to fit the new data in even after a normal eviction.
|
||||
if (
|
||||
this.index === this.byteLength << 3 ||
|
||||
sizeNeeded > available + bytePos // emergency evac
|
||||
) {
|
||||
this.byteLength = 0;
|
||||
this.index = 0;
|
||||
return;
|
||||
}
|
||||
else if (bytePos === 0) {
|
||||
// Nothing read yet - we can't evict anything
|
||||
return;
|
||||
}
|
||||
|
||||
// Some browsers don't support copyWithin() yet - we may have to do
|
||||
// it manually using set and a subarray
|
||||
if (this.bytes.copyWithin) {
|
||||
this.bytes.copyWithin(0, bytePos, this.byteLength);
|
||||
}
|
||||
else {
|
||||
this.bytes.set(this.bytes.subarray(bytePos, this.byteLength));
|
||||
}
|
||||
|
||||
this.byteLength = this.byteLength - bytePos;
|
||||
this.index -= bytePos << 3;
|
||||
return;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.write = function(buffers) {
|
||||
var isArrayOfBuffers = (typeof(buffers[0]) === 'object'),
|
||||
totalLength = 0,
|
||||
available = this.bytes.length - this.byteLength;
|
||||
|
||||
// Calculate total byte length
|
||||
if (isArrayOfBuffers) {
|
||||
var totalLength = 0;
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
totalLength += buffers[i].byteLength;
|
||||
}
|
||||
}
|
||||
else {
|
||||
totalLength = buffers.byteLength;
|
||||
}
|
||||
|
||||
// Do we need to resize or evict?
|
||||
if (totalLength > available) {
|
||||
if (this.mode === BitBuffer.MODE.EXPAND) {
|
||||
var newSize = Math.max(
|
||||
this.bytes.length * 2,
|
||||
totalLength - available
|
||||
);
|
||||
this.resize(newSize)
|
||||
}
|
||||
else {
|
||||
this.evict(totalLength);
|
||||
}
|
||||
}
|
||||
|
||||
if (isArrayOfBuffers) {
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
this.appendSingleBuffer(buffers[i]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.appendSingleBuffer(buffers);
|
||||
}
|
||||
|
||||
return totalLength;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.appendSingleBuffer = function(buffer) {
|
||||
buffer = buffer instanceof Uint8Array
|
||||
? buffer
|
||||
: new Uint8Array(buffer);
|
||||
|
||||
this.bytes.set(buffer, this.byteLength);
|
||||
this.byteLength += buffer.length;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.findNextStartCode = function() {
|
||||
for (var i = (this.index+7 >> 3); i < this.byteLength; i++) {
|
||||
if(
|
||||
this.bytes[i] == 0x00 &&
|
||||
this.bytes[i+1] == 0x00 &&
|
||||
this.bytes[i+2] == 0x01
|
||||
) {
|
||||
this.index = (i+4) << 3;
|
||||
return this.bytes[i+3];
|
||||
}
|
||||
}
|
||||
this.index = (this.byteLength << 3);
|
||||
return -1;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.findStartCode = function(code) {
|
||||
var current = 0;
|
||||
while (true) {
|
||||
current = this.findNextStartCode();
|
||||
if (current === code || current === -1) {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.nextBytesAreStartCode = function() {
|
||||
var i = (this.index+7 >> 3);
|
||||
return (
|
||||
i >= this.byteLength || (
|
||||
this.bytes[i] == 0x00 &&
|
||||
this.bytes[i+1] == 0x00 &&
|
||||
this.bytes[i+2] == 0x01
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
BitBuffer.prototype.peek = function(count) {
|
||||
var offset = this.index;
|
||||
var value = 0;
|
||||
while (count) {
|
||||
var currentByte = this.bytes[offset >> 3],
|
||||
remaining = 8 - (offset & 7), // remaining bits in byte
|
||||
read = remaining < count ? remaining : count, // bits in this run
|
||||
shift = remaining - read,
|
||||
mask = (0xff >> (8-read));
|
||||
|
||||
value = (value << read) | ((currentByte & (mask << shift)) >> shift);
|
||||
|
||||
offset += read;
|
||||
count -= read;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
BitBuffer.prototype.read = function(count) {
|
||||
var value = this.peek(count);
|
||||
this.index += count;
|
||||
return value;
|
||||
};
|
||||
|
||||
BitBuffer.prototype.skip = function(count) {
|
||||
return (this.index += count);
|
||||
};
|
||||
|
||||
BitBuffer.prototype.rewind = function(count) {
|
||||
this.index = Math.max(this.index - count, 0);
|
||||
};
|
||||
|
||||
BitBuffer.prototype.has = function(count) {
|
||||
return ((this.byteLength << 3) - this.index) >= count;
|
||||
};
|
||||
|
||||
BitBuffer.MODE = {
|
||||
EVICT: 1,
|
||||
EXPAND: 2
|
||||
};
|
||||
|
||||
return BitBuffer;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
JSMpeg.Renderer.Canvas2D = (function(){ "use strict";
|
||||
|
||||
var CanvasRenderer = function(options) {
|
||||
if (options.canvas) {
|
||||
this.canvas = options.canvas;
|
||||
this.ownsCanvasElement = false;
|
||||
}
|
||||
else {
|
||||
this.canvas = document.createElement('canvas');
|
||||
this.ownsCanvasElement = true;
|
||||
}
|
||||
this.width = this.canvas.width;
|
||||
this.height = this.canvas.height;
|
||||
this.enabled = true;
|
||||
|
||||
this.context = this.canvas.getContext('2d');
|
||||
};
|
||||
|
||||
CanvasRenderer.prototype.destroy = function() {
|
||||
if (this.ownsCanvasElement) {
|
||||
this.canvas.remove();
|
||||
}
|
||||
};
|
||||
|
||||
CanvasRenderer.prototype.resize = function(width, height) {
|
||||
this.width = width|0;
|
||||
this.height = height|0;
|
||||
|
||||
this.canvas.width = this.width;
|
||||
this.canvas.height = this.height;
|
||||
|
||||
this.imageData = this.context.getImageData(0, 0, this.width, this.height);
|
||||
JSMpeg.Fill(this.imageData.data, 255);
|
||||
};
|
||||
|
||||
CanvasRenderer.prototype.renderProgress = function(progress) {
|
||||
var
|
||||
w = this.canvas.width,
|
||||
h = this.canvas.height,
|
||||
ctx = this.context;
|
||||
|
||||
ctx.fillStyle = '#222';
|
||||
ctx.fillRect(0, 0, w, h);
|
||||
ctx.fillStyle = '#fff';
|
||||
ctx.fillRect(0, h - h * progress, w, h * progress);
|
||||
};
|
||||
|
||||
CanvasRenderer.prototype.render = function(y, cb, cr) {
|
||||
this.YCbCrToRGBA(y, cb, cr, this.imageData.data);
|
||||
this.context.putImageData(this.imageData, 0, 0);
|
||||
};
|
||||
|
||||
CanvasRenderer.prototype.YCbCrToRGBA = function(y, cb, cr, rgba) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Chroma values are the same for each block of 4 pixels, so we proccess
|
||||
// 2 lines at a time, 2 neighboring pixels each.
|
||||
// I wish we could use 32bit writes to the RGBA buffer instead of writing
|
||||
// each byte separately, but we need the automatic clamping of the RGBA
|
||||
// buffer.
|
||||
|
||||
var w = ((this.width + 15) >> 4) << 4,
|
||||
w2 = w >> 1;
|
||||
|
||||
var yIndex1 = 0,
|
||||
yIndex2 = w,
|
||||
yNext2Lines = w + (w - this.width);
|
||||
|
||||
var cIndex = 0,
|
||||
cNextLine = w2 - (this.width >> 1);
|
||||
|
||||
var rgbaIndex1 = 0,
|
||||
rgbaIndex2 = this.width * 4,
|
||||
rgbaNext2Lines = this.width * 4;
|
||||
|
||||
var cols = this.width >> 1,
|
||||
rows = this.height >> 1;
|
||||
|
||||
var ccb, ccr, r, g, b;
|
||||
|
||||
for (var row = 0; row < rows; row++) {
|
||||
for (var col = 0; col < cols; col++) {
|
||||
ccb = cb[cIndex];
|
||||
ccr = cr[cIndex];
|
||||
cIndex++;
|
||||
|
||||
r = (ccb + ((ccb * 103) >> 8)) - 179;
|
||||
g = ((ccr * 88) >> 8) - 44 + ((ccb * 183) >> 8) - 91;
|
||||
b = (ccr + ((ccr * 198) >> 8)) - 227;
|
||||
|
||||
// Line 1
|
||||
var y1 = y[yIndex1++];
|
||||
var y2 = y[yIndex1++];
|
||||
rgba[rgbaIndex1] = y1 + r;
|
||||
rgba[rgbaIndex1+1] = y1 - g;
|
||||
rgba[rgbaIndex1+2] = y1 + b;
|
||||
rgba[rgbaIndex1+4] = y2 + r;
|
||||
rgba[rgbaIndex1+5] = y2 - g;
|
||||
rgba[rgbaIndex1+6] = y2 + b;
|
||||
rgbaIndex1 += 8;
|
||||
|
||||
// Line 2
|
||||
var y3 = y[yIndex2++];
|
||||
var y4 = y[yIndex2++];
|
||||
rgba[rgbaIndex2] = y3 + r;
|
||||
rgba[rgbaIndex2+1] = y3 - g;
|
||||
rgba[rgbaIndex2+2] = y3 + b;
|
||||
rgba[rgbaIndex2+4] = y4 + r;
|
||||
rgba[rgbaIndex2+5] = y4 - g;
|
||||
rgba[rgbaIndex2+6] = y4 + b;
|
||||
rgbaIndex2 += 8;
|
||||
}
|
||||
|
||||
yIndex1 += yNext2Lines;
|
||||
yIndex2 += yNext2Lines;
|
||||
rgbaIndex1 += rgbaNext2Lines;
|
||||
rgbaIndex2 += rgbaNext2Lines;
|
||||
cIndex += cNextLine;
|
||||
}
|
||||
};
|
||||
|
||||
return CanvasRenderer;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
JSMpeg.Decoder.Base = (function(){ "use strict";
|
||||
|
||||
var BaseDecoder = function(options) {
|
||||
this.destination = null;
|
||||
this.canPlay = false;
|
||||
|
||||
this.collectTimestamps = !options.streaming;
|
||||
this.bytesWritten = 0;
|
||||
this.timestamps = [];
|
||||
this.timestampIndex = 0;
|
||||
|
||||
this.startTime = 0;
|
||||
this.decodedTime = 0;
|
||||
|
||||
Object.defineProperty(this, 'currentTime', {get: this.getCurrentTime});
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.destroy = function() {};
|
||||
|
||||
BaseDecoder.prototype.connect = function(destination) {
|
||||
this.destination = destination;
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.bufferGetIndex = function() {
|
||||
return this.bits.index;
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.bufferSetIndex = function(index) {
|
||||
this.bits.index = index;
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.bufferWrite = function(buffers) {
|
||||
return this.bits.write(buffers);
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.write = function(pts, buffers) {
|
||||
if (this.collectTimestamps) {
|
||||
if (this.timestamps.length === 0) {
|
||||
this.startTime = pts;
|
||||
this.decodedTime = pts;
|
||||
}
|
||||
this.timestamps.push({index: this.bytesWritten << 3, time: pts});
|
||||
}
|
||||
|
||||
this.bytesWritten += this.bufferWrite(buffers);
|
||||
this.canPlay = true;
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.seek = function(time) {
|
||||
if (!this.collectTimestamps) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.timestampIndex = 0;
|
||||
for (var i = 0; i < this.timestamps.length; i++) {
|
||||
if (this.timestamps[i].time > time) {
|
||||
break;
|
||||
}
|
||||
this.timestampIndex = i;
|
||||
}
|
||||
|
||||
var ts = this.timestamps[this.timestampIndex];
|
||||
if (ts) {
|
||||
this.bufferSetIndex(ts.index);
|
||||
this.decodedTime = ts.time;
|
||||
}
|
||||
else {
|
||||
this.bufferSetIndex(0);
|
||||
this.decodedTime = this.startTime;
|
||||
}
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.decode = function() {
|
||||
this.advanceDecodedTime(0);
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.advanceDecodedTime = function(seconds) {
|
||||
if (this.collectTimestamps) {
|
||||
var newTimestampIndex = -1;
|
||||
var currentIndex = this.bufferGetIndex();
|
||||
for (var i = this.timestampIndex; i < this.timestamps.length; i++) {
|
||||
if (this.timestamps[i].index > currentIndex) {
|
||||
break;
|
||||
}
|
||||
newTimestampIndex = i;
|
||||
}
|
||||
|
||||
// Did we find a new PTS, different from the last? If so, we don't have
|
||||
// to advance the decoded time manually and can instead sync it exactly
|
||||
// to the PTS.
|
||||
if (
|
||||
newTimestampIndex !== -1 &&
|
||||
newTimestampIndex !== this.timestampIndex
|
||||
) {
|
||||
this.timestampIndex = newTimestampIndex;
|
||||
this.decodedTime = this.timestamps[this.timestampIndex].time;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.decodedTime += seconds;
|
||||
};
|
||||
|
||||
BaseDecoder.prototype.getCurrentTime = function() {
|
||||
return this.decodedTime;
|
||||
};
|
||||
|
||||
return BaseDecoder;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
JSMpeg.Source.Fetch = (function(){ "use strict";
|
||||
|
||||
var FetchSource = function(url, options) {
|
||||
this.url = url;
|
||||
this.destination = null;
|
||||
this.request = null;
|
||||
this.streaming = true;
|
||||
|
||||
this.completed = false;
|
||||
this.established = false;
|
||||
this.progress = 0;
|
||||
this.aborted = false;
|
||||
|
||||
this.onEstablishedCallback = options.onSourceEstablished;
|
||||
this.onCompletedCallback = options.onSourceCompleted;
|
||||
};
|
||||
|
||||
FetchSource.prototype.connect = function(destination) {
|
||||
this.destination = destination;
|
||||
};
|
||||
|
||||
FetchSource.prototype.start = function() {
|
||||
var params = {
|
||||
method: 'GET',
|
||||
headers: new Headers(),
|
||||
cache: 'default'
|
||||
};
|
||||
|
||||
self.fetch(this.url, params).then(function(res) {
|
||||
if (res.ok && (res.status >= 200 && res.status <= 299)) {
|
||||
this.progress = 1;
|
||||
this.established = true;
|
||||
return this.pump(res.body.getReader());
|
||||
}
|
||||
else {
|
||||
//error
|
||||
}
|
||||
}.bind(this)).catch(function(err) {
|
||||
throw(err);
|
||||
});
|
||||
};
|
||||
|
||||
FetchSource.prototype.pump = function(reader) {
|
||||
return reader.read().then(function(result) {
|
||||
if (result.done) {
|
||||
this.completed = true;
|
||||
}
|
||||
else {
|
||||
if (this.aborted) {
|
||||
return reader.cancel();
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
this.destination.write(result.value.buffer);
|
||||
}
|
||||
|
||||
return this.pump(reader);
|
||||
}
|
||||
}.bind(this)).catch(function(err) {
|
||||
throw(err);
|
||||
});
|
||||
};
|
||||
|
||||
FetchSource.prototype.resume = function(secondsHeadroom) {
|
||||
// Nothing to do here
|
||||
};
|
||||
|
||||
FetchSource.prototype.abort = function() {
|
||||
this.aborted = true;
|
||||
};
|
||||
|
||||
return FetchSource;
|
||||
|
||||
})();
|
|
@ -0,0 +1,122 @@
|
|||
/*! jsmpeg v1.0 | (c) Dominic Szablewski | MIT license */
|
||||
|
||||
|
||||
// This sets up the JSMpeg "Namespace". The object is empty apart from the Now()
|
||||
// utility function and the automatic CreateVideoElements() after DOMReady.
|
||||
var JSMpeg = {
|
||||
|
||||
// The Player sets up the connections between source, demuxer, decoders,
|
||||
// renderer and audio output. It ties everything together, is responsible
|
||||
// of scheduling decoding and provides some convenience methods for
|
||||
// external users.
|
||||
Player: null,
|
||||
|
||||
// A Video Element wraps the Player, shows HTML controls to start/pause
|
||||
// the video and handles Audio unlocking on iOS. VideoElements can be
|
||||
// created directly in HTML using the <div class="jsmpeg"/> tag.
|
||||
VideoElement: null,
|
||||
|
||||
// The BitBuffer wraps a Uint8Array and allows reading an arbitrary number
|
||||
// of bits at a time. On writing, the BitBuffer either expands its
|
||||
// internal buffer (for static files) or deletes old data (for streaming).
|
||||
BitBuffer: null,
|
||||
|
||||
// A Source provides raw data from HTTP, a WebSocket connection or any
|
||||
// other mean. Sources must support the following API:
|
||||
// .connect(destinationNode)
|
||||
// .write(buffer)
|
||||
// .start() - start reading
|
||||
// .resume(headroom) - continue reading; headroom to play pos in seconds
|
||||
// .established - boolean, true after connection is established
|
||||
// .completed - boolean, true if the source is completely loaded
|
||||
// .progress - float 0-1
|
||||
Source: {},
|
||||
|
||||
// A Demuxer may sit between a Source and a Decoder. It separates the
|
||||
// incoming raw data into Video, Audio and other Streams. API:
|
||||
// .connect(streamId, destinationNode)
|
||||
// .write(buffer)
|
||||
// .currentTime – float, in seconds
|
||||
// .startTime - float, in seconds
|
||||
Demuxer: {},
|
||||
|
||||
// A Decoder accepts an incoming Stream of raw Audio or Video data, buffers
|
||||
// it and upon `.decode()` decodes a single frame of data. Video decoders
|
||||
// call `destinationNode.render(Y, Cr, CB)` with the decoded pixel data;
|
||||
// Audio decoders call `destinationNode.play(left, right)` with the decoded
|
||||
// PCM data. API:
|
||||
// .connect(destinationNode)
|
||||
// .write(pts, buffer)
|
||||
// .decode()
|
||||
// .seek(time)
|
||||
// .currentTime - float, in seconds
|
||||
// .startTime - float, in seconds
|
||||
Decoder: {},
|
||||
|
||||
// A Renderer accepts raw YCrCb data in 3 separate buffers via the render()
|
||||
// method. Renderers typically convert the data into the RGBA color space
|
||||
// and draw it on a Canvas, but other output - such as writing PNGs - would
|
||||
// be conceivable. API:
|
||||
// .render(y, cr, cb) - pixel data as Uint8Arrays
|
||||
// .enabled - wether the renderer does anything upon receiving data
|
||||
Renderer: {},
|
||||
|
||||
// Audio Outputs accept raw Stero PCM data in 2 separate buffers via the
|
||||
// play() method. Outputs typically play the audio on the user's device.
|
||||
// API:
|
||||
// .play(sampleRate, left, right) - rate in herz; PCM data as Uint8Arrays
|
||||
// .stop()
|
||||
// .enqueuedTime - float, in seconds
|
||||
// .enabled - wether the output does anything upon receiving data
|
||||
AudioOutput: {},
|
||||
|
||||
Now: function() {
|
||||
return window.performance
|
||||
? window.performance.now() / 1000
|
||||
: Date.now() / 1000;
|
||||
},
|
||||
|
||||
CreateVideoElements: function() {
|
||||
var elements = document.querySelectorAll('.jsmpeg');
|
||||
for (var i = 0; i < elements.length; i++) {
|
||||
new JSMpeg.VideoElement(elements[i]);
|
||||
}
|
||||
},
|
||||
|
||||
Fill: function(array, value) {
|
||||
if (array.fill) {
|
||||
array.fill(value);
|
||||
}
|
||||
else {
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
array[i] = value;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Base64ToArrayBuffer: function(base64) {
|
||||
var binary = window.atob(base64);
|
||||
var length = binary.length;
|
||||
var bytes = new Uint8Array(length);
|
||||
for (var i = 0; i < length; i++) {
|
||||
bytes[i] = binary.charCodeAt(i);
|
||||
}
|
||||
return bytes.buffer;
|
||||
},
|
||||
|
||||
// The build process may append `JSMpeg.WASM_BINARY_INLINED = base64data;`
|
||||
// to the minified source.
|
||||
// If this property is present, jsmpeg will use the inlined binary data
|
||||
// instead of trying to load a jsmpeg.wasm file via Ajax.
|
||||
WASM_BINARY_INLINED: null
|
||||
};
|
||||
|
||||
// Automatically create players for all found <div class="jsmpeg"/> elements.
|
||||
if (document.readyState === 'complete') {
|
||||
JSMpeg.CreateVideoElements();
|
||||
}
|
||||
else {
|
||||
document.addEventListener('DOMContentLoaded', JSMpeg.CreateVideoElements);
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
JSMpeg.Decoder.MP2AudioWASM = (function(){ "use strict";
|
||||
|
||||
// Based on kjmp2 by Martin J. Fiedler
|
||||
// http://keyj.emphy.de/kjmp2/
|
||||
|
||||
var MP2WASM = function(options) {
|
||||
JSMpeg.Decoder.Base.call(this, options);
|
||||
|
||||
this.onDecodeCallback = options.onAudioDecode;
|
||||
this.module = options.wasmModule;
|
||||
|
||||
this.bufferSize = options.audioBufferSize || 128*1024;
|
||||
this.bufferMode = options.streaming
|
||||
? JSMpeg.BitBuffer.MODE.EVICT
|
||||
: JSMpeg.BitBuffer.MODE.EXPAND;
|
||||
|
||||
this.sampleRate = 0;
|
||||
};
|
||||
|
||||
MP2WASM.prototype = Object.create(JSMpeg.Decoder.Base.prototype);
|
||||
MP2WASM.prototype.constructor = MP2WASM;
|
||||
|
||||
MP2WASM.prototype.initializeWasmDecoder = function() {
|
||||
if (!this.module.instance) {
|
||||
console.warn('JSMpeg: WASM module not compiled yet');
|
||||
return;
|
||||
}
|
||||
this.instance = this.module.instance;
|
||||
this.functions = this.module.instance.exports;
|
||||
this.decoder = this.functions._mp2_decoder_create(this.bufferSize, this.bufferMode);
|
||||
};
|
||||
|
||||
MP2WASM.prototype.destroy = function() {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
this.functions._mp2_decoder_destroy(this.decoder);
|
||||
};
|
||||
|
||||
MP2WASM.prototype.bufferGetIndex = function() {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
return this.functions._mp2_decoder_get_index(this.decoder);
|
||||
};
|
||||
|
||||
MP2WASM.prototype.bufferSetIndex = function(index) {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
this.functions._mp2_decoder_set_index(this.decoder, index);
|
||||
};
|
||||
|
||||
MP2WASM.prototype.bufferWrite = function(buffers) {
|
||||
if (!this.decoder) {
|
||||
this.initializeWasmDecoder();
|
||||
}
|
||||
|
||||
var totalLength = 0;
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
totalLength += buffers[i].length;
|
||||
}
|
||||
|
||||
var ptr = this.functions._mp2_decoder_get_write_ptr(this.decoder, totalLength);
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
this.instance.heapU8.set(buffers[i], ptr);
|
||||
ptr += buffers[i].length;
|
||||
}
|
||||
|
||||
this.functions._mp2_decoder_did_write(this.decoder, totalLength);
|
||||
return totalLength;
|
||||
};
|
||||
|
||||
MP2WASM.prototype.decode = function() {
|
||||
var startTime = JSMpeg.Now();
|
||||
|
||||
if (!this.decoder) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var decodedBytes = this.functions._mp2_decoder_decode(this.decoder);
|
||||
if (decodedBytes === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!this.sampleRate) {
|
||||
this.sampleRate = this.functions._mp2_decoder_get_sample_rate(this.decoder);
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
// Create a Float32 View into the modules output channel data
|
||||
var leftPtr = this.functions._mp2_decoder_get_left_channel_ptr(this.decoder),
|
||||
rightPtr = this.functions._mp2_decoder_get_right_channel_ptr(this.decoder);
|
||||
|
||||
var leftOffset = leftPtr / Float32Array.BYTES_PER_ELEMENT,
|
||||
rightOffset = rightPtr / Float32Array.BYTES_PER_ELEMENT;
|
||||
|
||||
var left = this.instance.heapF32.subarray(leftOffset, leftOffset + MP2WASM.SAMPLES_PER_FRAME),
|
||||
right = this.instance.heapF32.subarray(rightOffset, rightOffset + MP2WASM.SAMPLES_PER_FRAME);
|
||||
|
||||
this.destination.play(this.sampleRate, left, right);
|
||||
}
|
||||
|
||||
this.advanceDecodedTime(MP2WASM.SAMPLES_PER_FRAME / this.sampleRate);
|
||||
|
||||
var elapsedTime = JSMpeg.Now() - startTime;
|
||||
if (this.onDecodeCallback) {
|
||||
this.onDecodeCallback(this, elapsedTime);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
MP2WASM.prototype.getCurrentTime = function() {
|
||||
var enqueuedTime = this.destination ? this.destination.enqueuedTime : 0;
|
||||
return this.decodedTime - enqueuedTime;
|
||||
};
|
||||
|
||||
MP2WASM.SAMPLES_PER_FRAME = 1152;
|
||||
|
||||
return MP2WASM;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,690 @@
|
|||
JSMpeg.Decoder.MP2Audio = (function(){ "use strict";
|
||||
|
||||
// Based on kjmp2 by Martin J. Fiedler
|
||||
// http://keyj.emphy.de/kjmp2/
|
||||
|
||||
var MP2 = function(options) {
|
||||
JSMpeg.Decoder.Base.call(this, options);
|
||||
|
||||
this.onDecodeCallback = options.onAudioDecode;
|
||||
|
||||
var bufferSize = options.audioBufferSize || 128*1024;
|
||||
var bufferMode = options.streaming
|
||||
? JSMpeg.BitBuffer.MODE.EVICT
|
||||
: JSMpeg.BitBuffer.MODE.EXPAND;
|
||||
|
||||
this.bits = new JSMpeg.BitBuffer(bufferSize, bufferMode);
|
||||
|
||||
this.left = new Float32Array(1152);
|
||||
this.right = new Float32Array(1152);
|
||||
this.sampleRate = 44100;
|
||||
|
||||
this.D = new Float32Array(1024);
|
||||
this.D.set(MP2.SYNTHESIS_WINDOW, 0);
|
||||
this.D.set(MP2.SYNTHESIS_WINDOW, 512);
|
||||
this.V = [new Float32Array(1024), new Float32Array(1024)];
|
||||
this.U = new Int32Array(32);
|
||||
this.VPos = 0;
|
||||
|
||||
this.allocation = [new Array(32), new Array(32)];
|
||||
this.scaleFactorInfo = [new Uint8Array(32), new Uint8Array(32)];
|
||||
this.scaleFactor = [new Array(32), new Array(32)];
|
||||
this.sample = [new Array(32), new Array(32)];
|
||||
|
||||
for (var j = 0; j < 2; j++) {
|
||||
for (var i = 0; i < 32; i++) {
|
||||
this.scaleFactor[j][i] = [0, 0, 0];
|
||||
this.sample[j][i] = [0, 0, 0];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
MP2.prototype = Object.create(JSMpeg.Decoder.Base.prototype);
|
||||
MP2.prototype.constructor = MP2;
|
||||
|
||||
MP2.prototype.decode = function() {
|
||||
var startTime = JSMpeg.Now();
|
||||
|
||||
var pos = this.bits.index >> 3;
|
||||
if (pos >= this.bits.byteLength) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var decoded = this.decodeFrame(this.left, this.right);
|
||||
this.bits.index = (pos + decoded) << 3;
|
||||
if (!decoded) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
this.destination.play(this.sampleRate, this.left, this.right);
|
||||
}
|
||||
|
||||
this.advanceDecodedTime(this.left.length / this.sampleRate);
|
||||
|
||||
var elapsedTime = JSMpeg.Now() - startTime;
|
||||
if (this.onDecodeCallback) {
|
||||
this.onDecodeCallback(this, elapsedTime);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
MP2.prototype.getCurrentTime = function() {
|
||||
var enqueuedTime = this.destination ? this.destination.enqueuedTime : 0;
|
||||
return this.decodedTime - enqueuedTime;
|
||||
};
|
||||
|
||||
MP2.prototype.decodeFrame = function(left, right) {
|
||||
// Check for valid header: syncword OK, MPEG-Audio Layer 2
|
||||
var sync = this.bits.read(11),
|
||||
version = this.bits.read(2),
|
||||
layer = this.bits.read(2),
|
||||
hasCRC = !this.bits.read(1);
|
||||
|
||||
if (
|
||||
sync !== MP2.FRAME_SYNC ||
|
||||
version !== MP2.VERSION.MPEG_1 ||
|
||||
layer !== MP2.LAYER.II
|
||||
) {
|
||||
return 0; // Invalid header or unsupported version
|
||||
}
|
||||
|
||||
var bitrateIndex = this.bits.read(4) - 1;
|
||||
if (bitrateIndex > 13) {
|
||||
return 0; // Invalid bit rate or 'free format'
|
||||
}
|
||||
|
||||
var sampleRateIndex = this.bits.read(2);
|
||||
var sampleRate = MP2.SAMPLE_RATE[sampleRateIndex];
|
||||
if (sampleRateIndex === 3) {
|
||||
return 0; // Invalid sample rate
|
||||
}
|
||||
if (version === MP2.VERSION.MPEG_2) {
|
||||
sampleRateIndex += 4;
|
||||
bitrateIndex += 14;
|
||||
}
|
||||
var padding = this.bits.read(1),
|
||||
privat = this.bits.read(1),
|
||||
mode = this.bits.read(2);
|
||||
|
||||
// Parse the mode_extension, set up the stereo bound
|
||||
var bound = 0;
|
||||
if (mode === MP2.MODE.JOINT_STEREO) {
|
||||
bound = (this.bits.read(2) + 1) << 2;
|
||||
}
|
||||
else {
|
||||
this.bits.skip(2);
|
||||
bound = (mode === MP2.MODE.MONO) ? 0 : 32;
|
||||
}
|
||||
|
||||
// Discard the last 4 bits of the header and the CRC value, if present
|
||||
this.bits.skip(4);
|
||||
if (hasCRC) {
|
||||
this.bits.skip(16);
|
||||
}
|
||||
|
||||
// Compute the frame size
|
||||
var bitrate = MP2.BIT_RATE[bitrateIndex],
|
||||
sampleRate = MP2.SAMPLE_RATE[sampleRateIndex],
|
||||
frameSize = ((144000 * bitrate / sampleRate) + padding)|0;
|
||||
|
||||
|
||||
// Prepare the quantizer table lookups
|
||||
var tab3 = 0;
|
||||
var sblimit = 0;
|
||||
if (version === MP2.VERSION.MPEG_2) {
|
||||
// MPEG-2 (LSR)
|
||||
tab3 = 2;
|
||||
sblimit = 30;
|
||||
}
|
||||
else {
|
||||
// MPEG-1
|
||||
var tab1 = (mode === MP2.MODE.MONO) ? 0 : 1;
|
||||
var tab2 = MP2.QUANT_LUT_STEP_1[tab1][bitrateIndex];
|
||||
tab3 = MP2.QUANT_LUT_STEP_2[tab2][sampleRateIndex];
|
||||
sblimit = tab3 & 63;
|
||||
tab3 >>= 6;
|
||||
}
|
||||
|
||||
if (bound > sblimit) {
|
||||
bound = sblimit;
|
||||
}
|
||||
|
||||
// Read the allocation information
|
||||
for (var sb = 0; sb < bound; sb++) {
|
||||
this.allocation[0][sb] = this.readAllocation(sb, tab3);
|
||||
this.allocation[1][sb] = this.readAllocation(sb, tab3);
|
||||
}
|
||||
|
||||
for (var sb = bound; sb < sblimit; sb++) {
|
||||
this.allocation[0][sb] =
|
||||
this.allocation[1][sb] =
|
||||
this.readAllocation(sb, tab3);
|
||||
}
|
||||
|
||||
// Read scale factor selector information
|
||||
var channels = (mode === MP2.MODE.MONO) ? 1 : 2;
|
||||
for (var sb = 0; sb < sblimit; sb++) {
|
||||
for (ch = 0; ch < channels; ch++) {
|
||||
if (this.allocation[ch][sb]) {
|
||||
this.scaleFactorInfo[ch][sb] = this.bits.read(2);
|
||||
}
|
||||
}
|
||||
if (mode === MP2.MODE.MONO) {
|
||||
this.scaleFactorInfo[1][sb] = this.scaleFactorInfo[0][sb];
|
||||
}
|
||||
}
|
||||
|
||||
// Read scale factors
|
||||
for (var sb = 0; sb < sblimit; sb++) {
|
||||
for (var ch = 0; ch < channels; ch++) {
|
||||
if (this.allocation[ch][sb]) {
|
||||
var sf = this.scaleFactor[ch][sb];
|
||||
switch (this.scaleFactorInfo[ch][sb]) {
|
||||
case 0:
|
||||
sf[0] = this.bits.read(6);
|
||||
sf[1] = this.bits.read(6);
|
||||
sf[2] = this.bits.read(6);
|
||||
break;
|
||||
case 1:
|
||||
sf[0] =
|
||||
sf[1] = this.bits.read(6);
|
||||
sf[2] = this.bits.read(6);
|
||||
break;
|
||||
case 2:
|
||||
sf[0] =
|
||||
sf[1] =
|
||||
sf[2] = this.bits.read(6);
|
||||
break;
|
||||
case 3:
|
||||
sf[0] = this.bits.read(6);
|
||||
sf[1] =
|
||||
sf[2] = this.bits.read(6);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mode === MP2.MODE.MONO) {
|
||||
this.scaleFactor[1][sb][0] = this.scaleFactor[0][sb][0];
|
||||
this.scaleFactor[1][sb][1] = this.scaleFactor[0][sb][1];
|
||||
this.scaleFactor[1][sb][2] = this.scaleFactor[0][sb][2];
|
||||
}
|
||||
}
|
||||
|
||||
// Coefficient input and reconstruction
|
||||
var outPos = 0;
|
||||
for (var part = 0; part < 3; part++) {
|
||||
for (var granule = 0; granule < 4; granule++) {
|
||||
|
||||
// Read the samples
|
||||
for (var sb = 0; sb < bound; sb++) {
|
||||
this.readSamples(0, sb, part);
|
||||
this.readSamples(1, sb, part);
|
||||
}
|
||||
for (var sb = bound; sb < sblimit; sb++) {
|
||||
this.readSamples(0, sb, part);
|
||||
this.sample[1][sb][0] = this.sample[0][sb][0];
|
||||
this.sample[1][sb][1] = this.sample[0][sb][1];
|
||||
this.sample[1][sb][2] = this.sample[0][sb][2];
|
||||
}
|
||||
for (var sb = sblimit; sb < 32; sb++) {
|
||||
this.sample[0][sb][0] = 0;
|
||||
this.sample[0][sb][1] = 0;
|
||||
this.sample[0][sb][2] = 0;
|
||||
this.sample[1][sb][0] = 0;
|
||||
this.sample[1][sb][1] = 0;
|
||||
this.sample[1][sb][2] = 0;
|
||||
}
|
||||
|
||||
// Synthesis loop
|
||||
for (var p = 0; p < 3; p++) {
|
||||
// Shifting step
|
||||
this.VPos = (this.VPos - 64) & 1023;
|
||||
|
||||
for (var ch = 0; ch < 2; ch++) {
|
||||
MP2.MatrixTransform(this.sample[ch], p, this.V[ch], this.VPos);
|
||||
|
||||
// Build U, windowing, calculate output
|
||||
JSMpeg.Fill(this.U, 0);
|
||||
|
||||
var dIndex = 512 - (this.VPos >> 1);
|
||||
var vIndex = (this.VPos % 128) >> 1;
|
||||
while (vIndex < 1024) {
|
||||
for (var i = 0; i < 32; ++i) {
|
||||
this.U[i] += this.D[dIndex++] * this.V[ch][vIndex++];
|
||||
}
|
||||
|
||||
vIndex += 128-32;
|
||||
dIndex += 64-32;
|
||||
}
|
||||
|
||||
vIndex = (128-32 + 1024) - vIndex;
|
||||
dIndex -= (512 - 32);
|
||||
while (vIndex < 1024) {
|
||||
for (var i = 0; i < 32; ++i) {
|
||||
this.U[i] += this.D[dIndex++] * this.V[ch][vIndex++];
|
||||
}
|
||||
|
||||
vIndex += 128-32;
|
||||
dIndex += 64-32;
|
||||
}
|
||||
|
||||
// Output samples
|
||||
var outChannel = ch === 0 ? left : right;
|
||||
for (var j = 0; j < 32; j++) {
|
||||
outChannel[outPos + j] = this.U[j] / 2147418112;
|
||||
}
|
||||
} // End of synthesis channel loop
|
||||
outPos += 32;
|
||||
} // End of synthesis sub-block loop
|
||||
|
||||
} // Decoding of the granule finished
|
||||
}
|
||||
|
||||
this.sampleRate = sampleRate;
|
||||
return frameSize;
|
||||
};
|
||||
|
||||
MP2.prototype.readAllocation = function(sb, tab3) {
|
||||
var tab4 = MP2.QUANT_LUT_STEP_3[tab3][sb];
|
||||
var qtab = MP2.QUANT_LUT_STEP4[tab4 & 15][this.bits.read(tab4 >> 4)];
|
||||
return qtab ? (MP2.QUANT_TAB[qtab - 1]) : 0;
|
||||
};
|
||||
|
||||
MP2.prototype.readSamples = function(ch, sb, part) {
|
||||
var q = this.allocation[ch][sb],
|
||||
sf = this.scaleFactor[ch][sb][part],
|
||||
sample = this.sample[ch][sb],
|
||||
val = 0;
|
||||
|
||||
if (!q) {
|
||||
// No bits allocated for this subband
|
||||
sample[0] = sample[1] = sample[2] = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve scalefactor
|
||||
if (sf === 63) {
|
||||
sf = 0;
|
||||
}
|
||||
else {
|
||||
var shift = (sf / 3)|0;
|
||||
sf = (MP2.SCALEFACTOR_BASE[sf % 3] + ((1 << shift) >> 1)) >> shift;
|
||||
}
|
||||
|
||||
// Decode samples
|
||||
var adj = q.levels;
|
||||
if (q.group) {
|
||||
// Decode grouped samples
|
||||
val = this.bits.read(q.bits);
|
||||
sample[0] = val % adj;
|
||||
val = (val / adj)|0;
|
||||
sample[1] = val % adj;
|
||||
sample[2] = (val / adj)|0;
|
||||
}
|
||||
else {
|
||||
// Decode direct samples
|
||||
sample[0] = this.bits.read(q.bits);
|
||||
sample[1] = this.bits.read(q.bits);
|
||||
sample[2] = this.bits.read(q.bits);
|
||||
}
|
||||
|
||||
// Postmultiply samples
|
||||
var scale = (65536 / (adj + 1))|0;
|
||||
adj = ((adj + 1) >> 1) - 1;
|
||||
|
||||
val = (adj - sample[0]) * scale;
|
||||
sample[0] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
|
||||
val = (adj - sample[1]) * scale;
|
||||
sample[1] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
|
||||
val = (adj - sample[2]) * scale;
|
||||
sample[2] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
};
|
||||
|
||||
MP2.MatrixTransform = function(s, ss, d, dp) {
|
||||
var t01, t02, t03, t04, t05, t06, t07, t08, t09, t10, t11, t12,
|
||||
t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24,
|
||||
t25, t26, t27, t28, t29, t30, t31, t32, t33;
|
||||
|
||||
t01 = s[ 0][ss] + s[31][ss]; t02 = (s[ 0][ss] - s[31][ss]) * 0.500602998235;
|
||||
t03 = s[ 1][ss] + s[30][ss]; t04 = (s[ 1][ss] - s[30][ss]) * 0.505470959898;
|
||||
t05 = s[ 2][ss] + s[29][ss]; t06 = (s[ 2][ss] - s[29][ss]) * 0.515447309923;
|
||||
t07 = s[ 3][ss] + s[28][ss]; t08 = (s[ 3][ss] - s[28][ss]) * 0.53104259109;
|
||||
t09 = s[ 4][ss] + s[27][ss]; t10 = (s[ 4][ss] - s[27][ss]) * 0.553103896034;
|
||||
t11 = s[ 5][ss] + s[26][ss]; t12 = (s[ 5][ss] - s[26][ss]) * 0.582934968206;
|
||||
t13 = s[ 6][ss] + s[25][ss]; t14 = (s[ 6][ss] - s[25][ss]) * 0.622504123036;
|
||||
t15 = s[ 7][ss] + s[24][ss]; t16 = (s[ 7][ss] - s[24][ss]) * 0.674808341455;
|
||||
t17 = s[ 8][ss] + s[23][ss]; t18 = (s[ 8][ss] - s[23][ss]) * 0.744536271002;
|
||||
t19 = s[ 9][ss] + s[22][ss]; t20 = (s[ 9][ss] - s[22][ss]) * 0.839349645416;
|
||||
t21 = s[10][ss] + s[21][ss]; t22 = (s[10][ss] - s[21][ss]) * 0.972568237862;
|
||||
t23 = s[11][ss] + s[20][ss]; t24 = (s[11][ss] - s[20][ss]) * 1.16943993343;
|
||||
t25 = s[12][ss] + s[19][ss]; t26 = (s[12][ss] - s[19][ss]) * 1.48416461631;
|
||||
t27 = s[13][ss] + s[18][ss]; t28 = (s[13][ss] - s[18][ss]) * 2.05778100995;
|
||||
t29 = s[14][ss] + s[17][ss]; t30 = (s[14][ss] - s[17][ss]) * 3.40760841847;
|
||||
t31 = s[15][ss] + s[16][ss]; t32 = (s[15][ss] - s[16][ss]) * 10.1900081235;
|
||||
|
||||
t33 = t01 + t31; t31 = (t01 - t31) * 0.502419286188;
|
||||
t01 = t03 + t29; t29 = (t03 - t29) * 0.52249861494;
|
||||
t03 = t05 + t27; t27 = (t05 - t27) * 0.566944034816;
|
||||
t05 = t07 + t25; t25 = (t07 - t25) * 0.64682178336;
|
||||
t07 = t09 + t23; t23 = (t09 - t23) * 0.788154623451;
|
||||
t09 = t11 + t21; t21 = (t11 - t21) * 1.06067768599;
|
||||
t11 = t13 + t19; t19 = (t13 - t19) * 1.72244709824;
|
||||
t13 = t15 + t17; t17 = (t15 - t17) * 5.10114861869;
|
||||
t15 = t33 + t13; t13 = (t33 - t13) * 0.509795579104;
|
||||
t33 = t01 + t11; t01 = (t01 - t11) * 0.601344886935;
|
||||
t11 = t03 + t09; t09 = (t03 - t09) * 0.899976223136;
|
||||
t03 = t05 + t07; t07 = (t05 - t07) * 2.56291544774;
|
||||
t05 = t15 + t03; t15 = (t15 - t03) * 0.541196100146;
|
||||
t03 = t33 + t11; t11 = (t33 - t11) * 1.30656296488;
|
||||
t33 = t05 + t03; t05 = (t05 - t03) * 0.707106781187;
|
||||
t03 = t15 + t11; t15 = (t15 - t11) * 0.707106781187;
|
||||
t03 += t15;
|
||||
t11 = t13 + t07; t13 = (t13 - t07) * 0.541196100146;
|
||||
t07 = t01 + t09; t09 = (t01 - t09) * 1.30656296488;
|
||||
t01 = t11 + t07; t07 = (t11 - t07) * 0.707106781187;
|
||||
t11 = t13 + t09; t13 = (t13 - t09) * 0.707106781187;
|
||||
t11 += t13; t01 += t11;
|
||||
t11 += t07; t07 += t13;
|
||||
t09 = t31 + t17; t31 = (t31 - t17) * 0.509795579104;
|
||||
t17 = t29 + t19; t29 = (t29 - t19) * 0.601344886935;
|
||||
t19 = t27 + t21; t21 = (t27 - t21) * 0.899976223136;
|
||||
t27 = t25 + t23; t23 = (t25 - t23) * 2.56291544774;
|
||||
t25 = t09 + t27; t09 = (t09 - t27) * 0.541196100146;
|
||||
t27 = t17 + t19; t19 = (t17 - t19) * 1.30656296488;
|
||||
t17 = t25 + t27; t27 = (t25 - t27) * 0.707106781187;
|
||||
t25 = t09 + t19; t19 = (t09 - t19) * 0.707106781187;
|
||||
t25 += t19;
|
||||
t09 = t31 + t23; t31 = (t31 - t23) * 0.541196100146;
|
||||
t23 = t29 + t21; t21 = (t29 - t21) * 1.30656296488;
|
||||
t29 = t09 + t23; t23 = (t09 - t23) * 0.707106781187;
|
||||
t09 = t31 + t21; t31 = (t31 - t21) * 0.707106781187;
|
||||
t09 += t31; t29 += t09; t09 += t23; t23 += t31;
|
||||
t17 += t29; t29 += t25; t25 += t09; t09 += t27;
|
||||
t27 += t23; t23 += t19; t19 += t31;
|
||||
t21 = t02 + t32; t02 = (t02 - t32) * 0.502419286188;
|
||||
t32 = t04 + t30; t04 = (t04 - t30) * 0.52249861494;
|
||||
t30 = t06 + t28; t28 = (t06 - t28) * 0.566944034816;
|
||||
t06 = t08 + t26; t08 = (t08 - t26) * 0.64682178336;
|
||||
t26 = t10 + t24; t10 = (t10 - t24) * 0.788154623451;
|
||||
t24 = t12 + t22; t22 = (t12 - t22) * 1.06067768599;
|
||||
t12 = t14 + t20; t20 = (t14 - t20) * 1.72244709824;
|
||||
t14 = t16 + t18; t16 = (t16 - t18) * 5.10114861869;
|
||||
t18 = t21 + t14; t14 = (t21 - t14) * 0.509795579104;
|
||||
t21 = t32 + t12; t32 = (t32 - t12) * 0.601344886935;
|
||||
t12 = t30 + t24; t24 = (t30 - t24) * 0.899976223136;
|
||||
t30 = t06 + t26; t26 = (t06 - t26) * 2.56291544774;
|
||||
t06 = t18 + t30; t18 = (t18 - t30) * 0.541196100146;
|
||||
t30 = t21 + t12; t12 = (t21 - t12) * 1.30656296488;
|
||||
t21 = t06 + t30; t30 = (t06 - t30) * 0.707106781187;
|
||||
t06 = t18 + t12; t12 = (t18 - t12) * 0.707106781187;
|
||||
t06 += t12;
|
||||
t18 = t14 + t26; t26 = (t14 - t26) * 0.541196100146;
|
||||
t14 = t32 + t24; t24 = (t32 - t24) * 1.30656296488;
|
||||
t32 = t18 + t14; t14 = (t18 - t14) * 0.707106781187;
|
||||
t18 = t26 + t24; t24 = (t26 - t24) * 0.707106781187;
|
||||
t18 += t24; t32 += t18;
|
||||
t18 += t14; t26 = t14 + t24;
|
||||
t14 = t02 + t16; t02 = (t02 - t16) * 0.509795579104;
|
||||
t16 = t04 + t20; t04 = (t04 - t20) * 0.601344886935;
|
||||
t20 = t28 + t22; t22 = (t28 - t22) * 0.899976223136;
|
||||
t28 = t08 + t10; t10 = (t08 - t10) * 2.56291544774;
|
||||
t08 = t14 + t28; t14 = (t14 - t28) * 0.541196100146;
|
||||
t28 = t16 + t20; t20 = (t16 - t20) * 1.30656296488;
|
||||
t16 = t08 + t28; t28 = (t08 - t28) * 0.707106781187;
|
||||
t08 = t14 + t20; t20 = (t14 - t20) * 0.707106781187;
|
||||
t08 += t20;
|
||||
t14 = t02 + t10; t02 = (t02 - t10) * 0.541196100146;
|
||||
t10 = t04 + t22; t22 = (t04 - t22) * 1.30656296488;
|
||||
t04 = t14 + t10; t10 = (t14 - t10) * 0.707106781187;
|
||||
t14 = t02 + t22; t02 = (t02 - t22) * 0.707106781187;
|
||||
t14 += t02; t04 += t14; t14 += t10; t10 += t02;
|
||||
t16 += t04; t04 += t08; t08 += t14; t14 += t28;
|
||||
t28 += t10; t10 += t20; t20 += t02; t21 += t16;
|
||||
t16 += t32; t32 += t04; t04 += t06; t06 += t08;
|
||||
t08 += t18; t18 += t14; t14 += t30; t30 += t28;
|
||||
t28 += t26; t26 += t10; t10 += t12; t12 += t20;
|
||||
t20 += t24; t24 += t02;
|
||||
|
||||
d[dp + 48] = -t33;
|
||||
d[dp + 49] = d[dp + 47] = -t21;
|
||||
d[dp + 50] = d[dp + 46] = -t17;
|
||||
d[dp + 51] = d[dp + 45] = -t16;
|
||||
d[dp + 52] = d[dp + 44] = -t01;
|
||||
d[dp + 53] = d[dp + 43] = -t32;
|
||||
d[dp + 54] = d[dp + 42] = -t29;
|
||||
d[dp + 55] = d[dp + 41] = -t04;
|
||||
d[dp + 56] = d[dp + 40] = -t03;
|
||||
d[dp + 57] = d[dp + 39] = -t06;
|
||||
d[dp + 58] = d[dp + 38] = -t25;
|
||||
d[dp + 59] = d[dp + 37] = -t08;
|
||||
d[dp + 60] = d[dp + 36] = -t11;
|
||||
d[dp + 61] = d[dp + 35] = -t18;
|
||||
d[dp + 62] = d[dp + 34] = -t09;
|
||||
d[dp + 63] = d[dp + 33] = -t14;
|
||||
d[dp + 32] = -t05;
|
||||
d[dp + 0] = t05; d[dp + 31] = -t30;
|
||||
d[dp + 1] = t30; d[dp + 30] = -t27;
|
||||
d[dp + 2] = t27; d[dp + 29] = -t28;
|
||||
d[dp + 3] = t28; d[dp + 28] = -t07;
|
||||
d[dp + 4] = t07; d[dp + 27] = -t26;
|
||||
d[dp + 5] = t26; d[dp + 26] = -t23;
|
||||
d[dp + 6] = t23; d[dp + 25] = -t10;
|
||||
d[dp + 7] = t10; d[dp + 24] = -t15;
|
||||
d[dp + 8] = t15; d[dp + 23] = -t12;
|
||||
d[dp + 9] = t12; d[dp + 22] = -t19;
|
||||
d[dp + 10] = t19; d[dp + 21] = -t20;
|
||||
d[dp + 11] = t20; d[dp + 20] = -t13;
|
||||
d[dp + 12] = t13; d[dp + 19] = -t24;
|
||||
d[dp + 13] = t24; d[dp + 18] = -t31;
|
||||
d[dp + 14] = t31; d[dp + 17] = -t02;
|
||||
d[dp + 15] = t02; d[dp + 16] = 0.0;
|
||||
};
|
||||
|
||||
MP2.FRAME_SYNC = 0x7ff;
|
||||
|
||||
MP2.VERSION = {
|
||||
MPEG_2_5: 0x0,
|
||||
MPEG_2: 0x2,
|
||||
MPEG_1: 0x3
|
||||
};
|
||||
|
||||
MP2.LAYER = {
|
||||
III: 0x1,
|
||||
II: 0x2,
|
||||
I: 0x3
|
||||
};
|
||||
|
||||
MP2.MODE = {
|
||||
STEREO: 0x0,
|
||||
JOINT_STEREO: 0x1,
|
||||
DUAL_CHANNEL: 0x2,
|
||||
MONO: 0x3
|
||||
};
|
||||
|
||||
MP2.SAMPLE_RATE = new Uint16Array([
|
||||
44100, 48000, 32000, 0, // MPEG-1
|
||||
22050, 24000, 16000, 0 // MPEG-2
|
||||
]);
|
||||
|
||||
MP2.BIT_RATE = new Uint16Array([
|
||||
32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, // MPEG-1
|
||||
8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 // MPEG-2
|
||||
]);
|
||||
|
||||
MP2.SCALEFACTOR_BASE = new Uint32Array([
|
||||
0x02000000, 0x01965FEA, 0x01428A30
|
||||
]);
|
||||
|
||||
MP2.SYNTHESIS_WINDOW = new Float32Array([
|
||||
0.0, -0.5, -0.5, -0.5, -0.5, -0.5,
|
||||
-0.5, -1.0, -1.0, -1.0, -1.0, -1.5,
|
||||
-1.5, -2.0, -2.0, -2.5, -2.5, -3.0,
|
||||
-3.5, -3.5, -4.0, -4.5, -5.0, -5.5,
|
||||
-6.5, -7.0, -8.0, -8.5, -9.5, -10.5,
|
||||
-12.0, -13.0, -14.5, -15.5, -17.5, -19.0,
|
||||
-20.5, -22.5, -24.5, -26.5, -29.0, -31.5,
|
||||
-34.0, -36.5, -39.5, -42.5, -45.5, -48.5,
|
||||
-52.0, -55.5, -58.5, -62.5, -66.0, -69.5,
|
||||
-73.5, -77.0, -80.5, -84.5, -88.0, -91.5,
|
||||
-95.0, -98.0, -101.0, -104.0, 106.5, 109.0,
|
||||
111.0, 112.5, 113.5, 114.0, 114.0, 113.5,
|
||||
112.0, 110.5, 107.5, 104.0, 100.0, 94.5,
|
||||
88.5, 81.5, 73.0, 63.5, 53.0, 41.5,
|
||||
28.5, 14.5, -1.0, -18.0, -36.0, -55.5,
|
||||
-76.5, -98.5, -122.0, -147.0, -173.5, -200.5,
|
||||
-229.5, -259.5, -290.5, -322.5, -355.5, -389.5,
|
||||
-424.0, -459.5, -495.5, -532.0, -568.5, -605.0,
|
||||
-641.5, -678.0, -714.0, -749.0, -783.5, -817.0,
|
||||
-849.0, -879.5, -908.5, -935.0, -959.5, -981.0,
|
||||
-1000.5, -1016.0, -1028.5, -1037.5, -1042.5, -1043.5,
|
||||
-1040.0, -1031.5, 1018.5, 1000.0, 976.0, 946.5,
|
||||
911.0, 869.5, 822.0, 767.5, 707.0, 640.0,
|
||||
565.5, 485.0, 397.0, 302.5, 201.0, 92.5,
|
||||
-22.5, -144.0, -272.5, -407.0, -547.5, -694.0,
|
||||
-846.0, -1003.0, -1165.0, -1331.5, -1502.0, -1675.5,
|
||||
-1852.5, -2031.5, -2212.5, -2394.0, -2576.5, -2758.5,
|
||||
-2939.5, -3118.5, -3294.5, -3467.5, -3635.5, -3798.5,
|
||||
-3955.0, -4104.5, -4245.5, -4377.5, -4499.0, -4609.5,
|
||||
-4708.0, -4792.5, -4863.5, -4919.0, -4958.0, -4979.5,
|
||||
-4983.0, -4967.5, -4931.5, -4875.0, -4796.0, -4694.5,
|
||||
-4569.5, -4420.0, -4246.0, -4046.0, -3820.0, -3567.0,
|
||||
3287.0, 2979.5, 2644.0, 2280.5, 1888.0, 1467.5,
|
||||
1018.5, 541.0, 35.0, -499.0, -1061.0, -1650.0,
|
||||
-2266.5, -2909.0, -3577.0, -4270.0, -4987.5, -5727.5,
|
||||
-6490.0, -7274.0, -8077.5, -8899.5, -9739.0, -10594.5,
|
||||
-11464.5, -12347.0, -13241.0, -14144.5, -15056.0, -15973.5,
|
||||
-16895.5, -17820.0, -18744.5, -19668.0, -20588.0, -21503.0,
|
||||
-22410.5, -23308.5, -24195.0, -25068.5, -25926.5, -26767.0,
|
||||
-27589.0, -28389.0, -29166.5, -29919.0, -30644.5, -31342.0,
|
||||
-32009.5, -32645.0, -33247.0, -33814.5, -34346.0, -34839.5,
|
||||
-35295.0, -35710.0, -36084.5, -36417.5, -36707.5, -36954.0,
|
||||
-37156.5, -37315.0, -37428.0, -37496.0, 37519.0, 37496.0,
|
||||
37428.0, 37315.0, 37156.5, 36954.0, 36707.5, 36417.5,
|
||||
36084.5, 35710.0, 35295.0, 34839.5, 34346.0, 33814.5,
|
||||
33247.0, 32645.0, 32009.5, 31342.0, 30644.5, 29919.0,
|
||||
29166.5, 28389.0, 27589.0, 26767.0, 25926.5, 25068.5,
|
||||
24195.0, 23308.5, 22410.5, 21503.0, 20588.0, 19668.0,
|
||||
18744.5, 17820.0, 16895.5, 15973.5, 15056.0, 14144.5,
|
||||
13241.0, 12347.0, 11464.5, 10594.5, 9739.0, 8899.5,
|
||||
8077.5, 7274.0, 6490.0, 5727.5, 4987.5, 4270.0,
|
||||
3577.0, 2909.0, 2266.5, 1650.0, 1061.0, 499.0,
|
||||
-35.0, -541.0, -1018.5, -1467.5, -1888.0, -2280.5,
|
||||
-2644.0, -2979.5, 3287.0, 3567.0, 3820.0, 4046.0,
|
||||
4246.0, 4420.0, 4569.5, 4694.5, 4796.0, 4875.0,
|
||||
4931.5, 4967.5, 4983.0, 4979.5, 4958.0, 4919.0,
|
||||
4863.5, 4792.5, 4708.0, 4609.5, 4499.0, 4377.5,
|
||||
4245.5, 4104.5, 3955.0, 3798.5, 3635.5, 3467.5,
|
||||
3294.5, 3118.5, 2939.5, 2758.5, 2576.5, 2394.0,
|
||||
2212.5, 2031.5, 1852.5, 1675.5, 1502.0, 1331.5,
|
||||
1165.0, 1003.0, 846.0, 694.0, 547.5, 407.0,
|
||||
272.5, 144.0, 22.5, -92.5, -201.0, -302.5,
|
||||
-397.0, -485.0, -565.5, -640.0, -707.0, -767.5,
|
||||
-822.0, -869.5, -911.0, -946.5, -976.0, -1000.0,
|
||||
1018.5, 1031.5, 1040.0, 1043.5, 1042.5, 1037.5,
|
||||
1028.5, 1016.0, 1000.5, 981.0, 959.5, 935.0,
|
||||
908.5, 879.5, 849.0, 817.0, 783.5, 749.0,
|
||||
714.0, 678.0, 641.5, 605.0, 568.5, 532.0,
|
||||
495.5, 459.5, 424.0, 389.5, 355.5, 322.5,
|
||||
290.5, 259.5, 229.5, 200.5, 173.5, 147.0,
|
||||
122.0, 98.5, 76.5, 55.5, 36.0, 18.0,
|
||||
1.0, -14.5, -28.5, -41.5, -53.0, -63.5,
|
||||
-73.0, -81.5, -88.5, -94.5, -100.0, -104.0,
|
||||
-107.5, -110.5, -112.0, -113.5, -114.0, -114.0,
|
||||
-113.5, -112.5, -111.0, -109.0, 106.5, 104.0,
|
||||
101.0, 98.0, 95.0, 91.5, 88.0, 84.5,
|
||||
80.5, 77.0, 73.5, 69.5, 66.0, 62.5,
|
||||
58.5, 55.5, 52.0, 48.5, 45.5, 42.5,
|
||||
39.5, 36.5, 34.0, 31.5, 29.0, 26.5,
|
||||
24.5, 22.5, 20.5, 19.0, 17.5, 15.5,
|
||||
14.5, 13.0, 12.0, 10.5, 9.5, 8.5,
|
||||
8.0, 7.0, 6.5, 5.5, 5.0, 4.5,
|
||||
4.0, 3.5, 3.5, 3.0, 2.5, 2.5,
|
||||
2.0, 2.0, 1.5, 1.5, 1.0, 1.0,
|
||||
1.0, 1.0, 0.5, 0.5, 0.5, 0.5,
|
||||
0.5, 0.5
|
||||
]);
|
||||
|
||||
// Quantizer lookup, step 1: bitrate classes
|
||||
MP2.QUANT_LUT_STEP_1 = [
|
||||
// 32, 48, 56, 64, 80, 96,112,128,160,192,224,256,320,384 <- bitrate
|
||||
[ 0, 0, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2], // mono
|
||||
// 16, 24, 28, 32, 40, 48, 56, 64, 80, 96,112,128,160,192 <- bitrate / chan
|
||||
[ 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 2] // stereo
|
||||
];
|
||||
|
||||
// Quantizer lookup, step 2: bitrate class, sample rate -> B2 table idx, sblimit
|
||||
MP2.QUANT_TAB = {
|
||||
A: (27 | 64), // Table 3-B.2a: high-rate, sblimit = 27
|
||||
B: (30 | 64), // Table 3-B.2b: high-rate, sblimit = 30
|
||||
C: 8, // Table 3-B.2c: low-rate, sblimit = 8
|
||||
D: 12 // Table 3-B.2d: low-rate, sblimit = 12
|
||||
};
|
||||
|
||||
MP2.QUANT_LUT_STEP_2 = [
|
||||
// 44.1 kHz, 48 kHz, 32 kHz
|
||||
[MP2.QUANT_TAB.C, MP2.QUANT_TAB.C, MP2.QUANT_TAB.D], // 32 - 48 kbit/sec/ch
|
||||
[MP2.QUANT_TAB.A, MP2.QUANT_TAB.A, MP2.QUANT_TAB.A], // 56 - 80 kbit/sec/ch
|
||||
[MP2.QUANT_TAB.B, MP2.QUANT_TAB.A, MP2.QUANT_TAB.B] // 96+ kbit/sec/ch
|
||||
];
|
||||
|
||||
// Quantizer lookup, step 3: B2 table, subband -> nbal, row index
|
||||
// (upper 4 bits: nbal, lower 4 bits: row index)
|
||||
MP2.QUANT_LUT_STEP_3 = [
|
||||
// Low-rate table (3-B.2c and 3-B.2d)
|
||||
[
|
||||
0x44,0x44,
|
||||
0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34
|
||||
],
|
||||
// High-rate table (3-B.2a and 3-B.2b)
|
||||
[
|
||||
0x43,0x43,0x43,
|
||||
0x42,0x42,0x42,0x42,0x42,0x42,0x42,0x42,
|
||||
0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,
|
||||
0x20,0x20,0x20,0x20,0x20,0x20,0x20
|
||||
],
|
||||
// MPEG-2 LSR table (B.2 in ISO 13818-3)
|
||||
[
|
||||
0x45,0x45,0x45,0x45,
|
||||
0x34,0x34,0x34,0x34,0x34,0x34,0x34,
|
||||
0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,
|
||||
0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24
|
||||
]
|
||||
];
|
||||
|
||||
// Quantizer lookup, step 4: table row, allocation[] value -> quant table index
|
||||
MP2.QUANT_LUT_STEP4 = [
|
||||
[0, 1, 2, 17],
|
||||
[0, 1, 2, 3, 4, 5, 6, 17],
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 17],
|
||||
[0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
|
||||
[0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17],
|
||||
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
|
||||
];
|
||||
|
||||
MP2.QUANT_TAB = [
|
||||
{levels: 3, group: 1, bits: 5}, // 1
|
||||
{levels: 5, group: 1, bits: 7}, // 2
|
||||
{levels: 7, group: 0, bits: 3}, // 3
|
||||
{levels: 9, group: 1, bits: 10}, // 4
|
||||
{levels: 15, group: 0, bits: 4}, // 5
|
||||
{levels: 31, group: 0, bits: 5}, // 6
|
||||
{levels: 63, group: 0, bits: 6}, // 7
|
||||
{levels: 127, group: 0, bits: 7}, // 8
|
||||
{levels: 255, group: 0, bits: 8}, // 9
|
||||
{levels: 511, group: 0, bits: 9}, // 10
|
||||
{levels: 1023, group: 0, bits: 10}, // 11
|
||||
{levels: 2047, group: 0, bits: 11}, // 12
|
||||
{levels: 4095, group: 0, bits: 12}, // 13
|
||||
{levels: 8191, group: 0, bits: 13}, // 14
|
||||
{levels: 16383, group: 0, bits: 14}, // 15
|
||||
{levels: 32767, group: 0, bits: 15}, // 16
|
||||
{levels: 65535, group: 0, bits: 16} // 17
|
||||
];
|
||||
|
||||
return MP2;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,133 @@
|
|||
JSMpeg.Decoder.MPEG1VideoWASM = (function(){ "use strict";
|
||||
|
||||
var MPEG1WASM = function(options) {
|
||||
JSMpeg.Decoder.Base.call(this, options);
|
||||
|
||||
this.onDecodeCallback = options.onVideoDecode;
|
||||
this.module = options.wasmModule;
|
||||
|
||||
this.bufferSize = options.videoBufferSize || 512*1024;
|
||||
this.bufferMode = options.streaming
|
||||
? JSMpeg.BitBuffer.MODE.EVICT
|
||||
: JSMpeg.BitBuffer.MODE.EXPAND;
|
||||
|
||||
this.decodeFirstFrame = options.decodeFirstFrame !== false;
|
||||
this.hasSequenceHeader = false;
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype = Object.create(JSMpeg.Decoder.Base.prototype);
|
||||
MPEG1WASM.prototype.constructor = MPEG1WASM;
|
||||
|
||||
MPEG1WASM.prototype.initializeWasmDecoder = function() {
|
||||
if (!this.module.instance) {
|
||||
console.warn('JSMpeg: WASM module not compiled yet');
|
||||
return;
|
||||
}
|
||||
this.instance = this.module.instance;
|
||||
this.functions = this.module.instance.exports;
|
||||
this.decoder = this.functions._mpeg1_decoder_create(this.bufferSize, this.bufferMode);
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.destroy = function() {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
this.functions._mpeg1_decoder_destroy(this.decoder);
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.bufferGetIndex = function() {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
return this.functions._mpeg1_decoder_get_index(this.decoder);
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.bufferSetIndex = function(index) {
|
||||
if (!this.decoder) {
|
||||
return;
|
||||
}
|
||||
this.functions._mpeg1_decoder_set_index(this.decoder, index);
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.bufferWrite = function(buffers) {
|
||||
if (!this.decoder) {
|
||||
this.initializeWasmDecoder();
|
||||
}
|
||||
|
||||
var totalLength = 0;
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
totalLength += buffers[i].length;
|
||||
}
|
||||
|
||||
var ptr = this.functions._mpeg1_decoder_get_write_ptr(this.decoder, totalLength);
|
||||
for (var i = 0; i < buffers.length; i++) {
|
||||
this.instance.heapU8.set(buffers[i], ptr);
|
||||
ptr += buffers[i].length;
|
||||
}
|
||||
|
||||
this.functions._mpeg1_decoder_did_write(this.decoder, totalLength);
|
||||
return totalLength;
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.write = function(pts, buffers) {
|
||||
JSMpeg.Decoder.Base.prototype.write.call(this, pts, buffers);
|
||||
|
||||
if (!this.hasSequenceHeader && this.functions._mpeg1_decoder_has_sequence_header(this.decoder)) {
|
||||
this.loadSequenceHeader();
|
||||
}
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.loadSequenceHeader = function() {
|
||||
this.hasSequenceHeader = true;
|
||||
this.frameRate = this.functions._mpeg1_decoder_get_frame_rate(this.decoder);
|
||||
this.codedSize = this.functions._mpeg1_decoder_get_coded_size(this.decoder);
|
||||
|
||||
if (this.destination) {
|
||||
var w = this.functions._mpeg1_decoder_get_width(this.decoder);
|
||||
var h = this.functions._mpeg1_decoder_get_height(this.decoder);
|
||||
this.destination.resize(w, h);
|
||||
}
|
||||
|
||||
if (this.decodeFirstFrame) {
|
||||
this.decode();
|
||||
}
|
||||
};
|
||||
|
||||
MPEG1WASM.prototype.decode = function() {
|
||||
var startTime = JSMpeg.Now();
|
||||
|
||||
if (!this.decoder) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var didDecode = this.functions._mpeg1_decoder_decode(this.decoder);
|
||||
if (!didDecode) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Invoke decode callbacks
|
||||
if (this.destination) {
|
||||
var ptrY = this.functions._mpeg1_decoder_get_y_ptr(this.decoder),
|
||||
ptrCr = this.functions._mpeg1_decoder_get_cr_ptr(this.decoder),
|
||||
ptrCb = this.functions._mpeg1_decoder_get_cb_ptr(this.decoder);
|
||||
|
||||
var dy = this.instance.heapU8.subarray(ptrY, ptrY + this.codedSize);
|
||||
var dcr = this.instance.heapU8.subarray(ptrCr, ptrCr + (this.codedSize >> 2));
|
||||
var dcb = this.instance.heapU8.subarray(ptrCb, ptrCb + (this.codedSize >> 2));
|
||||
|
||||
this.destination.render(dy, dcr, dcb, false);
|
||||
}
|
||||
|
||||
this.advanceDecodedTime(1/this.frameRate);
|
||||
|
||||
var elapsedTime = JSMpeg.Now() - startTime;
|
||||
if (this.onDecodeCallback) {
|
||||
this.onDecodeCallback(this, elapsedTime);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
return MPEG1WASM;
|
||||
|
||||
})();
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,324 @@
|
|||
JSMpeg.Player = (function(){ "use strict";
|
||||
|
||||
var Player = function(url, options) {
|
||||
this.options = options || {};
|
||||
|
||||
if (options.source) {
|
||||
this.source = new options.source(url, options);
|
||||
options.streaming = !!this.source.streaming;
|
||||
}
|
||||
else if (url.match(/^wss?:\/\//)) {
|
||||
this.source = new JSMpeg.Source.WebSocket(url, options);
|
||||
options.streaming = true;
|
||||
}
|
||||
else if (options.progressive !== false) {
|
||||
this.source = new JSMpeg.Source.AjaxProgressive(url, options);
|
||||
options.streaming = false;
|
||||
}
|
||||
else {
|
||||
this.source = new JSMpeg.Source.Ajax(url, options);
|
||||
options.streaming = false;
|
||||
}
|
||||
|
||||
this.maxAudioLag = options.maxAudioLag || 0.25;
|
||||
this.loop = options.loop !== false;
|
||||
this.autoplay = !!options.autoplay || options.streaming;
|
||||
|
||||
this.demuxer = new JSMpeg.Demuxer.TS(options);
|
||||
this.source.connect(this.demuxer);
|
||||
|
||||
if (!options.disableWebAssembly && JSMpeg.WASMModule.IsSupported()) {
|
||||
this.wasmModule = JSMpeg.WASMModule.GetModule();
|
||||
options.wasmModule = this.wasmModule;
|
||||
}
|
||||
|
||||
if (options.video !== false) {
|
||||
this.video = options.wasmModule
|
||||
? new JSMpeg.Decoder.MPEG1VideoWASM(options)
|
||||
: new JSMpeg.Decoder.MPEG1Video(options);
|
||||
|
||||
this.renderer = !options.disableGl && JSMpeg.Renderer.WebGL.IsSupported()
|
||||
? new JSMpeg.Renderer.WebGL(options)
|
||||
: new JSMpeg.Renderer.Canvas2D(options);
|
||||
|
||||
this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.VIDEO_1, this.video);
|
||||
this.video.connect(this.renderer);
|
||||
}
|
||||
|
||||
if (options.audio !== false && JSMpeg.AudioOutput.WebAudio.IsSupported()) {
|
||||
this.audio = options.wasmModule
|
||||
? new JSMpeg.Decoder.MP2AudioWASM(options)
|
||||
: new JSMpeg.Decoder.MP2Audio(options);
|
||||
this.audioOut = new JSMpeg.AudioOutput.WebAudio(options);
|
||||
this.demuxer.connect(JSMpeg.Demuxer.TS.STREAM.AUDIO_1, this.audio);
|
||||
this.audio.connect(this.audioOut);
|
||||
}
|
||||
|
||||
Object.defineProperty(this, 'currentTime', {
|
||||
get: this.getCurrentTime,
|
||||
set: this.setCurrentTime
|
||||
});
|
||||
Object.defineProperty(this, 'volume', {
|
||||
get: this.getVolume,
|
||||
set: this.setVolume
|
||||
});
|
||||
|
||||
this.paused = true;
|
||||
this.unpauseOnShow = false;
|
||||
if (options.pauseWhenHidden !== false) {
|
||||
document.addEventListener('visibilitychange', this.showHide.bind(this));
|
||||
}
|
||||
|
||||
// If we have WebAssembly support, wait until the module is compiled before
|
||||
// loading the source. Otherwise the decoders won't know what to do with
|
||||
// the source data.
|
||||
if (this.wasmModule) {
|
||||
if (this.wasmModule.ready) {
|
||||
this.startLoading();
|
||||
}
|
||||
else if (JSMpeg.WASM_BINARY_INLINED) {
|
||||
var wasm = JSMpeg.Base64ToArrayBuffer(JSMpeg.WASM_BINARY_INLINED);
|
||||
this.wasmModule.loadFromBuffer(wasm, this.startLoading.bind(this));
|
||||
}
|
||||
else {
|
||||
this.wasmModule.loadFromFile('jsmpeg.wasm', this.startLoading.bind(this));
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.startLoading();
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.startLoading = function() {
|
||||
this.source.start();
|
||||
if (this.autoplay) {
|
||||
this.play();
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.showHide = function(ev) {
|
||||
if (document.visibilityState === 'hidden') {
|
||||
this.unpauseOnShow = this.wantsToPlay;
|
||||
this.pause();
|
||||
}
|
||||
else if (this.unpauseOnShow) {
|
||||
this.play();
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.play = function(ev) {
|
||||
if (this.animationId) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.animationId = requestAnimationFrame(this.update.bind(this));
|
||||
this.wantsToPlay = true;
|
||||
this.paused = false;
|
||||
};
|
||||
|
||||
Player.prototype.pause = function(ev) {
|
||||
if (this.paused) {
|
||||
return;
|
||||
}
|
||||
|
||||
cancelAnimationFrame(this.animationId);
|
||||
this.animationId = null;
|
||||
this.wantsToPlay = false;
|
||||
this.isPlaying = false;
|
||||
this.paused = true;
|
||||
|
||||
if (this.audio && this.audio.canPlay) {
|
||||
// Seek to the currentTime again - audio may already be enqueued a bit
|
||||
// further, so we have to rewind it.
|
||||
this.audioOut.stop();
|
||||
this.seek(this.currentTime);
|
||||
}
|
||||
|
||||
if (this.options.onPause) {
|
||||
this.options.onPause(this);
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.getVolume = function() {
|
||||
return this.audioOut ? this.audioOut.volume : 0;
|
||||
};
|
||||
|
||||
Player.prototype.setVolume = function(volume) {
|
||||
if (this.audioOut) {
|
||||
this.audioOut.volume = volume;
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.stop = function(ev) {
|
||||
this.pause();
|
||||
this.seek(0);
|
||||
if (this.video && this.options.decodeFirstFrame !== false) {
|
||||
this.video.decode();
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.destroy = function() {
|
||||
this.pause();
|
||||
this.source.destroy();
|
||||
this.video && this.video.destroy();
|
||||
this.renderer && this.renderer.destroy();
|
||||
this.audio && this.audio.destroy();
|
||||
this.audioOut && this.audioOut.destroy();
|
||||
};
|
||||
|
||||
Player.prototype.seek = function(time) {
|
||||
var startOffset = this.audio && this.audio.canPlay
|
||||
? this.audio.startTime
|
||||
: this.video.startTime;
|
||||
|
||||
if (this.video) {
|
||||
this.video.seek(time + startOffset);
|
||||
}
|
||||
if (this.audio) {
|
||||
this.audio.seek(time + startOffset);
|
||||
}
|
||||
|
||||
this.startTime = JSMpeg.Now() - time;
|
||||
};
|
||||
|
||||
Player.prototype.getCurrentTime = function() {
|
||||
return this.audio && this.audio.canPlay
|
||||
? this.audio.currentTime - this.audio.startTime
|
||||
: this.video.currentTime - this.video.startTime;
|
||||
};
|
||||
|
||||
Player.prototype.setCurrentTime = function(time) {
|
||||
this.seek(time);
|
||||
};
|
||||
|
||||
Player.prototype.update = function() {
|
||||
this.animationId = requestAnimationFrame(this.update.bind(this));
|
||||
|
||||
if (!this.source.established) {
|
||||
if (this.renderer) {
|
||||
this.renderer.renderProgress(this.source.progress);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.isPlaying) {
|
||||
this.isPlaying = true;
|
||||
this.startTime = JSMpeg.Now() - this.currentTime;
|
||||
|
||||
if (this.options.onPlay) {
|
||||
this.options.onPlay(this);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.streaming) {
|
||||
this.updateForStreaming();
|
||||
}
|
||||
else {
|
||||
this.updateForStaticFile();
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.updateForStreaming = function() {
|
||||
// When streaming, immediately decode everything we have buffered up until
|
||||
// now to minimize playback latency.
|
||||
|
||||
if (this.video) {
|
||||
this.video.decode();
|
||||
}
|
||||
|
||||
if (this.audio) {
|
||||
var decoded = false;
|
||||
do {
|
||||
// If there's a lot of audio enqueued already, disable output and
|
||||
// catch up with the encoding.
|
||||
if (this.audioOut.enqueuedTime > this.maxAudioLag) {
|
||||
this.audioOut.resetEnqueuedTime();
|
||||
this.audioOut.enabled = false;
|
||||
}
|
||||
decoded = this.audio.decode();
|
||||
} while (decoded);
|
||||
this.audioOut.enabled = true;
|
||||
}
|
||||
};
|
||||
|
||||
Player.prototype.nextFrame = function() {
|
||||
if (this.source.established && this.video) {
|
||||
return this.video.decode();
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
Player.prototype.updateForStaticFile = function() {
|
||||
var notEnoughData = false,
|
||||
headroom = 0;
|
||||
|
||||
// If we have an audio track, we always try to sync the video to the audio.
|
||||
// Gaps and discontinuities are far more percetable in audio than in video.
|
||||
|
||||
if (this.audio && this.audio.canPlay) {
|
||||
// Do we have to decode and enqueue some more audio data?
|
||||
while (
|
||||
!notEnoughData &&
|
||||
this.audio.decodedTime - this.audio.currentTime < 0.25
|
||||
) {
|
||||
notEnoughData = !this.audio.decode();
|
||||
}
|
||||
|
||||
// Sync video to audio
|
||||
if (this.video && this.video.currentTime < this.audio.currentTime) {
|
||||
notEnoughData = !this.video.decode();
|
||||
}
|
||||
|
||||
headroom = this.demuxer.currentTime - this.audio.currentTime;
|
||||
}
|
||||
|
||||
|
||||
else if (this.video) {
|
||||
// Video only - sync it to player's wallclock
|
||||
var targetTime = (JSMpeg.Now() - this.startTime) + this.video.startTime,
|
||||
lateTime = targetTime - this.video.currentTime,
|
||||
frameTime = 1/this.video.frameRate;
|
||||
|
||||
if (this.video && lateTime > 0) {
|
||||
// If the video is too far behind (>2 frames), simply reset the
|
||||
// target time to the next frame instead of trying to catch up.
|
||||
if (lateTime > frameTime * 2) {
|
||||
this.startTime += lateTime;
|
||||
}
|
||||
|
||||
notEnoughData = !this.video.decode();
|
||||
}
|
||||
|
||||
headroom = this.demuxer.currentTime - targetTime;
|
||||
}
|
||||
|
||||
// Notify the source of the playhead headroom, so it can decide whether to
|
||||
// continue loading further data.
|
||||
this.source.resume(headroom);
|
||||
|
||||
// If we failed to decode and the source is complete, it means we reached
|
||||
// the end of our data. We may want to loop.
|
||||
if (notEnoughData && this.source.completed) {
|
||||
if (this.loop) {
|
||||
this.seek(0);
|
||||
}
|
||||
else {
|
||||
this.pause();
|
||||
if (this.options.onEnded) {
|
||||
this.options.onEnded(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If there's not enough data and the source is not completed, we have
|
||||
// just stalled.
|
||||
else if (notEnoughData && this.options.onStalled) {
|
||||
this.options.onStalled(this);
|
||||
}
|
||||
};
|
||||
|
||||
return Player;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,228 @@
|
|||
JSMpeg.Demuxer.TS = (function(){ "use strict";
|
||||
|
||||
var TS = function(options) {
|
||||
this.bits = null;
|
||||
this.leftoverBytes = null;
|
||||
|
||||
this.guessVideoFrameEnd = true;
|
||||
this.pidsToStreamIds = {};
|
||||
|
||||
this.pesPacketInfo = {};
|
||||
this.startTime = 0;
|
||||
this.currentTime = 0;
|
||||
};
|
||||
|
||||
TS.prototype.connect = function(streamId, destination) {
|
||||
this.pesPacketInfo[streamId] = {
|
||||
destination: destination,
|
||||
currentLength: 0,
|
||||
totalLength: 0,
|
||||
pts: 0,
|
||||
buffers: []
|
||||
};
|
||||
};
|
||||
|
||||
TS.prototype.write = function(buffer) {
|
||||
if (this.leftoverBytes) {
|
||||
var totalLength = buffer.byteLength + this.leftoverBytes.byteLength;
|
||||
this.bits = new JSMpeg.BitBuffer(totalLength);
|
||||
this.bits.write([this.leftoverBytes, buffer]);
|
||||
}
|
||||
else {
|
||||
this.bits = new JSMpeg.BitBuffer(buffer);
|
||||
}
|
||||
|
||||
while (this.bits.has(188 << 3) && this.parsePacket()) {}
|
||||
|
||||
var leftoverCount = this.bits.byteLength - (this.bits.index >> 3);
|
||||
this.leftoverBytes = leftoverCount > 0
|
||||
? this.bits.bytes.subarray(this.bits.index >> 3)
|
||||
: null;
|
||||
};
|
||||
|
||||
TS.prototype.parsePacket = function() {
|
||||
// Check if we're in sync with packet boundaries; attempt to resync if not.
|
||||
if (this.bits.read(8) !== 0x47) {
|
||||
if (!this.resync()) {
|
||||
// Couldn't resync; maybe next time...
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
var end = (this.bits.index >> 3) + 187;
|
||||
var transportError = this.bits.read(1),
|
||||
payloadStart = this.bits.read(1),
|
||||
transportPriority = this.bits.read(1),
|
||||
pid = this.bits.read(13),
|
||||
transportScrambling = this.bits.read(2),
|
||||
adaptationField = this.bits.read(2),
|
||||
continuityCounter = this.bits.read(4);
|
||||
|
||||
|
||||
// If this is the start of a new payload; signal the end of the previous
|
||||
// frame, if we didn't do so already.
|
||||
var streamId = this.pidsToStreamIds[pid];
|
||||
if (payloadStart && streamId) {
|
||||
var pi = this.pesPacketInfo[streamId];
|
||||
if (pi && pi.currentLength) {
|
||||
this.packetComplete(pi);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract current payload
|
||||
if (adaptationField & 0x1) {
|
||||
if ((adaptationField & 0x2)) {
|
||||
var adaptationFieldLength = this.bits.read(8);
|
||||
this.bits.skip(adaptationFieldLength << 3);
|
||||
}
|
||||
|
||||
if (payloadStart && this.bits.nextBytesAreStartCode()) {
|
||||
this.bits.skip(24);
|
||||
streamId = this.bits.read(8);
|
||||
this.pidsToStreamIds[pid] = streamId;
|
||||
|
||||
var packetLength = this.bits.read(16)
|
||||
this.bits.skip(8);
|
||||
var ptsDtsFlag = this.bits.read(2);
|
||||
this.bits.skip(6);
|
||||
var headerLength = this.bits.read(8);
|
||||
var payloadBeginIndex = this.bits.index + (headerLength << 3);
|
||||
|
||||
var pi = this.pesPacketInfo[streamId];
|
||||
if (pi) {
|
||||
var pts = 0;
|
||||
if (ptsDtsFlag & 0x2) {
|
||||
// The Presentation Timestamp is encoded as 33(!) bit
|
||||
// integer, but has a "marker bit" inserted at weird places
|
||||
// in between, making the whole thing 5 bytes in size.
|
||||
// You can't make this shit up...
|
||||
this.bits.skip(4);
|
||||
var p32_30 = this.bits.read(3);
|
||||
this.bits.skip(1);
|
||||
var p29_15 = this.bits.read(15);
|
||||
this.bits.skip(1);
|
||||
var p14_0 = this.bits.read(15);
|
||||
this.bits.skip(1);
|
||||
|
||||
// Can't use bit shifts here; we need 33 bits of precision,
|
||||
// so we're using JavaScript's double number type. Also
|
||||
// divide by the 90khz clock to get the pts in seconds.
|
||||
pts = (p32_30 * 1073741824 + p29_15 * 32768 + p14_0)/90000;
|
||||
|
||||
this.currentTime = pts;
|
||||
if (this.startTime === -1) {
|
||||
this.startTime = pts;
|
||||
}
|
||||
}
|
||||
|
||||
var payloadLength = packetLength
|
||||
? packetLength - headerLength - 3
|
||||
: 0;
|
||||
this.packetStart(pi, pts, payloadLength);
|
||||
}
|
||||
|
||||
// Skip the rest of the header without parsing it
|
||||
this.bits.index = payloadBeginIndex;
|
||||
}
|
||||
|
||||
if (streamId) {
|
||||
// Attempt to detect if the PES packet is complete. For Audio (and
|
||||
// other) packets, we received a total packet length with the PES
|
||||
// header, so we can check the current length.
|
||||
|
||||
// For Video packets, we have to guess the end by detecting if this
|
||||
// TS packet was padded - there's no good reason to pad a TS packet
|
||||
// in between, but it might just fit exactly. If this fails, we can
|
||||
// only wait for the next PES header for that stream.
|
||||
|
||||
var pi = this.pesPacketInfo[streamId];
|
||||
if (pi) {
|
||||
var start = this.bits.index >> 3;
|
||||
var complete = this.packetAddData(pi, start, end);
|
||||
|
||||
var hasPadding = !payloadStart && (adaptationField & 0x2);
|
||||
if (complete || (this.guessVideoFrameEnd && hasPadding)) {
|
||||
this.packetComplete(pi);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.bits.index = end << 3;
|
||||
return true;
|
||||
};
|
||||
|
||||
TS.prototype.resync = function() {
|
||||
// Check if we have enough data to attempt a resync. We need 5 full packets.
|
||||
if (!this.bits.has((188 * 6) << 3)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var byteIndex = this.bits.index >> 3;
|
||||
|
||||
// Look for the first sync token in the first 187 bytes
|
||||
for (var i = 0; i < 187; i++) {
|
||||
if (this.bits.bytes[byteIndex + i] === 0x47) {
|
||||
|
||||
// Look for 4 more sync tokens, each 188 bytes appart
|
||||
var foundSync = true;
|
||||
for (var j = 1; j < 5; j++) {
|
||||
if (this.bits.bytes[byteIndex + i + 188 * j] !== 0x47) {
|
||||
foundSync = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (foundSync) {
|
||||
this.bits.index = (byteIndex + i + 1) << 3;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In theory, we shouldn't arrive here. If we do, we had enough data but
|
||||
// still didn't find sync - this can only happen if we were fed garbage
|
||||
// data. Check your source!
|
||||
console.warn('JSMpeg: Possible garbage data. Skipping.');
|
||||
this.bits.skip(187 << 3);
|
||||
return false;
|
||||
};
|
||||
|
||||
TS.prototype.packetStart = function(pi, pts, payloadLength) {
|
||||
pi.totalLength = payloadLength;
|
||||
pi.currentLength = 0;
|
||||
pi.pts = pts;
|
||||
};
|
||||
|
||||
TS.prototype.packetAddData = function(pi, start, end) {
|
||||
pi.buffers.push(this.bits.bytes.subarray(start, end));
|
||||
pi.currentLength += end - start;
|
||||
|
||||
var complete = (pi.totalLength !== 0 && pi.currentLength >= pi.totalLength);
|
||||
return complete;
|
||||
};
|
||||
|
||||
TS.prototype.packetComplete = function(pi) {
|
||||
pi.destination.write(pi.pts, pi.buffers);
|
||||
pi.totalLength = 0;
|
||||
pi.currentLength = 0;
|
||||
pi.buffers = [];
|
||||
};
|
||||
|
||||
TS.STREAM = {
|
||||
PACK_HEADER: 0xBA,
|
||||
SYSTEM_HEADER: 0xBB,
|
||||
PROGRAM_MAP: 0xBC,
|
||||
PRIVATE_1: 0xBD,
|
||||
PADDING: 0xBE,
|
||||
PRIVATE_2: 0xBF,
|
||||
AUDIO_1: 0xC0,
|
||||
VIDEO_1: 0xE0,
|
||||
DIRECTORY: 0xFF
|
||||
};
|
||||
|
||||
return TS;
|
||||
|
||||
})();
|
||||
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
JSMpeg.VideoElement = (function(){ "use strict";
|
||||
|
||||
var VideoElement = function(element) {
|
||||
var url = element.dataset.url;
|
||||
|
||||
if (!url) {
|
||||
throw ("VideoElement has no `data-url` attribute");
|
||||
}
|
||||
|
||||
// Setup the div container, canvas and play button
|
||||
var addStyles = function(element, styles) {
|
||||
for (var name in styles) {
|
||||
element.style[name] = styles[name];
|
||||
}
|
||||
};
|
||||
|
||||
this.container = element;
|
||||
addStyles(this.container, {
|
||||
display: 'inline-block',
|
||||
position: 'relative',
|
||||
minWidth: '80px', minHeight: '80px'
|
||||
});
|
||||
|
||||
this.canvas = document.createElement('canvas');
|
||||
this.canvas.width = 960;
|
||||
this.canvas.height = 540;
|
||||
addStyles(this.canvas, {
|
||||
display: 'block',
|
||||
width: '100%'
|
||||
});
|
||||
this.container.appendChild(this.canvas);
|
||||
|
||||
this.playButton = document.createElement('div');
|
||||
this.playButton.innerHTML = VideoElement.PLAY_BUTTON;
|
||||
addStyles(this.playButton, {
|
||||
zIndex: 2, position: 'absolute',
|
||||
top: '0', bottom: '0', left: '0', right: '0',
|
||||
maxWidth: '75px', maxHeight: '75px',
|
||||
margin: 'auto',
|
||||
opacity: '0.7',
|
||||
cursor: 'pointer'
|
||||
});
|
||||
this.container.appendChild(this.playButton);
|
||||
|
||||
// Parse the data-options - we try to decode the values as json. This way
|
||||
// we can get proper boolean and number values. If JSON.parse() fails,
|
||||
// treat it as a string.
|
||||
var options = {canvas: this.canvas};
|
||||
for (var option in element.dataset) {
|
||||
try {
|
||||
options[option] = JSON.parse(element.dataset[option]);
|
||||
}
|
||||
catch(err) {
|
||||
options[option] = element.dataset[option];
|
||||
}
|
||||
}
|
||||
|
||||
// Create the player instance
|
||||
this.player = new JSMpeg.Player(url, options);
|
||||
element.playerInstance = this.player;
|
||||
|
||||
// Setup the poster element, if any
|
||||
if (options.poster && !options.autoplay && !this.player.options.streaming) {
|
||||
options.decodeFirstFrame = false;
|
||||
this.poster = new Image();
|
||||
this.poster.src = options.poster;
|
||||
this.poster.addEventListener('load', this.posterLoaded)
|
||||
addStyles(this.poster, {
|
||||
display: 'block', zIndex: 1, position: 'absolute',
|
||||
top: 0, left: 0, bottom: 0, right: 0
|
||||
});
|
||||
this.container.appendChild(this.poster);
|
||||
}
|
||||
|
||||
// Add the click handler if this video is pausable
|
||||
if (!this.player.options.streaming) {
|
||||
this.container.addEventListener('click', this.onClick.bind(this));
|
||||
}
|
||||
|
||||
// Hide the play button if this video immediately begins playing
|
||||
if (options.autoplay || this.player.options.streaming) {
|
||||
this.playButton.style.display = 'none';
|
||||
}
|
||||
|
||||
// Set up the unlock audio buton for iOS devices. iOS only allows us to
|
||||
// play audio after a user action has initiated playing. For autoplay or
|
||||
// streaming players we set up a muted speaker icon as the button. For all
|
||||
// others, we can simply use the play button.
|
||||
if (this.player.audioOut && !this.player.audioOut.unlocked) {
|
||||
var unlockAudioElement = this.container;
|
||||
|
||||
if (options.autoplay || this.player.options.streaming) {
|
||||
this.unmuteButton = document.createElement('div');
|
||||
this.unmuteButton.innerHTML = VideoElement.UNMUTE_BUTTON;
|
||||
addStyles(this.unmuteButton, {
|
||||
zIndex: 2, position: 'absolute',
|
||||
bottom: '10px', right: '20px',
|
||||
width: '75px', height: '75px',
|
||||
margin: 'auto',
|
||||
opacity: '0.7',
|
||||
cursor: 'pointer'
|
||||
});
|
||||
this.container.appendChild(this.unmuteButton);
|
||||
unlockAudioElement = this.unmuteButton;
|
||||
}
|
||||
|
||||
this.unlockAudioBound = this.onUnlockAudio.bind(this, unlockAudioElement);
|
||||
unlockAudioElement.addEventListener('touchstart', this.unlockAudioBound, false);
|
||||
unlockAudioElement.addEventListener('click', this.unlockAudioBound, true);
|
||||
}
|
||||
};
|
||||
|
||||
VideoElement.prototype.onUnlockAudio = function(element, ev) {
|
||||
if (this.unmuteButton) {
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
}
|
||||
this.player.audioOut.unlock(function(){
|
||||
if (this.unmuteButton) {
|
||||
this.unmuteButton.style.display = 'none';
|
||||
}
|
||||
element.removeEventListener('touchstart', this.unlockAudioBound);
|
||||
element.removeEventListener('click', this.unlockAudioBound);
|
||||
}.bind(this));
|
||||
};
|
||||
|
||||
VideoElement.prototype.onClick = function(ev) {
|
||||
if (this.player.isPlaying) {
|
||||
this.player.pause();
|
||||
this.playButton.style.display = 'block';
|
||||
}
|
||||
else {
|
||||
this.player.play();
|
||||
this.playButton.style.display = 'none';
|
||||
if (this.poster) {
|
||||
this.poster.style.display = 'none';
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
VideoElement.PLAY_BUTTON =
|
||||
'<svg style="max-width: 75px; max-height: 75px;" ' +
|
||||
'viewBox="0 0 200 200" alt="Play video">' +
|
||||
'<circle cx="100" cy="100" r="90" fill="none" '+
|
||||
'stroke-width="15" stroke="#fff"/>' +
|
||||
'<polygon points="70, 55 70, 145 145, 100" fill="#fff"/>' +
|
||||
'</svg>';
|
||||
|
||||
VideoElement.UNMUTE_BUTTON =
|
||||
'<svg style="max-width: 75px; max-height: 75px;" viewBox="0 0 75 75">' +
|
||||
'<polygon class="audio-speaker" stroke="none" fill="#fff" '+
|
||||
'points="39,13 22,28 6,28 6,47 21,47 39,62 39,13"/>' +
|
||||
'<g stroke="#fff" stroke-width="5">' +
|
||||
'<path d="M 49,50 69,26"/>' +
|
||||
'<path d="M 69,50 49,26"/>' +
|
||||
'</g>' +
|
||||
'</svg>';
|
||||
|
||||
return VideoElement;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,187 @@
|
|||
JSMpeg.WASMModule = (function(){ "use strict";
|
||||
|
||||
var WASM = function() {
|
||||
this.stackSize = 5 * 1024 * 1024; // emscripten default
|
||||
this.pageSize = 64 * 1024; // wasm page size
|
||||
this.onInitCallbacks = [];
|
||||
this.ready = false;
|
||||
this.loadingFromFileStarted = false;
|
||||
this.loadingFromBufferStarted = false;
|
||||
};
|
||||
|
||||
WASM.prototype.write = function(buffer) {
|
||||
this.loadFromBuffer(buffer);
|
||||
};
|
||||
|
||||
WASM.prototype.loadFromFile = function(url, callback) {
|
||||
if (callback) {
|
||||
this.onInitCallbacks.push(callback);
|
||||
}
|
||||
|
||||
// Make sure this WASM Module is only instantiated once. If loadFromFile()
|
||||
// was already called, bail out here. On instantiation all pending
|
||||
// onInitCallbacks will be called.
|
||||
if (this.loadingFromFileStarted) {
|
||||
return;
|
||||
}
|
||||
this.loadingFromFileStarted = true;
|
||||
|
||||
this.onInitCallback = callback;
|
||||
var ajax = new JSMpeg.Source.Ajax(url, {});
|
||||
ajax.connect(this);
|
||||
ajax.start();
|
||||
};
|
||||
|
||||
WASM.prototype.loadFromBuffer = function(buffer, callback) {
|
||||
if (callback) {
|
||||
this.onInitCallbacks.push(callback);
|
||||
}
|
||||
|
||||
// Make sure this WASM Module is only instantiated once. If loadFromBuffer()
|
||||
// was already called, bail out here. On instantiation all pending
|
||||
// onInitCallbacks will be called.
|
||||
if (this.loadingFromBufferStarted) {
|
||||
return;
|
||||
}
|
||||
this.loadingFromBufferStarted = true;
|
||||
|
||||
this.moduleInfo = this.readDylinkSection(buffer);
|
||||
if (!this.moduleInfo) {
|
||||
for (var i = 0; i < this.onInitCallbacks.length; i++) {
|
||||
this.onInitCallbacks[i](null);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.memory = new WebAssembly.Memory({initial: 256});
|
||||
var env = {
|
||||
memory: this.memory,
|
||||
memoryBase: 0,
|
||||
__memory_base: 0,
|
||||
table: new WebAssembly.Table({initial: this.moduleInfo.tableSize, element: 'anyfunc'}),
|
||||
tableBase: 0,
|
||||
__table_base: 0,
|
||||
abort: this.c_abort.bind(this),
|
||||
___assert_fail: this.c_assertFail.bind(this),
|
||||
_sbrk: this.c_sbrk.bind(this)
|
||||
};
|
||||
|
||||
this.brk = this.align(this.moduleInfo.memorySize + this.stackSize);
|
||||
WebAssembly.instantiate(buffer, {env: env}).then(function(results){
|
||||
this.instance = results.instance;
|
||||
if (this.instance.exports.__post_instantiate) {
|
||||
this.instance.exports.__post_instantiate();
|
||||
}
|
||||
this.createHeapViews();
|
||||
this.ready = true;
|
||||
for (var i = 0; i < this.onInitCallbacks.length; i++) {
|
||||
this.onInitCallbacks[i](this);
|
||||
}
|
||||
}.bind(this))
|
||||
};
|
||||
|
||||
WASM.prototype.createHeapViews = function() {
|
||||
this.instance.heapU8 = new Uint8Array(this.memory.buffer);
|
||||
this.instance.heapU32 = new Uint32Array(this.memory.buffer);
|
||||
this.instance.heapF32 = new Float32Array(this.memory.buffer);
|
||||
};
|
||||
|
||||
WASM.prototype.align = function(addr) {
|
||||
var a = Math.pow(2, this.moduleInfo.memoryAlignment);
|
||||
return Math.ceil(addr / a) * a;
|
||||
};
|
||||
|
||||
WASM.prototype.c_sbrk = function(size) {
|
||||
var previousBrk = this.brk;
|
||||
this.brk += size;
|
||||
|
||||
if (this.brk > this.memory.buffer.byteLength) {
|
||||
var bytesNeeded = this.brk - this.memory.buffer.byteLength;
|
||||
var pagesNeeded = Math.ceil(bytesNeeded / this.pageSize);
|
||||
this.memory.grow(pagesNeeded);
|
||||
this.createHeapViews();
|
||||
}
|
||||
return previousBrk;
|
||||
};
|
||||
|
||||
WASM.prototype.c_abort = function(size) {
|
||||
console.warn('JSMPeg: WASM abort', arguments);
|
||||
};
|
||||
|
||||
WASM.prototype.c_assertFail = function(size) {
|
||||
console.warn('JSMPeg: WASM ___assert_fail', arguments);
|
||||
};
|
||||
|
||||
|
||||
WASM.prototype.readDylinkSection = function(buffer) {
|
||||
// Read the WASM header and dylink section of the .wasm binary data
|
||||
// to get the needed table size and static data size.
|
||||
|
||||
// https://github.com/WebAssembly/tool-conventions/blob/master/DynamicLinking.md
|
||||
// https://github.com/kripken/emscripten/blob/20602efb955a7c6c20865a495932427e205651d2/src/support.js
|
||||
|
||||
var bytes = new Uint8Array(buffer);
|
||||
var next = 0;
|
||||
|
||||
var readVarUint = function () {
|
||||
var ret = 0;
|
||||
var mul = 1;
|
||||
while (1) {
|
||||
var byte = bytes[next++];
|
||||
ret += ((byte & 0x7f) * mul);
|
||||
mul *= 0x80;
|
||||
if (!(byte & 0x80)) {
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var matchNextBytes = function(expected) {
|
||||
for (var i = 0; i < expected.length; i++) {
|
||||
var b = typeof(expected[i]) === 'string'
|
||||
? expected[i].charCodeAt(0)
|
||||
: expected[i];
|
||||
if (bytes[next++] !== b) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
|
||||
|
||||
// Make sure we have a wasm header
|
||||
if (!matchNextBytes([0, 'a', 's', 'm'])) {
|
||||
console.warn('JSMpeg: WASM header not found');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Make sure we have a dylink section
|
||||
var next = 9;
|
||||
var sectionSize = readVarUint();
|
||||
if (!matchNextBytes([6, 'd', 'y', 'l', 'i', 'n', 'k'])) {
|
||||
console.warn('JSMpeg: No dylink section found in WASM');
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
memorySize: readVarUint(),
|
||||
memoryAlignment: readVarUint(),
|
||||
tableSize: readVarUint(),
|
||||
tableAlignment: readVarUint()
|
||||
};
|
||||
};
|
||||
|
||||
WASM.IsSupported = function() {
|
||||
return (!!window.WebAssembly);
|
||||
};
|
||||
|
||||
WASM.GetModule = function() {
|
||||
WASM.CACHED_MODULE = WASM.CACHED_MODULE || new WASM();
|
||||
return WASM.CACHED_MODULE;
|
||||
};
|
||||
|
||||
return WASM;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,190 @@
|
|||
#include <string.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "buffer.h"
|
||||
|
||||
typedef struct bit_buffer_t {
|
||||
uint8_t *bytes;
|
||||
unsigned int index;
|
||||
unsigned int byte_capacity;
|
||||
unsigned int byte_length;
|
||||
bit_buffer_mode_t mode;
|
||||
} bit_buffer_t;
|
||||
|
||||
void bit_buffer_resize(bit_buffer_t *self, unsigned int byte_capacity);
|
||||
void bit_buffer_evict(bit_buffer_t *self, unsigned int bytes_needed);
|
||||
|
||||
|
||||
|
||||
bit_buffer_t *bit_buffer_create(unsigned int initial_byte_capacity, bit_buffer_mode_t mode) {
|
||||
bit_buffer_t *self = malloc(sizeof(bit_buffer_t));
|
||||
memset(self, 0, sizeof(bit_buffer_t));
|
||||
self->mode = mode;
|
||||
self->bytes = malloc(initial_byte_capacity);
|
||||
self->byte_capacity = initial_byte_capacity;
|
||||
self->byte_length = 0;
|
||||
self->index = 0;
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
void bit_buffer_destroy(bit_buffer_t *self) {
|
||||
free(self->bytes);
|
||||
free(self);
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_get_index(bit_buffer_t *self) {
|
||||
return self->index;
|
||||
}
|
||||
|
||||
|
||||
void bit_buffer_set_index(bit_buffer_t *self, unsigned int index) {
|
||||
self->index = index; // TODO check validity!
|
||||
}
|
||||
|
||||
|
||||
uint8_t *bit_buffer_get_write_ptr(bit_buffer_t *self, unsigned int bytes_to_write) {
|
||||
int bytes_available = self->byte_capacity - self->byte_length;
|
||||
|
||||
if (bytes_to_write > bytes_available) {
|
||||
if (self->mode == BIT_BUFFER_MODE_EXPAND) {
|
||||
int new_byte_capacity = self->byte_capacity * 2;
|
||||
if (new_byte_capacity + bytes_available < bytes_to_write) {
|
||||
new_byte_capacity = bytes_to_write - bytes_available;
|
||||
}
|
||||
bit_buffer_resize(self, new_byte_capacity);
|
||||
}
|
||||
else {
|
||||
bit_buffer_evict(self, bytes_to_write);
|
||||
}
|
||||
}
|
||||
|
||||
return self->bytes + self->byte_length;
|
||||
};
|
||||
|
||||
|
||||
void bit_buffer_did_write(bit_buffer_t *self, unsigned int bytes_written) {
|
||||
self->byte_length += bytes_written;
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_find_next_start_code(bit_buffer_t *self) {
|
||||
for (int i = ((self->index + 7) >> 3); i < self->byte_length; i++) {
|
||||
if(
|
||||
self->bytes[i] == 0x00 &&
|
||||
self->bytes[i+1] == 0x00 &&
|
||||
self->bytes[i+2] == 0x01
|
||||
) {
|
||||
self->index = (i+4) << 3;
|
||||
return self->bytes[i+3];
|
||||
}
|
||||
}
|
||||
self->index = (self->byte_length << 3);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_find_start_code(bit_buffer_t *self, int code) {
|
||||
int current = 0;
|
||||
while (true) {
|
||||
current = bit_buffer_find_next_start_code(self);
|
||||
if (current == code || current == -1) {
|
||||
return current;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_next_bytes_are_start_code(bit_buffer_t *self) {
|
||||
int i = ((self->index + 7) >> 3);
|
||||
return (
|
||||
i >= self->byte_length || (
|
||||
self->bytes[i] == 0x00 &&
|
||||
self->bytes[i+1] == 0x00 &&
|
||||
self->bytes[i+2] == 0x01
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_peek(bit_buffer_t *self, unsigned int count) {
|
||||
int offset = self->index;
|
||||
int value = 0;
|
||||
while (count) {
|
||||
int current_byte = self->bytes[offset >> 3];
|
||||
int remaining = 8 - (offset & 7); // remaining bits in byte
|
||||
int read = remaining < count ? remaining : count; // bits in self run
|
||||
int shift = remaining - read;
|
||||
int mask = (0xff >> (8-read));
|
||||
|
||||
value = (value << read) | ((current_byte & (mask << shift)) >> shift);
|
||||
|
||||
offset += read;
|
||||
count -= read;
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_read(bit_buffer_t *self, unsigned int count) {
|
||||
int value = bit_buffer_peek(self, count);
|
||||
self->index += count;
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_skip(bit_buffer_t *self, unsigned int count) {
|
||||
return (self->index += count);
|
||||
}
|
||||
|
||||
|
||||
void bit_buffer_rewind(bit_buffer_t *self, unsigned int count) {
|
||||
self->index = self->index - count;
|
||||
if (self->index < 0) {
|
||||
self->index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int bit_buffer_has(bit_buffer_t *self, unsigned int count) {
|
||||
return ((self->byte_length << 3) - self->index) >= count;
|
||||
}
|
||||
|
||||
|
||||
void bit_buffer_resize(bit_buffer_t *self, unsigned int byte_capacity) {
|
||||
self->bytes = realloc(self->bytes, byte_capacity);
|
||||
self->byte_capacity = byte_capacity;
|
||||
if (self->index > self->byte_length << 3) {
|
||||
self->index = self->byte_length << 3;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void bit_buffer_evict(bit_buffer_t *self, unsigned int bytes_needed) {
|
||||
int byte_pos = self->index >> 3;
|
||||
int bytes_available = self->byte_capacity - self->byte_length;
|
||||
|
||||
// If the current index is the write position, we can simply reset both
|
||||
// to 0. Also reset (and throw away yet unread data) if we won't be able
|
||||
// to fit the new data in even after a normal eviction.
|
||||
if (
|
||||
byte_pos == self->byte_length ||
|
||||
bytes_needed > bytes_available + byte_pos // emergency evac
|
||||
) {
|
||||
self->byte_length = 0;
|
||||
self->index = 0;
|
||||
return;
|
||||
}
|
||||
else if (byte_pos == 0) {
|
||||
// Nothing read yet - we can't evict anything
|
||||
return;
|
||||
}
|
||||
|
||||
memmove(self->bytes, self->bytes + byte_pos, self->byte_length - byte_pos);
|
||||
self->byte_length -= byte_pos;
|
||||
self->index -= byte_pos << 3;
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
#ifndef BUFFER_H
|
||||
#define BUFFER_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
typedef struct bit_buffer_t bit_buffer_t;
|
||||
|
||||
typedef enum {
|
||||
BIT_BUFFER_MODE_EVICT = 1,
|
||||
BIT_BUFFER_MODE_EXPAND = 2
|
||||
} bit_buffer_mode_t;
|
||||
|
||||
|
||||
bit_buffer_t *bit_buffer_create(unsigned int initial_byte_capacity, bit_buffer_mode_t mode);
|
||||
void bit_buffer_destroy(bit_buffer_t *self);
|
||||
|
||||
int bit_buffer_get_index(bit_buffer_t *self);
|
||||
void bit_buffer_set_index(bit_buffer_t *self, unsigned int index);
|
||||
|
||||
uint8_t *bit_buffer_get_write_ptr(bit_buffer_t *self, unsigned int bytes_to_write);
|
||||
void bit_buffer_did_write(bit_buffer_t *self, unsigned int bytes_written);
|
||||
int bit_buffer_find_next_start_code(bit_buffer_t *self);
|
||||
int bit_buffer_find_start_code(bit_buffer_t *self, int code);
|
||||
int bit_buffer_next_bytes_are_start_code(bit_buffer_t *self);
|
||||
int bit_buffer_peek(bit_buffer_t *self, unsigned int count);
|
||||
int bit_buffer_read(bit_buffer_t *self, unsigned int count);
|
||||
int bit_buffer_skip(bit_buffer_t *self, unsigned int count);
|
||||
int bit_buffer_has(bit_buffer_t *self, unsigned int count);
|
||||
void bit_buffer_rewind(bit_buffer_t *self, unsigned int count);
|
||||
|
||||
#endif
|
|
@ -0,0 +1,704 @@
|
|||
#include <string.h>
|
||||
#include <stdlib.h>
|
||||
#include "mp2.h"
|
||||
|
||||
const static int FRAME_SYNC = 0x7ff;
|
||||
|
||||
const static int VERSION_MPEG_2_5 = 0x0;
|
||||
const static int VERSION_MPEG_2 = 0x2;
|
||||
const static int VERSION_MPEG_1 = 0x3;
|
||||
|
||||
const static int LAYER_III = 0x1;
|
||||
const static int LAYER_II = 0x2;
|
||||
const static int LAYER_I = 0x3;
|
||||
|
||||
const static int MODE_STEREO = 0x0;
|
||||
const static int MODE_JOINT_STEREO = 0x1;
|
||||
const static int MODE_DUAL_CHANNEL = 0x2;
|
||||
const static int MODE_MONO = 0x3;
|
||||
|
||||
const static unsigned short SAMPLE_RATE[] = {
|
||||
44100, 48000, 32000, 0, // MPEG-1
|
||||
22050, 24000, 16000, 0 // MPEG-2
|
||||
};
|
||||
|
||||
const static short BIT_RATE[] = {
|
||||
32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, // MPEG-1
|
||||
8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 // MPEG-2
|
||||
};
|
||||
|
||||
const static int SCALEFACTOR_BASE[] = {
|
||||
0x02000000, 0x01965FEA, 0x01428A30
|
||||
};
|
||||
|
||||
const static float SYNTHESIS_WINDOW[] = {
|
||||
0.0, -0.5, -0.5, -0.5, -0.5, -0.5,
|
||||
-0.5, -1.0, -1.0, -1.0, -1.0, -1.5,
|
||||
-1.5, -2.0, -2.0, -2.5, -2.5, -3.0,
|
||||
-3.5, -3.5, -4.0, -4.5, -5.0, -5.5,
|
||||
-6.5, -7.0, -8.0, -8.5, -9.5, -10.5,
|
||||
-12.0, -13.0, -14.5, -15.5, -17.5, -19.0,
|
||||
-20.5, -22.5, -24.5, -26.5, -29.0, -31.5,
|
||||
-34.0, -36.5, -39.5, -42.5, -45.5, -48.5,
|
||||
-52.0, -55.5, -58.5, -62.5, -66.0, -69.5,
|
||||
-73.5, -77.0, -80.5, -84.5, -88.0, -91.5,
|
||||
-95.0, -98.0, -101.0, -104.0, 106.5, 109.0,
|
||||
111.0, 112.5, 113.5, 114.0, 114.0, 113.5,
|
||||
112.0, 110.5, 107.5, 104.0, 100.0, 94.5,
|
||||
88.5, 81.5, 73.0, 63.5, 53.0, 41.5,
|
||||
28.5, 14.5, -1.0, -18.0, -36.0, -55.5,
|
||||
-76.5, -98.5, -122.0, -147.0, -173.5, -200.5,
|
||||
-229.5, -259.5, -290.5, -322.5, -355.5, -389.5,
|
||||
-424.0, -459.5, -495.5, -532.0, -568.5, -605.0,
|
||||
-641.5, -678.0, -714.0, -749.0, -783.5, -817.0,
|
||||
-849.0, -879.5, -908.5, -935.0, -959.5, -981.0,
|
||||
-1000.5, -1016.0, -1028.5, -1037.5, -1042.5, -1043.5,
|
||||
-1040.0, -1031.5, 1018.5, 1000.0, 976.0, 946.5,
|
||||
911.0, 869.5, 822.0, 767.5, 707.0, 640.0,
|
||||
565.5, 485.0, 397.0, 302.5, 201.0, 92.5,
|
||||
-22.5, -144.0, -272.5, -407.0, -547.5, -694.0,
|
||||
-846.0, -1003.0, -1165.0, -1331.5, -1502.0, -1675.5,
|
||||
-1852.5, -2031.5, -2212.5, -2394.0, -2576.5, -2758.5,
|
||||
-2939.5, -3118.5, -3294.5, -3467.5, -3635.5, -3798.5,
|
||||
-3955.0, -4104.5, -4245.5, -4377.5, -4499.0, -4609.5,
|
||||
-4708.0, -4792.5, -4863.5, -4919.0, -4958.0, -4979.5,
|
||||
-4983.0, -4967.5, -4931.5, -4875.0, -4796.0, -4694.5,
|
||||
-4569.5, -4420.0, -4246.0, -4046.0, -3820.0, -3567.0,
|
||||
3287.0, 2979.5, 2644.0, 2280.5, 1888.0, 1467.5,
|
||||
1018.5, 541.0, 35.0, -499.0, -1061.0, -1650.0,
|
||||
-2266.5, -2909.0, -3577.0, -4270.0, -4987.5, -5727.5,
|
||||
-6490.0, -7274.0, -8077.5, -8899.5, -9739.0, -10594.5,
|
||||
-11464.5, -12347.0, -13241.0, -14144.5, -15056.0, -15973.5,
|
||||
-16895.5, -17820.0, -18744.5, -19668.0, -20588.0, -21503.0,
|
||||
-22410.5, -23308.5, -24195.0, -25068.5, -25926.5, -26767.0,
|
||||
-27589.0, -28389.0, -29166.5, -29919.0, -30644.5, -31342.0,
|
||||
-32009.5, -32645.0, -33247.0, -33814.5, -34346.0, -34839.5,
|
||||
-35295.0, -35710.0, -36084.5, -36417.5, -36707.5, -36954.0,
|
||||
-37156.5, -37315.0, -37428.0, -37496.0, 37519.0, 37496.0,
|
||||
37428.0, 37315.0, 37156.5, 36954.0, 36707.5, 36417.5,
|
||||
36084.5, 35710.0, 35295.0, 34839.5, 34346.0, 33814.5,
|
||||
33247.0, 32645.0, 32009.5, 31342.0, 30644.5, 29919.0,
|
||||
29166.5, 28389.0, 27589.0, 26767.0, 25926.5, 25068.5,
|
||||
24195.0, 23308.5, 22410.5, 21503.0, 20588.0, 19668.0,
|
||||
18744.5, 17820.0, 16895.5, 15973.5, 15056.0, 14144.5,
|
||||
13241.0, 12347.0, 11464.5, 10594.5, 9739.0, 8899.5,
|
||||
8077.5, 7274.0, 6490.0, 5727.5, 4987.5, 4270.0,
|
||||
3577.0, 2909.0, 2266.5, 1650.0, 1061.0, 499.0,
|
||||
-35.0, -541.0, -1018.5, -1467.5, -1888.0, -2280.5,
|
||||
-2644.0, -2979.5, 3287.0, 3567.0, 3820.0, 4046.0,
|
||||
4246.0, 4420.0, 4569.5, 4694.5, 4796.0, 4875.0,
|
||||
4931.5, 4967.5, 4983.0, 4979.5, 4958.0, 4919.0,
|
||||
4863.5, 4792.5, 4708.0, 4609.5, 4499.0, 4377.5,
|
||||
4245.5, 4104.5, 3955.0, 3798.5, 3635.5, 3467.5,
|
||||
3294.5, 3118.5, 2939.5, 2758.5, 2576.5, 2394.0,
|
||||
2212.5, 2031.5, 1852.5, 1675.5, 1502.0, 1331.5,
|
||||
1165.0, 1003.0, 846.0, 694.0, 547.5, 407.0,
|
||||
272.5, 144.0, 22.5, -92.5, -201.0, -302.5,
|
||||
-397.0, -485.0, -565.5, -640.0, -707.0, -767.5,
|
||||
-822.0, -869.5, -911.0, -946.5, -976.0, -1000.0,
|
||||
1018.5, 1031.5, 1040.0, 1043.5, 1042.5, 1037.5,
|
||||
1028.5, 1016.0, 1000.5, 981.0, 959.5, 935.0,
|
||||
908.5, 879.5, 849.0, 817.0, 783.5, 749.0,
|
||||
714.0, 678.0, 641.5, 605.0, 568.5, 532.0,
|
||||
495.5, 459.5, 424.0, 389.5, 355.5, 322.5,
|
||||
290.5, 259.5, 229.5, 200.5, 173.5, 147.0,
|
||||
122.0, 98.5, 76.5, 55.5, 36.0, 18.0,
|
||||
1.0, -14.5, -28.5, -41.5, -53.0, -63.5,
|
||||
-73.0, -81.5, -88.5, -94.5, -100.0, -104.0,
|
||||
-107.5, -110.5, -112.0, -113.5, -114.0, -114.0,
|
||||
-113.5, -112.5, -111.0, -109.0, 106.5, 104.0,
|
||||
101.0, 98.0, 95.0, 91.5, 88.0, 84.5,
|
||||
80.5, 77.0, 73.5, 69.5, 66.0, 62.5,
|
||||
58.5, 55.5, 52.0, 48.5, 45.5, 42.5,
|
||||
39.5, 36.5, 34.0, 31.5, 29.0, 26.5,
|
||||
24.5, 22.5, 20.5, 19.0, 17.5, 15.5,
|
||||
14.5, 13.0, 12.0, 10.5, 9.5, 8.5,
|
||||
8.0, 7.0, 6.5, 5.5, 5.0, 4.5,
|
||||
4.0, 3.5, 3.5, 3.0, 2.5, 2.5,
|
||||
2.0, 2.0, 1.5, 1.5, 1.0, 1.0,
|
||||
1.0, 1.0, 0.5, 0.5, 0.5, 0.5,
|
||||
0.5, 0.5
|
||||
};
|
||||
|
||||
// Quantizer lookup, step 1: bitrate classes
|
||||
const static uint8_t QUANT_LUT_STEP_1[2][16] = {
|
||||
// 32, 48, 56, 64, 80, 96,112,128,160,192,224,256,320,384 <- bitrate
|
||||
{ 0, 0, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2}, // mono
|
||||
// 16, 24, 28, 32, 40, 48, 56, 64, 80, 96,112,128,160,192 <- bitrate / chan
|
||||
{ 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 2} // stereo
|
||||
};
|
||||
|
||||
// Quantizer lookup, step 2: bitrate class, sample rate -> B2 table idx, sblimit
|
||||
const static uint8_t QUANT_TAB_A = (27 | 64); // Table 3-B.2a: high-rate, sblimit = 27
|
||||
const static uint8_t QUANT_TAB_B = (30 | 64); // Table 3-B.2b: high-rate, sblimit = 30
|
||||
const static uint8_t QUANT_TAB_C = 8; // Table 3-B.2c: low-rate, sblimit = 8
|
||||
const static uint8_t QUANT_TAB_D = 12; // Table 3-B.2d: low-rate, sblimit = 12
|
||||
|
||||
const static uint8_t QUANT_LUT_STEP_2[3][3] = {
|
||||
// 44.1 kHz, 48 kHz, 32 kHz
|
||||
{QUANT_TAB_C, QUANT_TAB_C, QUANT_TAB_D}, // 32 - 48 kbit/sec/ch
|
||||
{QUANT_TAB_A, QUANT_TAB_A, QUANT_TAB_A}, // 56 - 80 kbit/sec/ch
|
||||
{QUANT_TAB_B, QUANT_TAB_A, QUANT_TAB_B} // 96+ kbit/sec/ch
|
||||
};
|
||||
|
||||
// Quantizer lookup, step 3: B2 table, subband -> nbal, row index
|
||||
// (upper 4 bits: nbal, lower 4 bits: row index)
|
||||
const static uint8_t QUANT_LUT_STEP_3[3][32] = {
|
||||
// Low-rate table (3-B.2c and 3-B.2d)
|
||||
{
|
||||
0x44,0x44,
|
||||
0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34,0x34
|
||||
},
|
||||
// High-rate table (3-B.2a and 3-B.2b)
|
||||
{
|
||||
0x43,0x43,0x43,
|
||||
0x42,0x42,0x42,0x42,0x42,0x42,0x42,0x42,
|
||||
0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,0x31,
|
||||
0x20,0x20,0x20,0x20,0x20,0x20,0x20
|
||||
},
|
||||
// MPEG-2 LSR table (B.2 in ISO 13818-3)
|
||||
{
|
||||
0x45,0x45,0x45,0x45,
|
||||
0x34,0x34,0x34,0x34,0x34,0x34,0x34,
|
||||
0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,
|
||||
0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24,0x24
|
||||
}
|
||||
};
|
||||
|
||||
// Quantizer lookup, step 4: table row, allocation[] value -> quant table index
|
||||
const static uint8_t QUANT_LUT_STEP4[6][16] = {
|
||||
{0, 1, 2, 17},
|
||||
{0, 1, 2, 3, 4, 5, 6, 17},
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 17},
|
||||
{0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
|
||||
{0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17},
|
||||
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
|
||||
};
|
||||
|
||||
typedef struct quantizer_spec_t {
|
||||
unsigned short levels;
|
||||
unsigned char group;
|
||||
unsigned char bits;
|
||||
} quantizer_spec_t;
|
||||
|
||||
const static quantizer_spec_t QUANT_TAB[] = {
|
||||
{.levels = 3, .group = 1, .bits = 5}, // 1
|
||||
{.levels = 5, .group = 1, .bits = 7}, // 2
|
||||
{.levels = 7, .group = 0, .bits = 3}, // 3
|
||||
{.levels = 9, .group = 1, .bits = 10}, // 4
|
||||
{.levels = 15, .group = 0, .bits = 4}, // 5
|
||||
{.levels = 31, .group = 0, .bits = 5}, // 6
|
||||
{.levels = 63, .group = 0, .bits = 6}, // 7
|
||||
{.levels = 127, .group = 0, .bits = 7}, // 8
|
||||
{.levels = 255, .group = 0, .bits = 8}, // 9
|
||||
{.levels = 511, .group = 0, .bits = 9}, // 10
|
||||
{.levels = 1023, .group = 0, .bits = 10}, // 11
|
||||
{.levels = 2047, .group = 0, .bits = 11}, // 12
|
||||
{.levels = 4095, .group = 0, .bits = 12}, // 13
|
||||
{.levels = 8191, .group = 0, .bits = 13}, // 14
|
||||
{.levels = 16383, .group = 0, .bits = 14}, // 15
|
||||
{.levels = 32767, .group = 0, .bits = 15}, // 16
|
||||
{.levels = 65535, .group = 0, .bits = 16} // 17
|
||||
};
|
||||
|
||||
#define SAMPLES_PER_FRAME 1152
|
||||
|
||||
typedef struct mp2_decoder_t {
|
||||
int sample_rate;
|
||||
int v_pos;
|
||||
|
||||
bit_buffer_t *bits;
|
||||
|
||||
const quantizer_spec_t *allocation[2][32];
|
||||
uint8_t scale_factor_info[2][32];
|
||||
int scale_factor[2][32][3];
|
||||
int sample[2][32][3];
|
||||
|
||||
float channel_left[SAMPLES_PER_FRAME];
|
||||
float channel_right[SAMPLES_PER_FRAME];
|
||||
float D[1024];
|
||||
float V[2][1024];
|
||||
int U[32];
|
||||
} mp2_decoder_t;
|
||||
|
||||
|
||||
void matrix_transform(int s[32][3], int ss, float *d, int dp);
|
||||
void read_samples(mp2_decoder_t *self, int ch, int sb, int part);
|
||||
const quantizer_spec_t *read_allocation(mp2_decoder_t *self, int sb, int tab3);
|
||||
int decode_frame(mp2_decoder_t *self);
|
||||
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Public interface
|
||||
|
||||
mp2_decoder_t *mp2_decoder_create(unsigned int buffer_size, bit_buffer_mode_t buffer_mode) {
|
||||
mp2_decoder_t *self = malloc(sizeof(mp2_decoder_t));
|
||||
memset(self, 0, sizeof(mp2_decoder_t));
|
||||
self->bits = bit_buffer_create(buffer_size, buffer_mode);
|
||||
|
||||
self->sample_rate = 44100;
|
||||
memcpy(self->D, SYNTHESIS_WINDOW, 512 * sizeof(float));
|
||||
memcpy(self->D + 512, SYNTHESIS_WINDOW, 512 * sizeof(float));
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
void mp2_decoder_destroy(mp2_decoder_t *self) {
|
||||
bit_buffer_destroy(self->bits);
|
||||
free(self);
|
||||
}
|
||||
|
||||
void *mp2_decoder_get_write_ptr(mp2_decoder_t *self, unsigned int byte_size) {
|
||||
return bit_buffer_get_write_ptr(self->bits, byte_size);
|
||||
}
|
||||
|
||||
int mp2_decoder_get_index(mp2_decoder_t *self) {
|
||||
return bit_buffer_get_index(self->bits);
|
||||
}
|
||||
|
||||
void mp2_decoder_set_index(mp2_decoder_t *self, unsigned int index) {
|
||||
bit_buffer_set_index(self->bits, index);
|
||||
}
|
||||
|
||||
void mp2_decoder_did_write(mp2_decoder_t *self, unsigned int byte_size) {
|
||||
bit_buffer_did_write(self->bits, byte_size);
|
||||
}
|
||||
|
||||
int mp2_decoder_get_sample_rate(mp2_decoder_t *self) {
|
||||
return self->sample_rate;
|
||||
}
|
||||
|
||||
void *mp2_decoder_get_left_channel_ptr(mp2_decoder_t *self) {
|
||||
return self->channel_left;
|
||||
}
|
||||
|
||||
void *mp2_decoder_get_right_channel_ptr(mp2_decoder_t *self) {
|
||||
return self->channel_right;
|
||||
}
|
||||
|
||||
int mp2_decoder_decode(mp2_decoder_t *self) {
|
||||
int byte_pos = bit_buffer_get_index(self->bits) >> 3;
|
||||
|
||||
if (!bit_buffer_has(self->bits, 16)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int decoded_bytes = decode_frame(self);
|
||||
bit_buffer_set_index(self->bits, (byte_pos + decoded_bytes) << 3);
|
||||
return decoded_bytes;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
int decode_frame(mp2_decoder_t *self) {
|
||||
// Check for valid header: syncword OK, MPEG-Audio Layer 2
|
||||
int sync = bit_buffer_read(self->bits, 11);
|
||||
int version = bit_buffer_read(self->bits, 2);
|
||||
int layer = bit_buffer_read(self->bits, 2);
|
||||
int hasCRC = !bit_buffer_read(self->bits, 1);
|
||||
|
||||
if (
|
||||
sync != FRAME_SYNC ||
|
||||
version != VERSION_MPEG_1 ||
|
||||
layer != LAYER_II
|
||||
) {
|
||||
return 0; // Invalid header or unsupported version
|
||||
}
|
||||
|
||||
int bitrate_index = bit_buffer_read(self->bits, 4) - 1;
|
||||
if (bitrate_index > 13) {
|
||||
return 0; // Invalid bit rate or 'free format'
|
||||
}
|
||||
|
||||
int sample_rate_index = bit_buffer_read(self->bits, 2);
|
||||
int sample_rate = SAMPLE_RATE[sample_rate_index];
|
||||
if (sample_rate_index == 3) {
|
||||
return 0; // Invalid sample rate
|
||||
}
|
||||
if (version == VERSION_MPEG_2) {
|
||||
sample_rate_index += 4;
|
||||
bitrate_index += 14;
|
||||
}
|
||||
int padding = bit_buffer_read(self->bits, 1),
|
||||
privat = bit_buffer_read(self->bits, 1),
|
||||
mode = bit_buffer_read(self->bits, 2);
|
||||
|
||||
// Parse the mode_extension, set up the stereo bound
|
||||
int bound = 0;
|
||||
if (mode == MODE_JOINT_STEREO) {
|
||||
bound = (bit_buffer_read(self->bits, 2) + 1) << 2;
|
||||
}
|
||||
else {
|
||||
bit_buffer_skip(self->bits, 2);
|
||||
bound = (mode == MODE_MONO) ? 0 : 32;
|
||||
}
|
||||
|
||||
// Discard the last 4 bits of the header and the CRC value, if present
|
||||
bit_buffer_skip(self->bits, 4);
|
||||
if (hasCRC) {
|
||||
bit_buffer_skip(self->bits, 16);
|
||||
}
|
||||
|
||||
// Compute the frame size
|
||||
int bitrate = BIT_RATE[bitrate_index];
|
||||
sample_rate = SAMPLE_RATE[sample_rate_index];
|
||||
int frame_size = ((144000 * bitrate / sample_rate) + padding)|0;
|
||||
|
||||
|
||||
// Prepare the quantizer table lookups
|
||||
int tab3 = 0;
|
||||
int sblimit = 0;
|
||||
if (version == VERSION_MPEG_2) {
|
||||
// MPEG-2 (LSR)
|
||||
tab3 = 2;
|
||||
sblimit = 30;
|
||||
}
|
||||
else {
|
||||
// MPEG-1
|
||||
int tab1 = (mode == MODE_MONO) ? 0 : 1;
|
||||
int tab2 = QUANT_LUT_STEP_1[tab1][bitrate_index];
|
||||
tab3 = QUANT_LUT_STEP_2[tab2][sample_rate_index];
|
||||
sblimit = tab3 & 63;
|
||||
tab3 >>= 6;
|
||||
}
|
||||
|
||||
if (bound > sblimit) {
|
||||
bound = sblimit;
|
||||
}
|
||||
|
||||
// Read the allocation information
|
||||
for (int sb = 0; sb < bound; sb++) {
|
||||
self->allocation[0][sb] = read_allocation(self, sb, tab3);
|
||||
self->allocation[1][sb] = read_allocation(self, sb, tab3);
|
||||
}
|
||||
|
||||
for (int sb = bound; sb < sblimit; sb++) {
|
||||
self->allocation[0][sb] =
|
||||
self->allocation[1][sb] =
|
||||
read_allocation(self, sb, tab3);
|
||||
}
|
||||
|
||||
// Read scale factor selector information
|
||||
int channels = (mode == MODE_MONO) ? 1 : 2;
|
||||
for (int sb = 0; sb < sblimit; sb++) {
|
||||
for (int ch = 0; ch < channels; ch++) {
|
||||
if (self->allocation[ch][sb]) {
|
||||
self->scale_factor_info[ch][sb] = bit_buffer_read(self->bits, 2);
|
||||
}
|
||||
}
|
||||
if (mode == MODE_MONO) {
|
||||
self->scale_factor_info[1][sb] = self->scale_factor_info[0][sb];
|
||||
}
|
||||
}
|
||||
|
||||
// Read scale factors
|
||||
for (int sb = 0; sb < sblimit; sb++) {
|
||||
for (int ch = 0; ch < channels; ch++) {
|
||||
if (self->allocation[ch][sb]) {
|
||||
int *sf = self->scale_factor[ch][sb];
|
||||
switch (self->scale_factor_info[ch][sb]) {
|
||||
case 0:
|
||||
sf[0] = bit_buffer_read(self->bits, 6);
|
||||
sf[1] = bit_buffer_read(self->bits, 6);
|
||||
sf[2] = bit_buffer_read(self->bits, 6);
|
||||
break;
|
||||
case 1:
|
||||
sf[0] =
|
||||
sf[1] = bit_buffer_read(self->bits, 6);
|
||||
sf[2] = bit_buffer_read(self->bits, 6);
|
||||
break;
|
||||
case 2:
|
||||
sf[0] =
|
||||
sf[1] =
|
||||
sf[2] = bit_buffer_read(self->bits, 6);
|
||||
break;
|
||||
case 3:
|
||||
sf[0] = bit_buffer_read(self->bits, 6);
|
||||
sf[1] =
|
||||
sf[2] = bit_buffer_read(self->bits, 6);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mode == MODE_MONO) {
|
||||
self->scale_factor[1][sb][0] = self->scale_factor[0][sb][0];
|
||||
self->scale_factor[1][sb][1] = self->scale_factor[0][sb][1];
|
||||
self->scale_factor[1][sb][2] = self->scale_factor[0][sb][2];
|
||||
}
|
||||
}
|
||||
|
||||
// Coefficient input and reconstruction
|
||||
int out_pos = 0;
|
||||
for (int part = 0; part < 3; part++) {
|
||||
for (int granule = 0; granule < 4; granule++) {
|
||||
|
||||
// Read the samples
|
||||
for (int sb = 0; sb < bound; sb++) {
|
||||
read_samples(self, 0, sb, part);
|
||||
read_samples(self, 1, sb, part);
|
||||
}
|
||||
for (int sb = bound; sb < sblimit; sb++) {
|
||||
read_samples(self, 0, sb, part);
|
||||
self->sample[1][sb][0] = self->sample[0][sb][0];
|
||||
self->sample[1][sb][1] = self->sample[0][sb][1];
|
||||
self->sample[1][sb][2] = self->sample[0][sb][2];
|
||||
}
|
||||
for (int sb = sblimit; sb < 32; sb++) {
|
||||
self->sample[0][sb][0] = 0;
|
||||
self->sample[0][sb][1] = 0;
|
||||
self->sample[0][sb][2] = 0;
|
||||
self->sample[1][sb][0] = 0;
|
||||
self->sample[1][sb][1] = 0;
|
||||
self->sample[1][sb][2] = 0;
|
||||
}
|
||||
|
||||
// Synthesis loop
|
||||
for (int p = 0; p < 3; p++) {
|
||||
// Shifting step
|
||||
self->v_pos = (self->v_pos - 64) & 1023;
|
||||
|
||||
for (int ch = 0; ch < 2; ch++) {
|
||||
matrix_transform(self->sample[ch], p, self->V[ch], self->v_pos);
|
||||
|
||||
// Build U, windowing, calculate output
|
||||
memset(self->U, 0, sizeof(self->U));
|
||||
|
||||
int d_index = 512 - (self->v_pos >> 1);
|
||||
int v_index = (self->v_pos % 128) >> 1;
|
||||
while (v_index < 1024) {
|
||||
for (int i = 0; i < 32; ++i) {
|
||||
self->U[i] += self->D[d_index++] * self->V[ch][v_index++];
|
||||
}
|
||||
|
||||
v_index += 128-32;
|
||||
d_index += 64-32;
|
||||
}
|
||||
|
||||
v_index = (128-32 + 1024) - v_index;
|
||||
d_index -= (512 - 32);
|
||||
while (v_index < 1024) {
|
||||
for (int i = 0; i < 32; ++i) {
|
||||
self->U[i] += self->D[d_index++] * self->V[ch][v_index++];
|
||||
}
|
||||
|
||||
v_index += 128-32;
|
||||
d_index += 64-32;
|
||||
}
|
||||
|
||||
// Output samples
|
||||
float *out_channel = ch == 0
|
||||
? self->channel_left
|
||||
: self->channel_right;
|
||||
for (int j = 0; j < 32; j++) {
|
||||
out_channel[out_pos + j] = (float)self->U[j] / 2147418112.0;
|
||||
}
|
||||
} // End of synthesis channel loop
|
||||
out_pos += 32;
|
||||
} // End of synthesis sub-block loop
|
||||
|
||||
} // Decoding of the granule finished
|
||||
}
|
||||
|
||||
self->sample_rate = sample_rate;
|
||||
return frame_size;
|
||||
}
|
||||
|
||||
const quantizer_spec_t *read_allocation(mp2_decoder_t *self, int sb, int tab3) {
|
||||
int tab4 = QUANT_LUT_STEP_3[tab3][sb];
|
||||
int qtab = QUANT_LUT_STEP4[tab4 & 15][bit_buffer_read(self->bits, tab4 >> 4)];
|
||||
return qtab ? (&QUANT_TAB[qtab - 1]) : 0;
|
||||
}
|
||||
|
||||
void read_samples(mp2_decoder_t *self, int ch, int sb, int part) {
|
||||
const quantizer_spec_t *q = self->allocation[ch][sb];
|
||||
int sf = self->scale_factor[ch][sb][part];
|
||||
int *sample = self->sample[ch][sb];
|
||||
int val = 0;
|
||||
|
||||
if (!q) {
|
||||
// No bits allocated for this subband
|
||||
sample[0] = sample[1] = sample[2] = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve scalefactor
|
||||
if (sf == 63) {
|
||||
sf = 0;
|
||||
}
|
||||
else {
|
||||
int shift = (sf / 3)|0;
|
||||
sf = (SCALEFACTOR_BASE[sf % 3] + ((1 << shift) >> 1)) >> shift;
|
||||
}
|
||||
|
||||
// Decode samples
|
||||
int adj = q->levels;
|
||||
if (q->group) {
|
||||
// Decode grouped samples
|
||||
val = bit_buffer_read(self->bits, q->bits);
|
||||
sample[0] = val % adj;
|
||||
val /= adj;
|
||||
sample[1] = val % adj;
|
||||
sample[2] = val / adj;
|
||||
}
|
||||
else {
|
||||
// Decode direct samples
|
||||
sample[0] = bit_buffer_read(self->bits, q->bits);
|
||||
sample[1] = bit_buffer_read(self->bits, q->bits);
|
||||
sample[2] = bit_buffer_read(self->bits, q->bits);
|
||||
}
|
||||
|
||||
// Postmultiply samples
|
||||
int scale = 65536 / (adj + 1);
|
||||
adj = ((adj + 1) >> 1) - 1;
|
||||
|
||||
val = (adj - sample[0]) * scale;
|
||||
sample[0] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
|
||||
val = (adj - sample[1]) * scale;
|
||||
sample[1] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
|
||||
val = (adj - sample[2]) * scale;
|
||||
sample[2] = (val * (sf >> 12) + ((val * (sf & 4095) + 2048) >> 12)) >> 12;
|
||||
}
|
||||
|
||||
void matrix_transform(int s[32][3], int ss, float *d, int dp) {
|
||||
float t01, t02, t03, t04, t05, t06, t07, t08, t09, t10, t11, t12,
|
||||
t13, t14, t15, t16, t17, t18, t19, t20, t21, t22, t23, t24,
|
||||
t25, t26, t27, t28, t29, t30, t31, t32, t33;
|
||||
|
||||
t01 = s[ 0][ss] + s[31][ss]; t02 = (float)(s[ 0][ss] - s[31][ss]) * 0.500602998235;
|
||||
t03 = s[ 1][ss] + s[30][ss]; t04 = (float)(s[ 1][ss] - s[30][ss]) * 0.505470959898;
|
||||
t05 = s[ 2][ss] + s[29][ss]; t06 = (float)(s[ 2][ss] - s[29][ss]) * 0.515447309923;
|
||||
t07 = s[ 3][ss] + s[28][ss]; t08 = (float)(s[ 3][ss] - s[28][ss]) * 0.53104259109;
|
||||
t09 = s[ 4][ss] + s[27][ss]; t10 = (float)(s[ 4][ss] - s[27][ss]) * 0.553103896034;
|
||||
t11 = s[ 5][ss] + s[26][ss]; t12 = (float)(s[ 5][ss] - s[26][ss]) * 0.582934968206;
|
||||
t13 = s[ 6][ss] + s[25][ss]; t14 = (float)(s[ 6][ss] - s[25][ss]) * 0.622504123036;
|
||||
t15 = s[ 7][ss] + s[24][ss]; t16 = (float)(s[ 7][ss] - s[24][ss]) * 0.674808341455;
|
||||
t17 = s[ 8][ss] + s[23][ss]; t18 = (float)(s[ 8][ss] - s[23][ss]) * 0.744536271002;
|
||||
t19 = s[ 9][ss] + s[22][ss]; t20 = (float)(s[ 9][ss] - s[22][ss]) * 0.839349645416;
|
||||
t21 = s[10][ss] + s[21][ss]; t22 = (float)(s[10][ss] - s[21][ss]) * 0.972568237862;
|
||||
t23 = s[11][ss] + s[20][ss]; t24 = (float)(s[11][ss] - s[20][ss]) * 1.16943993343;
|
||||
t25 = s[12][ss] + s[19][ss]; t26 = (float)(s[12][ss] - s[19][ss]) * 1.48416461631;
|
||||
t27 = s[13][ss] + s[18][ss]; t28 = (float)(s[13][ss] - s[18][ss]) * 2.05778100995;
|
||||
t29 = s[14][ss] + s[17][ss]; t30 = (float)(s[14][ss] - s[17][ss]) * 3.40760841847;
|
||||
t31 = s[15][ss] + s[16][ss]; t32 = (float)(s[15][ss] - s[16][ss]) * 10.1900081235;
|
||||
|
||||
t33 = t01 + t31; t31 = (t01 - t31) * 0.502419286188;
|
||||
t01 = t03 + t29; t29 = (t03 - t29) * 0.52249861494;
|
||||
t03 = t05 + t27; t27 = (t05 - t27) * 0.566944034816;
|
||||
t05 = t07 + t25; t25 = (t07 - t25) * 0.64682178336;
|
||||
t07 = t09 + t23; t23 = (t09 - t23) * 0.788154623451;
|
||||
t09 = t11 + t21; t21 = (t11 - t21) * 1.06067768599;
|
||||
t11 = t13 + t19; t19 = (t13 - t19) * 1.72244709824;
|
||||
t13 = t15 + t17; t17 = (t15 - t17) * 5.10114861869;
|
||||
t15 = t33 + t13; t13 = (t33 - t13) * 0.509795579104;
|
||||
t33 = t01 + t11; t01 = (t01 - t11) * 0.601344886935;
|
||||
t11 = t03 + t09; t09 = (t03 - t09) * 0.899976223136;
|
||||
t03 = t05 + t07; t07 = (t05 - t07) * 2.56291544774;
|
||||
t05 = t15 + t03; t15 = (t15 - t03) * 0.541196100146;
|
||||
t03 = t33 + t11; t11 = (t33 - t11) * 1.30656296488;
|
||||
t33 = t05 + t03; t05 = (t05 - t03) * 0.707106781187;
|
||||
t03 = t15 + t11; t15 = (t15 - t11) * 0.707106781187;
|
||||
t03 += t15;
|
||||
t11 = t13 + t07; t13 = (t13 - t07) * 0.541196100146;
|
||||
t07 = t01 + t09; t09 = (t01 - t09) * 1.30656296488;
|
||||
t01 = t11 + t07; t07 = (t11 - t07) * 0.707106781187;
|
||||
t11 = t13 + t09; t13 = (t13 - t09) * 0.707106781187;
|
||||
t11 += t13; t01 += t11;
|
||||
t11 += t07; t07 += t13;
|
||||
t09 = t31 + t17; t31 = (t31 - t17) * 0.509795579104;
|
||||
t17 = t29 + t19; t29 = (t29 - t19) * 0.601344886935;
|
||||
t19 = t27 + t21; t21 = (t27 - t21) * 0.899976223136;
|
||||
t27 = t25 + t23; t23 = (t25 - t23) * 2.56291544774;
|
||||
t25 = t09 + t27; t09 = (t09 - t27) * 0.541196100146;
|
||||
t27 = t17 + t19; t19 = (t17 - t19) * 1.30656296488;
|
||||
t17 = t25 + t27; t27 = (t25 - t27) * 0.707106781187;
|
||||
t25 = t09 + t19; t19 = (t09 - t19) * 0.707106781187;
|
||||
t25 += t19;
|
||||
t09 = t31 + t23; t31 = (t31 - t23) * 0.541196100146;
|
||||
t23 = t29 + t21; t21 = (t29 - t21) * 1.30656296488;
|
||||
t29 = t09 + t23; t23 = (t09 - t23) * 0.707106781187;
|
||||
t09 = t31 + t21; t31 = (t31 - t21) * 0.707106781187;
|
||||
t09 += t31; t29 += t09; t09 += t23; t23 += t31;
|
||||
t17 += t29; t29 += t25; t25 += t09; t09 += t27;
|
||||
t27 += t23; t23 += t19; t19 += t31;
|
||||
t21 = t02 + t32; t02 = (t02 - t32) * 0.502419286188;
|
||||
t32 = t04 + t30; t04 = (t04 - t30) * 0.52249861494;
|
||||
t30 = t06 + t28; t28 = (t06 - t28) * 0.566944034816;
|
||||
t06 = t08 + t26; t08 = (t08 - t26) * 0.64682178336;
|
||||
t26 = t10 + t24; t10 = (t10 - t24) * 0.788154623451;
|
||||
t24 = t12 + t22; t22 = (t12 - t22) * 1.06067768599;
|
||||
t12 = t14 + t20; t20 = (t14 - t20) * 1.72244709824;
|
||||
t14 = t16 + t18; t16 = (t16 - t18) * 5.10114861869;
|
||||
t18 = t21 + t14; t14 = (t21 - t14) * 0.509795579104;
|
||||
t21 = t32 + t12; t32 = (t32 - t12) * 0.601344886935;
|
||||
t12 = t30 + t24; t24 = (t30 - t24) * 0.899976223136;
|
||||
t30 = t06 + t26; t26 = (t06 - t26) * 2.56291544774;
|
||||
t06 = t18 + t30; t18 = (t18 - t30) * 0.541196100146;
|
||||
t30 = t21 + t12; t12 = (t21 - t12) * 1.30656296488;
|
||||
t21 = t06 + t30; t30 = (t06 - t30) * 0.707106781187;
|
||||
t06 = t18 + t12; t12 = (t18 - t12) * 0.707106781187;
|
||||
t06 += t12;
|
||||
t18 = t14 + t26; t26 = (t14 - t26) * 0.541196100146;
|
||||
t14 = t32 + t24; t24 = (t32 - t24) * 1.30656296488;
|
||||
t32 = t18 + t14; t14 = (t18 - t14) * 0.707106781187;
|
||||
t18 = t26 + t24; t24 = (t26 - t24) * 0.707106781187;
|
||||
t18 += t24; t32 += t18;
|
||||
t18 += t14; t26 = t14 + t24;
|
||||
t14 = t02 + t16; t02 = (t02 - t16) * 0.509795579104;
|
||||
t16 = t04 + t20; t04 = (t04 - t20) * 0.601344886935;
|
||||
t20 = t28 + t22; t22 = (t28 - t22) * 0.899976223136;
|
||||
t28 = t08 + t10; t10 = (t08 - t10) * 2.56291544774;
|
||||
t08 = t14 + t28; t14 = (t14 - t28) * 0.541196100146;
|
||||
t28 = t16 + t20; t20 = (t16 - t20) * 1.30656296488;
|
||||
t16 = t08 + t28; t28 = (t08 - t28) * 0.707106781187;
|
||||
t08 = t14 + t20; t20 = (t14 - t20) * 0.707106781187;
|
||||
t08 += t20;
|
||||
t14 = t02 + t10; t02 = (t02 - t10) * 0.541196100146;
|
||||
t10 = t04 + t22; t22 = (t04 - t22) * 1.30656296488;
|
||||
t04 = t14 + t10; t10 = (t14 - t10) * 0.707106781187;
|
||||
t14 = t02 + t22; t02 = (t02 - t22) * 0.707106781187;
|
||||
t14 += t02; t04 += t14; t14 += t10; t10 += t02;
|
||||
t16 += t04; t04 += t08; t08 += t14; t14 += t28;
|
||||
t28 += t10; t10 += t20; t20 += t02; t21 += t16;
|
||||
t16 += t32; t32 += t04; t04 += t06; t06 += t08;
|
||||
t08 += t18; t18 += t14; t14 += t30; t30 += t28;
|
||||
t28 += t26; t26 += t10; t10 += t12; t12 += t20;
|
||||
t20 += t24; t24 += t02;
|
||||
|
||||
d[dp + 48] = -t33;
|
||||
d[dp + 49] = d[dp + 47] = -t21;
|
||||
d[dp + 50] = d[dp + 46] = -t17;
|
||||
d[dp + 51] = d[dp + 45] = -t16;
|
||||
d[dp + 52] = d[dp + 44] = -t01;
|
||||
d[dp + 53] = d[dp + 43] = -t32;
|
||||
d[dp + 54] = d[dp + 42] = -t29;
|
||||
d[dp + 55] = d[dp + 41] = -t04;
|
||||
d[dp + 56] = d[dp + 40] = -t03;
|
||||
d[dp + 57] = d[dp + 39] = -t06;
|
||||
d[dp + 58] = d[dp + 38] = -t25;
|
||||
d[dp + 59] = d[dp + 37] = -t08;
|
||||
d[dp + 60] = d[dp + 36] = -t11;
|
||||
d[dp + 61] = d[dp + 35] = -t18;
|
||||
d[dp + 62] = d[dp + 34] = -t09;
|
||||
d[dp + 63] = d[dp + 33] = -t14;
|
||||
d[dp + 32] = -t05;
|
||||
d[dp + 0] = t05; d[dp + 31] = -t30;
|
||||
d[dp + 1] = t30; d[dp + 30] = -t27;
|
||||
d[dp + 2] = t27; d[dp + 29] = -t28;
|
||||
d[dp + 3] = t28; d[dp + 28] = -t07;
|
||||
d[dp + 4] = t07; d[dp + 27] = -t26;
|
||||
d[dp + 5] = t26; d[dp + 26] = -t23;
|
||||
d[dp + 6] = t23; d[dp + 25] = -t10;
|
||||
d[dp + 7] = t10; d[dp + 24] = -t15;
|
||||
d[dp + 8] = t15; d[dp + 23] = -t12;
|
||||
d[dp + 9] = t12; d[dp + 22] = -t19;
|
||||
d[dp + 10] = t19; d[dp + 21] = -t20;
|
||||
d[dp + 11] = t20; d[dp + 20] = -t13;
|
||||
d[dp + 12] = t13; d[dp + 19] = -t24;
|
||||
d[dp + 13] = t24; d[dp + 18] = -t31;
|
||||
d[dp + 14] = t31; d[dp + 17] = -t02;
|
||||
d[dp + 15] = t02; d[dp + 16] = 0.0;
|
||||
};
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#ifndef MP2_H
|
||||
#define MP2_H
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include "buffer.h"
|
||||
|
||||
typedef struct mp2_decoder_t mp2_decoder_t;
|
||||
|
||||
mp2_decoder_t *mp2_decoder_create(unsigned int buffer_size, bit_buffer_mode_t buffer_mode);
|
||||
void mp2_decoder_destroy(mp2_decoder_t *self);
|
||||
void *mp2_decoder_get_write_ptr(mp2_decoder_t *self, unsigned int byte_size);
|
||||
int mp2_decoder_get_index(mp2_decoder_t *self);
|
||||
void mp2_decoder_set_index(mp2_decoder_t *self, unsigned int index);
|
||||
void mp2_decoder_did_write(mp2_decoder_t *self, unsigned int byte_size);
|
||||
|
||||
void *mp2_decoder_get_left_channel_ptr(mp2_decoder_t *self);
|
||||
void *mp2_decoder_get_right_channel_ptr(mp2_decoder_t *self);
|
||||
int mp2_decoder_get_sample_rate(mp2_decoder_t *self);
|
||||
int mp2_decoder_decode(mp2_decoder_t *self);
|
||||
|
||||
#endif
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,27 @@
|
|||
#ifndef MPEG1_H
|
||||
#define MPEG1_H
|
||||
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include "buffer.h"
|
||||
|
||||
typedef struct mpeg1_decoder_t mpeg1_decoder_t;
|
||||
|
||||
mpeg1_decoder_t *mpeg1_decoder_create(unsigned int buffer_size, bit_buffer_mode_t buffer_mode);
|
||||
void mpeg1_decoder_destroy(mpeg1_decoder_t *self);
|
||||
void *mpeg1_decoder_get_write_ptr(mpeg1_decoder_t *self, unsigned int byte_size);
|
||||
int mpeg1_decoder_get_index(mpeg1_decoder_t *self);
|
||||
void mpeg1_decoder_set_index(mpeg1_decoder_t *self, unsigned int index);
|
||||
void mpeg1_decoder_did_write(mpeg1_decoder_t *self, unsigned int byte_size);
|
||||
|
||||
int mpeg1_decoder_has_sequence_header(mpeg1_decoder_t *self);
|
||||
float mpeg1_decoder_get_frame_rate(mpeg1_decoder_t *self);
|
||||
int mpeg1_decoder_get_coded_size(mpeg1_decoder_t *self);
|
||||
int mpeg1_decoder_get_width(mpeg1_decoder_t *self);
|
||||
int mpeg1_decoder_get_height(mpeg1_decoder_t *self);
|
||||
void *mpeg1_decoder_get_y_ptr(mpeg1_decoder_t *self);
|
||||
void *mpeg1_decoder_get_cr_ptr(mpeg1_decoder_t *self);
|
||||
void *mpeg1_decoder_get_cb_ptr(mpeg1_decoder_t *self);
|
||||
bool mpeg1_decoder_decode(mpeg1_decoder_t *self);
|
||||
|
||||
#endif
|
|
@ -0,0 +1,145 @@
|
|||
JSMpeg.AudioOutput.WebAudio = (function() { "use strict";
|
||||
|
||||
var WebAudioOut = function(options) {
|
||||
this.context = WebAudioOut.CachedContext =
|
||||
WebAudioOut.CachedContext ||
|
||||
new (window.AudioContext || window.webkitAudioContext)();
|
||||
|
||||
this.gain = this.context.createGain();
|
||||
this.destination = this.gain;
|
||||
|
||||
// Keep track of the number of connections to this AudioContext, so we
|
||||
// can safely close() it when we're the only one connected to it.
|
||||
this.gain.connect(this.context.destination);
|
||||
this.context._connections = (this.context._connections || 0) + 1;
|
||||
|
||||
this.startTime = 0;
|
||||
this.buffer = null;
|
||||
this.wallclockStartTime = 0;
|
||||
this.volume = 1;
|
||||
this.enabled = true;
|
||||
|
||||
this.unlocked = !WebAudioOut.NeedsUnlocking();
|
||||
|
||||
Object.defineProperty(this, 'enqueuedTime', {get: this.getEnqueuedTime});
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.destroy = function() {
|
||||
this.gain.disconnect();
|
||||
this.context._connections--;
|
||||
|
||||
if (this.context._connections === 0) {
|
||||
this.context.close();
|
||||
WebAudioOut.CachedContext = null;
|
||||
}
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.play = function(sampleRate, left, right) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the context is not unlocked yet, we simply advance the start time
|
||||
// to "fake" actually playing audio. This will keep the video in sync.
|
||||
if (!this.unlocked) {
|
||||
var ts = JSMpeg.Now()
|
||||
if (this.wallclockStartTime < ts) {
|
||||
this.wallclockStartTime = ts;
|
||||
}
|
||||
this.wallclockStartTime += left.length / sampleRate;
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
this.gain.gain.value = this.volume;
|
||||
|
||||
var buffer = this.context.createBuffer(2, left.length, sampleRate);
|
||||
buffer.getChannelData(0).set(left);
|
||||
buffer.getChannelData(1).set(right);
|
||||
|
||||
var source = this.context.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(this.destination);
|
||||
|
||||
var now = this.context.currentTime;
|
||||
var duration = buffer.duration;
|
||||
if (this.startTime < now) {
|
||||
this.startTime = now;
|
||||
this.wallclockStartTime = JSMpeg.Now();
|
||||
}
|
||||
|
||||
source.start(this.startTime);
|
||||
this.startTime += duration;
|
||||
this.wallclockStartTime += duration;
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.stop = function() {
|
||||
// Meh; there seems to be no simple way to get a list of currently
|
||||
// active source nodes from the Audio Context, and maintaining this
|
||||
// list ourselfs would be a pain, so we just set the gain to 0
|
||||
// to cut off all enqueued audio instantly.
|
||||
this.gain.gain.value = 0;
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.getEnqueuedTime = function() {
|
||||
// The AudioContext.currentTime is only updated every so often, so if we
|
||||
// want to get exact timing, we need to rely on the system time.
|
||||
return Math.max(this.wallclockStartTime - JSMpeg.Now(), 0)
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.resetEnqueuedTime = function() {
|
||||
this.startTime = this.context.currentTime;
|
||||
this.wallclockStartTime = JSMpeg.Now();
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.unlock = function(callback) {
|
||||
if (this.unlocked) {
|
||||
if (callback) {
|
||||
callback();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.unlockCallback = callback;
|
||||
|
||||
// Create empty buffer and play it
|
||||
var buffer = this.context.createBuffer(1, 1, 22050);
|
||||
var source = this.context.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(this.destination);
|
||||
source.start(0);
|
||||
|
||||
setTimeout(this.checkIfUnlocked.bind(this, source, 0), 0);
|
||||
};
|
||||
|
||||
WebAudioOut.prototype.checkIfUnlocked = function(source, attempt) {
|
||||
if (
|
||||
source.playbackState === source.PLAYING_STATE ||
|
||||
source.playbackState === source.FINISHED_STATE
|
||||
) {
|
||||
this.unlocked = true;
|
||||
if (this.unlockCallback) {
|
||||
this.unlockCallback();
|
||||
this.unlockCallback = null;
|
||||
}
|
||||
}
|
||||
else if (attempt < 10) {
|
||||
// Jeez, what a shit show. Thanks iOS!
|
||||
setTimeout(this.checkIfUnlocked.bind(this, source, attempt+1), 100);
|
||||
}
|
||||
};
|
||||
|
||||
WebAudioOut.NeedsUnlocking = function() {
|
||||
return /iPhone|iPad|iPod/i.test(navigator.userAgent);
|
||||
};
|
||||
|
||||
WebAudioOut.IsSupported = function() {
|
||||
return (window.AudioContext || window.webkitAudioContext);
|
||||
};
|
||||
|
||||
WebAudioOut.CachedContext = null;
|
||||
|
||||
return WebAudioOut;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,308 @@
|
|||
JSMpeg.Renderer.WebGL = (function(){ "use strict";
|
||||
|
||||
var WebGLRenderer = function(options) {
|
||||
if (options.canvas) {
|
||||
this.canvas = options.canvas;
|
||||
this.ownsCanvasElement = false;
|
||||
}
|
||||
else {
|
||||
this.canvas = document.createElement('canvas');
|
||||
this.ownsCanvasElement = true;
|
||||
}
|
||||
this.width = this.canvas.width;
|
||||
this.height = this.canvas.height;
|
||||
this.enabled = true;
|
||||
|
||||
this.hasTextureData = {};
|
||||
|
||||
var contextCreateOptions = {
|
||||
preserveDrawingBuffer: !!options.preserveDrawingBuffer,
|
||||
alpha: false,
|
||||
depth: false,
|
||||
stencil: false,
|
||||
antialias: false,
|
||||
premultipliedAlpha: false
|
||||
};
|
||||
|
||||
this.gl =
|
||||
this.canvas.getContext('webgl', contextCreateOptions) ||
|
||||
this.canvas.getContext('experimental-webgl', contextCreateOptions);
|
||||
|
||||
if (!this.gl) {
|
||||
throw new Error('Failed to get WebGL Context');
|
||||
}
|
||||
|
||||
this.handleContextLostBound = this.handleContextLost.bind(this);
|
||||
this.handleContextRestoredBound = this.handleContextRestored.bind(this);
|
||||
|
||||
this.canvas.addEventListener('webglcontextlost', this.handleContextLostBound, false);
|
||||
this.canvas.addEventListener('webglcontextrestored', this.handleContextRestoredBound, false);
|
||||
|
||||
this.initGL();
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.initGL = function() {
|
||||
this.hasTextureData = {};
|
||||
|
||||
var gl = this.gl;
|
||||
var vertexAttr = null;
|
||||
|
||||
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
|
||||
|
||||
// Init buffers
|
||||
this.vertexBuffer = gl.createBuffer();
|
||||
var vertexCoords = new Float32Array([0, 0, 0, 1, 1, 0, 1, 1]);
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
|
||||
gl.bufferData(gl.ARRAY_BUFFER, vertexCoords, gl.STATIC_DRAW);
|
||||
|
||||
// Setup the main YCrCbToRGBA shader
|
||||
this.program = this.createProgram(
|
||||
WebGLRenderer.SHADER.VERTEX_IDENTITY,
|
||||
WebGLRenderer.SHADER.FRAGMENT_YCRCB_TO_RGBA
|
||||
);
|
||||
vertexAttr = gl.getAttribLocation(this.program, 'vertex');
|
||||
gl.enableVertexAttribArray(vertexAttr);
|
||||
gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0);
|
||||
|
||||
this.textureY = this.createTexture(0, 'textureY');
|
||||
this.textureCb = this.createTexture(1, 'textureCb');
|
||||
this.textureCr = this.createTexture(2, 'textureCr');
|
||||
|
||||
|
||||
// Setup the loading animation shader
|
||||
this.loadingProgram = this.createProgram(
|
||||
WebGLRenderer.SHADER.VERTEX_IDENTITY,
|
||||
WebGLRenderer.SHADER.FRAGMENT_LOADING
|
||||
);
|
||||
vertexAttr = gl.getAttribLocation(this.loadingProgram, 'vertex');
|
||||
gl.enableVertexAttribArray(vertexAttr);
|
||||
gl.vertexAttribPointer(vertexAttr, 2, gl.FLOAT, false, 0, 0);
|
||||
|
||||
this.shouldCreateUnclampedViews = !this.allowsClampedTextureData();
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.handleContextLost = function(ev) {
|
||||
ev.preventDefault();
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.handleContextRestored = function(ev) {
|
||||
this.initGL();
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.destroy = function() {
|
||||
var gl = this.gl;
|
||||
|
||||
this.deleteTexture(gl.TEXTURE0, this.textureY);
|
||||
this.deleteTexture(gl.TEXTURE1, this.textureCb);
|
||||
this.deleteTexture(gl.TEXTURE2, this.textureCr);
|
||||
|
||||
gl.useProgram(null);
|
||||
gl.deleteProgram(this.program);
|
||||
gl.deleteProgram(this.loadingProgram);
|
||||
|
||||
gl.bindBuffer(gl.ARRAY_BUFFER, null);
|
||||
gl.deleteBuffer(this.vertexBuffer);
|
||||
|
||||
this.canvas.removeEventListener('webglcontextlost', this.handleContextLostBound, false);
|
||||
this.canvas.removeEventListener('webglcontextrestored', this.handleContextRestoredBound, false);
|
||||
|
||||
if (this.ownsCanvasElement) {
|
||||
this.canvas.remove();
|
||||
}
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.resize = function(width, height) {
|
||||
this.width = width|0;
|
||||
this.height = height|0;
|
||||
|
||||
this.canvas.width = this.width;
|
||||
this.canvas.height = this.height;
|
||||
|
||||
this.gl.useProgram(this.program);
|
||||
|
||||
var codedWidth = ((this.width + 15) >> 4) << 4;
|
||||
this.gl.viewport(0, 0, codedWidth, this.height);
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.createTexture = function(index, name) {
|
||||
var gl = this.gl;
|
||||
var texture = gl.createTexture();
|
||||
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.uniform1i(gl.getUniformLocation(this.program, name), index);
|
||||
|
||||
return texture;
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.createProgram = function(vsh, fsh) {
|
||||
var gl = this.gl;
|
||||
var program = gl.createProgram();
|
||||
|
||||
gl.attachShader(program, this.compileShader(gl.VERTEX_SHADER, vsh));
|
||||
gl.attachShader(program, this.compileShader(gl.FRAGMENT_SHADER, fsh));
|
||||
gl.linkProgram(program);
|
||||
gl.useProgram(program);
|
||||
|
||||
return program;
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.compileShader = function(type, source) {
|
||||
var gl = this.gl;
|
||||
var shader = gl.createShader(type);
|
||||
gl.shaderSource(shader, source);
|
||||
gl.compileShader(shader);
|
||||
|
||||
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
|
||||
throw new Error(gl.getShaderInfoLog(shader));
|
||||
}
|
||||
|
||||
return shader;
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.allowsClampedTextureData = function() {
|
||||
var gl = this.gl;
|
||||
var texture = gl.createTexture();
|
||||
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
gl.texImage2D(
|
||||
gl.TEXTURE_2D, 0, gl.LUMINANCE, 1, 1, 0,
|
||||
gl.LUMINANCE, gl.UNSIGNED_BYTE, new Uint8ClampedArray([0])
|
||||
);
|
||||
return (gl.getError() === 0);
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.renderProgress = function(progress) {
|
||||
var gl = this.gl;
|
||||
|
||||
gl.useProgram(this.loadingProgram);
|
||||
|
||||
var loc = gl.getUniformLocation(this.loadingProgram, 'progress');
|
||||
gl.uniform1f(loc, progress);
|
||||
|
||||
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.render = function(y, cb, cr, isClampedArray) {
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
var gl = this.gl;
|
||||
var w = ((this.width + 15) >> 4) << 4,
|
||||
h = this.height,
|
||||
w2 = w >> 1,
|
||||
h2 = h >> 1;
|
||||
|
||||
// In some browsers WebGL doesn't like Uint8ClampedArrays (this is a bug
|
||||
// and should be fixed soon-ish), so we have to create a Uint8Array view
|
||||
// for each plane.
|
||||
if (isClampedArray && this.shouldCreateUnclampedViews) {
|
||||
y = new Uint8Array(y.buffer),
|
||||
cb = new Uint8Array(cb.buffer),
|
||||
cr = new Uint8Array(cr.buffer);
|
||||
}
|
||||
|
||||
gl.useProgram(this.program);
|
||||
|
||||
this.updateTexture(gl.TEXTURE0, this.textureY, w, h, y);
|
||||
this.updateTexture(gl.TEXTURE1, this.textureCb, w2, h2, cb);
|
||||
this.updateTexture(gl.TEXTURE2, this.textureCr, w2, h2, cr);
|
||||
|
||||
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.updateTexture = function(unit, texture, w, h, data) {
|
||||
var gl = this.gl;
|
||||
gl.activeTexture(unit);
|
||||
gl.bindTexture(gl.TEXTURE_2D, texture);
|
||||
|
||||
if (this.hasTextureData[unit]) {
|
||||
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, w, h, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
|
||||
}
|
||||
else {
|
||||
this.hasTextureData[unit] = true;
|
||||
gl.texImage2D(
|
||||
gl.TEXTURE_2D, 0, gl.LUMINANCE, w, h, 0,
|
||||
gl.LUMINANCE, gl.UNSIGNED_BYTE, data
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
WebGLRenderer.prototype.deleteTexture = function(unit, texture) {
|
||||
var gl = this.gl;
|
||||
gl.activeTexture(unit);
|
||||
gl.bindTexture(gl.TEXTURE_2D, null);
|
||||
gl.deleteTexture(texture);
|
||||
};
|
||||
|
||||
WebGLRenderer.IsSupported = function() {
|
||||
try {
|
||||
if (!window.WebGLRenderingContext) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var canvas = document.createElement('canvas');
|
||||
return !!(
|
||||
canvas.getContext('webgl') ||
|
||||
canvas.getContext('experimental-webgl')
|
||||
);
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
WebGLRenderer.SHADER = {
|
||||
FRAGMENT_YCRCB_TO_RGBA: [
|
||||
'precision mediump float;',
|
||||
'uniform sampler2D textureY;',
|
||||
'uniform sampler2D textureCb;',
|
||||
'uniform sampler2D textureCr;',
|
||||
'varying vec2 texCoord;',
|
||||
|
||||
'mat4 rec601 = mat4(',
|
||||
'1.16438, 0.00000, 1.59603, -0.87079,',
|
||||
'1.16438, -0.39176, -0.81297, 0.52959,',
|
||||
'1.16438, 2.01723, 0.00000, -1.08139,',
|
||||
'0, 0, 0, 1',
|
||||
');',
|
||||
|
||||
'void main() {',
|
||||
'float y = texture2D(textureY, texCoord).r;',
|
||||
'float cb = texture2D(textureCb, texCoord).r;',
|
||||
'float cr = texture2D(textureCr, texCoord).r;',
|
||||
|
||||
'gl_FragColor = vec4(y, cr, cb, 1.0) * rec601;',
|
||||
'}'
|
||||
].join('\n'),
|
||||
|
||||
FRAGMENT_LOADING: [
|
||||
'precision mediump float;',
|
||||
'uniform float progress;',
|
||||
'varying vec2 texCoord;',
|
||||
|
||||
'void main() {',
|
||||
'float c = ceil(progress-(1.0-texCoord.y));',
|
||||
'gl_FragColor = vec4(c,c,c,1);',
|
||||
'}'
|
||||
].join('\n'),
|
||||
|
||||
VERTEX_IDENTITY: [
|
||||
'attribute vec2 vertex;',
|
||||
'varying vec2 texCoord;',
|
||||
|
||||
'void main() {',
|
||||
'texCoord = vertex;',
|
||||
'gl_Position = vec4((vertex * 2.0 - 1.0) * vec2(1, -1), 0.0, 1.0);',
|
||||
'}'
|
||||
].join('\n')
|
||||
};
|
||||
|
||||
return WebGLRenderer;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
JSMpeg.Source.WebSocket = (function(){ "use strict";
|
||||
|
||||
var WSSource = function(url, options) {
|
||||
this.url = url;
|
||||
this.options = options;
|
||||
this.socket = null;
|
||||
this.streaming = true;
|
||||
|
||||
this.callbacks = {connect: [], data: []};
|
||||
this.destination = null;
|
||||
|
||||
this.reconnectInterval = options.reconnectInterval !== undefined
|
||||
? options.reconnectInterval
|
||||
: 5;
|
||||
this.shouldAttemptReconnect = !!this.reconnectInterval;
|
||||
|
||||
this.completed = false;
|
||||
this.established = false;
|
||||
this.progress = 0;
|
||||
|
||||
this.reconnectTimeoutId = 0;
|
||||
|
||||
this.onEstablishedCallback = options.onSourceEstablished;
|
||||
this.onCompletedCallback = options.onSourceCompleted; // Never used
|
||||
};
|
||||
|
||||
WSSource.prototype.connect = function(destination) {
|
||||
this.destination = destination;
|
||||
};
|
||||
|
||||
WSSource.prototype.destroy = function() {
|
||||
clearTimeout(this.reconnectTimeoutId);
|
||||
this.shouldAttemptReconnect = false;
|
||||
this.socket.close();
|
||||
};
|
||||
|
||||
WSSource.prototype.start = function() {
|
||||
this.shouldAttemptReconnect = !!this.reconnectInterval;
|
||||
this.progress = 0;
|
||||
this.established = false;
|
||||
|
||||
if (this.options.protocols) {
|
||||
this.socket = new WebSocket(this.url, this.options.protocols);
|
||||
}
|
||||
else {
|
||||
this.socket = new WebSocket(this.url);
|
||||
}
|
||||
this.socket.binaryType = 'arraybuffer';
|
||||
this.socket.onmessage = this.onMessage.bind(this);
|
||||
this.socket.onopen = this.onOpen.bind(this);
|
||||
this.socket.onerror = this.onClose.bind(this);
|
||||
this.socket.onclose = this.onClose.bind(this);
|
||||
};
|
||||
|
||||
WSSource.prototype.resume = function(secondsHeadroom) {
|
||||
// Nothing to do here
|
||||
};
|
||||
|
||||
WSSource.prototype.onOpen = function() {
|
||||
this.progress = 1;
|
||||
};
|
||||
|
||||
WSSource.prototype.onClose = function() {
|
||||
if (this.shouldAttemptReconnect) {
|
||||
clearTimeout(this.reconnectTimeoutId);
|
||||
this.reconnectTimeoutId = setTimeout(function(){
|
||||
this.start();
|
||||
}.bind(this), this.reconnectInterval*1000);
|
||||
}
|
||||
};
|
||||
|
||||
WSSource.prototype.onMessage = function(ev) {
|
||||
var isFirstChunk = !this.established;
|
||||
this.established = true;
|
||||
|
||||
if (isFirstChunk && this.onEstablishedCallback) {
|
||||
this.onEstablishedCallback(this);
|
||||
}
|
||||
|
||||
if (this.destination) {
|
||||
this.destination.write(ev.data);
|
||||
}
|
||||
};
|
||||
|
||||
return WSSource;
|
||||
|
||||
})();
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>JSMpeg Stream Client</title>
|
||||
<style type="text/css">
|
||||
html, body {
|
||||
background-color: #111;
|
||||
text-align: center;
|
||||
}
|
||||
</style>
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<canvas id="video-canvas"></canvas>
|
||||
<script type="text/javascript" src="jsmpeg.min.js"></script>
|
||||
<script type="text/javascript">
|
||||
var canvas = document.getElementById('video-canvas');
|
||||
var url = 'ws://'+document.location.hostname+':8082/';
|
||||
var player = new JSMpeg.Player(url, {canvas: canvas});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,92 @@
|
|||
// Use the websocket-relay to serve a raw MPEG-TS over WebSockets. You can use
|
||||
// ffmpeg to feed the relay. ffmpeg -> websocket-relay -> browser
|
||||
// Example:
|
||||
// node websocket-relay yoursecret 8081 8082
|
||||
// ffmpeg -i <some input> -f mpegts http://localhost:8081/yoursecret
|
||||
|
||||
var fs = require('fs'),
|
||||
http = require('http'),
|
||||
WebSocket = require('ws');
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.log(
|
||||
'Usage: \n' +
|
||||
'node websocket-relay.js <secret> [<stream-port> <websocket-port>]'
|
||||
);
|
||||
process.exit();
|
||||
}
|
||||
|
||||
var STREAM_SECRET = process.argv[2],
|
||||
STREAM_PORT = process.argv[3] || 8081,
|
||||
WEBSOCKET_PORT = process.argv[4] || 8082,
|
||||
RECORD_STREAM = false;
|
||||
|
||||
// Websocket Server
|
||||
var socketServer = new WebSocket.Server({port: WEBSOCKET_PORT, perMessageDeflate: false});
|
||||
socketServer.connectionCount = 0;
|
||||
socketServer.on('connection', function(socket, upgradeReq) {
|
||||
socketServer.connectionCount++;
|
||||
console.log(
|
||||
'New WebSocket Connection: ',
|
||||
(upgradeReq || socket.upgradeReq).socket.remoteAddress,
|
||||
(upgradeReq || socket.upgradeReq).headers['user-agent'],
|
||||
'('+socketServer.connectionCount+' total)'
|
||||
);
|
||||
socket.on('close', function(code, message){
|
||||
socketServer.connectionCount--;
|
||||
console.log(
|
||||
'Disconnected WebSocket ('+socketServer.connectionCount+' total)'
|
||||
);
|
||||
});
|
||||
});
|
||||
socketServer.broadcast = function(data) {
|
||||
socketServer.clients.forEach(function each(client) {
|
||||
if (client.readyState === WebSocket.OPEN) {
|
||||
client.send(data);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// HTTP Server to accept incomming MPEG-TS Stream from ffmpeg
|
||||
var streamServer = http.createServer( function(request, response) {
|
||||
var params = request.url.substr(1).split('/');
|
||||
|
||||
if (params[0] !== STREAM_SECRET) {
|
||||
console.log(
|
||||
'Failed Stream Connection: '+ request.socket.remoteAddress + ':' +
|
||||
request.socket.remotePort + ' - wrong secret.'
|
||||
);
|
||||
response.end();
|
||||
}
|
||||
|
||||
response.connection.setTimeout(0);
|
||||
console.log(
|
||||
'Stream Connected: ' +
|
||||
request.socket.remoteAddress + ':' +
|
||||
request.socket.remotePort
|
||||
);
|
||||
request.on('data', function(data){
|
||||
socketServer.broadcast(data);
|
||||
if (request.socket.recording) {
|
||||
request.socket.recording.write(data);
|
||||
}
|
||||
});
|
||||
request.on('end',function(){
|
||||
console.log('close');
|
||||
if (request.socket.recording) {
|
||||
request.socket.recording.close();
|
||||
}
|
||||
});
|
||||
|
||||
// Record the stream to a local file?
|
||||
if (RECORD_STREAM) {
|
||||
var path = 'recordings/' + Date.now() + '.ts';
|
||||
request.socket.recording = fs.createWriteStream(path);
|
||||
}
|
||||
})
|
||||
// Keep the socket open for streaming
|
||||
streamServer.headersTimeout = 0;
|
||||
streamServer.listen(STREAM_PORT);
|
||||
|
||||
console.log('Listening for incomming MPEG-TS Stream on http://127.0.0.1:'+STREAM_PORT+'/<secret>');
|
||||
console.log('Awaiting WebSocket connections on ws://127.0.0.1:'+WEBSOCKET_PORT+'/');
|
Loading…
Reference in New Issue