Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

VideoProcessors v3.0 #2053

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,24 @@ The Twilio Programmable Video SDKs use [Semantic Versioning](http://www.semver.o

**Version 1.x reached End of Life on September 8th, 2021.** See the changelog entry [here](https://www.twilio.com/changelog/end-of-life-complete-for-unsupported-versions-of-the-programmable-video-sdk). Support for the 1.x version ended on December 4th, 2020.

2.29.0-beta.1 (in progress)
===========================

Changes
-------

- `AddProcessorOptions.outputFrameBufferContextType = 'bitmaprenderer'` is now supported on Safari and Firefox. (VBLOCKS-3643, VBLOCKS-3644)

2.29.0-preview.1 (August 13, 2024)
==================================

Changes
-------

- `AddProcessorOptions.inputFrameBufferType` now has a new value `videoframe`. On browsers that support [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame), the `inputFrameBuffer` argument of [`VideoProcessor.processFrame()`](https://twilio.github.io/twilio-video-processors.js/classes/VirtualBackgroundProcessor.html#processFrame) will be a `VideoFrame`. On other supported browsers, it will be an `HTMLVideoElement`.
- `AddProcessorOptions.outputFrameBufferContextType` now has a new value `bitmaprenderer`. Currently, this is only **supported for Chromium-based browsers**. On other supported browsers, it falls back to `2d`.
- Patched the build script to work around the issue: https://github.com/markdown-it/linkify-it/issues/111.

2.28.1 (October 3, 2023)
========================

Expand Down
68 changes: 68 additions & 0 deletions lib/media/track/capturevideoframes.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/* globals MediaStreamTrackGenerator, MediaStreamTrackProcessor, TransformStream */
'use strict';

const { DEFAULT_FRAME_RATE } = require('../../util/constants');

function captureVideoFramesSetInterval(videoEl, processVideoFrame) {
const [track] = videoEl.srcObject.getVideoTracks();
const { frameRate = DEFAULT_FRAME_RATE } = track.getSettings();
let sampleInterval;

const readable = new ReadableStream({
start(controller) {
sampleInterval = setInterval(
() => controller.enqueue(),
1000 / frameRate
);
}
});

const transformer = new TransformStream({
transform() {
return processVideoFrame();
}
});

readable
.pipeThrough(transformer)
.pipeTo(new WritableStream())
.then(() => { /* noop */ });

return () => {
clearInterval(sampleInterval);
};
}

function captureVideoFramesInsertableStreams(videoEl, processVideoFrame, videoFrameType) {
const [track] = videoEl.srcObject.getVideoTracks();
const { readable } = new MediaStreamTrackProcessor({ track });
const generator = new MediaStreamTrackGenerator({ kind: 'video' });
let shouldStop = false;

const transformer = new TransformStream({
transform(videoFrame, controller) {
const promise = videoFrameType === 'videoframe'
? processVideoFrame(videoFrame)
: Promise.resolve(videoFrame.close())
.then(processVideoFrame);
return promise.finally(() => {
if (shouldStop) {
controller.terminate();
}
});
}
});

readable
.pipeThrough(transformer)
.pipeTo(generator.writable)
.then(() => { /* noop */ });

return () => {
shouldStop = true;
};
}

module.exports = typeof MediaStreamTrackGenerator === 'function' && typeof MediaStreamTrackProcessor === 'function'
? captureVideoFramesInsertableStreams
: captureVideoFramesSetInterval;
87 changes: 52 additions & 35 deletions lib/media/track/videotrack.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
'use strict';

const MediaTrack = require('./mediatrack');
const captureVideoFrames = require('./capturevideoframes');
const VideoProcessorEventObserver = require('./videoprocessoreventobserver');
const { guessBrowser } = require('../../webrtc/util');
const { DEFAULT_FRAME_RATE } = require('../../util/constants');

/**
Expand Down Expand Up @@ -34,10 +36,6 @@ class VideoTrack extends MediaTrack {
constructor(mediaTrackTransceiver, options) {
super(mediaTrackTransceiver, options);
Object.defineProperties(this, {
_captureTimeoutId: {
value: null,
writable: true
},
_isCapturing: {
value: false,
writable: true
Expand All @@ -58,6 +56,10 @@ class VideoTrack extends MediaTrack {
value: {},
writable: true,
},
_stopCapture: {
value: () => {},
writable: true
},
_unmuteHandler: {
value: null,
writable: true
Expand Down Expand Up @@ -129,30 +131,21 @@ class VideoTrack extends MediaTrack {
this._processorEventObserver.emit('start');
this._log.debug('Start capturing frames');

let startTime = Date.now();
let processFramePeriodMs;
const { inputFrameBufferType } = this._processorOptions;

this._dummyEl.play().then(() => {
const captureFrame = cb => {
clearTimeout(this._captureTimeoutId);
const { frameRate = DEFAULT_FRAME_RATE } = this.mediaStreamTrack.getSettings();
const capturePeriodMs = Math.floor(1000 / frameRate);
let delay = capturePeriodMs - processFramePeriodMs;
if (delay < 0 || typeof processFramePeriodMs !== 'number') {
delay = 0;
}
this._captureTimeoutId = setTimeout(cb, delay);
};
const process = () => {
const process = videoFrame => {
const checkResult = this._checkIfCanCaptureFrames();
if (!checkResult.canCaptureFrames) {
if (videoFrame) {
videoFrame.close();
}
this._isCapturing = false;
this._stopCapture();
this._processorEventObserver.emit('stop', checkResult.message);
this._log.debug('Cannot capture frames. Stopping capturing frames.');
return;
return Promise.resolve();
}
startTime = Date.now();

const { width = 0, height = 0 } = this.mediaStreamTrack.getSettings();
// Setting the canvas' dimension triggers a redraw.
// Only set it if it has changed.
Expand All @@ -165,32 +158,45 @@ class VideoTrack extends MediaTrack {
this._inputFrame.width = width;
this._inputFrame.height = height;
}
this._inputFrame.getContext('2d').drawImage(this._dummyEl, 0, 0, width, height);
this._inputFrame.getContext('2d').drawImage(
this._dummyEl,
0,
0,
width,
height
);
}

const input = videoFrame || (
['video', 'videoframe'].includes(inputFrameBufferType)
? this._dummyEl
: this._inputFrame
);
let result = null;

try {
const input = this._processorOptions.inputFrameBufferType === 'video' ? this._dummyEl : this._inputFrame;
result = this.processor.processFrame(input, this._outputFrame);
} catch (ex) {
this._log.debug('Exception detected after calling processFrame.', ex);
}
((result instanceof Promise) ? result : Promise.resolve(result))
return ((result instanceof Promise) ? result : Promise.resolve(result))
.then(() => {
if (this._outputFrame) {
if (typeof this.processedTrack.requestFrame === 'function') {
this.processedTrack.requestFrame();
}
this._processorEventObserver.emit('stats');
}
})
.finally(() => {
processFramePeriodMs = Date.now() - startTime;
captureFrame(process);
});
};
captureFrame(process);
}).catch(error => this._log.error('Video element cannot be played', { error, track: this }));
this._stopCapture = captureVideoFrames(
this._dummyEl,
process,
inputFrameBufferType
);
}).catch(error => this._log.error(
'Video element cannot be played',
{ error, track: this }
));
}

/**
Expand Down Expand Up @@ -295,7 +301,11 @@ class VideoTrack extends MediaTrack {
if (typeof OffscreenCanvas === 'undefined' && inputFrameBufferType === 'offscreencanvas') {
throw new Error('OffscreenCanvas is not supported by this browser.');
}
if (inputFrameBufferType && inputFrameBufferType !== 'video' && inputFrameBufferType !== 'canvas' && inputFrameBufferType !== 'offscreencanvas') {
if (inputFrameBufferType
&& inputFrameBufferType !== 'videoframe'
&& inputFrameBufferType !== 'video'
&& inputFrameBufferType !== 'canvas'
&& inputFrameBufferType !== 'offscreencanvas') {
throw new Error(`Invalid inputFrameBufferType of ${inputFrameBufferType}`);
}
if (!inputFrameBufferType) {
Expand All @@ -322,6 +332,7 @@ class VideoTrack extends MediaTrack {
// that the correct type is used and on Firefox, it throws an exception if you try to capture
// frames prior calling getContext https://bugzilla.mozilla.org/show_bug.cgi?id=1572422
outputFrameBufferContextType = outputFrameBufferContextType || '2d';

const ctx = this._outputFrame.getContext(outputFrameBufferContextType);
if (!ctx) {
throw new Error(`Cannot get outputFrameBufferContextType: ${outputFrameBufferContextType}.`);
Expand Down Expand Up @@ -484,7 +495,8 @@ class VideoTrack extends MediaTrack {

this._processorEventObserver.emit('remove');
this._log.debug('Removing VideoProcessor from the VideoTrack', processor);
clearTimeout(this._captureTimeoutId);
this._stopCapture();
this._stopCapture = () => {};
this.mediaStreamTrack.removeEventListener('unmute', this._unmuteHandler);
this._processorOptions = {};
this._unmuteHandler = null;
Expand Down Expand Up @@ -529,7 +541,7 @@ function dimensionsChanged(track, elem) {
* Any exception raised (either synchronously or asynchronously) in `processFrame` will result in the frame being dropped.
* This callback has the following signature:<br/><br/>
* <code>processFrame(</code><br/>
* &nbsp;&nbsp;<code>inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,</code><br/>
* &nbsp;&nbsp;<code>inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement | VideoFrame,</code><br/>
* &nbsp;&nbsp;<code>outputFrameBuffer: HTMLCanvasElement</code><br/>
* <code>): Promise&lt;void&gt; | void;</code>
*
Expand All @@ -554,6 +566,10 @@ function dimensionsChanged(track, elem) {
* Possible values include the following.
* <br/>
* <br/>
* `videoframe` - Your Video Processor will receive a [VideoFrame](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame).
* On browsers that do not support `VideoFrame`, it will receive an [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) instead.
* <br/>
* <br/>
* `offscreencanvas` - Your Video Processor will receive an [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas)
* which is good for canvas-related processing that can be rendered off screen.
* <br/>
Expand All @@ -567,8 +583,9 @@ function dimensionsChanged(track, elem) {
* the frame directly to your output canvas.
* @property {string} [outputFrameBufferContextType="2d"] - The SDK needs the [context type](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/getContext)
* that your Video Processor uses in order to properly generate the processed track. For example, if your Video Processor uses WebGL2 (`canvas.getContext('webgl2')`),
* you should set `outputFrameBufferContextType` to `webgl2`. Or if you're using Canvas 2D processing (`canvas.getContext('2d')`),
* you should set `outputFrameBufferContextType` to `2d`.
* you should set `outputFrameBufferContextType` to `webgl2`. If you're using Canvas 2D processing (`canvas.getContext('2d')`),
* you should set `outputFrameBufferContextType` to `2d`. If the output frame is an [ImageBitmap](https://developer.mozilla.org/en-US/docs/Web/API/ImageBitmap),
* you should set `outputFrameBufferContextType` to `bitmaprenderer`.
*/

/**
Expand Down
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "twilio-video",
"title": "Twilio Video",
"description": "Twilio Video JavaScript Library",
"version": "2.28.2-dev",
"version": "2.29.0-dev",
"homepage": "https://twilio.com",
"author": "Mark Andrus Roberts <[email protected]>",
"contributors": [
Expand Down Expand Up @@ -74,6 +74,7 @@
"node-http-server": "^8.1.2",
"npm-run-all": "^4.0.2",
"nyc": "^15.1.0",
"regex-replace": "^2.3.1",
"requirejs": "^2.3.3",
"rimraf": "^2.6.1",
"simple-git": "^1.126.0",
Expand All @@ -99,6 +100,7 @@
"lint:js": "eslint ./lib ./test/*.js ./docker/**/*.js ./test/framework/*.js ./test/lib/*.js ./test/integration/** ./test/unit/** ",
"lint:ts": "eslint ./tsdef/*.ts ./lib/**/*.ts",
"lint": "npm-run-all lint:js lint:ts",
"patch-linkifyit-111": "regex-replace \"readonly linkify: LinkifyIt.LinkifyIt;\" \"readonly linkify: typeof LinkifyIt;\" \"node_modules/@types/markdown-it/lib/index.d.ts\" --filecontents",
"printVersion": "node --version && npm --version",
"test:unit": "npm-run-all printVersion build:es5 && nyc --report-dir=./coverage --include=lib/**/* --reporter=html --reporter=lcov --reporter=text mocha -r ts-node/register ./test/unit/*",
"test:unit:quick": "nyc --report-dir=./coverage --include=lib/**/* --reporter=html --reporter=lcov mocha -r ts-node/register",
Expand Down Expand Up @@ -137,7 +139,7 @@
"test:framework:install": "npm install chromedriver && npm install selenium-webdriver && npm install geckodriver && npm install puppeteer",
"test:framework": "npm-run-all test:framework:install test:framework:no-framework test:framework:react",
"test": "npm-run-all test:unit test:integration",
"build:es5": "rimraf ./es5 && mkdir -p es5 && tsc tsdef/twilio-video-tests.ts --noEmit --lib es2018,dom && tsc",
"build:es5": "rimraf ./es5 && mkdir -p es5 && npm run patch-linkifyit-111 && tsc tsdef/twilio-video-tests.ts --noEmit --lib es2018,dom && tsc",
"build:js": "node ./scripts/build.js ./src/twilio-video.js ./LICENSE.md ./dist/twilio-video.js",
"build:min.js": "uglifyjs ./dist/twilio-video.js -o ./dist/twilio-video.min.js --comments \"/^! twilio-video.js/\" -b beautify=false,ascii_only=true",
"build": "npm-run-all clean lint docs test:unit test:integration build:es5 build:js build:min.js test:umd",
Expand Down
53 changes: 53 additions & 0 deletions test/lib/mockstreams.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
'use strict';

class ReadableStream {
constructor({ start }) {
this._transformStream = null;
this._writableStream = null;
setTimeout(() => start({
enqueue: arg => {
if (this._transformStream) {
this._transformStream.transform(arg, {
enqueue: arg => {
if (this._writableStream) {
this._writableStream.write(arg);
}
}
});
}
}
}));
}

pipeThrough(transformStream) {
this._transformStream = transformStream;
return this;
}

pipeTo(writableStream) {
this._writableStream = writableStream;
return Promise.resolve();
}
}

class TransformStream {
constructor({ transform }) {
this.transform = transform;
}
}

class WritableStream {
constructor() {
this.write = () => {};
}
}

function mockStreams(_global) {
_global = _global || global;
_global.ReadableStream = ReadableStream;
_global.TransformStream = TransformStream;
_global.WritableStream = WritableStream;
return _global;
}

module.exports = mockStreams;
1 change: 1 addition & 0 deletions test/unit/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
'use strict';

if (typeof window === 'undefined') {
require('../lib/mockstreams')();
require('../lib/mockwebrtc')();
}

Expand Down
Loading