diff --git a/dist/build/selfie_segmentation_landscape.tflite b/dist/build/selfie_segmentation_landscape.tflite new file mode 100644 index 0000000..4ea3f8a Binary files /dev/null and b/dist/build/selfie_segmentation_landscape.tflite differ diff --git a/dist/build/tflite-1-0-0.js b/dist/build/tflite-1-0-0.js new file mode 100644 index 0000000..7d2e41c --- /dev/null +++ b/dist/build/tflite-1-0-0.js @@ -0,0 +1,21 @@ + +var createTwilioTFLiteModule = (function() { + var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined; + if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename; + return ( +function(createTwilioTFLiteModule) { + createTwilioTFLiteModule = createTwilioTFLiteModule || {}; + +var Module=typeof createTwilioTFLiteModule!=="undefined"?createTwilioTFLiteModule:{};var readyPromiseResolve,readyPromiseReject;Module["ready"]=new Promise(function(resolve,reject){readyPromiseResolve=resolve;readyPromiseReject=reject});var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram="./this.program";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof process.versions==="object"&&typeof process.versions.node==="string";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory="";function locateFile(path){if(Module["locateFile"]){return Module["locateFile"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require("path").dirname(scriptDirectory)+"/"}else{scriptDirectory=__dirname+"/"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require("fs");if(!nodePath)nodePath=require("path");filename=nodePath["normalize"](filename);return nodeFS["readFileSync"](filename,binary?null:"utf8")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){thisProgram=process["argv"][1].replace(/\\/g,"/")}arguments_=process["argv"].slice(2);process["on"]("uncaughtException",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process["on"]("unhandledRejection",abort);quit_=function(status){process["exit"](status)};Module["inspect"]=function(){return"[Emscripten Module object]"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){arguments_=scriptArgs}else if(typeof arguments!="undefined"){arguments_=arguments}if(typeof quit==="function"){quit_=function(status){quit(status)}}if(typeof print!=="undefined"){if(typeof console==="undefined")console={};console.log=print;console.warn=console.error=typeof printErr!=="undefined"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(typeof document!=="undefined"&&document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf("blob:")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf("/")+1)}else{scriptDirectory=""}{read_=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module["print"]||console.log.bind(console);var err=Module["printErr"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module["arguments"])arguments_=Module["arguments"];if(Module["thisProgram"])thisProgram=Module["thisProgram"];if(Module["quit"])quit_=Module["quit"];var wasmBinary;if(Module["wasmBinary"])wasmBinary=Module["wasmBinary"];var noExitRuntime=Module["noExitRuntime"]||true;if(typeof WebAssembly!=="object"){abort("no native wasm support detected")}var wasmMemory;var ABORT=false;var EXITSTATUS;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(heap,idx,maxBytesToRead){var endIdx=idx+maxBytesToRead;var endPtr=idx;while(heap[endPtr]&&!(endPtr>=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str="";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):""}function writeAsciiToMemory(str,buffer,dontAddNull){for(var i=0;i>0]=str.charCodeAt(i)}if(!dontAddNull)HEAP8[buffer>>0]=0}function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module["HEAP8"]=HEAP8=new Int8Array(buf);Module["HEAP16"]=HEAP16=new Int16Array(buf);Module["HEAP32"]=HEAP32=new Int32Array(buf);Module["HEAPU8"]=HEAPU8=new Uint8Array(buf);Module["HEAPU16"]=HEAPU16=new Uint16Array(buf);Module["HEAPU32"]=HEAPU32=new Uint32Array(buf);Module["HEAPF32"]=HEAPF32=new Float32Array(buf);Module["HEAPF64"]=HEAPF64=new Float64Array(buf)}var INITIAL_MEMORY=Module["INITIAL_MEMORY"]||16777216;var wasmTable;var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnInit(cb){__ATINIT__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}what+="";err(what);ABORT=true;EXITSTATUS=1;what="abort("+what+"). Build with -s ASSERTIONS=1 for more info.";var e=new WebAssembly.RuntimeError(what);readyPromiseReject(e);throw e}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix="file://";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile="tflite-1-0-0.wasm";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(file){try{if(file==wasmBinaryFile&&wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(file)}else{throw"both async and sync fetching of the wasm failed"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)){if(typeof fetch==="function"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){if(!response["ok"]){throw"failed to load wasm binary file at '"+wasmBinaryFile+"'"}return response["arrayBuffer"]()}).catch(function(){return getBinary(wasmBinaryFile)})}else{if(readAsync){return new Promise(function(resolve,reject){readAsync(wasmBinaryFile,function(response){resolve(new Uint8Array(response))},reject)})}}}return Promise.resolve().then(function(){return getBinary(wasmBinaryFile)})}function createWasm(){var info={"a":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module["asm"]=exports;wasmMemory=Module["asm"]["q"];updateGlobalBufferAndViews(wasmMemory.buffer);wasmTable=Module["asm"]["D"];addOnInit(Module["asm"]["r"]);removeRunDependency("wasm-instantiate")}addRunDependency("wasm-instantiate");function receiveInstantiatedSource(output){receiveInstance(output["instance"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){var result=WebAssembly.instantiate(binary,info);return result}).then(receiver,function(reason){err("failed to asynchronously prepare wasm: "+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming==="function"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch==="function"){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err("wasm streaming compile failed: "+reason);err("falling back to ArrayBuffer instantiation");return instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module["instantiateWasm"]){try{var exports=Module["instantiateWasm"](info,receiveInstance);return exports}catch(e){err("Module.instantiateWasm callback failed with error: "+e);return false}}instantiateAsync().catch(readyPromiseReject);return{}}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback=="function"){callback(Module);continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){wasmTable.get(func)()}else{wasmTable.get(func)(callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var runtimeKeepaliveCounter=0;function keepRuntimeAlive(){return noExitRuntime||runtimeKeepaliveCounter>0}function _abort(){abort()}var _emscripten_get_now;if(ENVIRONMENT_IS_NODE){_emscripten_get_now=function(){var t=process["hrtime"]();return t[0]*1e3+t[1]/1e6}}else if(typeof dateNow!=="undefined"){_emscripten_get_now=dateNow}else _emscripten_get_now=function(){return performance.now()};var _emscripten_get_now_is_monotonic=true;function setErrNo(value){HEAP32[___errno_location()>>2]=value;return value}function _clock_gettime(clk_id,tp){var now;if(clk_id===0){now=Date.now()}else if((clk_id===1||clk_id===4)&&_emscripten_get_now_is_monotonic){now=_emscripten_get_now()}else{setErrNo(28);return-1}HEAP32[tp>>2]=now/1e3|0;HEAP32[tp+4>>2]=now%1e3*1e3*1e3|0;return 0}function _dlopen(filename,flag){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _dlsym(handle,symbol){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _emscripten_get_heap_max(){return 2147483648}function _emscripten_memcpy_big(dest,src,num){HEAPU8.copyWithin(dest,src,src+num)}function emscripten_realloc_buffer(size){try{wasmMemory.grow(size-buffer.byteLength+65535>>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){var oldSize=HEAPU8.length;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(requestedSize,overGrownHeapSize),65536));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}function _emscripten_thread_sleep(msecs){var start=_emscripten_get_now();while(_emscripten_get_now()-start0){return}preRun();if(runDependencies>0){return}function doRun(){if(calledRun)return;calledRun=true;Module["calledRun"]=true;if(ABORT)return;initRuntime();preMain();readyPromiseResolve(Module);if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout(function(){setTimeout(function(){Module["setStatus"]("")},1);doRun()},1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){EXITSTATUS=status;if(implicit&&keepRuntimeAlive()&&status===0){return}if(keepRuntimeAlive()){}else{exitRuntime();if(Module["onExit"])Module["onExit"](status);ABORT=true}quit_(status,new ExitStatus(status))}if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}run(); + + + return createTwilioTFLiteModule.ready +} +); +})(); +if (typeof exports === 'object' && typeof module === 'object') + module.exports = createTwilioTFLiteModule; +else if (typeof define === 'function' && define['amd']) + define([], function() { return createTwilioTFLiteModule; }); +else if (typeof exports === 'object') + exports["createTwilioTFLiteModule"] = createTwilioTFLiteModule; diff --git a/dist/build/tflite-1-0-0.wasm b/dist/build/tflite-1-0-0.wasm new file mode 100755 index 0000000..ead8e52 Binary files /dev/null and b/dist/build/tflite-1-0-0.wasm differ diff --git a/dist/build/tflite-simd-1-0-0.js b/dist/build/tflite-simd-1-0-0.js new file mode 100644 index 0000000..d68e168 --- /dev/null +++ b/dist/build/tflite-simd-1-0-0.js @@ -0,0 +1,21 @@ + +var createTwilioTFLiteSIMDModule = (function() { + var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined; + if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename; + return ( +function(createTwilioTFLiteSIMDModule) { + createTwilioTFLiteSIMDModule = createTwilioTFLiteSIMDModule || {}; + +var Module=typeof createTwilioTFLiteSIMDModule!=="undefined"?createTwilioTFLiteSIMDModule:{};var readyPromiseResolve,readyPromiseReject;Module["ready"]=new Promise(function(resolve,reject){readyPromiseResolve=resolve;readyPromiseReject=reject});var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram="./this.program";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof process.versions==="object"&&typeof process.versions.node==="string";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory="";function locateFile(path){if(Module["locateFile"]){return Module["locateFile"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require("path").dirname(scriptDirectory)+"/"}else{scriptDirectory=__dirname+"/"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require("fs");if(!nodePath)nodePath=require("path");filename=nodePath["normalize"](filename);return nodeFS["readFileSync"](filename,binary?null:"utf8")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){thisProgram=process["argv"][1].replace(/\\/g,"/")}arguments_=process["argv"].slice(2);process["on"]("uncaughtException",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process["on"]("unhandledRejection",abort);quit_=function(status){process["exit"](status)};Module["inspect"]=function(){return"[Emscripten Module object]"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){arguments_=scriptArgs}else if(typeof arguments!="undefined"){arguments_=arguments}if(typeof quit==="function"){quit_=function(status){quit(status)}}if(typeof print!=="undefined"){if(typeof console==="undefined")console={};console.log=print;console.warn=console.error=typeof printErr!=="undefined"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(typeof document!=="undefined"&&document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf("blob:")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf("/")+1)}else{scriptDirectory=""}{read_=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module["print"]||console.log.bind(console);var err=Module["printErr"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module["arguments"])arguments_=Module["arguments"];if(Module["thisProgram"])thisProgram=Module["thisProgram"];if(Module["quit"])quit_=Module["quit"];var wasmBinary;if(Module["wasmBinary"])wasmBinary=Module["wasmBinary"];var noExitRuntime=Module["noExitRuntime"]||true;if(typeof WebAssembly!=="object"){abort("no native wasm support detected")}var wasmMemory;var ABORT=false;var EXITSTATUS;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(heap,idx,maxBytesToRead){var endIdx=idx+maxBytesToRead;var endPtr=idx;while(heap[endPtr]&&!(endPtr>=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str="";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):""}function writeAsciiToMemory(str,buffer,dontAddNull){for(var i=0;i>0]=str.charCodeAt(i)}if(!dontAddNull)HEAP8[buffer>>0]=0}function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module["HEAP8"]=HEAP8=new Int8Array(buf);Module["HEAP16"]=HEAP16=new Int16Array(buf);Module["HEAP32"]=HEAP32=new Int32Array(buf);Module["HEAPU8"]=HEAPU8=new Uint8Array(buf);Module["HEAPU16"]=HEAPU16=new Uint16Array(buf);Module["HEAPU32"]=HEAPU32=new Uint32Array(buf);Module["HEAPF32"]=HEAPF32=new Float32Array(buf);Module["HEAPF64"]=HEAPF64=new Float64Array(buf)}var INITIAL_MEMORY=Module["INITIAL_MEMORY"]||16777216;var wasmTable;var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnInit(cb){__ATINIT__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}what+="";err(what);ABORT=true;EXITSTATUS=1;what="abort("+what+"). Build with -s ASSERTIONS=1 for more info.";var e=new WebAssembly.RuntimeError(what);readyPromiseReject(e);throw e}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix="file://";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile="tflite-simd-1-0-0.wasm";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(file){try{if(file==wasmBinaryFile&&wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(file)}else{throw"both async and sync fetching of the wasm failed"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)){if(typeof fetch==="function"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){if(!response["ok"]){throw"failed to load wasm binary file at '"+wasmBinaryFile+"'"}return response["arrayBuffer"]()}).catch(function(){return getBinary(wasmBinaryFile)})}else{if(readAsync){return new Promise(function(resolve,reject){readAsync(wasmBinaryFile,function(response){resolve(new Uint8Array(response))},reject)})}}}return Promise.resolve().then(function(){return getBinary(wasmBinaryFile)})}function createWasm(){var info={"a":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module["asm"]=exports;wasmMemory=Module["asm"]["q"];updateGlobalBufferAndViews(wasmMemory.buffer);wasmTable=Module["asm"]["D"];addOnInit(Module["asm"]["r"]);removeRunDependency("wasm-instantiate")}addRunDependency("wasm-instantiate");function receiveInstantiatedSource(output){receiveInstance(output["instance"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){var result=WebAssembly.instantiate(binary,info);return result}).then(receiver,function(reason){err("failed to asynchronously prepare wasm: "+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming==="function"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch==="function"){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err("wasm streaming compile failed: "+reason);err("falling back to ArrayBuffer instantiation");return instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module["instantiateWasm"]){try{var exports=Module["instantiateWasm"](info,receiveInstance);return exports}catch(e){err("Module.instantiateWasm callback failed with error: "+e);return false}}instantiateAsync().catch(readyPromiseReject);return{}}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback=="function"){callback(Module);continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){wasmTable.get(func)()}else{wasmTable.get(func)(callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var runtimeKeepaliveCounter=0;function keepRuntimeAlive(){return noExitRuntime||runtimeKeepaliveCounter>0}function _abort(){abort()}var _emscripten_get_now;if(ENVIRONMENT_IS_NODE){_emscripten_get_now=function(){var t=process["hrtime"]();return t[0]*1e3+t[1]/1e6}}else if(typeof dateNow!=="undefined"){_emscripten_get_now=dateNow}else _emscripten_get_now=function(){return performance.now()};var _emscripten_get_now_is_monotonic=true;function setErrNo(value){HEAP32[___errno_location()>>2]=value;return value}function _clock_gettime(clk_id,tp){var now;if(clk_id===0){now=Date.now()}else if((clk_id===1||clk_id===4)&&_emscripten_get_now_is_monotonic){now=_emscripten_get_now()}else{setErrNo(28);return-1}HEAP32[tp>>2]=now/1e3|0;HEAP32[tp+4>>2]=now%1e3*1e3*1e3|0;return 0}function _dlopen(filename,flag){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _dlsym(handle,symbol){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _emscripten_get_heap_max(){return 2147483648}function _emscripten_memcpy_big(dest,src,num){HEAPU8.copyWithin(dest,src,src+num)}function emscripten_realloc_buffer(size){try{wasmMemory.grow(size-buffer.byteLength+65535>>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){var oldSize=HEAPU8.length;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(requestedSize,overGrownHeapSize),65536));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}function _emscripten_thread_sleep(msecs){var start=_emscripten_get_now();while(_emscripten_get_now()-start0){return}preRun();if(runDependencies>0){return}function doRun(){if(calledRun)return;calledRun=true;Module["calledRun"]=true;if(ABORT)return;initRuntime();preMain();readyPromiseResolve(Module);if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout(function(){setTimeout(function(){Module["setStatus"]("")},1);doRun()},1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){EXITSTATUS=status;if(implicit&&keepRuntimeAlive()&&status===0){return}if(keepRuntimeAlive()){}else{exitRuntime();if(Module["onExit"])Module["onExit"](status);ABORT=true}quit_(status,new ExitStatus(status))}if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}run(); + + + return createTwilioTFLiteSIMDModule.ready +} +); +})(); +if (typeof exports === 'object' && typeof module === 'object') + module.exports = createTwilioTFLiteSIMDModule; +else if (typeof define === 'function' && define['amd']) + define([], function() { return createTwilioTFLiteSIMDModule; }); +else if (typeof exports === 'object') + exports["createTwilioTFLiteSIMDModule"] = createTwilioTFLiteSIMDModule; diff --git a/dist/build/tflite-simd-1-0-0.wasm b/dist/build/tflite-simd-1-0-0.wasm new file mode 100755 index 0000000..bb24d2d Binary files /dev/null and b/dist/build/tflite-simd-1-0-0.wasm differ diff --git a/dist/build/twilio-video-processors.js b/dist/build/twilio-video-processors.js new file mode 100644 index 0000000..4a7a9db --- /dev/null +++ b/dist/build/twilio-video-processors.js @@ -0,0 +1,1756 @@ +/*! twilio-video-processors.js 2.2.0 + +The following license applies to all parts of this software except as +documented below. + + Copyright (C) 2022 Twilio Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. Neither the name of Twilio nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BackgroundProcessor = void 0; +var Processor_1 = require("../Processor"); +var Benchmark_1 = require("../../utils/Benchmark"); +var TwilioTFLite_1 = require("../../utils/TwilioTFLite"); +var support_1 = require("../../utils/support"); +var types_1 = require("../../types"); +var webgl2_1 = require("../webgl2"); +var constants_1 = require("../../constants"); +/** + * @private + */ +var BackgroundProcessor = /** @class */ (function (_super) { + __extends(BackgroundProcessor, _super); + function BackgroundProcessor(options) { + var _this = _super.call(this) || this; + _this._backgroundImage = null; + _this._outputCanvas = null; + _this._outputContext = null; + _this._webgl2Pipeline = null; + _this._inferenceDimensions = constants_1.WASM_INFERENCE_DIMENSIONS; + if (typeof options.assetsPath !== 'string') { + throw new Error('assetsPath parameter is missing'); + } + var assetsPath = options.assetsPath; + if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') { + assetsPath += '/'; + } + _this._assetsPath = assetsPath; + _this._debounce = typeof options.debounce === 'boolean' ? options.debounce : true; + _this._deferInputResize = typeof options.deferInputResize === 'boolean' ? options.deferInputResize : false; + _this._inferenceDimensions = options.inferenceDimensions || _this._inferenceDimensions; + _this._inputResizeMode = typeof options.inputResizeMode === 'string' + ? options.inputResizeMode + : ((0, support_1.isChromiumImageBitmap)() ? 'image-bitmap' : 'canvas'); + _this._pipeline = options.pipeline || types_1.Pipeline.WebGL2; + _this._benchmark = new Benchmark_1.Benchmark(); + _this._currentMask = null; + _this._isSimdEnabled = null; + _this._inferenceInputCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._inferenceInputContext = _this._inferenceInputCanvas.getContext('2d', { willReadFrequently: true }); + _this._inputFrameCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._inputFrameContext = _this._inputFrameCanvas.getContext('2d'); + _this._maskBlurRadius = typeof options.maskBlurRadius === 'number' ? options.maskBlurRadius : (_this._pipeline === types_1.Pipeline.WebGL2 ? constants_1.MASK_BLUR_RADIUS : (constants_1.MASK_BLUR_RADIUS / 2)); + _this._maskCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._maskContext = _this._maskCanvas.getContext('2d'); + return _this; + } + Object.defineProperty(BackgroundProcessor.prototype, "maskBlurRadius", { + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get: function () { + return this._maskBlurRadius; + }, + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set: function (radius) { + var _a; + if (typeof radius !== 'number' || radius < 0) { + console.warn("Valid mask blur radius not found. Using ".concat(constants_1.MASK_BLUR_RADIUS, " as default.")); + radius = constants_1.MASK_BLUR_RADIUS; + } + if (this._maskBlurRadius !== radius) { + this._maskBlurRadius = radius; + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.updatePostProcessingConfig({ + jointBilateralFilter: { + sigmaSpace: this._maskBlurRadius + } + }); + } + }, + enumerable: false, + configurable: true + }); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + BackgroundProcessor.prototype.loadModel = function () { + return __awaiter(this, void 0, void 0, function () { + var tflite; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + tflite = BackgroundProcessor._tflite; + if (!!tflite) return [3 /*break*/, 2]; + tflite = new TwilioTFLite_1.TwilioTFLite(); + return [4 /*yield*/, tflite.initialize(this._assetsPath, constants_1.MODEL_NAME, constants_1.TFLITE_LOADER_NAME, constants_1.TFLITE_SIMD_LOADER_NAME)]; + case 1: + _a.sent(); + BackgroundProcessor._tflite = tflite; + _a.label = 2; + case 2: + this._isSimdEnabled = tflite.isSimdEnabled; + return [2 /*return*/]; + } + }); + }); + }; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + BackgroundProcessor.prototype.processFrame = function (inputFrameBuffer, outputFrameBuffer) { + var _a, _b, _c; + return __awaiter(this, void 0, void 0, function () { + var _d, inferenceWidth, inferenceHeight, _e, captureWidth, captureHeight, inputFrame, personMask, ctx, _f, outputHeight, outputWidth; + return __generator(this, function (_g) { + switch (_g.label) { + case 0: + if (!BackgroundProcessor._tflite) { + return [2 /*return*/]; + } + if (!inputFrameBuffer || !outputFrameBuffer) { + throw new Error('Missing input or output frame buffer'); + } + this._benchmark.end('captureFrameDelay'); + this._benchmark.start('processFrameDelay'); + _d = this._inferenceDimensions, inferenceWidth = _d.width, inferenceHeight = _d.height; + _e = inputFrameBuffer instanceof HTMLVideoElement + ? { width: inputFrameBuffer.videoWidth, height: inputFrameBuffer.videoHeight } + : inputFrameBuffer, captureWidth = _e.width, captureHeight = _e.height; + if (this._outputCanvas !== outputFrameBuffer) { + this._outputCanvas = outputFrameBuffer; + this._outputContext = this._outputCanvas + .getContext(this._pipeline === types_1.Pipeline.Canvas2D ? '2d' : 'webgl2'); + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + } + if (this._pipeline === types_1.Pipeline.WebGL2) { + if (!this._webgl2Pipeline) { + this._createWebGL2Pipeline(inputFrameBuffer, captureWidth, captureHeight, inferenceWidth, inferenceHeight); + } + (_b = this._webgl2Pipeline) === null || _b === void 0 ? void 0 : _b.sampleInputFrame(); + } + // Only set the canvas' dimensions if they have changed to prevent unnecessary redraw + if (this._inputFrameCanvas.width !== captureWidth) { + this._inputFrameCanvas.width = captureWidth; + } + if (this._inputFrameCanvas.height !== captureHeight) { + this._inputFrameCanvas.height = captureHeight; + } + if (this._inferenceInputCanvas.width !== inferenceWidth) { + this._inferenceInputCanvas.width = inferenceWidth; + this._maskCanvas.width = inferenceWidth; + } + if (this._inferenceInputCanvas.height !== inferenceHeight) { + this._inferenceInputCanvas.height = inferenceHeight; + this._maskCanvas.height = inferenceHeight; + } + if (inputFrameBuffer instanceof HTMLVideoElement) { + this._inputFrameContext.drawImage(inputFrameBuffer, 0, 0); + inputFrame = this._inputFrameCanvas; + } + else { + inputFrame = inputFrameBuffer; + } + return [4 /*yield*/, this._createPersonMask(inputFrame)]; + case 1: + personMask = _g.sent(); + if (this._debounce) { + this._currentMask = this._currentMask === personMask + ? null + : personMask; + } + if (this._pipeline === types_1.Pipeline.WebGL2) { + (_c = this._webgl2Pipeline) === null || _c === void 0 ? void 0 : _c.render(personMask.data); + } + else { + this._benchmark.start('imageCompositionDelay'); + if (!this._debounce || this._currentMask) { + this._maskContext.putImageData(personMask, 0, 0); + } + ctx = this._outputContext; + _f = this._outputCanvas, outputHeight = _f.height, outputWidth = _f.width; + ctx.save(); + ctx.filter = "blur(".concat(this._maskBlurRadius, "px)"); + ctx.globalCompositeOperation = 'copy'; + ctx.drawImage(this._maskCanvas, 0, 0, outputWidth, outputHeight); + ctx.filter = 'none'; + ctx.globalCompositeOperation = 'source-in'; + ctx.drawImage(inputFrame, 0, 0, outputWidth, outputHeight); + ctx.globalCompositeOperation = 'destination-over'; + this._setBackground(inputFrame); + ctx.restore(); + this._benchmark.end('imageCompositionDelay'); + } + this._benchmark.end('processFrameDelay'); + this._benchmark.end('totalProcessingDelay'); + // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk + // for a more accurate fps + this._benchmark.start('totalProcessingDelay'); + this._benchmark.start('captureFrameDelay'); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor.prototype._createPersonMask = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var _a, height, width, stages, shouldDebounce, inferenceStage, resizeStage, resizePromise, personMaskBuffer; + var _this = this; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this._inferenceDimensions, height = _a.height, width = _a.width; + stages = { + inference: { + false: function () { return BackgroundProcessor._tflite.runInference(); }, + true: function () { return _this._currentMask.data; } + }, + resize: { + false: function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) { + return [2 /*return*/, this._resizeInputFrame(inputFrame)]; + }); }); }, + true: function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) { + return [2 /*return*/]; + }); }); } + } + }; + shouldDebounce = !!this._currentMask; + inferenceStage = stages.inference["".concat(shouldDebounce)]; + resizeStage = stages.resize["".concat(shouldDebounce)]; + this._benchmark.start('inputImageResizeDelay'); + resizePromise = resizeStage(); + if (!!this._deferInputResize) return [3 /*break*/, 2]; + return [4 /*yield*/, resizePromise]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + this._benchmark.end('inputImageResizeDelay'); + this._benchmark.start('segmentationDelay'); + personMaskBuffer = inferenceStage(); + this._benchmark.end('segmentationDelay'); + return [2 /*return*/, this._currentMask || new ImageData(personMaskBuffer, width, height)]; + } + }); + }); + }; + BackgroundProcessor.prototype._createWebGL2Pipeline = function (inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight) { + this._webgl2Pipeline = (0, webgl2_1.buildWebGL2Pipeline)({ + htmlElement: inputFrame, + width: captureWidth, + height: captureHeight, + }, this._backgroundImage, { + type: this._getWebGL2PipelineType(), + }, { + inputResolution: "".concat(inferenceWidth, "x").concat(inferenceHeight), + }, this._outputCanvas, this._benchmark, this._debounce); + this._webgl2Pipeline.updatePostProcessingConfig({ + jointBilateralFilter: { + sigmaSpace: this._maskBlurRadius, + sigmaColor: 0.1 + }, + coverage: [ + 0, + 0.99 + ], + lightWrapping: 0, + blendMode: 'screen' + }); + }; + BackgroundProcessor.prototype._resizeInputFrame = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var _a, _b, resizeWidth, resizeHeight, ctx, resizeMode, resizedInputFrameBitmap, imageData; + return __generator(this, function (_c) { + switch (_c.label) { + case 0: + _a = this, _b = _a._inferenceInputCanvas, resizeWidth = _b.width, resizeHeight = _b.height, ctx = _a._inferenceInputContext, resizeMode = _a._inputResizeMode; + if (!(resizeMode === 'image-bitmap')) return [3 /*break*/, 2]; + return [4 /*yield*/, createImageBitmap(inputFrame, { + resizeWidth: resizeWidth, + resizeHeight: resizeHeight, + resizeQuality: 'pixelated' + })]; + case 1: + resizedInputFrameBitmap = _c.sent(); + ctx.drawImage(resizedInputFrameBitmap, 0, 0, resizeWidth, resizeHeight); + resizedInputFrameBitmap.close(); + return [3 /*break*/, 3]; + case 2: + ctx.drawImage(inputFrame, 0, 0, resizeWidth, resizeHeight); + _c.label = 3; + case 3: + imageData = ctx.getImageData(0, 0, resizeWidth, resizeHeight); + BackgroundProcessor._tflite.loadInputBuffer(imageData.data); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor._tflite = null; + return BackgroundProcessor; +}(Processor_1.Processor)); +exports.BackgroundProcessor = BackgroundProcessor; + +},{"../../constants":1,"../../types":15,"../../utils/Benchmark":16,"../../utils/TwilioTFLite":17,"../../utils/support":18,"../Processor":3,"../webgl2":9}],5:[function(require,module,exports){ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GaussianBlurBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var constants_1 = require("../../constants"); +var types_1 = require("../../types"); +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let blurBackground: GaussianBlurBackgroundProcessor; + * + * (async() => { + * const isWasmSimdSupported = await simd(); + * + * blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await blurBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * })(); + * ``` + */ +var GaussianBlurBackgroundProcessor = /** @class */ (function (_super) { + __extends(GaussianBlurBackgroundProcessor, _super); + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + function GaussianBlurBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + _this._blurFilterRadius = constants_1.BLUR_FILTER_RADIUS; + // tslint:disable-next-line no-unused-variable + _this._name = 'GaussianBlurBackgroundProcessor'; + _this.blurFilterRadius = options.blurFilterRadius; + return _this; + } + Object.defineProperty(GaussianBlurBackgroundProcessor.prototype, "blurFilterRadius", { + /** + * The current background blur filter radius in pixels. + */ + get: function () { + return this._blurFilterRadius; + }, + /** + * Set a new background blur filter radius in pixels. + */ + set: function (radius) { + if (!radius) { + console.warn("Valid blur filter radius not found. Using ".concat(constants_1.BLUR_FILTER_RADIUS, " as default.")); + radius = constants_1.BLUR_FILTER_RADIUS; + } + this._blurFilterRadius = radius; + }, + enumerable: false, + configurable: true + }); + GaussianBlurBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Blur; + }; + GaussianBlurBackgroundProcessor.prototype._setBackground = function (inputFrame) { + if (!this._outputContext) { + return; + } + var ctx = this._outputContext; + ctx.filter = "blur(".concat(this._blurFilterRadius, "px)"); + ctx.drawImage(inputFrame, 0, 0); + }; + return GaussianBlurBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.GaussianBlurBackgroundProcessor = GaussianBlurBackgroundProcessor; + +},{"../../constants":1,"../../types":15,"./BackgroundProcessor":4}],6:[function(require,module,exports){ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var types_1 = require("../../types"); +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let virtualBackground: VirtualBackgroundProcessor; + * const img = new Image(); + * + * img.onload = async () => { + * const isWasmSimdSupported = await simd(); + * + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await virtualBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }; + * + * img.src = '/background.jpg'; + * ``` + */ +var VirtualBackgroundProcessor = /** @class */ (function (_super) { + __extends(VirtualBackgroundProcessor, _super); + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + function VirtualBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + // tslint:disable-next-line no-unused-variable + _this._name = 'VirtualBackgroundProcessor'; + _this.backgroundImage = options.backgroundImage; + _this.fitType = options.fitType; + return _this; + } + Object.defineProperty(VirtualBackgroundProcessor.prototype, "backgroundImage", { + /** + * The HTMLImageElement representing the current background image. + */ + get: function () { + return this._backgroundImage; + }, + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set: function (image) { + var _a; + if (!image || !image.complete || !image.naturalHeight) { + throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded'); + } + this._backgroundImage = image; + // Triggers recreation of the pipeline in the next processFrame call + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(VirtualBackgroundProcessor.prototype, "fitType", { + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get: function () { + return this._fitType; + }, + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set: function (fitType) { + var validTypes = Object.keys(types_1.ImageFit); + if (!validTypes.includes(fitType)) { + console.warn("Valid fitType not found. Using '".concat(types_1.ImageFit.Fill, "' as default.")); + fitType = types_1.ImageFit.Fill; + } + this._fitType = fitType; + }, + enumerable: false, + configurable: true + }); + VirtualBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Image; + }; + VirtualBackgroundProcessor.prototype._setBackground = function () { + if (!this._outputContext || !this._outputCanvas) { + return; + } + var img = this._backgroundImage; + var imageWidth = img.naturalWidth; + var imageHeight = img.naturalHeight; + var canvasWidth = this._outputCanvas.width; + var canvasHeight = this._outputCanvas.height; + var ctx = this._outputContext; + if (this._fitType === types_1.ImageFit.Fill) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight); + } + else if (this._fitType === types_1.ImageFit.None) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight); + } + else if (this._fitType === types_1.ImageFit.Contain) { + var _a = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Contain), x = _a.x, y = _a.y, w = _a.w, h = _a.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + else if (this._fitType === types_1.ImageFit.Cover) { + var _b = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Cover), x = _b.x, y = _b.y, w = _b.w, h = _b.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + }; + VirtualBackgroundProcessor.prototype._getFitPosition = function (contentWidth, contentHeight, viewportWidth, viewportHeight, type) { + // Calculate new content width to fit viewport width + var factor = viewportWidth / contentWidth; + var newContentWidth = viewportWidth; + var newContentHeight = factor * contentHeight; + // Scale down the resulting height and width more + // to fit viewport height if the content still exceeds it + if ((type === types_1.ImageFit.Contain && newContentHeight > viewportHeight) + || (type === types_1.ImageFit.Cover && viewportHeight > newContentHeight)) { + factor = viewportHeight / newContentHeight; + newContentWidth = factor * newContentWidth; + newContentHeight = viewportHeight; + } + // Calculate the destination top left corner to center the content + var x = (viewportWidth - newContentWidth) / 2; + var y = (viewportHeight - newContentHeight) / 2; + return { + x: x, + y: y, + w: newContentWidth, + h: newContentHeight, + }; + }; + return VirtualBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.VirtualBackgroundProcessor = VirtualBackgroundProcessor; + +},{"../../types":15,"./BackgroundProcessor":4}],7:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.inputResolutions = void 0; +exports.inputResolutions = { + '640x360': [640, 360], + '256x256': [256, 256], + '256x144': [256, 144], + '160x96': [160, 96], +}; + +},{}],8:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTexture = exports.compileShader = exports.createProgram = exports.createPiplelineStageProgram = exports.glsl = void 0; +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +exports.glsl = String.raw; +function createPiplelineStageProgram(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer) { + var program = createProgram(gl, vertexShader, fragmentShader); + var positionAttributeLocation = gl.getAttribLocation(program, 'a_position'); + gl.enableVertexAttribArray(positionAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0); + var texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord'); + gl.enableVertexAttribArray(texCoordAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0); + return program; +} +exports.createPiplelineStageProgram = createPiplelineStageProgram; +function createProgram(gl, vertexShader, fragmentShader) { + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error("Could not link WebGL program: ".concat(gl.getProgramInfoLog(program))); + } + return program; +} +exports.createProgram = createProgram; +function compileShader(gl, shaderType, shaderSource) { + var shader = gl.createShader(shaderType); + gl.shaderSource(shader, shaderSource); + gl.compileShader(shader); + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + throw new Error("Could not compile shader: ".concat(gl.getShaderInfoLog(shader))); + } + return shader; +} +exports.compileShader = compileShader; +function createTexture(gl, internalformat, width, height, minFilter, magFilter) { + if (minFilter === void 0) { minFilter = gl.NEAREST; } + if (magFilter === void 0) { magFilter = gl.NEAREST; } + var texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter); + gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height); + return texture; +} +exports.createTexture = createTexture; + +},{}],9:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +var webgl2Pipeline_1 = require("./pipelines/webgl2Pipeline"); +Object.defineProperty(exports, "buildWebGL2Pipeline", { enumerable: true, get: function () { return webgl2Pipeline_1.buildWebGL2Pipeline; } }); + +},{"./pipelines/webgl2Pipeline":14}],10:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundBlurStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundBlurStage(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var blurPass = buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas); + var blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas); + function render() { + blurPass.render(); + blendPass.render(); + } + function updateCoverage(coverage) { + blendPass.updateCoverage(coverage); + } + function cleanUp() { + blendPass.cleanUp(); + blurPass.cleanUp(); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundBlurStage = buildBackgroundBlurStage; +function buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "]))); + var scale = 0.5; + var outputWidth = canvas.width * scale; + var outputHeight = canvas.height * scale; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var texture1 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var texture2 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var frameBuffer1 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0); + var frameBuffer2 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture2, 0); + gl.useProgram(program); + gl.uniform1i(personMaskLocation, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + for (var i = 0; i < 8; i++) { + gl.uniform2f(texelSizeLocation, 0, texelHeight); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, texture1); + gl.uniform1i(inputFrameLocation, 2); + gl.uniform2f(texelSizeLocation, texelWidth, 0); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.bindTexture(gl.TEXTURE_2D, texture2); + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer2); + gl.deleteFramebuffer(frameBuffer1); + gl.deleteTexture(texture2); + gl.deleteTexture(texture1); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { + render: render, + cleanUp: cleanUp, + }; +} +function buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_3 || (templateObject_3 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var blurredInputFrame = gl.getUniformLocation(program, 'u_blurredInputFrame'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform1i(blurredInputFrame, 2); + gl.uniform2f(coverageLocation, 0, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function cleanUp() { + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +var templateObject_1, templateObject_2, templateObject_3; + +},{"../helpers/webglHelper":8}],11:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundImageStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundImageStage(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "], ["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var outputRatio = outputWidth / outputHeight; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var backgroundScaleLocation = gl.getUniformLocation(program, 'u_backgroundScale'); + var backgroundOffsetLocation = gl.getUniformLocation(program, 'u_backgroundOffset'); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var backgroundLocation = gl.getUniformLocation(program, 'u_background'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + var lightWrappingLocation = gl.getUniformLocation(program, 'u_lightWrapping'); + var blendModeLocation = gl.getUniformLocation(program, 'u_blendMode'); + gl.useProgram(program); + gl.uniform2f(backgroundScaleLocation, 1, 1); + gl.uniform2f(backgroundOffsetLocation, 0, 0); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform2f(coverageLocation, 0, 1); + gl.uniform1f(lightWrappingLocation, 0); + gl.uniform1f(blendModeLocation, 0); + var backgroundTexture = null; + // TODO Find a better to handle background being loaded + if (backgroundImage === null || backgroundImage === void 0 ? void 0 : backgroundImage.complete) { + updateBackgroundImage(backgroundImage); + } + else if (backgroundImage) { + backgroundImage.onload = function () { + updateBackgroundImage(backgroundImage); + }; + } + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + if (backgroundTexture !== null) { + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, backgroundTexture); + // TODO Handle correctly the background not loaded yet + gl.uniform1i(backgroundLocation, 2); + } + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateBackgroundImage(backgroundImage) { + backgroundTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.LINEAR, gl.LINEAR); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.RGBA, gl.UNSIGNED_BYTE, backgroundImage); + var xOffset = 0; + var yOffset = 0; + var backgroundWidth = backgroundImage.naturalWidth; + var backgroundHeight = backgroundImage.naturalHeight; + var backgroundRatio = backgroundWidth / backgroundHeight; + if (backgroundRatio < outputRatio) { + backgroundHeight = backgroundWidth / outputRatio; + yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2; + } + else { + backgroundWidth = backgroundHeight * outputRatio; + xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2; + } + var xScale = backgroundWidth / backgroundImage.naturalWidth; + var yScale = backgroundHeight / backgroundImage.naturalHeight; + xOffset /= backgroundImage.naturalWidth; + yOffset /= backgroundImage.naturalHeight; + gl.uniform2f(backgroundScaleLocation, xScale, yScale); + gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function updateLightWrapping(lightWrapping) { + gl.useProgram(program); + gl.uniform1f(lightWrappingLocation, lightWrapping); + } + function updateBlendMode(blendMode) { + gl.useProgram(program); + gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1); + } + function cleanUp() { + gl.deleteTexture(backgroundTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + updateLightWrapping: updateLightWrapping, + updateBlendMode: updateBlendMode, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundImageStage = buildBackgroundImageStage; +var templateObject_1, templateObject_2; + +},{"../helpers/webglHelper":8}],12:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildFastBilateralFilterStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildFastBilateralFilterStage(gl, vertexShader, positionBuffer, texCoordBuffer, inputTexture, segmentationConfig, outputTexture, canvas) { + // NOTE(mmalavalli): This is a faster approximation of the joint bilateral filter. + // For a given pixel, instead of calculating the space and color weights of all + // the pixels within the filter kernel, which would have a complexity of O(r^2), + // we calculate the space and color weights of only those pixels which form two + // diagonal lines between the two pairs of opposite corners of the filter kernel, + // which would have a complexity of O(r). This improves the overall complexity + // of this stage from O(w x h x r^2) to O(w x h x r), where: + // w => width of the output video frame + // h => height of the output video frame + // r => radius of the joint bilateral filter kernel + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var outputWidth = canvas.width, outputHeight = canvas.height; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var segmentationMaskLocation = gl.getUniformLocation(program, 'u_segmentationMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var stepLocation = gl.getUniformLocation(program, 'u_step'); + var radiusLocation = gl.getUniformLocation(program, 'u_radius'); + var offsetLocation = gl.getUniformLocation(program, 'u_offset'); + var sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel'); + var sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor'); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(segmentationMaskLocation, 1); + gl.uniform2f(texelSizeLocation, texelWidth, texelHeight); + // Ensures default values are configured to prevent infinite + // loop in fragment shader + updateSigmaSpace(0); + updateSigmaColor(0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateSigmaSpace(sigmaSpace) { + sigmaSpace *= Math.max(outputWidth / segmentationWidth, outputHeight / segmentationHeight); + var kSparsityFactor = 0.66; // Higher is more sparse. + var sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor); + var step = sparsity; + var radius = sigmaSpace; + var offset = step > 1 ? step * 0.5 : 0; + var sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace; + gl.useProgram(program); + gl.uniform1f(stepLocation, step); + gl.uniform1f(radiusLocation, radius); + gl.uniform1f(offsetLocation, offset); + gl.uniform1f(sigmaTexelLocation, sigmaTexel); + } + function updateSigmaColor(sigmaColor) { + gl.useProgram(program); + gl.uniform1f(sigmaColorLocation, sigmaColor); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, updateSigmaSpace: updateSigmaSpace, updateSigmaColor: updateSigmaColor, cleanUp: cleanUp }; +} +exports.buildFastBilateralFilterStage = buildFastBilateralFilterStage; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],13:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildLoadSegmentationStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildLoadSegmentationStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, outputTexture) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation'); + var inputTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputLocation, 1); + function render(segmentationData) { + gl.viewport(0, 0, segmentationWidth, segmentationHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, segmentationWidth, segmentationHeight, gl.RGBA, gl.UNSIGNED_BYTE, segmentationData); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(inputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildLoadSegmentationStage = buildLoadSegmentationStage; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],14:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +var backgroundBlurStage_1 = require("./backgroundBlurStage"); +var backgroundImageStage_1 = require("./backgroundImageStage"); +var fastBilateralFilterStage_1 = require("./fastBilateralFilterStage"); +var loadSegmentationStage_1 = require("./loadSegmentationStage"); +function buildWebGL2Pipeline(sourcePlayback, backgroundImage, backgroundConfig, segmentationConfig, canvas, benchmark, debounce) { + var shouldUpscaleCurrentMask = true; + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var gl = canvas.getContext('webgl2'); + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var vertexArray = gl.createVertexArray(); + gl.bindVertexArray(vertexArray); + var positionBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + // We don't use texStorage2D here because texImage2D seems faster + // to upload video texture than texSubImage2D even though the latter + // is supposed to be the recommended way: + // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures + var inputFrameTexture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + // TODO Rename segmentation and person mask to be more specific + var segmentationTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var personMaskTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight); + var loadSegmentationStage = (0, loadSegmentationStage_1.buildLoadSegmentationStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, segmentationTexture); + var fastBilateralFilterStage = (0, fastBilateralFilterStage_1.buildFastBilateralFilterStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationTexture, segmentationConfig, personMaskTexture, canvas); + var backgroundStage = backgroundConfig.type === 'blur' + ? (0, backgroundBlurStage_1.buildBackgroundBlurStage)(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) + : (0, backgroundImageStage_1.buildBackgroundImageStage)(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas); + function sampleInputFrame() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + gl.clearColor(0, 0, 0, 0); + gl.clear(gl.COLOR_BUFFER_BIT); + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + // texImage2D seems faster than texSubImage2D to upload + // video texture + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, sourcePlayback.htmlElement); + gl.bindVertexArray(vertexArray); + return [2 /*return*/]; + }); + }); + } + function render(segmentationData) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + benchmark.start('imageCompositionDelay'); + if (shouldUpscaleCurrentMask) { + loadSegmentationStage.render(segmentationData); + } + fastBilateralFilterStage.render(); + backgroundStage.render(); + if (debounce) { + shouldUpscaleCurrentMask = !shouldUpscaleCurrentMask; + } + benchmark.end('imageCompositionDelay'); + return [2 /*return*/]; + }); + }); + } + function updatePostProcessingConfig(postProcessingConfig) { + var blendMode = postProcessingConfig.blendMode, coverage = postProcessingConfig.coverage, lightWrapping = postProcessingConfig.lightWrapping, _a = postProcessingConfig.jointBilateralFilter, jointBilateralFilter = _a === void 0 ? {} : _a; + var sigmaColor = jointBilateralFilter.sigmaColor, sigmaSpace = jointBilateralFilter.sigmaSpace; + if (typeof sigmaColor === 'number') { + fastBilateralFilterStage.updateSigmaColor(sigmaColor); + } + if (typeof sigmaSpace === 'number') { + fastBilateralFilterStage.updateSigmaSpace(sigmaSpace); + } + if (Array.isArray(coverage)) { + if (backgroundConfig.type === 'blur' || backgroundConfig.type === 'image') { + backgroundStage.updateCoverage(coverage); + } + } + if (backgroundConfig.type === 'image') { + var backgroundImageStage = backgroundStage; + if (typeof lightWrapping === 'number') { + backgroundImageStage.updateLightWrapping(lightWrapping); + } + if (typeof blendMode === 'string') { + backgroundImageStage.updateBlendMode(blendMode); + } + } + else if (backgroundConfig.type !== 'blur') { + // TODO Handle no background in a separate pipeline path + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage([0, 0.9999]); + backgroundImageStage.updateLightWrapping(0); + } + } + function cleanUp() { + backgroundStage.cleanUp(); + fastBilateralFilterStage.cleanUp(); + loadSegmentationStage.cleanUp(); + gl.deleteTexture(personMaskTexture); + gl.deleteTexture(segmentationTexture); + gl.deleteTexture(inputFrameTexture); + gl.deleteBuffer(texCoordBuffer); + gl.deleteBuffer(positionBuffer); + gl.deleteVertexArray(vertexArray); + gl.deleteShader(vertexShader); + } + return { render: render, sampleInputFrame: sampleInputFrame, updatePostProcessingConfig: updatePostProcessingConfig, cleanUp: cleanUp }; +} +exports.buildWebGL2Pipeline = buildWebGL2Pipeline; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8,"./backgroundBlurStage":10,"./backgroundImageStage":11,"./fastBilateralFilterStage":12,"./loadSegmentationStage":13}],15:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pipeline = exports.ImageFit = exports.WebGL2PipelineType = void 0; +/** + * @private + */ +var WebGL2PipelineType; +(function (WebGL2PipelineType) { + WebGL2PipelineType["Blur"] = "blur"; + WebGL2PipelineType["Image"] = "image"; +})(WebGL2PipelineType || (exports.WebGL2PipelineType = WebGL2PipelineType = {})); +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +var ImageFit; +(function (ImageFit) { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + ImageFit["Contain"] = "Contain"; + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + ImageFit["Cover"] = "Cover"; + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + ImageFit["Fill"] = "Fill"; + /** + * Ignore height and width and use the original size. + */ + ImageFit["None"] = "None"; +})(ImageFit || (exports.ImageFit = ImageFit = {})); +/** + * Specifies which pipeline to use when processing video frames. + */ +var Pipeline; +(function (Pipeline) { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Pipeline["Canvas2D"] = "Canvas2D"; + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + Pipeline["WebGL2"] = "WebGL2"; +})(Pipeline || (exports.Pipeline = Pipeline = {})); + +},{}],16:[function(require,module,exports){ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Benchmark = void 0; +/** + * @private + */ +var Benchmark = /** @class */ (function () { + function Benchmark() { + this._timingCache = new Map(); + this._timings = new Map(); + } + Benchmark.prototype.end = function (name) { + var timing = this._timings.get(name); + if (!timing) { + return; + } + timing.end = Date.now(); + timing.delay = timing.end - timing.start; + this._save(name, __assign({}, timing)); + }; + Benchmark.prototype.getAverageDelay = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || !timingCache.length) { + return; + } + return timingCache.map(function (timing) { return timing.delay; }) + .reduce(function (total, value) { return total += value; }, 0) / timingCache.length; + }; + Benchmark.prototype.getNames = function () { + return Array.from(this._timingCache.keys()); + }; + Benchmark.prototype.getRate = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || timingCache.length < 2) { + return; + } + var totalDelay = timingCache[timingCache.length - 1].end - timingCache[0].start; + return (timingCache.length / totalDelay) * 1000; + }; + Benchmark.prototype.start = function (name) { + var timing = this._timings.get(name); + if (!timing) { + timing = {}; + this._timings.set(name, timing); + } + timing.start = Date.now(); + delete timing.end; + delete timing.delay; + }; + Benchmark.prototype._save = function (name, timing) { + var timingCache = this._timingCache.get(name); + if (!timingCache) { + timingCache = []; + this._timingCache.set(name, timingCache); + } + timingCache.push(timing); + if (timingCache.length > Benchmark.cacheSize) { + timingCache.splice(0, timingCache.length - Benchmark.cacheSize); + } + }; + // NOTE (csantos): How many timing information to save per benchmark. + // This is about the amount of timing info generated on a 24fps input. + // Enough samples to calculate fps + Benchmark.cacheSize = 41; + return Benchmark; +}()); +exports.Benchmark = Benchmark; + +},{}],17:[function(require,module,exports){ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TwilioTFLite = void 0; +var loadedScripts = new Set(); +var model; +/** + * @private + */ +var TwilioTFLite = /** @class */ (function () { + function TwilioTFLite() { + this._inputBuffer = null; + this._isSimdEnabled = null; + this._tflite = null; + } + Object.defineProperty(TwilioTFLite.prototype, "isSimdEnabled", { + get: function () { + return this._isSimdEnabled; + }, + enumerable: false, + configurable: true + }); + TwilioTFLite.prototype.initialize = function (assetsPath, modelName, moduleLoaderName, moduleSimdLoaderName) { + return __awaiter(this, void 0, void 0, function () { + var _a, modelResponse, _b, tflite, modelBufferOffset; + return __generator(this, function (_c) { + switch (_c.label) { + case 0: + if (this._tflite) { + return [2 /*return*/]; + } + return [4 /*yield*/, Promise.all([ + this._loadWasmModule(assetsPath, moduleLoaderName, moduleSimdLoaderName), + fetch("".concat(assetsPath).concat(modelName)), + ])]; + case 1: + _a = _c.sent(), modelResponse = _a[1]; + _b = model; + if (_b) return [3 /*break*/, 3]; + return [4 /*yield*/, modelResponse.arrayBuffer()]; + case 2: + _b = (_c.sent()); + _c.label = 3; + case 3: + model = _b; + tflite = this._tflite; + modelBufferOffset = tflite._getModelBufferMemoryOffset(); + tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset); + tflite._loadModel(model.byteLength); + return [2 /*return*/]; + } + }); + }); + }; + TwilioTFLite.prototype.loadInputBuffer = function (inputBuffer) { + var tflite = this._tflite; + var height = tflite._getInputHeight(); + var width = tflite._getInputWidth(); + var pixels = width * height; + var tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4; + for (var i = 0; i < pixels; i++) { + var curTFLiteOffset = tfliteInputMemoryOffset + i * 3; + var curImageBufferOffset = i * 4; + tflite.HEAPF32[curTFLiteOffset] = inputBuffer[curImageBufferOffset] / 255; + tflite.HEAPF32[curTFLiteOffset + 1] = inputBuffer[curImageBufferOffset + 1] / 255; + tflite.HEAPF32[curTFLiteOffset + 2] = inputBuffer[curImageBufferOffset + 2] / 255; + } + this._inputBuffer = inputBuffer; + }; + TwilioTFLite.prototype.runInference = function () { + var tflite = this._tflite; + var height = tflite._getInputHeight(); + var width = tflite._getInputWidth(); + var pixels = width * height; + var tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + tflite._runInference(); + var inputBuffer = this._inputBuffer || new Uint8ClampedArray(pixels * 4); + for (var i = 0; i < pixels; i++) { + inputBuffer[i * 4 + 3] = Math.round(tflite.HEAPF32[tfliteOutputMemoryOffset + i] * 255); + } + return inputBuffer; + }; + TwilioTFLite.prototype._loadScript = function (path) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + if (loadedScripts.has(path)) { + return [2 /*return*/]; + } + return [2 /*return*/, new Promise(function (resolve, reject) { + var script = document.createElement('script'); + script.onload = function () { + loadedScripts.add(path); + resolve(); + }; + script.onerror = function () { + reject(); + }; + document.head.append(script); + script.src = path; + })]; + }); + }); + }; + TwilioTFLite.prototype._loadWasmModule = function (assetsPath, moduleLoaderName, moduleSimdLoaderName) { + return __awaiter(this, void 0, void 0, function () { + var _a, _b, _c; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 3, , 6]); + return [4 /*yield*/, this._loadScript("".concat(assetsPath).concat(moduleSimdLoaderName))]; + case 1: + _d.sent(); + _a = this; + return [4 /*yield*/, createTwilioTFLiteSIMDModule()]; + case 2: + _a._tflite = _d.sent(); + this._isSimdEnabled = true; + return [3 /*break*/, 6]; + case 3: + _b = _d.sent(); + return [4 /*yield*/, this._loadScript("".concat(assetsPath).concat(moduleLoaderName))]; + case 4: + _d.sent(); + _c = this; + return [4 /*yield*/, createTwilioTFLiteModule()]; + case 5: + _c._tflite = _d.sent(); + this._isSimdEnabled = false; + return [3 /*break*/, 6]; + case 6: return [2 /*return*/]; + } + }); + }); + }; + return TwilioTFLite; +}()); +exports.TwilioTFLite = TwilioTFLite; + +},{}],18:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSupported = exports.isChromiumImageBitmap = exports.isBrowserSupported = void 0; +/** + * @private + */ +function getCanvas() { + return typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); +} +/** + * @private + */ +function isBrowserSupported() { + if (typeof window !== 'undefined' && typeof document !== 'undefined') { + return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2')); + } + else { + return false; + } +} +exports.isBrowserSupported = isBrowserSupported; +/** + * @private + */ +function isChromiumImageBitmap() { + return typeof chrome === 'object' + && /Chrome/.test(navigator.userAgent) + && typeof createImageBitmap === 'function'; +} +exports.isChromiumImageBitmap = isChromiumImageBitmap; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +exports.isSupported = isBrowserSupported(); + +},{}],19:[function(require,module,exports){ +"use strict"; +// This file is generated on build. To make changes, see scripts/version.js +Object.defineProperty(exports, "__esModule", { value: true }); +exports.version = void 0; +/** + * The current version of the library. + */ +exports.version = '2.2.0'; + +},{}]},{},[2]); diff --git a/dist/build/twilio-video-processors.min.js b/dist/build/twilio-video-processors.min.js new file mode 100644 index 0000000..7f7b345 --- /dev/null +++ b/dist/build/twilio-video-processors.min.js @@ -0,0 +1,38 @@ +/*! twilio-video-processors.js 2.2.0 + +The following license applies to all parts of this software except as +documented below. + + Copyright (C) 2022 Twilio Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. Neither the name of Twilio nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]viewportHeight||type===types_1.ImageFit.Cover&&viewportHeight>newContentHeight){factor=viewportHeight/newContentHeight;newContentWidth=factor*newContentWidth;newContentHeight=viewportHeight}var x=(viewportWidth-newContentWidth)/2;var y=(viewportHeight-newContentHeight)/2;return{x:x,y:y,w:newContentWidth,h:newContentHeight}};return VirtualBackgroundProcessor}(BackgroundProcessor_1.BackgroundProcessor);exports.VirtualBackgroundProcessor=VirtualBackgroundProcessor},{"../../types":15,"./BackgroundProcessor":4}],7:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.inputResolutions=void 0;exports.inputResolutions={"640x360":[640,360],"256x256":[256,256],"256x144":[256,144],"160x96":[160,96]}},{}],8:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.createTexture=exports.compileShader=exports.createProgram=exports.createPiplelineStageProgram=exports.glsl=void 0;exports.glsl=String.raw;function createPiplelineStageProgram(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer){var program=createProgram(gl,vertexShader,fragmentShader);var positionAttributeLocation=gl.getAttribLocation(program,"a_position");gl.enableVertexAttribArray(positionAttributeLocation);gl.bindBuffer(gl.ARRAY_BUFFER,positionBuffer);gl.vertexAttribPointer(positionAttributeLocation,2,gl.FLOAT,false,0,0);var texCoordAttributeLocation=gl.getAttribLocation(program,"a_texCoord");gl.enableVertexAttribArray(texCoordAttributeLocation);gl.bindBuffer(gl.ARRAY_BUFFER,texCoordBuffer);gl.vertexAttribPointer(texCoordAttributeLocation,2,gl.FLOAT,false,0,0);return program}exports.createPiplelineStageProgram=createPiplelineStageProgram;function createProgram(gl,vertexShader,fragmentShader){var program=gl.createProgram();gl.attachShader(program,vertexShader);gl.attachShader(program,fragmentShader);gl.linkProgram(program);if(!gl.getProgramParameter(program,gl.LINK_STATUS)){throw new Error("Could not link WebGL program: ".concat(gl.getProgramInfoLog(program)))}return program}exports.createProgram=createProgram;function compileShader(gl,shaderType,shaderSource){var shader=gl.createShader(shaderType);gl.shaderSource(shader,shaderSource);gl.compileShader(shader);if(!gl.getShaderParameter(shader,gl.COMPILE_STATUS)){throw new Error("Could not compile shader: ".concat(gl.getShaderInfoLog(shader)))}return shader}exports.compileShader=compileShader;function createTexture(gl,internalformat,width,height,minFilter,magFilter){if(minFilter===void 0){minFilter=gl.NEAREST}if(magFilter===void 0){magFilter=gl.NEAREST}var texture=gl.createTexture();gl.bindTexture(gl.TEXTURE_2D,texture);gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_S,gl.CLAMP_TO_EDGE);gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_WRAP_T,gl.CLAMP_TO_EDGE);gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MIN_FILTER,minFilter);gl.texParameteri(gl.TEXTURE_2D,gl.TEXTURE_MAG_FILTER,magFilter);gl.texStorage2D(gl.TEXTURE_2D,1,internalformat,width,height);return texture}exports.createTexture=createTexture},{}],9:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.buildWebGL2Pipeline=void 0;var webgl2Pipeline_1=require("./pipelines/webgl2Pipeline");Object.defineProperty(exports,"buildWebGL2Pipeline",{enumerable:true,get:function(){return webgl2Pipeline_1.buildWebGL2Pipeline}})},{"./pipelines/webgl2Pipeline":14}],10:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};Object.defineProperty(exports,"__esModule",{value:true});exports.buildBackgroundBlurStage=void 0;var webglHelper_1=require("../helpers/webglHelper");function buildBackgroundBlurStage(gl,vertexShader,positionBuffer,texCoordBuffer,personMaskTexture,canvas){var blurPass=buildBlurPass(gl,vertexShader,positionBuffer,texCoordBuffer,personMaskTexture,canvas);var blendPass=buildBlendPass(gl,positionBuffer,texCoordBuffer,canvas);function render(){blurPass.render();blendPass.render()}function updateCoverage(coverage){blendPass.updateCoverage(coverage)}function cleanUp(){blendPass.cleanUp();blurPass.cleanUp()}return{render:render,updateCoverage:updateCoverage,cleanUp:cleanUp}}exports.buildBackgroundBlurStage=buildBackgroundBlurStage;function buildBlurPass(gl,vertexShader,positionBuffer,texCoordBuffer,personMaskTexture,canvas){var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_1||(templateObject_1=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "])));var scale=.5;var outputWidth=canvas.width*scale;var outputHeight=canvas.height*scale;var texelWidth=1/outputWidth;var texelHeight=1/outputHeight;var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var personMaskLocation=gl.getUniformLocation(program,"u_personMask");var texelSizeLocation=gl.getUniformLocation(program,"u_texelSize");var texture1=(0,webglHelper_1.createTexture)(gl,gl.RGBA8,outputWidth,outputHeight,gl.NEAREST,gl.LINEAR);var texture2=(0,webglHelper_1.createTexture)(gl,gl.RGBA8,outputWidth,outputHeight,gl.NEAREST,gl.LINEAR);var frameBuffer1=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer1);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,texture1,0);var frameBuffer2=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer2);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,texture2,0);gl.useProgram(program);gl.uniform1i(personMaskLocation,1);function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.uniform1i(inputFrameLocation,0);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,personMaskTexture);for(var i=0;i<8;i++){gl.uniform2f(texelSizeLocation,0,texelHeight);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer1);gl.drawArrays(gl.TRIANGLE_STRIP,0,4);gl.activeTexture(gl.TEXTURE2);gl.bindTexture(gl.TEXTURE_2D,texture1);gl.uniform1i(inputFrameLocation,2);gl.uniform2f(texelSizeLocation,texelWidth,0);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer2);gl.drawArrays(gl.TRIANGLE_STRIP,0,4);gl.bindTexture(gl.TEXTURE_2D,texture2)}}function cleanUp(){gl.deleteFramebuffer(frameBuffer2);gl.deleteFramebuffer(frameBuffer1);gl.deleteTexture(texture2);gl.deleteTexture(texture1);gl.deleteProgram(program);gl.deleteShader(fragmentShader)}return{render:render,cleanUp:cleanUp}}function buildBlendPass(gl,positionBuffer,texCoordBuffer,canvas){var vertexShaderSource=(0,webglHelper_1.glsl)(templateObject_2||(templateObject_2=__makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "],["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "])));var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_3||(templateObject_3=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "])));var outputWidth=canvas.width,outputHeight=canvas.height;var vertexShader=(0,webglHelper_1.compileShader)(gl,gl.VERTEX_SHADER,vertexShaderSource);var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var personMaskLocation=gl.getUniformLocation(program,"u_personMask");var blurredInputFrame=gl.getUniformLocation(program,"u_blurredInputFrame");var coverageLocation=gl.getUniformLocation(program,"u_coverage");gl.useProgram(program);gl.uniform1i(inputFrameLocation,0);gl.uniform1i(personMaskLocation,1);gl.uniform1i(blurredInputFrame,2);gl.uniform2f(coverageLocation,0,1);function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.bindFramebuffer(gl.FRAMEBUFFER,null);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function updateCoverage(coverage){gl.useProgram(program);gl.uniform2f(coverageLocation,coverage[0],coverage[1])}function cleanUp(){gl.deleteProgram(program);gl.deleteShader(fragmentShader);gl.deleteShader(vertexShader)}return{render:render,updateCoverage:updateCoverage,cleanUp:cleanUp}}var templateObject_1,templateObject_2,templateObject_3},{"../helpers/webglHelper":8}],11:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};Object.defineProperty(exports,"__esModule",{value:true});exports.buildBackgroundImageStage=void 0;var webglHelper_1=require("../helpers/webglHelper");function buildBackgroundImageStage(gl,positionBuffer,texCoordBuffer,personMaskTexture,backgroundImage,canvas){var vertexShaderSource=(0,webglHelper_1.glsl)(templateObject_1||(templateObject_1=__makeTemplateObject(["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "],["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "])));var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_2||(templateObject_2=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "])));var outputWidth=canvas.width,outputHeight=canvas.height;var outputRatio=outputWidth/outputHeight;var vertexShader=(0,webglHelper_1.compileShader)(gl,gl.VERTEX_SHADER,vertexShaderSource);var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var backgroundScaleLocation=gl.getUniformLocation(program,"u_backgroundScale");var backgroundOffsetLocation=gl.getUniformLocation(program,"u_backgroundOffset");var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var personMaskLocation=gl.getUniformLocation(program,"u_personMask");var backgroundLocation=gl.getUniformLocation(program,"u_background");var coverageLocation=gl.getUniformLocation(program,"u_coverage");var lightWrappingLocation=gl.getUniformLocation(program,"u_lightWrapping");var blendModeLocation=gl.getUniformLocation(program,"u_blendMode");gl.useProgram(program);gl.uniform2f(backgroundScaleLocation,1,1);gl.uniform2f(backgroundOffsetLocation,0,0);gl.uniform1i(inputFrameLocation,0);gl.uniform1i(personMaskLocation,1);gl.uniform2f(coverageLocation,0,1);gl.uniform1f(lightWrappingLocation,0);gl.uniform1f(blendModeLocation,0);var backgroundTexture=null;if(backgroundImage===null||backgroundImage===void 0?void 0:backgroundImage.complete){updateBackgroundImage(backgroundImage)}else if(backgroundImage){backgroundImage.onload=function(){updateBackgroundImage(backgroundImage)}}function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,personMaskTexture);if(backgroundTexture!==null){gl.activeTexture(gl.TEXTURE2);gl.bindTexture(gl.TEXTURE_2D,backgroundTexture);gl.uniform1i(backgroundLocation,2)}gl.bindFramebuffer(gl.FRAMEBUFFER,null);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function updateBackgroundImage(backgroundImage){backgroundTexture=(0,webglHelper_1.createTexture)(gl,gl.RGBA8,backgroundImage.naturalWidth,backgroundImage.naturalHeight,gl.LINEAR,gl.LINEAR);gl.texSubImage2D(gl.TEXTURE_2D,0,0,0,backgroundImage.naturalWidth,backgroundImage.naturalHeight,gl.RGBA,gl.UNSIGNED_BYTE,backgroundImage);var xOffset=0;var yOffset=0;var backgroundWidth=backgroundImage.naturalWidth;var backgroundHeight=backgroundImage.naturalHeight;var backgroundRatio=backgroundWidth/backgroundHeight;if(backgroundRatio= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "])));var _a=segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution],segmentationWidth=_a[0],segmentationHeight=_a[1];var outputWidth=canvas.width,outputHeight=canvas.height;var texelWidth=1/outputWidth;var texelHeight=1/outputHeight;var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var segmentationMaskLocation=gl.getUniformLocation(program,"u_segmentationMask");var texelSizeLocation=gl.getUniformLocation(program,"u_texelSize");var stepLocation=gl.getUniformLocation(program,"u_step");var radiusLocation=gl.getUniformLocation(program,"u_radius");var offsetLocation=gl.getUniformLocation(program,"u_offset");var sigmaTexelLocation=gl.getUniformLocation(program,"u_sigmaTexel");var sigmaColorLocation=gl.getUniformLocation(program,"u_sigmaColor");var frameBuffer=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,outputTexture,0);gl.useProgram(program);gl.uniform1i(inputFrameLocation,0);gl.uniform1i(segmentationMaskLocation,1);gl.uniform2f(texelSizeLocation,texelWidth,texelHeight);updateSigmaSpace(0);updateSigmaColor(0);function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,inputTexture);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function updateSigmaSpace(sigmaSpace){sigmaSpace*=Math.max(outputWidth/segmentationWidth,outputHeight/segmentationHeight);var kSparsityFactor=.66;var sparsity=Math.max(1,Math.sqrt(sigmaSpace)*kSparsityFactor);var step=sparsity;var radius=sigmaSpace;var offset=step>1?step*.5:0;var sigmaTexel=Math.max(texelWidth,texelHeight)*sigmaSpace;gl.useProgram(program);gl.uniform1f(stepLocation,step);gl.uniform1f(radiusLocation,radius);gl.uniform1f(offsetLocation,offset);gl.uniform1f(sigmaTexelLocation,sigmaTexel)}function updateSigmaColor(sigmaColor){gl.useProgram(program);gl.uniform1f(sigmaColorLocation,sigmaColor)}function cleanUp(){gl.deleteFramebuffer(frameBuffer);gl.deleteProgram(program);gl.deleteShader(fragmentShader)}return{render:render,updateSigmaSpace:updateSigmaSpace,updateSigmaColor:updateSigmaColor,cleanUp:cleanUp}}exports.buildFastBilateralFilterStage=buildFastBilateralFilterStage;var templateObject_1},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],13:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};Object.defineProperty(exports,"__esModule",{value:true});exports.buildLoadSegmentationStage=void 0;var segmentationHelper_1=require("../helpers/segmentationHelper");var webglHelper_1=require("../helpers/webglHelper");function buildLoadSegmentationStage(gl,vertexShader,positionBuffer,texCoordBuffer,segmentationConfig,outputTexture){var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_1||(templateObject_1=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "])));var _a=segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution],segmentationWidth=_a[0],segmentationHeight=_a[1];var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputLocation=gl.getUniformLocation(program,"u_inputSegmentation");var inputTexture=(0,webglHelper_1.createTexture)(gl,gl.RGBA8,segmentationWidth,segmentationHeight);var frameBuffer=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,outputTexture,0);gl.useProgram(program);gl.uniform1i(inputLocation,1);function render(segmentationData){gl.viewport(0,0,segmentationWidth,segmentationHeight);gl.useProgram(program);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,inputTexture);gl.texSubImage2D(gl.TEXTURE_2D,0,0,0,segmentationWidth,segmentationHeight,gl.RGBA,gl.UNSIGNED_BYTE,segmentationData);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function cleanUp(){gl.deleteFramebuffer(frameBuffer);gl.deleteTexture(inputTexture);gl.deleteProgram(program);gl.deleteShader(fragmentShader)}return{render:render,cleanUp:cleanUp}}exports.buildLoadSegmentationStage=buildLoadSegmentationStage;var templateObject_1},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],14:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};var __awaiter=this&&this.__awaiter||function(thisArg,_arguments,P,generator){function adopt(value){return value instanceof P?value:new P(function(resolve){resolve(value)})}return new(P||(P=Promise))(function(resolve,reject){function fulfilled(value){try{step(generator.next(value))}catch(e){reject(e)}}function rejected(value){try{step(generator["throw"](value))}catch(e){reject(e)}}function step(result){result.done?resolve(result.value):adopt(result.value).then(fulfilled,rejected)}step((generator=generator.apply(thisArg,_arguments||[])).next())})};var __generator=this&&this.__generator||function(thisArg,body){var _={label:0,sent:function(){if(t[0]&1)throw t[1];return t[1]},trys:[],ops:[]},f,y,t,g;return g={next:verb(0),throw:verb(1),return:verb(2)},typeof Symbol==="function"&&(g[Symbol.iterator]=function(){return this}),g;function verb(n){return function(v){return step([n,v])}}function step(op){if(f)throw new TypeError("Generator is already executing.");while(g&&(g=0,op[0]&&(_=0)),_)try{if(f=1,y&&(t=op[0]&2?y["return"]:op[0]?y["throw"]||((t=y["return"])&&t.call(y),0):y.next)&&!(t=t.call(y,op[1])).done)return t;if(y=0,t)op=[op[0]&2,t.value];switch(op[0]){case 0:case 1:t=op;break;case 4:_.label++;return{value:op[1],done:false};case 5:_.label++;y=op[1];op=[0];continue;case 7:op=_.ops.pop();_.trys.pop();continue;default:if(!(t=_.trys,t=t.length>0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]Benchmark.cacheSize){timingCache.splice(0,timingCache.length-Benchmark.cacheSize)}};Benchmark.cacheSize=41;return Benchmark}();exports.Benchmark=Benchmark},{}],17:[function(require,module,exports){"use strict";var __awaiter=this&&this.__awaiter||function(thisArg,_arguments,P,generator){function adopt(value){return value instanceof P?value:new P(function(resolve){resolve(value)})}return new(P||(P=Promise))(function(resolve,reject){function fulfilled(value){try{step(generator.next(value))}catch(e){reject(e)}}function rejected(value){try{step(generator["throw"](value))}catch(e){reject(e)}}function step(result){result.done?resolve(result.value):adopt(result.value).then(fulfilled,rejected)}step((generator=generator.apply(thisArg,_arguments||[])).next())})};var __generator=this&&this.__generator||function(thisArg,body){var _={label:0,sent:function(){if(t[0]&1)throw t[1];return t[1]},trys:[],ops:[]},f,y,t,g;return g={next:verb(0),throw:verb(1),return:verb(2)},typeof Symbol==="function"&&(g[Symbol.iterator]=function(){return this}),g;function verb(n){return function(v){return step([n,v])}}function step(op){if(f)throw new TypeError("Generator is already executing.");while(g&&(g=0,op[0]&&(_=0)),_)try{if(f=1,y&&(t=op[0]&2?y["return"]:op[0]?y["throw"]||((t=y["return"])&&t.call(y),0):y.next)&&!(t=t.call(y,op[1])).done)return t;if(y=0,t)op=[op[0]&2,t.value];switch(op[0]){case 0:case 1:t=op;break;case 4:_.label++;return{value:op[1],done:false};case 5:_.label++;y=op[1];op=[0];continue;case 7:op=_.ops.pop();_.trys.pop();continue;default:if(!(t=_.trys,t=t.length>0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]{var Se=Object.create;var re=Object.defineProperty;var we=Object.getOwnPropertyDescriptor;var Te=Object.getOwnPropertyNames;var ke=Object.getPrototypeOf,Qe=Object.prototype.hasOwnProperty;var Pe=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var Ie=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Te(e))!Qe.call(t,i)&&i!==r&&re(t,i,{get:()=>e[i],enumerable:!(n=we(e,i))||n.enumerable});return t};var Ce=(t,e,r)=>(r=t!=null?Se(ke(t)):{},Ie(e||!t||!t.__esModule?re(r,"default",{value:t,enumerable:!0}):r,t));var ae=Pe((se,oe)=>{(function(){var t=function(e){var r=new t.Builder;return r.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),r.searchPipeline.add(t.stemmer),e.call(r,r),r.build()};t.version="2.3.9";t.utils={},t.utils.warn=function(e){return function(r){e.console&&console.warn&&console.warn(r)}}(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var r=Object.create(null),n=Object.keys(e),i=0;i0){var d=t.utils.clone(r)||{};d.position=[a,u],d.index=s.length,s.push(new t.Token(n.slice(a,o),d))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,r){r in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+r),e.label=r,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var r=e.label&&e.label in this.registeredFunctions;r||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index. +`,e)},t.Pipeline.load=function(e){var r=new t.Pipeline;return e.forEach(function(n){var i=t.Pipeline.registeredFunctions[n];if(i)r.add(i);else throw new Error("Cannot load unregistered function: "+n)}),r},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(r){t.Pipeline.warnIfFunctionNotRegistered(r),this._stack.push(r)},this)},t.Pipeline.prototype.after=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");n=n+1,this._stack.splice(n,0,r)},t.Pipeline.prototype.before=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");this._stack.splice(n,0,r)},t.Pipeline.prototype.remove=function(e){var r=this._stack.indexOf(e);r!=-1&&this._stack.splice(r,1)},t.Pipeline.prototype.run=function(e){for(var r=this._stack.length,n=0;n1&&(oe&&(n=s),o!=e);)i=n-r,s=r+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(ol?d+=2:a==l&&(r+=n[u+1]*i[d+1],u+=2,d+=2);return r},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),r=1,n=0;r0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new t.TokenSet;s.node.edges["*"]=l}if(s.str.length==0&&(l.final=!0),i.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new t.TokenSet;s.node.edges["*"]=u}s.str.length==1&&(u.final=!0),i.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var d=s.str.charAt(0),m=s.str.charAt(1),y;m in s.node.edges?y=s.node.edges[m]:(y=new t.TokenSet,s.node.edges[m]=y),s.str.length==1&&(y.final=!0),i.push({node:y,editsRemaining:s.editsRemaining-1,str:d+s.str.slice(2)})}}}return n},t.TokenSet.fromString=function(e){for(var r=new t.TokenSet,n=r,i=0,s=e.length;i=e;r--){var n=this.uncheckedNodes[r],i=n.child.toString();i in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[i]:(n.child._str=i,this.minimizedNodes[i]=n.child),this.uncheckedNodes.pop()}};t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(r){var n=new t.QueryParser(e,r);n.parse()})},t.Index.prototype.query=function(e){for(var r=new t.Query(this.fields),n=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),l=0;l1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,r){var n=e[this._ref],i=Object.keys(this._fields);this._documents[n]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,r;do e=this.next(),r=e.charCodeAt(0);while(r>47&&r<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var r=e.next();if(r==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(r.charCodeAt(0)==92){e.escapeCharacter();continue}if(r==":")return t.QueryLexer.lexField;if(r=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(r=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(r=="+"&&e.width()===1||r=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(r.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,r){this.lexer=new t.QueryLexer(e),this.query=r,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var r=e.peekLexeme();if(r!=null)switch(r.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(n+=" with value '"+r.str+"'"),new t.QueryParseError(n,r.start,r.end)}},t.QueryParser.parsePresence=function(e){var r=e.consumeLexeme();if(r!=null){switch(r.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+r.str+"'";throw new t.QueryParseError(n,r.start,r.end)}var i=e.peekLexeme();if(i==null){var n="expecting term or field, found nothing";throw new t.QueryParseError(n,r.start,r.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(n,i.start,i.end)}}},t.QueryParser.parseField=function(e){var r=e.consumeLexeme();if(r!=null){if(e.query.allFields.indexOf(r.str)==-1){var n=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+r.str+"', possible fields: "+n;throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.fields=[r.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,r.start,r.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var r=e.consumeLexeme();if(r!=null){e.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(n==null){e.nextClause();return}switch(n.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+n.type+"'";throw new t.QueryParseError(i,n.start,n.end)}}},t.QueryParser.parseEditDistance=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="edit distance must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.editDistance=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="boost must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.boost=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},function(e,r){typeof define=="function"&&define.amd?define(r):typeof se=="object"?oe.exports=r():e.lunr=r()}(this,function(){return t})})()});var ne=[];function G(t,e){ne.push({selector:e,constructor:t})}var U=class{constructor(){this.alwaysVisibleMember=null;this.createComponents(document.body),this.ensureActivePageVisible(),this.ensureFocusedElementVisible(),this.listenForCodeCopies(),window.addEventListener("hashchange",()=>this.ensureFocusedElementVisible())}createComponents(e){ne.forEach(r=>{e.querySelectorAll(r.selector).forEach(n=>{n.dataset.hasInstance||(new r.constructor({el:n,app:this}),n.dataset.hasInstance=String(!0))})})}filterChanged(){this.ensureFocusedElementVisible()}ensureActivePageVisible(){let e=document.querySelector(".tsd-navigation .current"),r=e?.parentElement;for(;r&&!r.classList.contains(".tsd-navigation");)r instanceof HTMLDetailsElement&&(r.open=!0),r=r.parentElement;if(e){let n=e.getBoundingClientRect().top-document.documentElement.clientHeight/4;document.querySelector(".site-menu").scrollTop=n}}ensureFocusedElementVisible(){if(this.alwaysVisibleMember&&(this.alwaysVisibleMember.classList.remove("always-visible"),this.alwaysVisibleMember.firstElementChild.remove(),this.alwaysVisibleMember=null),!location.hash)return;let e=document.getElementById(location.hash.substring(1));if(!e)return;let r=e.parentElement;for(;r&&r.tagName!=="SECTION";)r=r.parentElement;if(r&&r.offsetParent==null){this.alwaysVisibleMember=r,r.classList.add("always-visible");let n=document.createElement("p");n.classList.add("warning"),n.textContent="This member is normally hidden due to your filter settings.",r.prepend(n)}}listenForCodeCopies(){document.querySelectorAll("pre > button").forEach(e=>{let r;e.addEventListener("click",()=>{e.previousElementSibling instanceof HTMLElement&&navigator.clipboard.writeText(e.previousElementSibling.innerText.trim()),e.textContent="Copied!",e.classList.add("visible"),clearTimeout(r),r=setTimeout(()=>{e.classList.remove("visible"),r=setTimeout(()=>{e.textContent="Copy"},100)},1e3)})})}};var ie=(t,e=100)=>{let r;return()=>{clearTimeout(r),r=setTimeout(()=>t(),e)}};var ce=Ce(ae());function de(){let t=document.getElementById("tsd-search");if(!t)return;let e=document.getElementById("tsd-search-script");t.classList.add("loading"),e&&(e.addEventListener("error",()=>{t.classList.remove("loading"),t.classList.add("failure")}),e.addEventListener("load",()=>{t.classList.remove("loading"),t.classList.add("ready")}),window.searchData&&t.classList.remove("loading"));let r=document.querySelector("#tsd-search input"),n=document.querySelector("#tsd-search .results");if(!r||!n)throw new Error("The input field or the result list wrapper was not found");let i=!1;n.addEventListener("mousedown",()=>i=!0),n.addEventListener("mouseup",()=>{i=!1,t.classList.remove("has-focus")}),r.addEventListener("focus",()=>t.classList.add("has-focus")),r.addEventListener("blur",()=>{i||(i=!1,t.classList.remove("has-focus"))});let s={base:t.dataset.base+"/"};Oe(t,n,r,s)}function Oe(t,e,r,n){r.addEventListener("input",ie(()=>{Re(t,e,r,n)},200));let i=!1;r.addEventListener("keydown",s=>{i=!0,s.key=="Enter"?Fe(e,r):s.key=="Escape"?r.blur():s.key=="ArrowUp"?ue(e,-1):s.key==="ArrowDown"?ue(e,1):i=!1}),r.addEventListener("keypress",s=>{i&&s.preventDefault()}),document.body.addEventListener("keydown",s=>{s.altKey||s.ctrlKey||s.metaKey||!r.matches(":focus")&&s.key==="/"&&(r.focus(),s.preventDefault())})}function _e(t,e){t.index||window.searchData&&(e.classList.remove("loading"),e.classList.add("ready"),t.data=window.searchData,t.index=ce.Index.load(window.searchData.index))}function Re(t,e,r,n){if(_e(n,t),!n.index||!n.data)return;e.textContent="";let i=r.value.trim(),s=i?n.index.search(`*${i}*`):[];for(let o=0;oa.score-o.score);for(let o=0,a=Math.min(10,s.length);o${le(l.parent,i)}.${u}`);let d=document.createElement("li");d.classList.value=l.classes??"";let m=document.createElement("a");m.href=n.base+l.url,m.innerHTML=u,d.append(m),e.appendChild(d)}}function ue(t,e){let r=t.querySelector(".current");if(!r)r=t.querySelector(e==1?"li:first-child":"li:last-child"),r&&r.classList.add("current");else{let n=r;if(e===1)do n=n.nextElementSibling??void 0;while(n instanceof HTMLElement&&n.offsetParent==null);else do n=n.previousElementSibling??void 0;while(n instanceof HTMLElement&&n.offsetParent==null);n&&(r.classList.remove("current"),n.classList.add("current"))}}function Fe(t,e){let r=t.querySelector(".current");if(r||(r=t.querySelector("li:first-child")),r){let n=r.querySelector("a");n&&(window.location.href=n.href),e.blur()}}function le(t,e){if(e==="")return t;let r=t.toLocaleLowerCase(),n=e.toLocaleLowerCase(),i=[],s=0,o=r.indexOf(n);for(;o!=-1;)i.push(K(t.substring(s,o)),`${K(t.substring(o,o+n.length))}`),s=o+n.length,o=r.indexOf(n,s);return i.push(K(t.substring(s))),i.join("")}var Me={"&":"&","<":"<",">":">","'":"'",'"':"""};function K(t){return t.replace(/[&<>"'"]/g,e=>Me[e])}var P=class{constructor(e){this.el=e.el,this.app=e.app}};var M="mousedown",fe="mousemove",N="mouseup",J={x:0,y:0},he=!1,ee=!1,De=!1,D=!1,pe=/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);document.documentElement.classList.add(pe?"is-mobile":"not-mobile");pe&&"ontouchstart"in document.documentElement&&(De=!0,M="touchstart",fe="touchmove",N="touchend");document.addEventListener(M,t=>{ee=!0,D=!1;let e=M=="touchstart"?t.targetTouches[0]:t;J.y=e.pageY||0,J.x=e.pageX||0});document.addEventListener(fe,t=>{if(ee&&!D){let e=M=="touchstart"?t.targetTouches[0]:t,r=J.x-(e.pageX||0),n=J.y-(e.pageY||0);D=Math.sqrt(r*r+n*n)>10}});document.addEventListener(N,()=>{ee=!1});document.addEventListener("click",t=>{he&&(t.preventDefault(),t.stopImmediatePropagation(),he=!1)});var X=class extends P{constructor(r){super(r);this.className=this.el.dataset.toggle||"",this.el.addEventListener(N,n=>this.onPointerUp(n)),this.el.addEventListener("click",n=>n.preventDefault()),document.addEventListener(M,n=>this.onDocumentPointerDown(n)),document.addEventListener(N,n=>this.onDocumentPointerUp(n))}setActive(r){if(this.active==r)return;this.active=r,document.documentElement.classList.toggle("has-"+this.className,r),this.el.classList.toggle("active",r);let n=(this.active?"to-has-":"from-has-")+this.className;document.documentElement.classList.add(n),setTimeout(()=>document.documentElement.classList.remove(n),500)}onPointerUp(r){D||(this.setActive(!0),r.preventDefault())}onDocumentPointerDown(r){if(this.active){if(r.target.closest(".col-sidebar, .tsd-filter-group"))return;this.setActive(!1)}}onDocumentPointerUp(r){if(!D&&this.active&&r.target.closest(".col-sidebar")){let n=r.target.closest("a");if(n){let i=window.location.href;i.indexOf("#")!=-1&&(i=i.substring(0,i.indexOf("#"))),n.href.substring(0,i.length)==i&&setTimeout(()=>this.setActive(!1),250)}}}};var te;try{te=localStorage}catch{te={getItem(){return null},setItem(){}}}var Q=te;var me=document.head.appendChild(document.createElement("style"));me.dataset.for="filters";var Y=class extends P{constructor(r){super(r);this.key=`filter-${this.el.name}`,this.value=this.el.checked,this.el.addEventListener("change",()=>{this.setLocalStorage(this.el.checked)}),this.setLocalStorage(this.fromLocalStorage()),me.innerHTML+=`html:not(.${this.key}) .tsd-is-${this.el.name} { display: none; } +`}fromLocalStorage(){let r=Q.getItem(this.key);return r?r==="true":this.el.checked}setLocalStorage(r){Q.setItem(this.key,r.toString()),this.value=r,this.handleValueChange()}handleValueChange(){this.el.checked=this.value,document.documentElement.classList.toggle(this.key,this.value),this.app.filterChanged(),document.querySelectorAll(".tsd-index-section").forEach(r=>{r.style.display="block";let n=Array.from(r.querySelectorAll(".tsd-index-link")).every(i=>i.offsetParent==null);r.style.display=n?"none":"block"})}};var Z=class extends P{constructor(r){super(r);this.summary=this.el.querySelector(".tsd-accordion-summary"),this.icon=this.summary.querySelector("svg"),this.key=`tsd-accordion-${this.summary.dataset.key??this.summary.textContent.trim().replace(/\s+/g,"-").toLowerCase()}`;let n=Q.getItem(this.key);this.el.open=n?n==="true":this.el.open,this.el.addEventListener("toggle",()=>this.update()),this.update()}update(){this.icon.style.transform=`rotate(${this.el.open?0:-90}deg)`,Q.setItem(this.key,this.el.open.toString())}};function ve(t){let e=Q.getItem("tsd-theme")||"os";t.value=e,ye(e),t.addEventListener("change",()=>{Q.setItem("tsd-theme",t.value),ye(t.value)})}function ye(t){document.documentElement.dataset.theme=t}de();G(X,"a[data-toggle]");G(Z,".tsd-index-accordion");G(Y,".tsd-filter-item input[type=checkbox]");var ge=document.getElementById("tsd-theme");ge&&ve(ge);var Ae=new U;Object.defineProperty(window,"app",{value:Ae});document.querySelectorAll("summary a").forEach(t=>{t.addEventListener("click",()=>{location.assign(t.href)})});})(); +/*! Bundled license information: + +lunr/lunr.js: + (** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + *) + (*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Set + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.tokenizer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Vector + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.stemmer + * Copyright (C) 2020 Oliver Nightingale + * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt + *) + (*! + * lunr.stopWordFilter + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.trimmer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.TokenSet + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Builder + * Copyright (C) 2020 Oliver Nightingale + *) +*/ diff --git a/dist/docs/assets/search.js b/dist/docs/assets/search.js new file mode 100644 index 0000000..9d2bd55 --- /dev/null +++ b/dist/docs/assets/search.js @@ -0,0 +1 @@ +window.searchData = JSON.parse("{\"rows\":[{\"kind\":128,\"name\":\"GaussianBlurBackgroundProcessor\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html\",\"classes\":\"\"},{\"kind\":512,\"name\":\"constructor\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#constructor\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":262144,\"name\":\"blurFilterRadius\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#blurFilterRadius\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":262144,\"name\":\"maskBlurRadius\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":2048,\"name\":\"loadModel\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#loadModel\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":2048,\"name\":\"processFrame\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#processFrame\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":256,\"name\":\"GaussianBlurBackgroundProcessorOptions\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html\",\"classes\":\"\"},{\"kind\":1024,\"name\":\"blurFilterRadius\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#blurFilterRadius\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"assetsPath\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#assetsPath\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"debounce\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#debounce\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"maskBlurRadius\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"pipeline\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#pipeline\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":8,\"name\":\"ImageFit\",\"url\":\"enums/ImageFit.html\",\"classes\":\"\"},{\"kind\":16,\"name\":\"Contain\",\"url\":\"enums/ImageFit.html#Contain\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"Cover\",\"url\":\"enums/ImageFit.html#Cover\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"Fill\",\"url\":\"enums/ImageFit.html#Fill\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"None\",\"url\":\"enums/ImageFit.html#None\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":8,\"name\":\"Pipeline\",\"url\":\"enums/Pipeline.html\",\"classes\":\"\"},{\"kind\":16,\"name\":\"Canvas2D\",\"url\":\"enums/Pipeline.html#Canvas2D\",\"classes\":\"\",\"parent\":\"Pipeline\"},{\"kind\":16,\"name\":\"WebGL2\",\"url\":\"enums/Pipeline.html#WebGL2\",\"classes\":\"\",\"parent\":\"Pipeline\"},{\"kind\":32,\"name\":\"isSupported\",\"url\":\"variables/isSupported.html\",\"classes\":\"\"},{\"kind\":32,\"name\":\"version\",\"url\":\"variables/version.html\",\"classes\":\"\"},{\"kind\":128,\"name\":\"VirtualBackgroundProcessor\",\"url\":\"classes/VirtualBackgroundProcessor.html\",\"classes\":\"\"},{\"kind\":512,\"name\":\"constructor\",\"url\":\"classes/VirtualBackgroundProcessor.html#constructor\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"backgroundImage\",\"url\":\"classes/VirtualBackgroundProcessor.html#backgroundImage\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"fitType\",\"url\":\"classes/VirtualBackgroundProcessor.html#fitType\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"maskBlurRadius\",\"url\":\"classes/VirtualBackgroundProcessor.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":2048,\"name\":\"loadModel\",\"url\":\"classes/VirtualBackgroundProcessor.html#loadModel\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":2048,\"name\":\"processFrame\",\"url\":\"classes/VirtualBackgroundProcessor.html#processFrame\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":256,\"name\":\"VirtualBackgroundProcessorOptions\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html\",\"classes\":\"\"},{\"kind\":1024,\"name\":\"backgroundImage\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#backgroundImage\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"fitType\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#fitType\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"assetsPath\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#assetsPath\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"debounce\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#debounce\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"maskBlurRadius\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"pipeline\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#pipeline\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"}],\"index\":{\"version\":\"2.3.9\",\"fields\":[\"name\",\"comment\"],\"fieldVectors\":[[\"name/0\",[0,32.055]],[\"comment/0\",[]],[\"name/1\",[1,26.946]],[\"comment/1\",[]],[\"name/2\",[2,26.946]],[\"comment/2\",[]],[\"name/3\",[3,21.068]],[\"comment/3\",[]],[\"name/4\",[4,26.946]],[\"comment/4\",[]],[\"name/5\",[5,26.946]],[\"comment/5\",[]],[\"name/6\",[6,32.055]],[\"comment/6\",[]],[\"name/7\",[2,26.946]],[\"comment/7\",[]],[\"name/8\",[7,26.946]],[\"comment/8\",[]],[\"name/9\",[8,26.946]],[\"comment/9\",[]],[\"name/10\",[3,21.068]],[\"comment/10\",[]],[\"name/11\",[9,23.582]],[\"comment/11\",[]],[\"name/12\",[10,32.055]],[\"comment/12\",[]],[\"name/13\",[11,32.055]],[\"comment/13\",[]],[\"name/14\",[12,32.055]],[\"comment/14\",[]],[\"name/15\",[13,32.055]],[\"comment/15\",[]],[\"name/16\",[14,32.055]],[\"comment/16\",[]],[\"name/17\",[9,23.582]],[\"comment/17\",[]],[\"name/18\",[15,32.055]],[\"comment/18\",[]],[\"name/19\",[16,32.055]],[\"comment/19\",[]],[\"name/20\",[17,32.055]],[\"comment/20\",[]],[\"name/21\",[18,32.055]],[\"comment/21\",[]],[\"name/22\",[19,32.055]],[\"comment/22\",[]],[\"name/23\",[1,26.946]],[\"comment/23\",[]],[\"name/24\",[20,26.946]],[\"comment/24\",[]],[\"name/25\",[21,26.946]],[\"comment/25\",[]],[\"name/26\",[3,21.068]],[\"comment/26\",[]],[\"name/27\",[4,26.946]],[\"comment/27\",[]],[\"name/28\",[5,26.946]],[\"comment/28\",[]],[\"name/29\",[22,32.055]],[\"comment/29\",[]],[\"name/30\",[20,26.946]],[\"comment/30\",[]],[\"name/31\",[21,26.946]],[\"comment/31\",[]],[\"name/32\",[7,26.946]],[\"comment/32\",[]],[\"name/33\",[8,26.946]],[\"comment/33\",[]],[\"name/34\",[3,21.068]],[\"comment/34\",[]],[\"name/35\",[9,23.582]],[\"comment/35\",[]]],\"invertedIndex\":[[\"assetspath\",{\"_index\":7,\"name\":{\"8\":{},\"32\":{}},\"comment\":{}}],[\"backgroundimage\",{\"_index\":20,\"name\":{\"24\":{},\"30\":{}},\"comment\":{}}],[\"blurfilterradius\",{\"_index\":2,\"name\":{\"2\":{},\"7\":{}},\"comment\":{}}],[\"canvas2d\",{\"_index\":15,\"name\":{\"18\":{}},\"comment\":{}}],[\"constructor\",{\"_index\":1,\"name\":{\"1\":{},\"23\":{}},\"comment\":{}}],[\"contain\",{\"_index\":11,\"name\":{\"13\":{}},\"comment\":{}}],[\"cover\",{\"_index\":12,\"name\":{\"14\":{}},\"comment\":{}}],[\"debounce\",{\"_index\":8,\"name\":{\"9\":{},\"33\":{}},\"comment\":{}}],[\"fill\",{\"_index\":13,\"name\":{\"15\":{}},\"comment\":{}}],[\"fittype\",{\"_index\":21,\"name\":{\"25\":{},\"31\":{}},\"comment\":{}}],[\"gaussianblurbackgroundprocessor\",{\"_index\":0,\"name\":{\"0\":{}},\"comment\":{}}],[\"gaussianblurbackgroundprocessoroptions\",{\"_index\":6,\"name\":{\"6\":{}},\"comment\":{}}],[\"imagefit\",{\"_index\":10,\"name\":{\"12\":{}},\"comment\":{}}],[\"issupported\",{\"_index\":17,\"name\":{\"20\":{}},\"comment\":{}}],[\"loadmodel\",{\"_index\":4,\"name\":{\"4\":{},\"27\":{}},\"comment\":{}}],[\"maskblurradius\",{\"_index\":3,\"name\":{\"3\":{},\"10\":{},\"26\":{},\"34\":{}},\"comment\":{}}],[\"none\",{\"_index\":14,\"name\":{\"16\":{}},\"comment\":{}}],[\"pipeline\",{\"_index\":9,\"name\":{\"11\":{},\"17\":{},\"35\":{}},\"comment\":{}}],[\"processframe\",{\"_index\":5,\"name\":{\"5\":{},\"28\":{}},\"comment\":{}}],[\"version\",{\"_index\":18,\"name\":{\"21\":{}},\"comment\":{}}],[\"virtualbackgroundprocessor\",{\"_index\":19,\"name\":{\"22\":{}},\"comment\":{}}],[\"virtualbackgroundprocessoroptions\",{\"_index\":22,\"name\":{\"29\":{}},\"comment\":{}}],[\"webgl2\",{\"_index\":16,\"name\":{\"19\":{}},\"comment\":{}}]],\"pipeline\":[]}}"); \ No newline at end of file diff --git a/dist/docs/assets/style.css b/dist/docs/assets/style.css new file mode 100644 index 0000000..258146f --- /dev/null +++ b/dist/docs/assets/style.css @@ -0,0 +1,1379 @@ +:root { + /* Light */ + --light-color-background: #f2f4f8; + --light-color-background-secondary: #eff0f1; + --light-color-warning-text: #222; + --light-color-background-warning: #e6e600; + --light-color-icon-background: var(--light-color-background); + --light-color-accent: #c5c7c9; + --light-color-active-menu-item: var(--light-color-accent); + --light-color-text: #222; + --light-color-text-aside: #6e6e6e; + --light-color-link: #1f70c2; + + --light-color-ts-project: #b111c9; + --light-color-ts-module: var(--light-color-ts-project); + --light-color-ts-namespace: var(--light-color-ts-project); + --light-color-ts-enum: #7e6f15; + --light-color-ts-enum-member: var(--light-color-ts-enum); + --light-color-ts-variable: #4760ec; + --light-color-ts-function: #572be7; + --light-color-ts-class: #1f70c2; + --light-color-ts-interface: #108024; + --light-color-ts-constructor: var(--light-color-ts-class); + --light-color-ts-property: var(--light-color-ts-variable); + --light-color-ts-method: var(--light-color-ts-function); + --light-color-ts-call-signature: var(--light-color-ts-method); + --light-color-ts-index-signature: var(--light-color-ts-property); + --light-color-ts-constructor-signature: var(--light-color-ts-constructor); + --light-color-ts-parameter: var(--light-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --light-color-ts-type-parameter: var(--light-color-ts-type-alias); + --light-color-ts-accessor: var(--light-color-ts-property); + --light-color-ts-get-signature: var(--light-color-ts-accessor); + --light-color-ts-set-signature: var(--light-color-ts-accessor); + --light-color-ts-type-alias: #d51270; + /* reference not included as links will be colored with the kind that it points to */ + + --light-external-icon: url("data:image/svg+xml;utf8,"); + --light-color-scheme: light; + + /* Dark */ + --dark-color-background: #2b2e33; + --dark-color-background-secondary: #1e2024; + --dark-color-background-warning: #bebe00; + --dark-color-warning-text: #222; + --dark-color-icon-background: var(--dark-color-background-secondary); + --dark-color-accent: #9096a2; + --dark-color-active-menu-item: #5d5d6a; + --dark-color-text: #f5f5f5; + --dark-color-text-aside: #dddddd; + --dark-color-link: #00aff4; + + --dark-color-ts-project: #e358ff; + --dark-color-ts-module: var(--dark-color-ts-project); + --dark-color-ts-namespace: var(--dark-color-ts-project); + --dark-color-ts-enum: #f4d93e; + --dark-color-ts-enum-member: var(--dark-color-ts-enum); + --dark-color-ts-variable: #798dff; + --dark-color-ts-function: #a280ff; + --dark-color-ts-class: #8ac4ff; + --dark-color-ts-interface: #6cff87; + --dark-color-ts-constructor: var(--dark-color-ts-class); + --dark-color-ts-property: var(--dark-color-ts-variable); + --dark-color-ts-method: var(--dark-color-ts-function); + --dark-color-ts-call-signature: var(--dark-color-ts-method); + --dark-color-ts-index-signature: var(--dark-color-ts-property); + --dark-color-ts-constructor-signature: var(--dark-color-ts-constructor); + --dark-color-ts-parameter: var(--dark-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --dark-color-ts-type-parameter: var(--dark-color-ts-type-alias); + --dark-color-ts-accessor: var(--dark-color-ts-property); + --dark-color-ts-get-signature: var(--dark-color-ts-accessor); + --dark-color-ts-set-signature: var(--dark-color-ts-accessor); + --dark-color-ts-type-alias: #ff6492; + /* reference not included as links will be colored with the kind that it points to */ + + --dark-external-icon: url("data:image/svg+xml;utf8,"); + --dark-color-scheme: dark; +} + +@media (prefers-color-scheme: light) { + :root { + --color-background: var(--light-color-background); + --color-background-secondary: var(--light-color-background-secondary); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-icon-background: var(--light-color-icon-background); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + --color-link: var(--light-color-link); + + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); + } +} + +@media (prefers-color-scheme: dark) { + :root { + --color-background: var(--dark-color-background); + --color-background-secondary: var(--dark-color-background-secondary); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-icon-background: var(--dark-color-icon-background); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + --color-link: var(--dark-color-link); + + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); + } +} + +html { + color-scheme: var(--color-scheme); +} + +body { + margin: 0; +} + +:root[data-theme="light"] { + --color-background: var(--light-color-background); + --color-background-secondary: var(--light-color-background-secondary); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-icon-background: var(--light-color-icon-background); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + --color-link: var(--light-color-link); + + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); +} + +:root[data-theme="dark"] { + --color-background: var(--dark-color-background); + --color-background-secondary: var(--dark-color-background-secondary); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-icon-background: var(--dark-color-icon-background); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + --color-link: var(--dark-color-link); + + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); +} + +.always-visible, +.always-visible .tsd-signatures { + display: inherit !important; +} + +h1, +h2, +h3, +h4, +h5, +h6 { + line-height: 1.2; +} + +h1 > a, +h2 > a, +h3 > a, +h4 > a, +h5 > a, +h6 > a { + text-decoration: none; + color: var(--color-text); +} + +h1 { + font-size: 1.875rem; + margin: 0.67rem 0; +} + +h2 { + font-size: 1.5rem; + margin: 0.83rem 0; +} + +h3 { + font-size: 1.25rem; + margin: 1rem 0; +} + +h4 { + font-size: 1.05rem; + margin: 1.33rem 0; +} + +h5 { + font-size: 1rem; + margin: 1.5rem 0; +} + +h6 { + font-size: 0.875rem; + margin: 2.33rem 0; +} + +.uppercase { + text-transform: uppercase; +} + +dl, +menu, +ol, +ul { + margin: 1em 0; +} + +dd { + margin: 0 0 0 40px; +} + +.container { + max-width: 1700px; + padding: 0 2rem; +} + +/* Footer */ +.tsd-generator { + border-top: 1px solid var(--color-accent); + padding-top: 1rem; + padding-bottom: 1rem; + max-height: 3.5rem; +} + +.tsd-generator > p { + margin-top: 0; + margin-bottom: 0; + padding: 0 1rem; +} + +.container-main { + margin: 0 auto; + /* toolbar, footer, margin */ + min-height: calc(100vh - 41px - 56px - 4rem); +} + +@keyframes fade-in { + from { + opacity: 0; + } + to { + opacity: 1; + } +} +@keyframes fade-out { + from { + opacity: 1; + visibility: visible; + } + to { + opacity: 0; + } +} +@keyframes fade-in-delayed { + 0% { + opacity: 0; + } + 33% { + opacity: 0; + } + 100% { + opacity: 1; + } +} +@keyframes fade-out-delayed { + 0% { + opacity: 1; + visibility: visible; + } + 66% { + opacity: 0; + } + 100% { + opacity: 0; + } +} +@keyframes pop-in-from-right { + from { + transform: translate(100%, 0); + } + to { + transform: translate(0, 0); + } +} +@keyframes pop-out-to-right { + from { + transform: translate(0, 0); + visibility: visible; + } + to { + transform: translate(100%, 0); + } +} +body { + background: var(--color-background); + font-family: "Segoe UI", sans-serif; + font-size: 16px; + color: var(--color-text); +} + +a { + color: var(--color-link); + text-decoration: none; +} +a:hover { + text-decoration: underline; +} +a.external[target="_blank"] { + background-image: var(--external-icon); + background-position: top 3px right; + background-repeat: no-repeat; + padding-right: 13px; +} + +code, +pre { + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + padding: 0.2em; + margin: 0; + font-size: 0.875rem; + border-radius: 0.8em; +} + +pre { + position: relative; + white-space: pre; + white-space: pre-wrap; + word-wrap: break-word; + padding: 10px; + border: 1px solid var(--color-accent); +} +pre code { + padding: 0; + font-size: 100%; +} +pre > button { + position: absolute; + top: 10px; + right: 10px; + opacity: 0; + transition: opacity 0.1s; + box-sizing: border-box; +} +pre:hover > button, +pre > button.visible { + opacity: 1; +} + +blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid gray; +} + +.tsd-typography { + line-height: 1.333em; +} +.tsd-typography ul { + list-style: square; + padding: 0 0 0 20px; + margin: 0; +} +.tsd-typography .tsd-index-panel h3, +.tsd-index-panel .tsd-typography h3, +.tsd-typography h4, +.tsd-typography h5, +.tsd-typography h6 { + font-size: 1em; +} +.tsd-typography h5, +.tsd-typography h6 { + font-weight: normal; +} +.tsd-typography p, +.tsd-typography ul, +.tsd-typography ol { + margin: 1em 0; +} +.tsd-typography table { + border-collapse: collapse; + border: none; +} +.tsd-typography td, +.tsd-typography th { + padding: 6px 13px; + border: 1px solid var(--color-accent); +} +.tsd-typography thead, +.tsd-typography tr:nth-child(even) { + background-color: var(--color-background-secondary); +} + +.tsd-breadcrumb { + margin: 0; + padding: 0; + color: var(--color-text-aside); +} +.tsd-breadcrumb a { + color: var(--color-text-aside); + text-decoration: none; +} +.tsd-breadcrumb a:hover { + text-decoration: underline; +} +.tsd-breadcrumb li { + display: inline; +} +.tsd-breadcrumb li:after { + content: " / "; +} + +.tsd-comment-tags { + display: flex; + flex-direction: column; +} +dl.tsd-comment-tag-group { + display: flex; + align-items: center; + overflow: hidden; + margin: 0.5em 0; +} +dl.tsd-comment-tag-group dt { + display: flex; + margin-right: 0.5em; + font-size: 0.875em; + font-weight: normal; +} +dl.tsd-comment-tag-group dd { + margin: 0; +} +code.tsd-tag { + padding: 0.25em 0.4em; + border: 0.1em solid var(--color-accent); + margin-right: 0.25em; + font-size: 70%; +} +h1 code.tsd-tag:first-of-type { + margin-left: 0.25em; +} + +dl.tsd-comment-tag-group dd:before, +dl.tsd-comment-tag-group dd:after { + content: " "; +} +dl.tsd-comment-tag-group dd pre, +dl.tsd-comment-tag-group dd:after { + clear: both; +} +dl.tsd-comment-tag-group p { + margin: 0; +} + +.tsd-panel.tsd-comment .lead { + font-size: 1.1em; + line-height: 1.333em; + margin-bottom: 2em; +} +.tsd-panel.tsd-comment .lead:last-child { + margin-bottom: 0; +} + +.tsd-filter-visibility h4 { + font-size: 1rem; + padding-top: 0.75rem; + padding-bottom: 0.5rem; + margin: 0; +} +.tsd-filter-item:not(:last-child) { + margin-bottom: 0.5rem; +} +.tsd-filter-input { + display: flex; + width: fit-content; + width: -moz-fit-content; + align-items: center; + user-select: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + cursor: pointer; +} +.tsd-filter-input input[type="checkbox"] { + cursor: pointer; + position: absolute; + width: 1.5em; + height: 1.5em; + opacity: 0; +} +.tsd-filter-input input[type="checkbox"]:disabled { + pointer-events: none; +} +.tsd-filter-input svg { + cursor: pointer; + width: 1.5em; + height: 1.5em; + margin-right: 0.5em; + border-radius: 0.33em; + /* Leaving this at full opacity breaks event listeners on Firefox. + Don't remove unless you know what you're doing. */ + opacity: 0.99; +} +.tsd-filter-input input[type="checkbox"]:focus + svg { + transform: scale(0.95); +} +.tsd-filter-input input[type="checkbox"]:focus:not(:focus-visible) + svg { + transform: scale(1); +} +.tsd-checkbox-background { + fill: var(--color-accent); +} +input[type="checkbox"]:checked ~ svg .tsd-checkbox-checkmark { + stroke: var(--color-text); +} +.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-background { + fill: var(--color-background); + stroke: var(--color-accent); + stroke-width: 0.25rem; +} +.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-checkmark { + stroke: var(--color-accent); +} + +.tsd-theme-toggle { + padding-top: 0.75rem; +} +.tsd-theme-toggle > h4 { + display: inline; + vertical-align: middle; + margin-right: 0.75rem; +} + +.tsd-hierarchy { + list-style: square; + margin: 0; +} +.tsd-hierarchy .target { + font-weight: bold; +} + +.tsd-panel-group.tsd-index-group { + margin-bottom: 0; +} +.tsd-index-panel .tsd-index-list { + list-style: none; + line-height: 1.333em; + margin: 0; + padding: 0.25rem 0 0 0; + overflow: hidden; + display: grid; + grid-template-columns: repeat(3, 1fr); + column-gap: 1rem; + grid-template-rows: auto; +} +@media (max-width: 1024px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(2, 1fr); + } +} +@media (max-width: 768px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(1, 1fr); + } +} +.tsd-index-panel .tsd-index-list li { + -webkit-page-break-inside: avoid; + -moz-page-break-inside: avoid; + -ms-page-break-inside: avoid; + -o-page-break-inside: avoid; + page-break-inside: avoid; +} + +.tsd-flag { + display: inline-block; + padding: 0.25em 0.4em; + border-radius: 4px; + color: var(--color-comment-tag-text); + background-color: var(--color-comment-tag); + text-indent: 0; + font-size: 75%; + line-height: 1; + font-weight: normal; +} + +.tsd-anchor { + position: relative; + top: -100px; +} + +.tsd-member { + position: relative; +} +.tsd-member .tsd-anchor + h3 { + display: flex; + align-items: center; + margin-top: 0; + margin-bottom: 0; + border-bottom: none; +} + +.tsd-navigation.settings { + margin: 1rem 0; +} +.tsd-navigation > a, +.tsd-navigation .tsd-accordion-summary { + width: calc(100% - 0.5rem); +} +.tsd-navigation a, +.tsd-navigation summary > span, +.tsd-page-navigation a { + display: inline-flex; + align-items: center; + padding: 0.25rem; + color: var(--color-text); + text-decoration: none; + box-sizing: border-box; +} +.tsd-navigation a.current, +.tsd-page-navigation a.current { + background: var(--color-active-menu-item); +} +.tsd-navigation a:hover, +.tsd-page-navigation a:hover { + text-decoration: underline; +} +.tsd-navigation ul, +.tsd-page-navigation ul { + margin-top: 0; + margin-bottom: 0; + padding: 0; + list-style: none; +} +.tsd-navigation li, +.tsd-page-navigation li { + padding: 0; + max-width: 100%; +} +.tsd-nested-navigation { + margin-left: 3rem; +} +.tsd-nested-navigation > li > details { + margin-left: -1.5rem; +} +.tsd-small-nested-navigation { + margin-left: 1.5rem; +} +.tsd-small-nested-navigation > li > details { + margin-left: -1.5rem; +} + +.tsd-nested-navigation > li > a, +.tsd-nested-navigation > li > span { + width: calc(100% - 1.75rem - 0.5rem); +} + +.tsd-page-navigation ul { + padding-left: 1.75rem; +} + +#tsd-sidebar-links a { + margin-top: 0; + margin-bottom: 0.5rem; + line-height: 1.25rem; +} +#tsd-sidebar-links a:last-of-type { + margin-bottom: 0; +} + +a.tsd-index-link { + padding: 0.25rem 0 !important; + font-size: 1rem; + line-height: 1.25rem; + display: inline-flex; + align-items: center; + color: var(--color-text); +} +.tsd-accordion-summary { + list-style-type: none; /* hide marker on non-safari */ + outline: none; /* broken on safari, so just hide it */ +} +.tsd-accordion-summary::-webkit-details-marker { + display: none; /* hide marker on safari */ +} +.tsd-accordion-summary, +.tsd-accordion-summary a { + user-select: none; + -moz-user-select: none; + -webkit-user-select: none; + -ms-user-select: none; + + cursor: pointer; +} +.tsd-accordion-summary a { + width: calc(100% - 1.5rem); +} +.tsd-accordion-summary > * { + margin-top: 0; + margin-bottom: 0; + padding-top: 0; + padding-bottom: 0; +} +.tsd-index-accordion .tsd-accordion-summary > svg { + margin-left: 0.25rem; +} +.tsd-index-content > :not(:first-child) { + margin-top: 0.75rem; +} +.tsd-index-heading { + margin-top: 1.5rem; + margin-bottom: 0.75rem; +} + +.tsd-kind-icon { + margin-right: 0.5rem; + width: 1.25rem; + height: 1.25rem; + min-width: 1.25rem; + min-height: 1.25rem; +} +.tsd-kind-icon path { + transform-origin: center; + transform: scale(1.1); +} +.tsd-signature > .tsd-kind-icon { + margin-right: 0.8rem; +} + +.tsd-panel { + margin-bottom: 2.5rem; +} +.tsd-panel.tsd-member { + margin-bottom: 4rem; +} +.tsd-panel:empty { + display: none; +} +.tsd-panel > h1, +.tsd-panel > h2, +.tsd-panel > h3 { + margin: 1.5rem -1.5rem 0.75rem -1.5rem; + padding: 0 1.5rem 0.75rem 1.5rem; +} +.tsd-panel > h1.tsd-before-signature, +.tsd-panel > h2.tsd-before-signature, +.tsd-panel > h3.tsd-before-signature { + margin-bottom: 0; + border-bottom: none; +} + +.tsd-panel-group { + margin: 4rem 0; +} +.tsd-panel-group.tsd-index-group { + margin: 2rem 0; +} +.tsd-panel-group.tsd-index-group details { + margin: 2rem 0; +} + +#tsd-search { + transition: background-color 0.2s; +} +#tsd-search .title { + position: relative; + z-index: 2; +} +#tsd-search .field { + position: absolute; + left: 0; + top: 0; + right: 2.5rem; + height: 100%; +} +#tsd-search .field input { + box-sizing: border-box; + position: relative; + top: -50px; + z-index: 1; + width: 100%; + padding: 0 10px; + opacity: 0; + outline: 0; + border: 0; + background: transparent; + color: var(--color-text); +} +#tsd-search .field label { + position: absolute; + overflow: hidden; + right: -40px; +} +#tsd-search .field input, +#tsd-search .title, +#tsd-toolbar-links a { + transition: opacity 0.2s; +} +#tsd-search .results { + position: absolute; + visibility: hidden; + top: 40px; + width: 100%; + margin: 0; + padding: 0; + list-style: none; + box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); +} +#tsd-search .results li { + padding: 0 10px; + background-color: var(--color-background); +} +#tsd-search .results li:nth-child(even) { + background-color: var(--color-background-secondary); +} +#tsd-search .results li.state { + display: none; +} +#tsd-search .results li.current:not(.no-results), +#tsd-search .results li:hover:not(.no-results) { + background-color: var(--color-accent); +} +#tsd-search .results a { + display: block; +} +#tsd-search .results a:before { + top: 10px; +} +#tsd-search .results span.parent { + color: var(--color-text-aside); + font-weight: normal; +} +#tsd-search.has-focus { + background-color: var(--color-accent); +} +#tsd-search.has-focus .field input { + top: 0; + opacity: 1; +} +#tsd-search.has-focus .title, +#tsd-search.has-focus #tsd-toolbar-links a { + z-index: 0; + opacity: 0; +} +#tsd-search.has-focus .results { + visibility: visible; +} +#tsd-search.loading .results li.state.loading { + display: block; +} +#tsd-search.failure .results li.state.failure { + display: block; +} + +#tsd-toolbar-links { + position: absolute; + top: 0; + right: 2rem; + height: 100%; + display: flex; + align-items: center; + justify-content: flex-end; +} +#tsd-toolbar-links a { + margin-left: 1.5rem; +} +#tsd-toolbar-links a:hover { + text-decoration: underline; +} + +.tsd-signature { + margin: 0 0 1rem 0; + padding: 1rem 0.5rem; + border: 1px solid var(--color-accent); + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + font-size: 14px; + overflow-x: auto; +} + +.tsd-signature-symbol { + color: var(--color-text-aside); + font-weight: normal; +} + +.tsd-signature-type { + font-style: italic; + font-weight: normal; +} + +.tsd-signatures { + padding: 0; + margin: 0 0 1em 0; + list-style-type: none; +} +.tsd-signatures .tsd-signature { + margin: 0; + border-color: var(--color-accent); + border-width: 1px 0; + transition: background-color 0.1s; +} +.tsd-description .tsd-signatures .tsd-signature { + border-width: 1px; +} + +ul.tsd-parameter-list, +ul.tsd-type-parameter-list { + list-style: square; + margin: 0; + padding-left: 20px; +} +ul.tsd-parameter-list > li.tsd-parameter-signature, +ul.tsd-type-parameter-list > li.tsd-parameter-signature { + list-style: none; + margin-left: -20px; +} +ul.tsd-parameter-list h5, +ul.tsd-type-parameter-list h5 { + font-size: 16px; + margin: 1em 0 0.5em 0; +} +.tsd-sources { + margin-top: 1rem; + font-size: 0.875em; +} +.tsd-sources a { + color: var(--color-text-aside); + text-decoration: underline; +} +.tsd-sources ul { + list-style: none; + padding: 0; +} + +.tsd-page-toolbar { + position: sticky; + z-index: 1; + top: 0; + left: 0; + width: 100%; + color: var(--color-text); + background: var(--color-background-secondary); + border-bottom: 1px var(--color-accent) solid; + transition: transform 0.3s ease-in-out; +} +.tsd-page-toolbar a { + color: var(--color-text); + text-decoration: none; +} +.tsd-page-toolbar a.title { + font-weight: bold; +} +.tsd-page-toolbar a.title:hover { + text-decoration: underline; +} +.tsd-page-toolbar .tsd-toolbar-contents { + display: flex; + justify-content: space-between; + height: 2.5rem; + margin: 0 auto; +} +.tsd-page-toolbar .table-cell { + position: relative; + white-space: nowrap; + line-height: 40px; +} +.tsd-page-toolbar .table-cell:first-child { + width: 100%; +} +.tsd-page-toolbar .tsd-toolbar-icon { + box-sizing: border-box; + line-height: 0; + padding: 12px 0; +} + +.tsd-widget { + display: inline-block; + overflow: hidden; + opacity: 0.8; + height: 40px; + transition: + opacity 0.1s, + background-color 0.2s; + vertical-align: bottom; + cursor: pointer; +} +.tsd-widget:hover { + opacity: 0.9; +} +.tsd-widget.active { + opacity: 1; + background-color: var(--color-accent); +} +.tsd-widget.no-caption { + width: 40px; +} +.tsd-widget.no-caption:before { + margin: 0; +} + +.tsd-widget.options, +.tsd-widget.menu { + display: none; +} +input[type="checkbox"] + .tsd-widget:before { + background-position: -120px 0; +} +input[type="checkbox"]:checked + .tsd-widget:before { + background-position: -160px 0; +} + +img { + max-width: 100%; +} + +.tsd-anchor-icon { + display: inline-flex; + align-items: center; + margin-left: 0.5rem; + vertical-align: middle; + color: var(--color-text); +} + +.tsd-anchor-icon svg { + width: 1em; + height: 1em; + visibility: hidden; +} + +.tsd-anchor-link:hover > .tsd-anchor-icon svg { + visibility: visible; +} + +.deprecated { + text-decoration: line-through; +} + +.warning { + padding: 1rem; + color: var(--color-warning-text); + background: var(--color-background-warning); +} + +.tsd-kind-project { + color: var(--color-ts-project); +} +.tsd-kind-module { + color: var(--color-ts-module); +} +.tsd-kind-namespace { + color: var(--color-ts-namespace); +} +.tsd-kind-enum { + color: var(--color-ts-enum); +} +.tsd-kind-enum-member { + color: var(--color-ts-enum-member); +} +.tsd-kind-variable { + color: var(--color-ts-variable); +} +.tsd-kind-function { + color: var(--color-ts-function); +} +.tsd-kind-class { + color: var(--color-ts-class); +} +.tsd-kind-interface { + color: var(--color-ts-interface); +} +.tsd-kind-constructor { + color: var(--color-ts-constructor); +} +.tsd-kind-property { + color: var(--color-ts-property); +} +.tsd-kind-method { + color: var(--color-ts-method); +} +.tsd-kind-call-signature { + color: var(--color-ts-call-signature); +} +.tsd-kind-index-signature { + color: var(--color-ts-index-signature); +} +.tsd-kind-constructor-signature { + color: var(--color-ts-constructor-signature); +} +.tsd-kind-parameter { + color: var(--color-ts-parameter); +} +.tsd-kind-type-literal { + color: var(--color-ts-type-literal); +} +.tsd-kind-type-parameter { + color: var(--color-ts-type-parameter); +} +.tsd-kind-accessor { + color: var(--color-ts-accessor); +} +.tsd-kind-get-signature { + color: var(--color-ts-get-signature); +} +.tsd-kind-set-signature { + color: var(--color-ts-set-signature); +} +.tsd-kind-type-alias { + color: var(--color-ts-type-alias); +} + +/* if we have a kind icon, don't color the text by kind */ +.tsd-kind-icon ~ span { + color: var(--color-text); +} + +* { + scrollbar-width: thin; + scrollbar-color: var(--color-accent) var(--color-icon-background); +} + +*::-webkit-scrollbar { + width: 0.75rem; +} + +*::-webkit-scrollbar-track { + background: var(--color-icon-background); +} + +*::-webkit-scrollbar-thumb { + background-color: var(--color-accent); + border-radius: 999rem; + border: 0.25rem solid var(--color-icon-background); +} + +/* mobile */ +@media (max-width: 769px) { + .tsd-widget.options, + .tsd-widget.menu { + display: inline-block; + } + + .container-main { + display: flex; + } + html .col-content { + float: none; + max-width: 100%; + width: 100%; + } + html .col-sidebar { + position: fixed !important; + overflow-y: auto; + -webkit-overflow-scrolling: touch; + z-index: 1024; + top: 0 !important; + bottom: 0 !important; + left: auto !important; + right: 0 !important; + padding: 1.5rem 1.5rem 0 0; + width: 75vw; + visibility: hidden; + background-color: var(--color-background); + transform: translate(100%, 0); + } + html .col-sidebar > *:last-child { + padding-bottom: 20px; + } + html .overlay { + content: ""; + display: block; + position: fixed; + z-index: 1023; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: rgba(0, 0, 0, 0.75); + visibility: hidden; + } + + .to-has-menu .overlay { + animation: fade-in 0.4s; + } + + .to-has-menu .col-sidebar { + animation: pop-in-from-right 0.4s; + } + + .from-has-menu .overlay { + animation: fade-out 0.4s; + } + + .from-has-menu .col-sidebar { + animation: pop-out-to-right 0.4s; + } + + .has-menu body { + overflow: hidden; + } + .has-menu .overlay { + visibility: visible; + } + .has-menu .col-sidebar { + visibility: visible; + transform: translate(0, 0); + display: flex; + flex-direction: column; + gap: 1.5rem; + max-height: 100vh; + padding: 1rem 2rem; + } + .has-menu .tsd-navigation { + max-height: 100%; + } +} + +/* one sidebar */ +@media (min-width: 770px) { + .container-main { + display: grid; + grid-template-columns: minmax(0, 1fr) minmax(0, 2fr); + grid-template-areas: "sidebar content"; + margin: 2rem auto; + } + + .col-sidebar { + grid-area: sidebar; + } + .col-content { + grid-area: content; + padding: 0 1rem; + } +} +@media (min-width: 770px) and (max-width: 1399px) { + .col-sidebar { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + padding-top: 1rem; + } + .site-menu { + margin-top: 1rem; + } +} + +/* two sidebars */ +@media (min-width: 1200px) { + .container-main { + grid-template-columns: minmax(0, 1fr) minmax(0, 2.5fr) minmax(0, 20rem); + grid-template-areas: "sidebar content toc"; + } + + .col-sidebar { + display: contents; + } + + .page-menu { + grid-area: toc; + padding-left: 1rem; + } + .site-menu { + grid-area: sidebar; + } + + .site-menu { + margin-top: 1rem 0; + } + + .page-menu, + .site-menu { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + } +} diff --git a/dist/docs/classes/GaussianBlurBackgroundProcessor.html b/dist/docs/classes/GaussianBlurBackgroundProcessor.html new file mode 100644 index 0000000..d2d0d22 --- /dev/null +++ b/dist/docs/classes/GaussianBlurBackgroundProcessor.html @@ -0,0 +1,202 @@ +GaussianBlurBackgroundProcessor | @twilio/video-processors
+
+ +
+
+
+
+ +

Class GaussianBlurBackgroundProcessor

+
+

The GaussianBlurBackgroundProcessor, when added to a VideoTrack, +applies a gaussian blur filter on the background in each video frame +and leaves the foreground (person(s)) untouched. Each instance of +GaussianBlurBackgroundProcessor should be added to only one VideoTrack +at a time to prevent overlapping of image data from multiple VideoTracks.

+
+
+

Example

import { createLocalVideoTrack } from 'twilio-video';
import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors';
import { simd } from 'wasm-feature-detect';

let blurBackground: GaussianBlurBackgroundProcessor;

(async() => {
const isWasmSimdSupported = await simd();

blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',

// Enable debounce only if the browser does not support
// WASM SIMD in order to retain an acceptable frame rate.
debounce: !isWasmSimdSupported,

pipeline: Pipeline.WebGL2,
});
await blurBackground.loadModel();

const track = await createLocalVideoTrack({
// Increasing the capture resolution decreases the output FPS
// especially on browsers that do not support SIMD
// such as desktop Safari and iOS browsers, or on Chrome
// with capture resolutions above 640x480 for webgl2.
width: 640,
height: 480,

// Any frame rate above 24 fps on desktop browsers increase CPU
// usage without noticeable increase in quality.
frameRate: 24
});
track.addProcessor(virtualBackground, {
inputFrameBufferType: 'video',
outputFrameBufferContextType: 'webgl2',
});
})(); +
+
+
+

Hierarchy

+
    +
  • BackgroundProcessor +
      +
    • GaussianBlurBackgroundProcessor
+
+
+
+ +
+
+

Constructors

+
+
+

Accessors

+
+
+

Methods

+
+
+

Constructors

+
+ +
+
+

Accessors

+
+ +
    +
  • get blurFilterRadius(): number
  • +
  • +

    The current background blur filter radius in pixels.

    +
    +

    Returns number

    +
  • +
  • set blurFilterRadius(radius): void
  • +
  • +

    Set a new background blur filter radius in pixels.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+ +
    +
  • get maskBlurRadius(): number
  • +
  • +

    The current blur radius when smoothing out the edges of the person's mask.

    +
    +

    Returns number

    +
  • +
  • set maskBlurRadius(radius): void
  • +
  • +

    Set a new blur radius to be used when smoothing out the edges of the person's mask.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+

Methods

+
+ +
    + +
  • +

    Load the segmentation model. +Call this method before attaching the processor to ensure +video frames are processed correctly.

    +
    +

    Returns Promise<void>

    +
+
+ +
    + +
  • +

    Apply a transform to the background of an input video frame and leaving +the foreground (person(s)) untouched. Any exception detected will +result in the frame being dropped.

    +
    +
    +

    Parameters

    +
      +
    • +
      inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement
      +

      The source of the input frame to process. +
      +
      +OffscreenCanvas - Good for canvas-related processing +that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLCanvasElement - This is recommended on browsers +that doesn't support OffscreenCanvas, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLVideoElement - Recommended when using [[Pipeline.WebGL2]] but +works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. +

      +
    • +
    • +
      outputFrameBuffer: HTMLCanvasElement
      +

      The output frame buffer to use to draw the processed frame.

      +
    +

    Returns Promise<void>

    +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/classes/VirtualBackgroundProcessor.html b/dist/docs/classes/VirtualBackgroundProcessor.html new file mode 100644 index 0000000..73b44cb --- /dev/null +++ b/dist/docs/classes/VirtualBackgroundProcessor.html @@ -0,0 +1,227 @@ +VirtualBackgroundProcessor | @twilio/video-processors
+
+ +
+
+
+
+ +

Class VirtualBackgroundProcessor

+
+

The VirtualBackgroundProcessor, when added to a VideoTrack, +replaces the background in each video frame with a given image, +and leaves the foreground (person(s)) untouched. Each instance of +VirtualBackgroundProcessor should be added to only one VideoTrack +at a time to prevent overlapping of image data from multiple VideoTracks.

+
+
+

Example

import { createLocalVideoTrack } from 'twilio-video';
import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors';
import { simd } from 'wasm-feature-detect';

let virtualBackground: VirtualBackgroundProcessor;
const img = new Image();

img.onload = async () => {
const isWasmSimdSupported = await simd();

virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,

// Enable debounce only if the browser does not support
// WASM SIMD in order to retain an acceptable frame rate.
debounce: !isWasmSimdSupported,

pipeline: Pipeline.WebGL2,
});
await virtualBackground.loadModel();

const track = await createLocalVideoTrack({
// Increasing the capture resolution decreases the output FPS
// especially on browsers that do not support SIMD
// such as desktop Safari and iOS browsers, or on Chrome
// with capture resolutions above 640x480 for webgl2.
width: 640,
height: 480,

// Any frame rate above 24 fps on desktop browsers increase CPU
// usage without noticeable increase in quality.
frameRate: 24
});
track.addProcessor(virtualBackground, {
inputFrameBufferType: 'video',
outputFrameBufferContextType: 'webgl2',
});
};

img.src = '/background.jpg'; +
+
+
+

Hierarchy

+
    +
  • BackgroundProcessor +
      +
    • VirtualBackgroundProcessor
+
+
+
+ +
+
+

Constructors

+
+
+

Accessors

+
+
+

Methods

+
+
+

Constructors

+
+ +
+
+

Accessors

+
+ +
    +
  • get backgroundImage(): HTMLImageElement
  • +
  • +

    The HTMLImageElement representing the current background image.

    +
    +

    Returns HTMLImageElement

    +
  • +
  • set backgroundImage(image): void
  • +
  • +

    Set an HTMLImageElement as the new background image. +An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow +security guidelines +when loading the image from a different origin. Failing to do so will result to an empty output frame.

    +
    +
    +

    Parameters

    +
      +
    • +
      image: HTMLImageElement
    +

    Returns void

    +
+
+ +
    +
  • get fitType(): ImageFit
  • +
  • +

    The current [[ImageFit]] for positioning of the background image in the viewport.

    +
    +

    Returns ImageFit

    +
  • +
  • set fitType(fitType): void
  • +
  • +

    Set a new [[ImageFit]] to be used for positioning the background image in the viewport.

    +
    +
    +

    Parameters

    +
    +

    Returns void

    +
+
+ +
    +
  • get maskBlurRadius(): number
  • +
  • +

    The current blur radius when smoothing out the edges of the person's mask.

    +
    +

    Returns number

    +
  • +
  • set maskBlurRadius(radius): void
  • +
  • +

    Set a new blur radius to be used when smoothing out the edges of the person's mask.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+

Methods

+
+ +
    + +
  • +

    Load the segmentation model. +Call this method before attaching the processor to ensure +video frames are processed correctly.

    +
    +

    Returns Promise<void>

    +
+
+ +
    + +
  • +

    Apply a transform to the background of an input video frame and leaving +the foreground (person(s)) untouched. Any exception detected will +result in the frame being dropped.

    +
    +
    +

    Parameters

    +
      +
    • +
      inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement
      +

      The source of the input frame to process. +
      +
      +OffscreenCanvas - Good for canvas-related processing +that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLCanvasElement - This is recommended on browsers +that doesn't support OffscreenCanvas, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLVideoElement - Recommended when using [[Pipeline.WebGL2]] but +works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. +

      +
    • +
    • +
      outputFrameBuffer: HTMLCanvasElement
      +

      The output frame buffer to use to draw the processed frame.

      +
    +

    Returns Promise<void>

    +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/enums/ImageFit.html b/dist/docs/enums/ImageFit.html new file mode 100644 index 0000000..7d00262 --- /dev/null +++ b/dist/docs/enums/ImageFit.html @@ -0,0 +1,97 @@ +ImageFit | @twilio/video-processors
+
+ +
+
+
+
+ +

Enumeration ImageFit

+
+

ImageFit specifies the positioning of an image inside a viewport.

+
+
+
+
+
+ +
+
+

Enumeration Members

+
+
+

Enumeration Members

+
+ +
Contain: "Contain"
+

Scale the image up or down to fill the viewport while preserving the aspect ratio. +The image will be fully visible but will add empty space in the viewport if +aspect ratios do not match.

+
+
+
+ +
Cover: "Cover"
+

Scale the image to fill both height and width of the viewport while preserving +the aspect ratio, but will crop the image if aspect ratios do not match.

+
+
+
+ +
Fill: "Fill"
+

Stretches the image to fill the viewport regardless of aspect ratio.

+
+
+
+ +
None: "None"
+

Ignore height and width and use the original size.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/enums/Pipeline.html b/dist/docs/enums/Pipeline.html new file mode 100644 index 0000000..065d555 --- /dev/null +++ b/dist/docs/enums/Pipeline.html @@ -0,0 +1,84 @@ +Pipeline | @twilio/video-processors
+
+ +
+
+
+
+ +

Enumeration Pipeline

+
+

Specifies which pipeline to use when processing video frames.

+
+
+
+
+
+ +
+
+

Enumeration Members

+
+
+

Enumeration Members

+
+ +
Canvas2D: "Canvas2D"
+

Use canvas 2d rendering context. Some browsers such as Safari do not +have full support of this feature. Please test your application to make sure it works as intented. See +browser compatibility page +for reference.

+
+
+
+ +
WebGL2: "WebGL2"
+

Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work +on some older versions of browsers. Please test your application to make sure it works as intented. See +browser compatibility page +for reference.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/index.html b/dist/docs/index.html new file mode 100644 index 0000000..e498db5 --- /dev/null +++ b/dist/docs/index.html @@ -0,0 +1,99 @@ +@twilio/video-processors
+
+ +
+
+
+
+

@twilio/video-processors

+

Twilio Video Processors

+

[!WARNING]
We are no longer allowing new customers to onboard to Twilio Video. Effective December 5th, 2026, Twilio Video will End of Life (EOL) and will cease to function for all customers. Customers may transition to any video provider they choose, however, we are recommending customers migrate to the Zoom Video SDK and we have prepared a Migration Guide. Additional information on this EOL is available in our Help Center here.

+
+

Twilio Video Processors is a collection of video processing tools which can be used with Twilio Video JavaScript SDK to apply transformations and filters to a VideoTrack.

+

   See it live here!

+

Features

The following Video Processors are provided to apply transformations and filters to a person's background. You can also use them as a reference for creating your own Video Processors that can be used with Twilio Video JavaScript SDK.

+ +

Prerequisites

+

Note

The Node.js and NPM requirements do not apply if the goal is to use this library as a dependency of your project. They only apply if you want to check the source code out and build the artifacts and/or run tests.

+

Installation

NPM

You can install directly from npm.

+
npm install @twilio/video-processors --save
+
+

Using this method, you can import twilio-video-processors like so:

+
import * as VideoProcessors from '@twilio/video-processors';
+
+

Script tag

You can also copy twilio-video-processors.js from the dist/build folder and include it directly in your web app using a <script> tag.

+
<script src="https://my-server-path/twilio-video-processors.js"></script>
+
+

Using this method, twilio-video-processors.js will set a browser global:

+
const VideoProcessors = Twilio.VideoProcessors;
+
+

Assets

In order to achieve the best performance, the VideoProcessors use WebAssembly to run TensorFlow Lite for person segmentation. You need to serve the tflite model and binaries so they can be loaded properly. These files can be downloaded from the dist/build folder. Check the API docs for details and the examples folder for reference.

+

Usage

These processors run TensorFlow Lite using MediaPipe Selfie Segmentation Landscape Model and requires WebAssembly SIMD support in order to achieve the best performance. We recommend that, when calling Video.createLocalVideoTrack, the video capture constraints be set to 24 fps frame rate with 640x480 capture dimensions. Higher resolutions can still be used for increased accuracy, but may degrade performance, resulting in a lower output frame rate on low powered devices.

+

Best Practice

Please check out the following pages for best practice.

+ +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html b/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html new file mode 100644 index 0000000..611ffc5 --- /dev/null +++ b/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html @@ -0,0 +1,140 @@ +GaussianBlurBackgroundProcessorOptions | @twilio/video-processors
+
+ +
+
+
+
+ +

Interface GaussianBlurBackgroundProcessorOptions

+
+

Options passed to [[GaussianBlurBackgroundProcessor]] constructor.

+
+
+
+

Hierarchy

+
    +
  • BackgroundProcessorOptions +
      +
    • GaussianBlurBackgroundProcessorOptions
+
+
+
+ +
+
+

Properties

+
+ +
assetsPath: string
+

The VideoProcessors load assets dynamically depending on certain browser features. +You need to serve all the assets and provide the root path so they can be referenced properly. +These assets can be copied from the dist/build folder which you can add as part of your deployment process.

+
+
+

Example


+
+For virtual background: +
+ +
const virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,
});
await virtualBackground.loadModel(); +
+
+For blur background: +
+ +
const blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets'
});
await blurBackground.loadModel(); +
+
+
+ +
blurFilterRadius?: number
+

The background blur filter radius to use in pixels.

+
+
+

Default

15
+
+
+
+ +
debounce?: boolean
+

Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.

+
+
+

Default

true
+
+
+
+ +
maskBlurRadius?: number
+

The blur radius to use when smoothing out the edges of the person's mask.

+
+
+

Default

8 for WebGL2 pipeline, 4 for Canvas2D pipeline
+
+
+
+ +
pipeline?: Pipeline
+

Specifies which pipeline to use when processing video frames.

+
+
+

Default

'WebGL2'
+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html b/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html new file mode 100644 index 0000000..c482550 --- /dev/null +++ b/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html @@ -0,0 +1,152 @@ +VirtualBackgroundProcessorOptions | @twilio/video-processors
+
+ +
+
+
+
+ +

Interface VirtualBackgroundProcessorOptions

+
+

Options passed to [[VirtualBackgroundProcessor]] constructor.

+
+
+
+

Hierarchy

+
    +
  • BackgroundProcessorOptions +
      +
    • VirtualBackgroundProcessorOptions
+
+
+
+ +
+
+

Properties

+
+ +
assetsPath: string
+

The VideoProcessors load assets dynamically depending on certain browser features. +You need to serve all the assets and provide the root path so they can be referenced properly. +These assets can be copied from the dist/build folder which you can add as part of your deployment process.

+
+
+

Example


+
+For virtual background: +
+ +
const virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,
});
await virtualBackground.loadModel(); +
+
+For blur background: +
+ +
const blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets'
});
await blurBackground.loadModel(); +
+
+
+ +
backgroundImage: HTMLImageElement
+

The HTMLImageElement to use for background replacement. +An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow +security guidelines +when loading the image from a different origin. Failing to do so will result to an empty output frame.

+
+
+
+ +
debounce?: boolean
+

Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.

+
+
+

Default

true
+
+
+
+ +
fitType?: ImageFit
+

The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]] +supports this option. WebGL2 ignores this option and falls back to Cover.

+
+
+

Default

'Fill'
+
+
+
+ +
maskBlurRadius?: number
+

The blur radius to use when smoothing out the edges of the person's mask.

+
+
+

Default

8 for WebGL2 pipeline, 4 for Canvas2D pipeline
+
+
+
+ +
pipeline?: Pipeline
+

Specifies which pipeline to use when processing video frames.

+
+
+

Default

'WebGL2'
+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/modules.html b/dist/docs/modules.html new file mode 100644 index 0000000..3acea24 --- /dev/null +++ b/dist/docs/modules.html @@ -0,0 +1,64 @@ +@twilio/video-processors
+
+ +
+ +
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/variables/isSupported.html b/dist/docs/variables/isSupported.html new file mode 100644 index 0000000..09fcefb --- /dev/null +++ b/dist/docs/variables/isSupported.html @@ -0,0 +1,55 @@ +isSupported | @twilio/video-processors
+
+ +
+
+
+
+ +

Variable isSupportedConst

+
isSupported: boolean = ...
+

Check if the current browser is officially supported by twilio-video-procesors.js. +This is set to true for browsers that supports canvas +2D or +webgl2 +rendering context.

+
+
+

Example

import { isSupported } from '@twilio/video-processors';

if (isSupported) {
// Initialize the background processors
} +
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/variables/version.html b/dist/docs/variables/version.html new file mode 100644 index 0000000..ddd7af2 --- /dev/null +++ b/dist/docs/variables/version.html @@ -0,0 +1,48 @@ +version | @twilio/video-processors
+
+ +
+
+
+
+ +

Variable versionConst

+
version: string = '2.2.0'
+

The current version of the library.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/es5/constants.d.ts b/es5/constants.d.ts new file mode 100644 index 0000000..63660fa --- /dev/null +++ b/es5/constants.d.ts @@ -0,0 +1,8 @@ +import { Dimensions } from './types'; +export declare const BLUR_FILTER_RADIUS = 15; +export declare const MASK_BLUR_RADIUS = 8; +export declare const MODEL_NAME = "selfie_segmentation_landscape.tflite"; +export declare const TFLITE_LOADER_NAME = "tflite-1-0-0.js"; +export declare const TFLITE_SIMD_LOADER_NAME = "tflite-simd-1-0-0.js"; +export declare const TFLITE_WORKER_NAME = "tflite-worker.js"; +export declare const WASM_INFERENCE_DIMENSIONS: Dimensions; diff --git a/es5/constants.js b/es5/constants.js new file mode 100644 index 0000000..f6ae7b3 --- /dev/null +++ b/es5/constants.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WASM_INFERENCE_DIMENSIONS = exports.TFLITE_WORKER_NAME = exports.TFLITE_SIMD_LOADER_NAME = exports.TFLITE_LOADER_NAME = exports.MODEL_NAME = exports.MASK_BLUR_RADIUS = exports.BLUR_FILTER_RADIUS = void 0; +exports.BLUR_FILTER_RADIUS = 15; +exports.MASK_BLUR_RADIUS = 8; +exports.MODEL_NAME = 'selfie_segmentation_landscape.tflite'; +exports.TFLITE_LOADER_NAME = 'tflite-1-0-0.js'; +exports.TFLITE_SIMD_LOADER_NAME = 'tflite-simd-1-0-0.js'; +exports.TFLITE_WORKER_NAME = 'tflite-worker.js'; +exports.WASM_INFERENCE_DIMENSIONS = { + width: 256, + height: 144, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/es5/constants.js.map b/es5/constants.js.map new file mode 100644 index 0000000..de7e5e1 --- /dev/null +++ b/es5/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../lib/constants.ts"],"names":[],"mappings":";;;AAEa,QAAA,kBAAkB,GAAG,EAAE,CAAC;AACxB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AACrB,QAAA,UAAU,GAAG,sCAAsC,CAAC;AACpD,QAAA,kBAAkB,GAAG,iBAAiB,CAAC;AACvC,QAAA,uBAAuB,GAAG,sBAAsB,CAAC;AACjD,QAAA,kBAAkB,GAAG,kBAAkB,CAAC;AAExC,QAAA,yBAAyB,GAAe;IACnD,KAAK,EAAE,GAAG;IACV,MAAM,EAAE,GAAG;CACZ,CAAC","sourcesContent":["import { Dimensions } from './types';\n\nexport const BLUR_FILTER_RADIUS = 15;\nexport const MASK_BLUR_RADIUS = 8;\nexport const MODEL_NAME = 'selfie_segmentation_landscape.tflite';\nexport const TFLITE_LOADER_NAME = 'tflite-1-0-0.js';\nexport const TFLITE_SIMD_LOADER_NAME = 'tflite-simd-1-0-0.js';\nexport const TFLITE_WORKER_NAME = 'tflite-worker.js';\n\nexport const WASM_INFERENCE_DIMENSIONS: Dimensions = {\n width: 256,\n height: 144,\n};\n"]} \ No newline at end of file diff --git a/es5/index.d.ts b/es5/index.d.ts new file mode 100644 index 0000000..dd49bd2 --- /dev/null +++ b/es5/index.d.ts @@ -0,0 +1,6 @@ +import { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions } from './processors/background/GaussianBlurBackgroundProcessor'; +import { VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions } from './processors/background/VirtualBackgroundProcessor'; +import { ImageFit, Pipeline } from './types'; +import { isSupported } from './utils/support'; +import { version } from './utils/version'; +export { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions, ImageFit, Pipeline, isSupported, version, VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions, }; diff --git a/es5/index.js b/es5/index.js new file mode 100644 index 0000000..8a60daa --- /dev/null +++ b/es5/index.js @@ -0,0 +1,30 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = exports.version = exports.isSupported = exports.Pipeline = exports.ImageFit = exports.GaussianBlurBackgroundProcessor = void 0; +var GaussianBlurBackgroundProcessor_1 = require("./processors/background/GaussianBlurBackgroundProcessor"); +Object.defineProperty(exports, "GaussianBlurBackgroundProcessor", { enumerable: true, get: function () { return GaussianBlurBackgroundProcessor_1.GaussianBlurBackgroundProcessor; } }); +var VirtualBackgroundProcessor_1 = require("./processors/background/VirtualBackgroundProcessor"); +Object.defineProperty(exports, "VirtualBackgroundProcessor", { enumerable: true, get: function () { return VirtualBackgroundProcessor_1.VirtualBackgroundProcessor; } }); +var types_1 = require("./types"); +Object.defineProperty(exports, "ImageFit", { enumerable: true, get: function () { return types_1.ImageFit; } }); +Object.defineProperty(exports, "Pipeline", { enumerable: true, get: function () { return types_1.Pipeline; } }); +var support_1 = require("./utils/support"); +Object.defineProperty(exports, "isSupported", { enumerable: true, get: function () { return support_1.isSupported; } }); +var version_1 = require("./utils/version"); +Object.defineProperty(exports, "version", { enumerable: true, get: function () { return version_1.version; } }); +if (typeof window !== 'undefined') { + window.Twilio = window.Twilio || {}; + window.Twilio.VideoProcessors = __assign(__assign({}, window.Twilio.VideoProcessors), { GaussianBlurBackgroundProcessor: GaussianBlurBackgroundProcessor_1.GaussianBlurBackgroundProcessor, ImageFit: types_1.ImageFit, Pipeline: types_1.Pipeline, isSupported: support_1.isSupported, version: version_1.version, VirtualBackgroundProcessor: VirtualBackgroundProcessor_1.VirtualBackgroundProcessor }); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/es5/index.js.map b/es5/index.js.map new file mode 100644 index 0000000..e0b06e3 --- /dev/null +++ b/es5/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../lib/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,2GAAkJ;AAoBhJ,gHApBO,iEAA+B,OAoBP;AAnBjC,iGAAmI;AAyBjI,2GAzBO,uDAA0B,OAyBP;AAxB5B,iCAA6C;AAoB3C,yFApBO,gBAAQ,OAoBP;AACR,yFArBiB,gBAAQ,OAqBjB;AApBV,2CAA8C;AAqB5C,4FArBO,qBAAW,OAqBP;AApBb,2CAA0C;AAqBxC,wFArBO,iBAAO,OAqBP;AAnBT,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;IACpC,MAAM,CAAC,MAAM,CAAC,eAAe,yBACxB,MAAM,CAAC,MAAM,CAAC,eAAe,KAChC,+BAA+B,mEAAA,EAC/B,QAAQ,kBAAA,EACR,QAAQ,kBAAA,EACR,WAAW,uBAAA,EACX,OAAO,mBAAA,EACP,0BAA0B,yDAAA,GAC3B,CAAC;CACH","sourcesContent":["import { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions } from './processors/background/GaussianBlurBackgroundProcessor';\nimport { VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions } from './processors/background/VirtualBackgroundProcessor';\nimport { ImageFit, Pipeline } from './types';\nimport { isSupported } from './utils/support';\nimport { version } from './utils/version';\n\nif (typeof window !== 'undefined') {\n window.Twilio = window.Twilio || {};\n window.Twilio.VideoProcessors = {\n ...window.Twilio.VideoProcessors,\n GaussianBlurBackgroundProcessor,\n ImageFit,\n Pipeline,\n isSupported,\n version,\n VirtualBackgroundProcessor,\n };\n}\n\nexport {\n GaussianBlurBackgroundProcessor,\n GaussianBlurBackgroundProcessorOptions,\n ImageFit,\n Pipeline,\n isSupported,\n version,\n VirtualBackgroundProcessor,\n VirtualBackgroundProcessorOptions,\n};\n"]} \ No newline at end of file diff --git a/es5/processors/Processor.d.ts b/es5/processors/Processor.d.ts new file mode 100644 index 0000000..3b5fac6 --- /dev/null +++ b/es5/processors/Processor.d.ts @@ -0,0 +1,13 @@ +/** + * @private + * The [[Processor]] is an abstract class for building your own custom processors. + */ +export declare abstract class Processor { + /** + * Applies a transform to an input frame and draw the results to an output frame buffer. + * The frame will be dropped if this method raises an exception. + * @param inputFrameBuffer - The source of the input frame to process. + * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + abstract processFrame(inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement, outputFrameBuffer: HTMLCanvasElement): Promise | void; +} diff --git a/es5/processors/Processor.js b/es5/processors/Processor.js new file mode 100644 index 0000000..3bf6156 --- /dev/null +++ b/es5/processors/Processor.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = void 0; +/** + * @private + * The [[Processor]] is an abstract class for building your own custom processors. + */ +var Processor = /** @class */ (function () { + function Processor() { + } + return Processor; +}()); +exports.Processor = Processor; +//# sourceMappingURL=Processor.js.map \ No newline at end of file diff --git a/es5/processors/Processor.js.map b/es5/processors/Processor.js.map new file mode 100644 index 0000000..ff5705d --- /dev/null +++ b/es5/processors/Processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Processor.js","sourceRoot":"","sources":["../../lib/processors/Processor.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACH;IAAA;IAWA,CAAC;IAAD,gBAAC;AAAD,CAAC,AAXD,IAWC;AAXqB,8BAAS","sourcesContent":["/**\n * @private\n * The [[Processor]] is an abstract class for building your own custom processors.\n */\nexport abstract class Processor {\n\n /**\n * Applies a transform to an input frame and draw the results to an output frame buffer.\n * The frame will be dropped if this method raises an exception.\n * @param inputFrameBuffer - The source of the input frame to process.\n * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame.\n */\n abstract processFrame(\n inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,\n outputFrameBuffer: HTMLCanvasElement): Promise | void;\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/BackgroundProcessor.d.ts b/es5/processors/background/BackgroundProcessor.d.ts new file mode 100644 index 0000000..05d9c0d --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.d.ts @@ -0,0 +1,143 @@ +import { Processor } from '../Processor'; +import { Dimensions, Pipeline, WebGL2PipelineType } from '../../types'; +import { buildWebGL2Pipeline } from '../webgl2'; +type InputResizeMode = 'canvas' | 'image-bitmap'; +/** + * @private + */ +export interface BackgroundProcessorOptions { + /** + * The VideoProcessors load assets dynamically depending on certain browser features. + * You need to serve all the assets and provide the root path so they can be referenced properly. + * These assets can be copied from the `dist/build` folder which you can add as part of your deployment process. + * @example + *
+ *
+ * For virtual background: + *
+ * + * ```ts + * const virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * }); + * await virtualBackground.loadModel(); + * ``` + * + *
+ * For blur background: + *
+ * + * ```ts + * const blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets' + * }); + * await blurBackground.loadModel(); + * ``` + */ + assetsPath: string; + /** + * Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process. + * @default + * ```html + * true + * ``` + */ + debounce?: boolean; + /** + * @private + */ + deferInputResize?: boolean; + /** + * @private + */ + inferenceDimensions?: Dimensions; + /** + * @private + */ + inputResizeMode?: InputResizeMode; + /** + * The blur radius to use when smoothing out the edges of the person's mask. + * @default + * ```html + * 8 for WebGL2 pipeline, 4 for Canvas2D pipeline + * ``` + */ + maskBlurRadius?: number; + /** + * Specifies which pipeline to use when processing video frames. + * @default + * ```html + * 'WebGL2' + * ``` + */ + pipeline?: Pipeline; +} +/** + * @private + */ +export declare abstract class BackgroundProcessor extends Processor { + private static _tflite; + protected _backgroundImage: HTMLImageElement | null; + protected _outputCanvas: HTMLCanvasElement | null; + protected _outputContext: CanvasRenderingContext2D | WebGL2RenderingContext | null; + protected _webgl2Pipeline: ReturnType | null; + private _assetsPath; + private _benchmark; + private _currentMask; + private _debounce; + private _deferInputResize; + private _inferenceDimensions; + private _inferenceInputCanvas; + private _inferenceInputContext; + private _inputFrameCanvas; + private _inputFrameContext; + private _inputResizeMode; + private _isSimdEnabled; + private _maskBlurRadius; + private _maskCanvas; + private _maskContext; + private _pipeline; + constructor(options: BackgroundProcessorOptions); + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get maskBlurRadius(): number; + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set maskBlurRadius(radius: number); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + loadModel(): Promise; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + processFrame(inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement, outputFrameBuffer: HTMLCanvasElement): Promise; + protected abstract _getWebGL2PipelineType(): WebGL2PipelineType; + protected abstract _setBackground(inputFrame?: OffscreenCanvas | HTMLCanvasElement): void; + private _createPersonMask; + private _createWebGL2Pipeline; + private _resizeInputFrame; +} +export {}; diff --git a/es5/processors/background/BackgroundProcessor.js b/es5/processors/background/BackgroundProcessor.js new file mode 100644 index 0000000..18487b2 --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.js @@ -0,0 +1,368 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BackgroundProcessor = void 0; +var Processor_1 = require("../Processor"); +var Benchmark_1 = require("../../utils/Benchmark"); +var TwilioTFLite_1 = require("../../utils/TwilioTFLite"); +var support_1 = require("../../utils/support"); +var types_1 = require("../../types"); +var webgl2_1 = require("../webgl2"); +var constants_1 = require("../../constants"); +/** + * @private + */ +var BackgroundProcessor = /** @class */ (function (_super) { + __extends(BackgroundProcessor, _super); + function BackgroundProcessor(options) { + var _this = _super.call(this) || this; + _this._backgroundImage = null; + _this._outputCanvas = null; + _this._outputContext = null; + _this._webgl2Pipeline = null; + _this._inferenceDimensions = constants_1.WASM_INFERENCE_DIMENSIONS; + if (typeof options.assetsPath !== 'string') { + throw new Error('assetsPath parameter is missing'); + } + var assetsPath = options.assetsPath; + if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') { + assetsPath += '/'; + } + _this._assetsPath = assetsPath; + _this._debounce = typeof options.debounce === 'boolean' ? options.debounce : true; + _this._deferInputResize = typeof options.deferInputResize === 'boolean' ? options.deferInputResize : false; + _this._inferenceDimensions = options.inferenceDimensions || _this._inferenceDimensions; + _this._inputResizeMode = typeof options.inputResizeMode === 'string' + ? options.inputResizeMode + : ((0, support_1.isChromiumImageBitmap)() ? 'image-bitmap' : 'canvas'); + _this._pipeline = options.pipeline || types_1.Pipeline.WebGL2; + _this._benchmark = new Benchmark_1.Benchmark(); + _this._currentMask = null; + _this._isSimdEnabled = null; + _this._inferenceInputCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._inferenceInputContext = _this._inferenceInputCanvas.getContext('2d', { willReadFrequently: true }); + _this._inputFrameCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._inputFrameContext = _this._inputFrameCanvas.getContext('2d'); + _this._maskBlurRadius = typeof options.maskBlurRadius === 'number' ? options.maskBlurRadius : (_this._pipeline === types_1.Pipeline.WebGL2 ? constants_1.MASK_BLUR_RADIUS : (constants_1.MASK_BLUR_RADIUS / 2)); + _this._maskCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._maskContext = _this._maskCanvas.getContext('2d'); + return _this; + } + Object.defineProperty(BackgroundProcessor.prototype, "maskBlurRadius", { + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get: function () { + return this._maskBlurRadius; + }, + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set: function (radius) { + var _a; + if (typeof radius !== 'number' || radius < 0) { + console.warn("Valid mask blur radius not found. Using ".concat(constants_1.MASK_BLUR_RADIUS, " as default.")); + radius = constants_1.MASK_BLUR_RADIUS; + } + if (this._maskBlurRadius !== radius) { + this._maskBlurRadius = radius; + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.updatePostProcessingConfig({ + jointBilateralFilter: { + sigmaSpace: this._maskBlurRadius + } + }); + } + }, + enumerable: false, + configurable: true + }); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + BackgroundProcessor.prototype.loadModel = function () { + return __awaiter(this, void 0, void 0, function () { + var tflite; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + tflite = BackgroundProcessor._tflite; + if (!!tflite) return [3 /*break*/, 2]; + tflite = new TwilioTFLite_1.TwilioTFLite(); + return [4 /*yield*/, tflite.initialize(this._assetsPath, constants_1.MODEL_NAME, constants_1.TFLITE_LOADER_NAME, constants_1.TFLITE_SIMD_LOADER_NAME)]; + case 1: + _a.sent(); + BackgroundProcessor._tflite = tflite; + _a.label = 2; + case 2: + this._isSimdEnabled = tflite.isSimdEnabled; + return [2 /*return*/]; + } + }); + }); + }; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + BackgroundProcessor.prototype.processFrame = function (inputFrameBuffer, outputFrameBuffer) { + var _a, _b, _c; + return __awaiter(this, void 0, void 0, function () { + var _d, inferenceWidth, inferenceHeight, _e, captureWidth, captureHeight, inputFrame, personMask, ctx, _f, outputHeight, outputWidth; + return __generator(this, function (_g) { + switch (_g.label) { + case 0: + if (!BackgroundProcessor._tflite) { + return [2 /*return*/]; + } + if (!inputFrameBuffer || !outputFrameBuffer) { + throw new Error('Missing input or output frame buffer'); + } + this._benchmark.end('captureFrameDelay'); + this._benchmark.start('processFrameDelay'); + _d = this._inferenceDimensions, inferenceWidth = _d.width, inferenceHeight = _d.height; + _e = inputFrameBuffer instanceof HTMLVideoElement + ? { width: inputFrameBuffer.videoWidth, height: inputFrameBuffer.videoHeight } + : inputFrameBuffer, captureWidth = _e.width, captureHeight = _e.height; + if (this._outputCanvas !== outputFrameBuffer) { + this._outputCanvas = outputFrameBuffer; + this._outputContext = this._outputCanvas + .getContext(this._pipeline === types_1.Pipeline.Canvas2D ? '2d' : 'webgl2'); + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + } + if (this._pipeline === types_1.Pipeline.WebGL2) { + if (!this._webgl2Pipeline) { + this._createWebGL2Pipeline(inputFrameBuffer, captureWidth, captureHeight, inferenceWidth, inferenceHeight); + } + (_b = this._webgl2Pipeline) === null || _b === void 0 ? void 0 : _b.sampleInputFrame(); + } + // Only set the canvas' dimensions if they have changed to prevent unnecessary redraw + if (this._inputFrameCanvas.width !== captureWidth) { + this._inputFrameCanvas.width = captureWidth; + } + if (this._inputFrameCanvas.height !== captureHeight) { + this._inputFrameCanvas.height = captureHeight; + } + if (this._inferenceInputCanvas.width !== inferenceWidth) { + this._inferenceInputCanvas.width = inferenceWidth; + this._maskCanvas.width = inferenceWidth; + } + if (this._inferenceInputCanvas.height !== inferenceHeight) { + this._inferenceInputCanvas.height = inferenceHeight; + this._maskCanvas.height = inferenceHeight; + } + if (inputFrameBuffer instanceof HTMLVideoElement) { + this._inputFrameContext.drawImage(inputFrameBuffer, 0, 0); + inputFrame = this._inputFrameCanvas; + } + else { + inputFrame = inputFrameBuffer; + } + return [4 /*yield*/, this._createPersonMask(inputFrame)]; + case 1: + personMask = _g.sent(); + if (this._debounce) { + this._currentMask = this._currentMask === personMask + ? null + : personMask; + } + if (this._pipeline === types_1.Pipeline.WebGL2) { + (_c = this._webgl2Pipeline) === null || _c === void 0 ? void 0 : _c.render(personMask.data); + } + else { + this._benchmark.start('imageCompositionDelay'); + if (!this._debounce || this._currentMask) { + this._maskContext.putImageData(personMask, 0, 0); + } + ctx = this._outputContext; + _f = this._outputCanvas, outputHeight = _f.height, outputWidth = _f.width; + ctx.save(); + ctx.filter = "blur(".concat(this._maskBlurRadius, "px)"); + ctx.globalCompositeOperation = 'copy'; + ctx.drawImage(this._maskCanvas, 0, 0, outputWidth, outputHeight); + ctx.filter = 'none'; + ctx.globalCompositeOperation = 'source-in'; + ctx.drawImage(inputFrame, 0, 0, outputWidth, outputHeight); + ctx.globalCompositeOperation = 'destination-over'; + this._setBackground(inputFrame); + ctx.restore(); + this._benchmark.end('imageCompositionDelay'); + } + this._benchmark.end('processFrameDelay'); + this._benchmark.end('totalProcessingDelay'); + // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk + // for a more accurate fps + this._benchmark.start('totalProcessingDelay'); + this._benchmark.start('captureFrameDelay'); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor.prototype._createPersonMask = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var _a, height, width, stages, shouldDebounce, inferenceStage, resizeStage, resizePromise, personMaskBuffer; + var _this = this; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this._inferenceDimensions, height = _a.height, width = _a.width; + stages = { + inference: { + false: function () { return BackgroundProcessor._tflite.runInference(); }, + true: function () { return _this._currentMask.data; } + }, + resize: { + false: function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) { + return [2 /*return*/, this._resizeInputFrame(inputFrame)]; + }); }); }, + true: function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) { + return [2 /*return*/]; + }); }); } + } + }; + shouldDebounce = !!this._currentMask; + inferenceStage = stages.inference["".concat(shouldDebounce)]; + resizeStage = stages.resize["".concat(shouldDebounce)]; + this._benchmark.start('inputImageResizeDelay'); + resizePromise = resizeStage(); + if (!!this._deferInputResize) return [3 /*break*/, 2]; + return [4 /*yield*/, resizePromise]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + this._benchmark.end('inputImageResizeDelay'); + this._benchmark.start('segmentationDelay'); + personMaskBuffer = inferenceStage(); + this._benchmark.end('segmentationDelay'); + return [2 /*return*/, this._currentMask || new ImageData(personMaskBuffer, width, height)]; + } + }); + }); + }; + BackgroundProcessor.prototype._createWebGL2Pipeline = function (inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight) { + this._webgl2Pipeline = (0, webgl2_1.buildWebGL2Pipeline)({ + htmlElement: inputFrame, + width: captureWidth, + height: captureHeight, + }, this._backgroundImage, { + type: this._getWebGL2PipelineType(), + }, { + inputResolution: "".concat(inferenceWidth, "x").concat(inferenceHeight), + }, this._outputCanvas, this._benchmark, this._debounce); + this._webgl2Pipeline.updatePostProcessingConfig({ + jointBilateralFilter: { + sigmaSpace: this._maskBlurRadius, + sigmaColor: 0.1 + }, + coverage: [ + 0, + 0.99 + ], + lightWrapping: 0, + blendMode: 'screen' + }); + }; + BackgroundProcessor.prototype._resizeInputFrame = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var _a, _b, resizeWidth, resizeHeight, ctx, resizeMode, resizedInputFrameBitmap, imageData; + return __generator(this, function (_c) { + switch (_c.label) { + case 0: + _a = this, _b = _a._inferenceInputCanvas, resizeWidth = _b.width, resizeHeight = _b.height, ctx = _a._inferenceInputContext, resizeMode = _a._inputResizeMode; + if (!(resizeMode === 'image-bitmap')) return [3 /*break*/, 2]; + return [4 /*yield*/, createImageBitmap(inputFrame, { + resizeWidth: resizeWidth, + resizeHeight: resizeHeight, + resizeQuality: 'pixelated' + })]; + case 1: + resizedInputFrameBitmap = _c.sent(); + ctx.drawImage(resizedInputFrameBitmap, 0, 0, resizeWidth, resizeHeight); + resizedInputFrameBitmap.close(); + return [3 /*break*/, 3]; + case 2: + ctx.drawImage(inputFrame, 0, 0, resizeWidth, resizeHeight); + _c.label = 3; + case 3: + imageData = ctx.getImageData(0, 0, resizeWidth, resizeHeight); + BackgroundProcessor._tflite.loadInputBuffer(imageData.data); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor._tflite = null; + return BackgroundProcessor; +}(Processor_1.Processor)); +exports.BackgroundProcessor = BackgroundProcessor; +//# sourceMappingURL=BackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/BackgroundProcessor.js.map b/es5/processors/background/BackgroundProcessor.js.map new file mode 100644 index 0000000..bdc940b --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/BackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,0CAAyC;AACzC,mDAAkD;AAClD,yDAAwD;AACxD,+CAA4D;AAC5D,qCAAuE;AACvE,oCAAgD;AAEhD,6CAMyB;AAkFzB;;GAEG;AACH;IAAkD,uCAAS;IA0BzD,6BAAY,OAAmC;QAA/C,YACE,iBAAO,SAgCR;QAxDS,sBAAgB,GAA4B,IAAI,CAAC;QACjD,mBAAa,GAA6B,IAAI,CAAC;QAC/C,oBAAc,GAA6D,IAAI,CAAC;QAChF,qBAAe,GAAkD,IAAI,CAAC;QAOxE,0BAAoB,GAAe,qCAAyB,CAAC;QAgBnE,IAAI,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,EAAE;YAC1C,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE;YAC3D,UAAU,IAAI,GAAG,CAAC;SACnB;QAED,KAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,KAAI,CAAC,SAAS,GAAG,OAAO,OAAO,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;QACjF,KAAI,CAAC,iBAAiB,GAAG,OAAO,OAAO,CAAC,gBAAgB,KAAK,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,KAAK,CAAC;QAC1G,KAAI,CAAC,oBAAoB,GAAG,OAAO,CAAC,mBAAoB,IAAI,KAAI,CAAC,oBAAoB,CAAC;QAEtF,KAAI,CAAC,gBAAgB,GAAG,OAAO,OAAO,CAAC,eAAe,KAAK,QAAQ;YACjE,CAAC,CAAC,OAAO,CAAC,eAAe;YACzB,CAAC,CAAC,CAAC,IAAA,+BAAqB,GAAE,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC;QAE1D,KAAI,CAAC,SAAS,GAAG,OAAO,CAAC,QAAS,IAAI,gBAAQ,CAAC,MAAM,CAAC;QACtD,KAAI,CAAC,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QAClC,KAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,KAAI,CAAC,cAAc,GAAG,IAAI,CAAC;QAC3B,KAAI,CAAC,qBAAqB,GAAG,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QACnI,KAAI,CAAC,sBAAsB,GAAG,KAAI,CAAC,qBAAqB,CAAC,UAAU,CAAC,IAAI,EAAE,EAAE,kBAAkB,EAAE,IAAI,EAAE,CAAsC,CAAC;QAC7I,KAAI,CAAC,iBAAiB,GAAG,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAC/H,KAAI,CAAC,kBAAkB,GAAG,KAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAsC,CAAC;QACvG,KAAI,CAAC,eAAe,GAAG,OAAO,OAAO,CAAC,cAAc,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAC3F,KAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,4BAAgB,CAAC,CAAC,CAAC,CAAC,4BAAgB,GAAG,CAAC,CAAC,CAC/E,CAAC;QACF,KAAI,CAAC,WAAW,GAAG,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QACzH,KAAI,CAAC,YAAY,GAAG,KAAI,CAAC,WAAW,CAAC,UAAU,CAAC,IAAI,CAAsC,CAAC;;IAC7F,CAAC;IAKD,sBAAI,+CAAc;QAHlB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,eAAe,CAAC;QAC9B,CAAC;QAED;;WAEG;aACH,UAAmB,MAAc;;YAC/B,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,GAAG,CAAC,EAAE;gBAC5C,OAAO,CAAC,IAAI,CAAC,kDAA2C,4BAAgB,iBAAc,CAAC,CAAC;gBACxF,MAAM,GAAG,4BAAgB,CAAC;aAC3B;YACD,IAAI,IAAI,CAAC,eAAe,KAAK,MAAM,EAAE;gBACnC,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC;gBAC9B,MAAA,IAAI,CAAC,eAAe,0CAAE,0BAA0B,CAAC;oBAC/C,oBAAoB,EAAE;wBACpB,UAAU,EAAE,IAAI,CAAC,eAAe;qBACjC;iBACF,CAAC,CAAC;aACJ;QACH,CAAC;;;OAlBA;IAoBD;;;;OAIG;IACG,uCAAS,GAAf;;;;;;wBACiB,MAAM,GAAK,mBAAmB,QAAxB,CAAyB;6BAC1C,CAAC,MAAM,EAAP,wBAAO;wBACT,MAAM,GAAG,IAAI,2BAAY,EAAE,CAAC;wBAC5B,qBAAM,MAAM,CAAC,UAAU,CACrB,IAAI,CAAC,WAAW,EAChB,sBAAU,EACV,8BAAkB,EAClB,mCAAuB,CACxB,EAAA;;wBALD,SAKC,CAAC;wBACF,mBAAmB,CAAC,OAAO,GAAG,MAAM,CAAC;;;wBAEvC,IAAI,CAAC,cAAc,GAAG,MAAM,CAAC,aAAa,CAAC;;;;;KAC5C;IAED;;;;;;;;;;;;;;;;;;;OAmBG;IACG,0CAAY,GAAlB,UACE,gBAAwE,EACxE,iBAAoC;;;;;;;wBAEpC,IAAI,CAAC,mBAAmB,CAAC,OAAO,EAAE;4BAChC,sBAAO;yBACR;wBACD,IAAI,CAAC,gBAAgB,IAAI,CAAC,iBAAiB,EAAE;4BAC3C,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;yBACzD;wBACD,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;wBAErC,KAGF,IAAI,CAAC,oBAAoB,EAFpB,cAAc,WAAA,EACb,eAAe,YAAA,CACK;wBAExB,KAGF,gBAAgB,YAAY,gBAAgB;4BAC9C,CAAC,CAAC,EAAE,KAAK,EAAE,gBAAgB,CAAC,UAAU,EAAE,MAAM,EAAE,gBAAgB,CAAC,WAAW,EAAE;4BAC9E,CAAC,CAAC,gBAAgB,EAJX,YAAY,WAAA,EACX,aAAa,YAAA,CAGF;wBAErB,IAAI,IAAI,CAAC,aAAa,KAAK,iBAAiB,EAAE;4BAC5C,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC;4BACvC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,aAAa;iCACrC,UAAU,CAAC,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CACjB,CAAC;4BACpD,MAAA,IAAI,CAAC,eAAe,0CAAE,OAAO,EAAE,CAAC;4BAChC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC;yBAC7B;wBAED,IAAI,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,MAAM,EAAE;4BACtC,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;gCACzB,IAAI,CAAC,qBAAqB,CACxB,gBAAoC,EACpC,YAAY,EACZ,aAAa,EACb,cAAc,EACd,eAAe,CAChB,CAAC;6BACH;4BACD,MAAA,IAAI,CAAC,eAAe,0CAAE,gBAAgB,EAAE,CAAC;yBAC1C;wBAED,qFAAqF;wBACrF,IAAI,IAAI,CAAC,iBAAiB,CAAC,KAAK,KAAK,YAAY,EAAE;4BACjD,IAAI,CAAC,iBAAiB,CAAC,KAAK,GAAG,YAAY,CAAC;yBAC7C;wBACD,IAAI,IAAI,CAAC,iBAAiB,CAAC,MAAM,KAAK,aAAa,EAAE;4BACnD,IAAI,CAAC,iBAAiB,CAAC,MAAM,GAAG,aAAa,CAAC;yBAC/C;wBACD,IAAI,IAAI,CAAC,qBAAqB,CAAC,KAAK,KAAK,cAAc,EAAE;4BACvD,IAAI,CAAC,qBAAqB,CAAC,KAAK,GAAG,cAAc,CAAC;4BAClD,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,cAAc,CAAC;yBACzC;wBACD,IAAI,IAAI,CAAC,qBAAqB,CAAC,MAAM,KAAK,eAAe,EAAE;4BACzD,IAAI,CAAC,qBAAqB,CAAC,MAAM,GAAG,eAAe,CAAC;4BACpD,IAAI,CAAC,WAAW,CAAC,MAAM,GAAG,eAAe,CAAC;yBAC3C;wBAGD,IAAI,gBAAgB,YAAY,gBAAgB,EAAE;4BAChD,IAAI,CAAC,kBAAkB,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;4BAC1D,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;yBACrC;6BAAM;4BACL,UAAU,GAAG,gBAAgB,CAAC;yBAC/B;wBAEkB,qBAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAA;;wBAArD,UAAU,GAAG,SAAwC;wBAC3D,IAAI,IAAI,CAAC,SAAS,EAAE;4BAClB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,KAAK,UAAU;gCAClD,CAAC,CAAC,IAAI;gCACN,CAAC,CAAC,UAAU,CAAC;yBAChB;wBAED,IAAI,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,MAAM,EAAE;4BACtC,MAAA,IAAI,CAAC,eAAe,0CAAE,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;yBAC/C;6BACI;4BACH,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;4BAC/C,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,YAAY,EAAE;gCACxC,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;6BAClD;4BACK,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;4BACtD,KAGF,IAAI,CAAC,aAAa,EAFZ,YAAY,YAAA,EACb,WAAW,WAAA,CACG;4BACvB,GAAG,CAAC,IAAI,EAAE,CAAC;4BACX,GAAG,CAAC,MAAM,GAAG,eAAQ,IAAI,CAAC,eAAe,QAAK,CAAC;4BAC/C,GAAG,CAAC,wBAAwB,GAAG,MAAM,CAAC;4BACtC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;4BACjE,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC;4BACpB,GAAG,CAAC,wBAAwB,GAAG,WAAW,CAAC;4BAC3C,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;4BAC3D,GAAG,CAAC,wBAAwB,GAAG,kBAAkB,CAAC;4BAClD,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;4BAChC,GAAG,CAAC,OAAO,EAAE,CAAC;4BACd,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;yBAC9C;wBAED,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;wBACzC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBAE5C,+FAA+F;wBAC/F,0BAA0B;wBAC1B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;wBAC9C,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;;;;;KAC5C;IAMa,+CAAiB,GAA/B,UAAgC,UAA+C;;;;;;;wBACvE,KAAoB,IAAI,CAAC,oBAAoB,EAA3C,MAAM,YAAA,EAAE,KAAK,WAAA,CAA+B;wBAC9C,MAAM,GAAG;4BACb,SAAS,EAAE;gCACT,KAAK,EAAE,cAAM,OAAA,mBAAmB,CAAC,OAAQ,CAAC,YAAY,EAAE,EAA3C,CAA2C;gCACxD,IAAI,EAAE,cAAM,OAAA,KAAI,CAAC,YAAa,CAAC,IAAI,EAAvB,CAAuB;6BACpC;4BACD,MAAM,EAAE;gCACN,KAAK,EAAE;oCAAY,sBAAA,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAA;yCAAA;gCACrD,IAAI,EAAE;;yCAA0B;6BACjC;yBACF,CAAC;wBACI,cAAc,GAAG,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;wBACrC,cAAc,GAAG,MAAM,CAAC,SAAS,CAAC,UAAG,cAAc,CAAE,CAAC,CAAC;wBACvD,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,UAAG,cAAc,CAAE,CAAC,CAAC;wBAEvD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;wBACzC,aAAa,GAAG,WAAW,EAAE,CAAC;6BAChC,CAAC,IAAI,CAAC,iBAAiB,EAAvB,wBAAuB;wBACzB,qBAAM,aAAa,EAAA;;wBAAnB,SAAmB,CAAC;;;wBAEtB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;wBAC7C,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;wBACrC,gBAAgB,GAAG,cAAc,EAAE,CAAC;wBAC1C,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;wBACzC,sBAAO,IAAI,CAAC,YAAY,IAAI,IAAI,SAAS,CAAC,gBAAgB,EAAE,KAAK,EAAE,MAAM,CAAC,EAAC;;;;KAC5E;IAEO,mDAAqB,GAA7B,UACE,UAA4B,EAC5B,YAAoB,EACpB,aAAqB,EACrB,cAAsB,EACtB,eAAuB;QAEvB,IAAI,CAAC,eAAe,GAAG,IAAA,4BAAmB,EACxC;YACE,WAAW,EAAE,UAAU;YACvB,KAAK,EAAE,YAAY;YACnB,MAAM,EAAE,aAAa;SACtB,EACD,IAAI,CAAC,gBAAgB,EACrB;YACE,IAAI,EAAE,IAAI,CAAC,sBAAsB,EAAE;SACpC,EACD;YACE,eAAe,EAAE,UAAG,cAAc,cAAI,eAAe,CAAE;SACxD,EACD,IAAI,CAAC,aAAc,EACnB,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,SAAS,CACf,CAAC;QACF,IAAI,CAAC,eAAe,CAAC,0BAA0B,CAAC;YAC9C,oBAAoB,EAAE;gBACpB,UAAU,EAAE,IAAI,CAAC,eAAe;gBAChC,UAAU,EAAE,GAAG;aAChB;YACD,QAAQ,EAAE;gBACR,CAAC;gBACD,IAAI;aACL;YACD,aAAa,EAAE,CAAC;YAChB,SAAS,EAAE,QAAQ;SACpB,CAAC,CAAC;IACL,CAAC;IAEa,+CAAiB,GAA/B,UAAgC,UAA+C;;;;;;wBACvE,KAOF,IAAI,EANN,6BAGC,EAFQ,WAAW,WAAA,EACV,YAAY,YAAA,EAEE,GAAG,4BAAA,EACT,UAAU,sBAAA,CACrB;6BACL,CAAA,UAAU,KAAK,cAAc,CAAA,EAA7B,wBAA6B;wBACC,qBAAM,iBAAiB,CAAC,UAAU,EAAE;gCAClE,WAAW,aAAA;gCACX,YAAY,cAAA;gCACZ,aAAa,EAAE,WAAW;6BAC3B,CAAC,EAAA;;wBAJI,uBAAuB,GAAG,SAI9B;wBACF,GAAG,CAAC,SAAS,CAAC,uBAAuB,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;wBACxE,uBAAuB,CAAC,KAAK,EAAE,CAAC;;;wBAEhC,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;;;wBAEvD,SAAS,GAAG,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;wBACpE,mBAAmB,CAAC,OAAQ,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;;;;;KAC9D;IA1Uc,2BAAO,GAAwB,IAAI,AAA5B,CAA6B;IA2UrD,0BAAC;CAAA,AA5UD,CAAkD,qBAAS,GA4U1D;AA5UqB,kDAAmB","sourcesContent":["import { Processor } from '../Processor';\nimport { Benchmark } from '../../utils/Benchmark';\nimport { TwilioTFLite } from '../../utils/TwilioTFLite';\nimport { isChromiumImageBitmap } from '../../utils/support';\nimport { Dimensions, Pipeline, WebGL2PipelineType } from '../../types';\nimport { buildWebGL2Pipeline } from '../webgl2';\n\nimport {\n MASK_BLUR_RADIUS,\n MODEL_NAME,\n TFLITE_LOADER_NAME,\n TFLITE_SIMD_LOADER_NAME,\n WASM_INFERENCE_DIMENSIONS,\n} from '../../constants';\n\ntype InputResizeMode = 'canvas' | 'image-bitmap';\n\n/**\n * @private\n */\nexport interface BackgroundProcessorOptions {\n /**\n * The VideoProcessors load assets dynamically depending on certain browser features.\n * You need to serve all the assets and provide the root path so they can be referenced properly.\n * These assets can be copied from the `dist/build` folder which you can add as part of your deployment process.\n * @example\n *
\n *
\n * For virtual background:\n *
\n *\n * ```ts\n * const virtualBackground = new VirtualBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n * backgroundImage: img,\n * });\n * await virtualBackground.loadModel();\n * ```\n *\n *
\n * For blur background:\n *
\n *\n * ```ts\n * const blurBackground = new GaussianBlurBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets'\n * });\n * await blurBackground.loadModel();\n * ```\n */\n assetsPath: string;\n\n /**\n * Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.\n * @default\n * ```html\n * true\n * ```\n */\n debounce?: boolean;\n\n /**\n * @private\n */\n deferInputResize?: boolean;\n\n /**\n * @private\n */\n inferenceDimensions?: Dimensions;\n\n /**\n * @private\n */\n inputResizeMode?: InputResizeMode;\n\n /**\n * The blur radius to use when smoothing out the edges of the person's mask.\n * @default\n * ```html\n * 8 for WebGL2 pipeline, 4 for Canvas2D pipeline\n * ```\n */\n maskBlurRadius?: number;\n\n /**\n * Specifies which pipeline to use when processing video frames.\n * @default\n * ```html\n * 'WebGL2'\n * ```\n */\n pipeline?: Pipeline;\n}\n\n/**\n * @private\n */\nexport abstract class BackgroundProcessor extends Processor {\n private static _tflite: TwilioTFLite | null = null;\n\n protected _backgroundImage: HTMLImageElement | null = null;\n protected _outputCanvas: HTMLCanvasElement | null = null;\n protected _outputContext: CanvasRenderingContext2D | WebGL2RenderingContext | null = null;\n protected _webgl2Pipeline: ReturnType | null = null;\n\n private _assetsPath: string;\n private _benchmark: Benchmark;\n private _currentMask: ImageData | null;\n private _debounce: boolean;\n private _deferInputResize: boolean;\n private _inferenceDimensions: Dimensions = WASM_INFERENCE_DIMENSIONS;\n private _inferenceInputCanvas: OffscreenCanvas | HTMLCanvasElement;\n private _inferenceInputContext: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;\n private _inputFrameCanvas: OffscreenCanvas | HTMLCanvasElement;\n private _inputFrameContext: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;\n private _inputResizeMode: InputResizeMode;\n // tslint:disable-next-line no-unused-variable\n private _isSimdEnabled: boolean | null;\n private _maskBlurRadius: number;\n private _maskCanvas: OffscreenCanvas | HTMLCanvasElement;\n private _maskContext: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;\n private _pipeline: Pipeline;\n\n constructor(options: BackgroundProcessorOptions) {\n super();\n\n if (typeof options.assetsPath !== 'string') {\n throw new Error('assetsPath parameter is missing');\n }\n let assetsPath = options.assetsPath;\n if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') {\n assetsPath += '/';\n }\n\n this._assetsPath = assetsPath;\n this._debounce = typeof options.debounce === 'boolean' ? options.debounce : true;\n this._deferInputResize = typeof options.deferInputResize === 'boolean' ? options.deferInputResize : false;\n this._inferenceDimensions = options.inferenceDimensions! || this._inferenceDimensions;\n\n this._inputResizeMode = typeof options.inputResizeMode === 'string'\n ? options.inputResizeMode\n : (isChromiumImageBitmap() ? 'image-bitmap' : 'canvas');\n\n this._pipeline = options.pipeline! || Pipeline.WebGL2;\n this._benchmark = new Benchmark();\n this._currentMask = null;\n this._isSimdEnabled = null;\n this._inferenceInputCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas');\n this._inferenceInputContext = this._inferenceInputCanvas.getContext('2d', { willReadFrequently: true }) as OffscreenCanvasRenderingContext2D;\n this._inputFrameCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas');\n this._inputFrameContext = this._inputFrameCanvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n this._maskBlurRadius = typeof options.maskBlurRadius === 'number' ? options.maskBlurRadius : (\n this._pipeline === Pipeline.WebGL2 ? MASK_BLUR_RADIUS : (MASK_BLUR_RADIUS / 2)\n );\n this._maskCanvas = typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas');\n this._maskContext = this._maskCanvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n }\n\n /**\n * The current blur radius when smoothing out the edges of the person's mask.\n */\n get maskBlurRadius(): number {\n return this._maskBlurRadius;\n }\n\n /**\n * Set a new blur radius to be used when smoothing out the edges of the person's mask.\n */\n set maskBlurRadius(radius: number) {\n if (typeof radius !== 'number' || radius < 0) {\n console.warn(`Valid mask blur radius not found. Using ${MASK_BLUR_RADIUS} as default.`);\n radius = MASK_BLUR_RADIUS;\n }\n if (this._maskBlurRadius !== radius) {\n this._maskBlurRadius = radius;\n this._webgl2Pipeline?.updatePostProcessingConfig({\n jointBilateralFilter: {\n sigmaSpace: this._maskBlurRadius\n }\n });\n }\n }\n\n /**\n * Load the segmentation model.\n * Call this method before attaching the processor to ensure\n * video frames are processed correctly.\n */\n async loadModel(): Promise {\n let { _tflite: tflite } = BackgroundProcessor;\n if (!tflite) {\n tflite = new TwilioTFLite();\n await tflite.initialize(\n this._assetsPath,\n MODEL_NAME,\n TFLITE_LOADER_NAME,\n TFLITE_SIMD_LOADER_NAME,\n );\n BackgroundProcessor._tflite = tflite;\n }\n this._isSimdEnabled = tflite.isSimdEnabled;\n }\n\n /**\n * Apply a transform to the background of an input video frame and leaving\n * the foreground (person(s)) untouched. Any exception detected will\n * result in the frame being dropped.\n * @param inputFrameBuffer - The source of the input frame to process.\n *
\n *
\n * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing\n * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]].\n *
\n *
\n * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers\n * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]].\n *
\n *
\n * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but\n * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]].\n *
\n * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame.\n */\n async processFrame(\n inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,\n outputFrameBuffer: HTMLCanvasElement\n ): Promise {\n if (!BackgroundProcessor._tflite) {\n return;\n }\n if (!inputFrameBuffer || !outputFrameBuffer) {\n throw new Error('Missing input or output frame buffer');\n }\n this._benchmark.end('captureFrameDelay');\n this._benchmark.start('processFrameDelay');\n\n const {\n width: inferenceWidth,\n height: inferenceHeight\n } = this._inferenceDimensions;\n\n const {\n width: captureWidth,\n height: captureHeight\n } = inputFrameBuffer instanceof HTMLVideoElement\n ? { width: inputFrameBuffer.videoWidth, height: inputFrameBuffer.videoHeight }\n : inputFrameBuffer;\n\n if (this._outputCanvas !== outputFrameBuffer) {\n this._outputCanvas = outputFrameBuffer;\n this._outputContext = this._outputCanvas\n .getContext(this._pipeline === Pipeline.Canvas2D ? '2d' : 'webgl2') as\n CanvasRenderingContext2D | WebGL2RenderingContext;\n this._webgl2Pipeline?.cleanUp();\n this._webgl2Pipeline = null;\n }\n\n if (this._pipeline === Pipeline.WebGL2) {\n if (!this._webgl2Pipeline) {\n this._createWebGL2Pipeline(\n inputFrameBuffer as HTMLVideoElement,\n captureWidth,\n captureHeight,\n inferenceWidth,\n inferenceHeight\n );\n }\n this._webgl2Pipeline?.sampleInputFrame();\n }\n\n // Only set the canvas' dimensions if they have changed to prevent unnecessary redraw\n if (this._inputFrameCanvas.width !== captureWidth) {\n this._inputFrameCanvas.width = captureWidth;\n }\n if (this._inputFrameCanvas.height !== captureHeight) {\n this._inputFrameCanvas.height = captureHeight;\n }\n if (this._inferenceInputCanvas.width !== inferenceWidth) {\n this._inferenceInputCanvas.width = inferenceWidth;\n this._maskCanvas.width = inferenceWidth;\n }\n if (this._inferenceInputCanvas.height !== inferenceHeight) {\n this._inferenceInputCanvas.height = inferenceHeight;\n this._maskCanvas.height = inferenceHeight;\n }\n\n let inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement;\n if (inputFrameBuffer instanceof HTMLVideoElement) {\n this._inputFrameContext.drawImage(inputFrameBuffer, 0, 0);\n inputFrame = this._inputFrameCanvas;\n } else {\n inputFrame = inputFrameBuffer;\n }\n\n const personMask = await this._createPersonMask(inputFrame);\n if (this._debounce) {\n this._currentMask = this._currentMask === personMask\n ? null\n : personMask;\n }\n\n if (this._pipeline === Pipeline.WebGL2) {\n this._webgl2Pipeline?.render(personMask.data);\n }\n else {\n this._benchmark.start('imageCompositionDelay');\n if (!this._debounce || this._currentMask) {\n this._maskContext.putImageData(personMask, 0, 0);\n }\n const ctx = this._outputContext as CanvasRenderingContext2D;\n const {\n height: outputHeight,\n width: outputWidth\n } = this._outputCanvas;\n ctx.save();\n ctx.filter = `blur(${this._maskBlurRadius}px)`;\n ctx.globalCompositeOperation = 'copy';\n ctx.drawImage(this._maskCanvas, 0, 0, outputWidth, outputHeight);\n ctx.filter = 'none';\n ctx.globalCompositeOperation = 'source-in';\n ctx.drawImage(inputFrame, 0, 0, outputWidth, outputHeight);\n ctx.globalCompositeOperation = 'destination-over';\n this._setBackground(inputFrame);\n ctx.restore();\n this._benchmark.end('imageCompositionDelay');\n }\n\n this._benchmark.end('processFrameDelay');\n this._benchmark.end('totalProcessingDelay');\n\n // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk\n // for a more accurate fps\n this._benchmark.start('totalProcessingDelay');\n this._benchmark.start('captureFrameDelay');\n }\n\n protected abstract _getWebGL2PipelineType(): WebGL2PipelineType;\n\n protected abstract _setBackground(inputFrame?: OffscreenCanvas | HTMLCanvasElement): void;\n\n private async _createPersonMask(inputFrame: OffscreenCanvas | HTMLCanvasElement): Promise {\n const { height, width } = this._inferenceDimensions;\n const stages = {\n inference: {\n false: () => BackgroundProcessor._tflite!.runInference(),\n true: () => this._currentMask!.data\n },\n resize: {\n false: async () => this._resizeInputFrame(inputFrame),\n true: async () => { /* noop */ }\n }\n };\n const shouldDebounce = !!this._currentMask;\n const inferenceStage = stages.inference[`${shouldDebounce}`];\n const resizeStage = stages.resize[`${shouldDebounce}`];\n\n this._benchmark.start('inputImageResizeDelay');\n const resizePromise = resizeStage();\n if (!this._deferInputResize) {\n await resizePromise;\n }\n this._benchmark.end('inputImageResizeDelay');\n this._benchmark.start('segmentationDelay');\n const personMaskBuffer = inferenceStage();\n this._benchmark.end('segmentationDelay');\n return this._currentMask || new ImageData(personMaskBuffer, width, height);\n }\n\n private _createWebGL2Pipeline(\n inputFrame: HTMLVideoElement,\n captureWidth: number,\n captureHeight: number,\n inferenceWidth: number,\n inferenceHeight: number,\n ): void {\n this._webgl2Pipeline = buildWebGL2Pipeline(\n {\n htmlElement: inputFrame,\n width: captureWidth,\n height: captureHeight,\n },\n this._backgroundImage,\n {\n type: this._getWebGL2PipelineType(),\n },\n {\n inputResolution: `${inferenceWidth}x${inferenceHeight}`,\n },\n this._outputCanvas!,\n this._benchmark,\n this._debounce\n );\n this._webgl2Pipeline.updatePostProcessingConfig({\n jointBilateralFilter: {\n sigmaSpace: this._maskBlurRadius,\n sigmaColor: 0.1\n },\n coverage: [\n 0,\n 0.99\n ],\n lightWrapping: 0,\n blendMode: 'screen'\n });\n }\n\n private async _resizeInputFrame(inputFrame: OffscreenCanvas | HTMLCanvasElement): Promise {\n const {\n _inferenceInputCanvas: {\n width: resizeWidth,\n height: resizeHeight\n },\n _inferenceInputContext: ctx,\n _inputResizeMode: resizeMode\n } = this;\n if (resizeMode === 'image-bitmap') {\n const resizedInputFrameBitmap = await createImageBitmap(inputFrame, {\n resizeWidth,\n resizeHeight,\n resizeQuality: 'pixelated'\n });\n ctx.drawImage(resizedInputFrameBitmap, 0, 0, resizeWidth, resizeHeight);\n resizedInputFrameBitmap.close();\n } else {\n ctx.drawImage(inputFrame, 0, 0, resizeWidth, resizeHeight);\n }\n const imageData = ctx.getImageData(0, 0, resizeWidth, resizeHeight);\n BackgroundProcessor._tflite!.loadInputBuffer(imageData.data);\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts b/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts new file mode 100644 index 0000000..191c375 --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts @@ -0,0 +1,84 @@ +import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor'; +import { WebGL2PipelineType } from '../../types'; +/** + * Options passed to [[GaussianBlurBackgroundProcessor]] constructor. + */ +export interface GaussianBlurBackgroundProcessorOptions extends BackgroundProcessorOptions { + /** + * The background blur filter radius to use in pixels. + * @default + * ```html + * 15 + * ``` + */ + blurFilterRadius?: number; +} +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let blurBackground: GaussianBlurBackgroundProcessor; + * + * (async() => { + * const isWasmSimdSupported = await simd(); + * + * blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await blurBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * })(); + * ``` + */ +export declare class GaussianBlurBackgroundProcessor extends BackgroundProcessor { + private _blurFilterRadius; + private readonly _name; + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + constructor(options: GaussianBlurBackgroundProcessorOptions); + /** + * The current background blur filter radius in pixels. + */ + get blurFilterRadius(): number; + /** + * Set a new background blur filter radius in pixels. + */ + set blurFilterRadius(radius: number); + protected _getWebGL2PipelineType(): WebGL2PipelineType; + protected _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement): void; +} diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.js b/es5/processors/background/GaussianBlurBackgroundProcessor.js new file mode 100644 index 0000000..bbe751e --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.js @@ -0,0 +1,120 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GaussianBlurBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var constants_1 = require("../../constants"); +var types_1 = require("../../types"); +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let blurBackground: GaussianBlurBackgroundProcessor; + * + * (async() => { + * const isWasmSimdSupported = await simd(); + * + * blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await blurBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * })(); + * ``` + */ +var GaussianBlurBackgroundProcessor = /** @class */ (function (_super) { + __extends(GaussianBlurBackgroundProcessor, _super); + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + function GaussianBlurBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + _this._blurFilterRadius = constants_1.BLUR_FILTER_RADIUS; + // tslint:disable-next-line no-unused-variable + _this._name = 'GaussianBlurBackgroundProcessor'; + _this.blurFilterRadius = options.blurFilterRadius; + return _this; + } + Object.defineProperty(GaussianBlurBackgroundProcessor.prototype, "blurFilterRadius", { + /** + * The current background blur filter radius in pixels. + */ + get: function () { + return this._blurFilterRadius; + }, + /** + * Set a new background blur filter radius in pixels. + */ + set: function (radius) { + if (!radius) { + console.warn("Valid blur filter radius not found. Using ".concat(constants_1.BLUR_FILTER_RADIUS, " as default.")); + radius = constants_1.BLUR_FILTER_RADIUS; + } + this._blurFilterRadius = radius; + }, + enumerable: false, + configurable: true + }); + GaussianBlurBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Blur; + }; + GaussianBlurBackgroundProcessor.prototype._setBackground = function (inputFrame) { + if (!this._outputContext) { + return; + } + var ctx = this._outputContext; + ctx.filter = "blur(".concat(this._blurFilterRadius, "px)"); + ctx.drawImage(inputFrame, 0, 0); + }; + return GaussianBlurBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.GaussianBlurBackgroundProcessor = GaussianBlurBackgroundProcessor; +//# sourceMappingURL=GaussianBlurBackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.js.map b/es5/processors/background/GaussianBlurBackgroundProcessor.js.map new file mode 100644 index 0000000..1f6f494 --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"GaussianBlurBackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/GaussianBlurBackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,6DAAwF;AACxF,6CAAqD;AACrD,qCAAiD;AAgBjD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgDG;AACH;IAAqD,mDAAmB;IAMtE;;;;OAIG;IACH,yCAAY,OAA+C;QAA3D,YACE,kBAAM,OAAO,CAAC,SAEf;QAZO,uBAAiB,GAAW,8BAAkB,CAAC;QACvD,8CAA8C;QAC7B,WAAK,GAAW,iCAAiC,CAAC;QASjE,KAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAiB,CAAC;;IACpD,CAAC;IAKD,sBAAI,6DAAgB;QAHpB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,iBAAiB,CAAC;QAChC,CAAC;QAED;;WAEG;aACH,UAAqB,MAAc;YACjC,IAAI,CAAC,MAAM,EAAE;gBACX,OAAO,CAAC,IAAI,CAAC,oDAA6C,8BAAkB,iBAAc,CAAC,CAAC;gBAC5F,MAAM,GAAG,8BAAkB,CAAC;aAC7B;YACD,IAAI,CAAC,iBAAiB,GAAG,MAAM,CAAC;QAClC,CAAC;;;OAXA;IAaS,gEAAsB,GAAhC;QACE,OAAO,0BAAkB,CAAC,IAAI,CAAC;IACjC,CAAC;IAES,wDAAc,GAAxB,UAAyB,UAA+C;QACtE,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE;YACxB,OAAO;SACR;QACD,IAAM,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;QAC5D,GAAG,CAAC,MAAM,GAAG,eAAQ,IAAI,CAAC,iBAAiB,QAAK,CAAC;QACjD,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;IAClC,CAAC;IACH,sCAAC;AAAD,CAAC,AA9CD,CAAqD,yCAAmB,GA8CvE;AA9CY,0EAA+B","sourcesContent":["import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor';\nimport { BLUR_FILTER_RADIUS } from '../../constants';\nimport { WebGL2PipelineType } from '../../types';\n\n/**\n * Options passed to [[GaussianBlurBackgroundProcessor]] constructor.\n */\nexport interface GaussianBlurBackgroundProcessorOptions extends BackgroundProcessorOptions {\n /**\n * The background blur filter radius to use in pixels.\n * @default\n * ```html\n * 15\n * ```\n */\n blurFilterRadius?: number;\n}\n\n/**\n * The GaussianBlurBackgroundProcessor, when added to a VideoTrack,\n * applies a gaussian blur filter on the background in each video frame\n * and leaves the foreground (person(s)) untouched. Each instance of\n * GaussianBlurBackgroundProcessor should be added to only one VideoTrack\n * at a time to prevent overlapping of image data from multiple VideoTracks.\n *\n * @example\n *\n * ```ts\n * import { createLocalVideoTrack } from 'twilio-video';\n * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors';\n * import { simd } from 'wasm-feature-detect';\n *\n * let blurBackground: GaussianBlurBackgroundProcessor;\n *\n * (async() => {\n * const isWasmSimdSupported = await simd();\n *\n * blurBackground = new GaussianBlurBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n *\n * // Enable debounce only if the browser does not support\n * // WASM SIMD in order to retain an acceptable frame rate.\n * debounce: !isWasmSimdSupported,\n *\n * pipeline: Pipeline.WebGL2,\n * });\n * await blurBackground.loadModel();\n *\n * const track = await createLocalVideoTrack({\n * // Increasing the capture resolution decreases the output FPS\n * // especially on browsers that do not support SIMD\n * // such as desktop Safari and iOS browsers, or on Chrome\n * // with capture resolutions above 640x480 for webgl2.\n * width: 640,\n * height: 480,\n *\n * // Any frame rate above 24 fps on desktop browsers increase CPU\n * // usage without noticeable increase in quality.\n * frameRate: 24\n * });\n * track.addProcessor(virtualBackground, {\n * inputFrameBufferType: 'video',\n * outputFrameBufferContextType: 'webgl2',\n * });\n * })();\n * ```\n */\nexport class GaussianBlurBackgroundProcessor extends BackgroundProcessor {\n\n private _blurFilterRadius: number = BLUR_FILTER_RADIUS;\n // tslint:disable-next-line no-unused-variable\n private readonly _name: string = 'GaussianBlurBackgroundProcessor';\n\n /**\n * Construct a GaussianBlurBackgroundProcessor. Default values will be used for\n * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and\n * invalid properties will be ignored.\n */\n constructor(options: GaussianBlurBackgroundProcessorOptions) {\n super(options);\n this.blurFilterRadius = options.blurFilterRadius!;\n }\n\n /**\n * The current background blur filter radius in pixels.\n */\n get blurFilterRadius(): number {\n return this._blurFilterRadius;\n }\n\n /**\n * Set a new background blur filter radius in pixels.\n */\n set blurFilterRadius(radius: number) {\n if (!radius) {\n console.warn(`Valid blur filter radius not found. Using ${BLUR_FILTER_RADIUS} as default.`);\n radius = BLUR_FILTER_RADIUS;\n }\n this._blurFilterRadius = radius;\n }\n\n protected _getWebGL2PipelineType(): WebGL2PipelineType {\n return WebGL2PipelineType.Blur;\n }\n\n protected _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement): void {\n if (!this._outputContext) {\n return;\n }\n const ctx = this._outputContext as CanvasRenderingContext2D;\n ctx.filter = `blur(${this._blurFilterRadius}px)`;\n ctx.drawImage(inputFrame, 0, 0);\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/VirtualBackgroundProcessor.d.ts b/es5/processors/background/VirtualBackgroundProcessor.d.ts new file mode 100644 index 0000000..e239172 --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.d.ts @@ -0,0 +1,108 @@ +import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor'; +import { ImageFit, WebGL2PipelineType } from '../../types'; +/** + * Options passed to [[VirtualBackgroundProcessor]] constructor. + */ +export interface VirtualBackgroundProcessorOptions extends BackgroundProcessorOptions { + /** + * The HTMLImageElement to use for background replacement. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + backgroundImage: HTMLImageElement; + /** + * The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]] + * supports this option. WebGL2 ignores this option and falls back to Cover. + * @default + * ```html + * 'Fill' + * ``` + */ + fitType?: ImageFit; +} +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let virtualBackground: VirtualBackgroundProcessor; + * const img = new Image(); + * + * img.onload = async () => { + * const isWasmSimdSupported = await simd(); + * + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await virtualBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }; + * + * img.src = '/background.jpg'; + * ``` + */ +export declare class VirtualBackgroundProcessor extends BackgroundProcessor { + private _fitType; + private readonly _name; + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + constructor(options: VirtualBackgroundProcessorOptions); + /** + * The HTMLImageElement representing the current background image. + */ + get backgroundImage(): HTMLImageElement; + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set backgroundImage(image: HTMLImageElement); + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get fitType(): ImageFit; + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set fitType(fitType: ImageFit); + protected _getWebGL2PipelineType(): WebGL2PipelineType; + protected _setBackground(): void; + private _getFitPosition; +} diff --git a/es5/processors/background/VirtualBackgroundProcessor.js b/es5/processors/background/VirtualBackgroundProcessor.js new file mode 100644 index 0000000..48b4de8 --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.js @@ -0,0 +1,190 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var types_1 = require("../../types"); +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * import { simd } from 'wasm-feature-detect'; + * + * let virtualBackground: VirtualBackgroundProcessor; + * const img = new Image(); + * + * img.onload = async () => { + * const isWasmSimdSupported = await simd(); + * + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * + * // Enable debounce only if the browser does not support + * // WASM SIMD in order to retain an acceptable frame rate. + * debounce: !isWasmSimdSupported, + * + * pipeline: Pipeline.WebGL2, + * }); + * await virtualBackground.loadModel(); + * + * const track = await createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }); + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }; + * + * img.src = '/background.jpg'; + * ``` + */ +var VirtualBackgroundProcessor = /** @class */ (function (_super) { + __extends(VirtualBackgroundProcessor, _super); + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + function VirtualBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + // tslint:disable-next-line no-unused-variable + _this._name = 'VirtualBackgroundProcessor'; + _this.backgroundImage = options.backgroundImage; + _this.fitType = options.fitType; + return _this; + } + Object.defineProperty(VirtualBackgroundProcessor.prototype, "backgroundImage", { + /** + * The HTMLImageElement representing the current background image. + */ + get: function () { + return this._backgroundImage; + }, + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set: function (image) { + var _a; + if (!image || !image.complete || !image.naturalHeight) { + throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded'); + } + this._backgroundImage = image; + // Triggers recreation of the pipeline in the next processFrame call + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(VirtualBackgroundProcessor.prototype, "fitType", { + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get: function () { + return this._fitType; + }, + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set: function (fitType) { + var validTypes = Object.keys(types_1.ImageFit); + if (!validTypes.includes(fitType)) { + console.warn("Valid fitType not found. Using '".concat(types_1.ImageFit.Fill, "' as default.")); + fitType = types_1.ImageFit.Fill; + } + this._fitType = fitType; + }, + enumerable: false, + configurable: true + }); + VirtualBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Image; + }; + VirtualBackgroundProcessor.prototype._setBackground = function () { + if (!this._outputContext || !this._outputCanvas) { + return; + } + var img = this._backgroundImage; + var imageWidth = img.naturalWidth; + var imageHeight = img.naturalHeight; + var canvasWidth = this._outputCanvas.width; + var canvasHeight = this._outputCanvas.height; + var ctx = this._outputContext; + if (this._fitType === types_1.ImageFit.Fill) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight); + } + else if (this._fitType === types_1.ImageFit.None) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight); + } + else if (this._fitType === types_1.ImageFit.Contain) { + var _a = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Contain), x = _a.x, y = _a.y, w = _a.w, h = _a.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + else if (this._fitType === types_1.ImageFit.Cover) { + var _b = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Cover), x = _b.x, y = _b.y, w = _b.w, h = _b.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + }; + VirtualBackgroundProcessor.prototype._getFitPosition = function (contentWidth, contentHeight, viewportWidth, viewportHeight, type) { + // Calculate new content width to fit viewport width + var factor = viewportWidth / contentWidth; + var newContentWidth = viewportWidth; + var newContentHeight = factor * contentHeight; + // Scale down the resulting height and width more + // to fit viewport height if the content still exceeds it + if ((type === types_1.ImageFit.Contain && newContentHeight > viewportHeight) + || (type === types_1.ImageFit.Cover && viewportHeight > newContentHeight)) { + factor = viewportHeight / newContentHeight; + newContentWidth = factor * newContentWidth; + newContentHeight = viewportHeight; + } + // Calculate the destination top left corner to center the content + var x = (viewportWidth - newContentWidth) / 2; + var y = (viewportHeight - newContentHeight) / 2; + return { + x: x, + y: y, + w: newContentWidth, + h: newContentHeight, + }; + }; + return VirtualBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.VirtualBackgroundProcessor = VirtualBackgroundProcessor; +//# sourceMappingURL=VirtualBackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/VirtualBackgroundProcessor.js.map b/es5/processors/background/VirtualBackgroundProcessor.js.map new file mode 100644 index 0000000..15867b3 --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"VirtualBackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/VirtualBackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,6DAAwF;AACxF,qCAA2D;AAyB3D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoDG;AACH;IAAgD,8CAAmB;IAMjE;;;;OAIG;IACH,oCAAY,OAA0C;QAAtD,YACE,kBAAM,OAAO,CAAC,SAGf;QAZD,8CAA8C;QAC7B,WAAK,GAAW,4BAA4B,CAAC;QAS5D,KAAI,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC/C,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAQ,CAAC;;IAClC,CAAC;IAKD,sBAAI,uDAAe;QAHnB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,gBAAiB,CAAC;QAChC,CAAC;QAED;;;;;WAKG;aACH,UAAoB,KAAuB;;YACzC,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;gBACrD,MAAM,IAAI,KAAK,CAAC,iGAAiG,CAAC,CAAC;aACpH;YACD,IAAI,CAAC,gBAAgB,GAAG,KAAK,CAAC;YAE9B,oEAAoE;YACpE,MAAA,IAAI,CAAC,eAAe,0CAAE,OAAO,EAAE,CAAC;YAChC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC;QAC9B,CAAC;;;OAjBA;IAsBD,sBAAI,+CAAO;QAHX;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,QAAQ,CAAC;QACvB,CAAC;QAED;;WAEG;aACH,UAAY,OAAiB;YAC3B,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,gBAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,OAAc,CAAC,EAAE;gBACxC,OAAO,CAAC,IAAI,CAAC,0CAAmC,gBAAQ,CAAC,IAAI,kBAAe,CAAC,CAAC;gBAC9E,OAAO,GAAG,gBAAQ,CAAC,IAAI,CAAC;aACzB;YACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;QAC1B,CAAC;;;OAZA;IAcS,2DAAsB,GAAhC;QACE,OAAO,0BAAkB,CAAC,KAAK,CAAC;IAClC,CAAC;IAES,mDAAc,GAAxB;QACE,IAAI,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YAC/C,OAAO;SACR;QACD,IAAM,GAAG,GAAG,IAAI,CAAC,gBAAiB,CAAC;QACnC,IAAM,UAAU,GAAG,GAAG,CAAC,YAAY,CAAC;QACpC,IAAM,WAAW,GAAG,GAAG,CAAC,aAAa,CAAC;QACtC,IAAM,WAAW,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;QAC7C,IAAM,YAAY,GAAG,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC;QAC/C,IAAM,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;QAE5D,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,IAAI,EAAE;YACnC,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;SACpF;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,IAAI,EAAE;YAC1C,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;SACnD;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,OAAO,EAAE;YACvC,IAAA,KAAiB,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAQ,CAAC,OAAO,CAAC,EAAzG,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAA+F,CAAC;YAClH,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/D;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,KAAK,EAAE;YACrC,IAAA,KAAiB,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAQ,CAAC,KAAK,CAAC,EAAvG,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAA6F,CAAC;YAChH,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/D;IACH,CAAC;IAEO,oDAAe,GAAvB,UAAwB,YAAoB,EAAE,aAAqB,EACjE,aAAqB,EAAE,cAAsB,EAAE,IAAc;QAG7D,oDAAoD;QACpD,IAAI,MAAM,GAAG,aAAa,GAAG,YAAY,CAAC;QAC1C,IAAI,eAAe,GAAG,aAAa,CAAC;QACpC,IAAI,gBAAgB,GAAG,MAAM,GAAG,aAAa,CAAC;QAE9C,iDAAiD;QACjD,yDAAyD;QACzD,IAAI,CAAC,IAAI,KAAK,gBAAQ,CAAC,OAAO,IAAI,gBAAgB,GAAG,cAAc,CAAC;eAC/D,CAAC,IAAI,KAAK,gBAAQ,CAAC,KAAK,IAAI,cAAc,GAAG,gBAAgB,CAAC,EAAE;YACnE,MAAM,GAAG,cAAc,GAAG,gBAAgB,CAAC;YAC3C,eAAe,GAAG,MAAM,GAAG,eAAe,CAAC;YAC3C,gBAAgB,GAAG,cAAc,CAAC;SACnC;QAED,kEAAkE;QAClE,IAAM,CAAC,GAAG,CAAC,aAAa,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;QAChD,IAAM,CAAC,GAAG,CAAC,cAAc,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAC;QAElD,OAAO;YACL,CAAC,GAAA;YAAE,CAAC,GAAA;YACJ,CAAC,EAAE,eAAe;YAClB,CAAC,EAAE,gBAAgB;SACpB,CAAC;IACJ,CAAC;IACH,iCAAC;AAAD,CAAC,AApHD,CAAgD,yCAAmB,GAoHlE;AApHY,gEAA0B","sourcesContent":["import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor';\nimport { ImageFit, WebGL2PipelineType } from '../../types';\n\n/**\n * Options passed to [[VirtualBackgroundProcessor]] constructor.\n */\nexport interface VirtualBackgroundProcessorOptions extends BackgroundProcessorOptions {\n /**\n * The HTMLImageElement to use for background replacement.\n * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow\n * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image)\n * when loading the image from a different origin. Failing to do so will result to an empty output frame.\n */\n backgroundImage: HTMLImageElement;\n\n /**\n * The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]]\n * supports this option. WebGL2 ignores this option and falls back to Cover.\n * @default\n * ```html\n * 'Fill'\n * ```\n */\n fitType?: ImageFit;\n}\n\n/**\n * The VirtualBackgroundProcessor, when added to a VideoTrack,\n * replaces the background in each video frame with a given image,\n * and leaves the foreground (person(s)) untouched. Each instance of\n * VirtualBackgroundProcessor should be added to only one VideoTrack\n * at a time to prevent overlapping of image data from multiple VideoTracks.\n *\n * @example\n *\n * ```ts\n * import { createLocalVideoTrack } from 'twilio-video';\n * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors';\n * import { simd } from 'wasm-feature-detect';\n *\n * let virtualBackground: VirtualBackgroundProcessor;\n * const img = new Image();\n *\n * img.onload = async () => {\n * const isWasmSimdSupported = await simd();\n *\n * virtualBackground = new VirtualBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n * backgroundImage: img,\n *\n * // Enable debounce only if the browser does not support\n * // WASM SIMD in order to retain an acceptable frame rate.\n * debounce: !isWasmSimdSupported,\n *\n * pipeline: Pipeline.WebGL2,\n * });\n * await virtualBackground.loadModel();\n *\n * const track = await createLocalVideoTrack({\n * // Increasing the capture resolution decreases the output FPS\n * // especially on browsers that do not support SIMD\n * // such as desktop Safari and iOS browsers, or on Chrome\n * // with capture resolutions above 640x480 for webgl2.\n * width: 640,\n * height: 480,\n *\n * // Any frame rate above 24 fps on desktop browsers increase CPU\n * // usage without noticeable increase in quality.\n * frameRate: 24\n * });\n * track.addProcessor(virtualBackground, {\n * inputFrameBufferType: 'video',\n * outputFrameBufferContextType: 'webgl2',\n * });\n * };\n *\n * img.src = '/background.jpg';\n * ```\n */\nexport class VirtualBackgroundProcessor extends BackgroundProcessor {\n\n private _fitType!: ImageFit;\n // tslint:disable-next-line no-unused-variable\n private readonly _name: string = 'VirtualBackgroundProcessor';\n\n /**\n * Construct a VirtualBackgroundProcessor. Default values will be used for\n * any missing optional properties in [[VirtualBackgroundProcessorOptions]],\n * and invalid properties will be ignored.\n */\n constructor(options: VirtualBackgroundProcessorOptions) {\n super(options);\n this.backgroundImage = options.backgroundImage;\n this.fitType = options.fitType!;\n }\n\n /**\n * The HTMLImageElement representing the current background image.\n */\n get backgroundImage(): HTMLImageElement {\n return this._backgroundImage!;\n }\n\n /**\n * Set an HTMLImageElement as the new background image.\n * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow\n * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image)\n * when loading the image from a different origin. Failing to do so will result to an empty output frame.\n */\n set backgroundImage(image: HTMLImageElement) {\n if (!image || !image.complete || !image.naturalHeight) {\n throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded');\n }\n this._backgroundImage = image;\n\n // Triggers recreation of the pipeline in the next processFrame call\n this._webgl2Pipeline?.cleanUp();\n this._webgl2Pipeline = null;\n }\n\n /**\n * The current [[ImageFit]] for positioning of the background image in the viewport.\n */\n get fitType(): ImageFit {\n return this._fitType;\n }\n\n /**\n * Set a new [[ImageFit]] to be used for positioning the background image in the viewport.\n */\n set fitType(fitType: ImageFit) {\n const validTypes = Object.keys(ImageFit);\n if (!validTypes.includes(fitType as any)) {\n console.warn(`Valid fitType not found. Using '${ImageFit.Fill}' as default.`);\n fitType = ImageFit.Fill;\n }\n this._fitType = fitType;\n }\n\n protected _getWebGL2PipelineType(): WebGL2PipelineType {\n return WebGL2PipelineType.Image;\n }\n\n protected _setBackground(): void {\n if (!this._outputContext || !this._outputCanvas) {\n return;\n }\n const img = this._backgroundImage!;\n const imageWidth = img.naturalWidth;\n const imageHeight = img.naturalHeight;\n const canvasWidth = this._outputCanvas.width;\n const canvasHeight = this._outputCanvas.height;\n const ctx = this._outputContext as CanvasRenderingContext2D;\n\n if (this._fitType === ImageFit.Fill) {\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight);\n } else if (this._fitType === ImageFit.None) {\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight);\n } else if (this._fitType === ImageFit.Contain) {\n const { x, y, w, h } = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, ImageFit.Contain);\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h);\n } else if (this._fitType === ImageFit.Cover) {\n const { x, y, w, h } = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, ImageFit.Cover);\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h);\n }\n }\n\n private _getFitPosition(contentWidth: number, contentHeight: number,\n viewportWidth: number, viewportHeight: number, type: ImageFit)\n : { h: number, w: number, x: number, y: number } {\n\n // Calculate new content width to fit viewport width\n let factor = viewportWidth / contentWidth;\n let newContentWidth = viewportWidth;\n let newContentHeight = factor * contentHeight;\n\n // Scale down the resulting height and width more\n // to fit viewport height if the content still exceeds it\n if ((type === ImageFit.Contain && newContentHeight > viewportHeight)\n || (type === ImageFit.Cover && viewportHeight > newContentHeight)) {\n factor = viewportHeight / newContentHeight;\n newContentWidth = factor * newContentWidth;\n newContentHeight = viewportHeight;\n }\n\n // Calculate the destination top left corner to center the content\n const x = (viewportWidth - newContentWidth) / 2;\n const y = (viewportHeight - newContentHeight) / 2;\n\n return {\n x, y,\n w: newContentWidth,\n h: newContentHeight,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/backgroundHelper.d.ts b/es5/processors/webgl2/helpers/backgroundHelper.d.ts new file mode 100644 index 0000000..46a426f --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.d.ts @@ -0,0 +1,4 @@ +export type BackgroundConfig = { + type: 'none' | 'blur' | 'image'; + url?: string; +}; diff --git a/es5/processors/webgl2/helpers/backgroundHelper.js b/es5/processors/webgl2/helpers/backgroundHelper.js new file mode 100644 index 0000000..cc5ab03 --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=backgroundHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/backgroundHelper.js.map b/es5/processors/webgl2/helpers/backgroundHelper.js.map new file mode 100644 index 0000000..f7a7337 --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/backgroundHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type BackgroundConfig = {\n type: 'none' | 'blur' | 'image'\n url?: string\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.d.ts b/es5/processors/webgl2/helpers/postProcessingHelper.d.ts new file mode 100644 index 0000000..6720cfe --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.d.ts @@ -0,0 +1,11 @@ +export type BlendMode = 'screen' | 'linearDodge'; +export type PostProcessingConfig = { + jointBilateralFilter?: JointBilateralFilterConfig; + coverage?: [number, number]; + lightWrapping?: number; + blendMode?: BlendMode; +}; +export type JointBilateralFilterConfig = { + sigmaSpace?: number; + sigmaColor?: number; +}; diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.js b/es5/processors/webgl2/helpers/postProcessingHelper.js new file mode 100644 index 0000000..bf9a7db --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=postProcessingHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.js.map b/es5/processors/webgl2/helpers/postProcessingHelper.js.map new file mode 100644 index 0000000..5a0f048 --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"postProcessingHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/postProcessingHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type BlendMode = 'screen' | 'linearDodge'\n\nexport type PostProcessingConfig = {\n jointBilateralFilter?: JointBilateralFilterConfig\n coverage?: [number, number]\n lightWrapping?: number\n blendMode?: BlendMode\n}\n\nexport type JointBilateralFilterConfig = {\n sigmaSpace?: number\n sigmaColor?: number\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/segmentationHelper.d.ts b/es5/processors/webgl2/helpers/segmentationHelper.d.ts new file mode 100644 index 0000000..01bdece --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.d.ts @@ -0,0 +1,7 @@ +export type InputResolution = '640x360' | '256x256' | '256x144' | '160x96' | string; +export declare const inputResolutions: { + [resolution in InputResolution]: [number, number]; +}; +export type SegmentationConfig = { + inputResolution: InputResolution; +}; diff --git a/es5/processors/webgl2/helpers/segmentationHelper.js b/es5/processors/webgl2/helpers/segmentationHelper.js new file mode 100644 index 0000000..a150b9e --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.inputResolutions = void 0; +exports.inputResolutions = { + '640x360': [640, 360], + '256x256': [256, 256], + '256x144': [256, 144], + '160x96': [160, 96], +}; +//# sourceMappingURL=segmentationHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/segmentationHelper.js.map b/es5/processors/webgl2/helpers/segmentationHelper.js.map new file mode 100644 index 0000000..e612a03 --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"segmentationHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/segmentationHelper.ts"],"names":[],"mappings":";;;AAEa,QAAA,gBAAgB,GAEzB;IACF,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC;CACpB,CAAA","sourcesContent":["export type InputResolution = '640x360' | '256x256' | '256x144' | '160x96' | string\n\nexport const inputResolutions: {\n [resolution in InputResolution]: [number, number]\n} = {\n '640x360': [640, 360],\n '256x256': [256, 256],\n '256x144': [256, 144],\n '160x96': [160, 96],\n}\n\nexport type SegmentationConfig = {\n inputResolution: InputResolution\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/sourceHelper.d.ts b/es5/processors/webgl2/helpers/sourceHelper.d.ts new file mode 100644 index 0000000..d673899 --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.d.ts @@ -0,0 +1,5 @@ +export type SourcePlayback = { + htmlElement: HTMLImageElement | HTMLVideoElement; + width: number; + height: number; +}; diff --git a/es5/processors/webgl2/helpers/sourceHelper.js b/es5/processors/webgl2/helpers/sourceHelper.js new file mode 100644 index 0000000..747a6bd --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=sourceHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/sourceHelper.js.map b/es5/processors/webgl2/helpers/sourceHelper.js.map new file mode 100644 index 0000000..9b3f217 --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sourceHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/sourceHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type SourcePlayback = {\n htmlElement: HTMLImageElement | HTMLVideoElement\n width: number\n height: number\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/webglHelper.d.ts b/es5/processors/webgl2/helpers/webglHelper.d.ts new file mode 100644 index 0000000..2721c87 --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.d.ts @@ -0,0 +1,16 @@ +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +export declare const glsl: (template: { + raw: ArrayLike | readonly string[]; +}, ...substitutions: any[]) => string; +export declare function createPiplelineStageProgram(gl: WebGL2RenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer): WebGLProgram; +export declare function createProgram(gl: WebGL2RenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader): WebGLProgram; +export declare function compileShader(gl: WebGL2RenderingContext, shaderType: number, shaderSource: string): WebGLShader; +export declare function createTexture(gl: WebGL2RenderingContext, internalformat: number, width: number, height: number, minFilter?: GLint, magFilter?: GLint): WebGLTexture | null; diff --git a/es5/processors/webgl2/helpers/webglHelper.js b/es5/processors/webgl2/helpers/webglHelper.js new file mode 100644 index 0000000..c8a0728 --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTexture = exports.compileShader = exports.createProgram = exports.createPiplelineStageProgram = exports.glsl = void 0; +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +exports.glsl = String.raw; +function createPiplelineStageProgram(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer) { + var program = createProgram(gl, vertexShader, fragmentShader); + var positionAttributeLocation = gl.getAttribLocation(program, 'a_position'); + gl.enableVertexAttribArray(positionAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0); + var texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord'); + gl.enableVertexAttribArray(texCoordAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0); + return program; +} +exports.createPiplelineStageProgram = createPiplelineStageProgram; +function createProgram(gl, vertexShader, fragmentShader) { + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error("Could not link WebGL program: ".concat(gl.getProgramInfoLog(program))); + } + return program; +} +exports.createProgram = createProgram; +function compileShader(gl, shaderType, shaderSource) { + var shader = gl.createShader(shaderType); + gl.shaderSource(shader, shaderSource); + gl.compileShader(shader); + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + throw new Error("Could not compile shader: ".concat(gl.getShaderInfoLog(shader))); + } + return shader; +} +exports.compileShader = compileShader; +function createTexture(gl, internalformat, width, height, minFilter, magFilter) { + if (minFilter === void 0) { minFilter = gl.NEAREST; } + if (magFilter === void 0) { magFilter = gl.NEAREST; } + var texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter); + gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height); + return texture; +} +exports.createTexture = createTexture; +//# sourceMappingURL=webglHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/webglHelper.js.map b/es5/processors/webgl2/helpers/webglHelper.js.map new file mode 100644 index 0000000..191dc77 --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webglHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/webglHelper.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;GAQG;AACU,QAAA,IAAI,GAAG,MAAM,CAAC,GAAG,CAAA;AAE9B,SAAgB,2BAA2B,CACzC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,cAA2B;IAE3B,IAAM,OAAO,GAAG,aAAa,CAAC,EAAE,EAAE,YAAY,EAAE,cAAc,CAAC,CAAA;IAE/D,IAAM,yBAAyB,GAAG,EAAE,CAAC,iBAAiB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAC7E,EAAE,CAAC,uBAAuB,CAAC,yBAAyB,CAAC,CAAA;IACrD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,mBAAmB,CAAC,yBAAyB,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAE3E,IAAM,yBAAyB,GAAG,EAAE,CAAC,iBAAiB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAC7E,EAAE,CAAC,uBAAuB,CAAC,yBAAyB,CAAC,CAAA;IACrD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,mBAAmB,CAAC,yBAAyB,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAE3E,OAAO,OAAO,CAAA;AAChB,CAAC;AApBD,kEAoBC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,YAAyB,EACzB,cAA2B;IAE3B,IAAM,OAAO,GAAG,EAAE,CAAC,aAAa,EAAG,CAAA;IACnC,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IACtC,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACxC,EAAE,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;IACvB,IAAI,CAAC,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE;QACpD,MAAM,IAAI,KAAK,CACb,wCAAiC,EAAE,CAAC,iBAAiB,CAAC,OAAO,CAAC,CAAE,CACjE,CAAA;KACF;IACD,OAAO,OAAO,CAAA;AAChB,CAAC;AAfD,sCAeC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,UAAkB,EAClB,YAAoB;IAEpB,IAAM,MAAM,GAAG,EAAE,CAAC,YAAY,CAAC,UAAU,CAAE,CAAA;IAC3C,EAAE,CAAC,YAAY,CAAC,MAAM,EAAE,YAAY,CAAC,CAAA;IACrC,EAAE,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;IACxB,IAAI,CAAC,EAAE,CAAC,kBAAkB,CAAC,MAAM,EAAE,EAAE,CAAC,cAAc,CAAC,EAAE;QACrD,MAAM,IAAI,KAAK,CAAC,oCAA6B,EAAE,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAE,CAAC,CAAA;KAC5E;IACD,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sCAYC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,cAAsB,EACtB,KAAa,EACb,MAAc,EACd,SAA6B,EAC7B,SAA6B;IAD7B,0BAAA,EAAA,YAAmB,EAAE,CAAC,OAAO;IAC7B,0BAAA,EAAA,YAAmB,EAAE,CAAC,OAAO;IAE7B,IAAM,OAAO,GAAG,EAAE,CAAC,aAAa,EAAE,CAAA;IAClC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC,CAAA;IACtC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,SAAS,CAAC,CAAA;IACjE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,SAAS,CAAC,CAAA;IACjE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,UAAU,EAAE,CAAC,EAAE,cAAc,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;IAChE,OAAO,OAAO,CAAA;AAChB,CAAC;AAhBD,sCAgBC","sourcesContent":["/**\n * Use it along with boyswan.glsl-literal VSCode extension\n * to get GLSL syntax highlighting.\n * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal\n *\n * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension\n * to be installed as well.\n * https://marketplace.visualstudio.com/items?itemName=slevesque.shader\n */\nexport const glsl = String.raw\n\nexport function createPiplelineStageProgram(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n fragmentShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer\n) {\n const program = createProgram(gl, vertexShader, fragmentShader)\n\n const positionAttributeLocation = gl.getAttribLocation(program, 'a_position')\n gl.enableVertexAttribArray(positionAttributeLocation)\n gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer)\n gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0)\n\n const texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord')\n gl.enableVertexAttribArray(texCoordAttributeLocation)\n gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer)\n gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0)\n\n return program\n}\n\nexport function createProgram(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n fragmentShader: WebGLShader\n) {\n const program = gl.createProgram()!\n gl.attachShader(program, vertexShader)\n gl.attachShader(program, fragmentShader)\n gl.linkProgram(program)\n if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {\n throw new Error(\n `Could not link WebGL program: ${gl.getProgramInfoLog(program)}`\n )\n }\n return program\n}\n\nexport function compileShader(\n gl: WebGL2RenderingContext,\n shaderType: number,\n shaderSource: string\n) {\n const shader = gl.createShader(shaderType)!\n gl.shaderSource(shader, shaderSource)\n gl.compileShader(shader)\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error(`Could not compile shader: ${gl.getShaderInfoLog(shader)}`)\n }\n return shader\n}\n\nexport function createTexture(\n gl: WebGL2RenderingContext,\n internalformat: number,\n width: number,\n height: number,\n minFilter: GLint = gl.NEAREST,\n magFilter: GLint = gl.NEAREST\n) {\n const texture = gl.createTexture()\n gl.bindTexture(gl.TEXTURE_2D, texture)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter)\n gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height)\n return texture\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/index.d.ts b/es5/processors/webgl2/index.d.ts new file mode 100644 index 0000000..a13dd35 --- /dev/null +++ b/es5/processors/webgl2/index.d.ts @@ -0,0 +1,8 @@ +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +import { buildWebGL2Pipeline } from './pipelines/webgl2Pipeline'; +export { buildWebGL2Pipeline }; diff --git a/es5/processors/webgl2/index.js b/es5/processors/webgl2/index.js new file mode 100644 index 0000000..a4bf8ef --- /dev/null +++ b/es5/processors/webgl2/index.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +var webgl2Pipeline_1 = require("./pipelines/webgl2Pipeline"); +Object.defineProperty(exports, "buildWebGL2Pipeline", { enumerable: true, get: function () { return webgl2Pipeline_1.buildWebGL2Pipeline; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/index.js.map b/es5/processors/webgl2/index.js.map new file mode 100644 index 0000000..175f2a1 --- /dev/null +++ b/es5/processors/webgl2/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../lib/processors/webgl2/index.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,6DAAgE;AAEvD,oGAFA,oCAAmB,OAEA","sourcesContent":["/**\n * This pipeline is based on Volcomix's react project.\n * https://github.com/Volcomix/virtual-background\n * It was modified and converted into a module to work with\n * Twilio's Video Processor\n */\nimport { buildWebGL2Pipeline } from './pipelines/webgl2Pipeline'\n\nexport { buildWebGL2Pipeline };\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts b/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts new file mode 100644 index 0000000..34f572a --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts @@ -0,0 +1,6 @@ +export type BackgroundBlurStage = { + render(): void; + updateCoverage(coverage: [number, number]): void; + cleanUp(): void; +}; +export declare function buildBackgroundBlurStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, personMaskTexture: WebGLTexture, canvas: HTMLCanvasElement): BackgroundBlurStage; diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.js b/es5/processors/webgl2/pipelines/backgroundBlurStage.js new file mode 100644 index 0000000..c5eae59 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.js @@ -0,0 +1,122 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundBlurStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundBlurStage(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var blurPass = buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas); + var blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas); + function render() { + blurPass.render(); + blendPass.render(); + } + function updateCoverage(coverage) { + blendPass.updateCoverage(coverage); + } + function cleanUp() { + blendPass.cleanUp(); + blurPass.cleanUp(); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundBlurStage = buildBackgroundBlurStage; +function buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "]))); + var scale = 0.5; + var outputWidth = canvas.width * scale; + var outputHeight = canvas.height * scale; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var texture1 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var texture2 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var frameBuffer1 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0); + var frameBuffer2 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture2, 0); + gl.useProgram(program); + gl.uniform1i(personMaskLocation, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + for (var i = 0; i < 8; i++) { + gl.uniform2f(texelSizeLocation, 0, texelHeight); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, texture1); + gl.uniform1i(inputFrameLocation, 2); + gl.uniform2f(texelSizeLocation, texelWidth, 0); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.bindTexture(gl.TEXTURE_2D, texture2); + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer2); + gl.deleteFramebuffer(frameBuffer1); + gl.deleteTexture(texture2); + gl.deleteTexture(texture1); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { + render: render, + cleanUp: cleanUp, + }; +} +function buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_3 || (templateObject_3 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var blurredInputFrame = gl.getUniformLocation(program, 'u_blurredInputFrame'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform1i(blurredInputFrame, 2); + gl.uniform2f(coverageLocation, 0, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function cleanUp() { + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +var templateObject_1, templateObject_2, templateObject_3; +//# sourceMappingURL=backgroundBlurStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map b/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map new file mode 100644 index 0000000..3ee3395 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundBlurStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/backgroundBlurStage.ts"],"names":[],"mappings":";;;;;;;AAAA,sDAK+B;AAQ/B,SAAgB,wBAAwB,CACtC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,MAAyB;IAEzB,IAAM,QAAQ,GAAG,aAAa,CAC5B,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,MAAM,CACP,CAAA;IACD,IAAM,SAAS,GAAG,cAAc,CAAC,EAAE,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,CAAC,CAAA;IAE5E,SAAS,MAAM;QACb,QAAQ,CAAC,MAAM,EAAE,CAAA;QACjB,SAAS,CAAC,MAAM,EAAE,CAAA;IACpB,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,SAAS,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAA;IACpC,CAAC;IAED,SAAS,OAAO;QACd,SAAS,CAAC,OAAO,EAAE,CAAA;QACnB,QAAQ,CAAC,OAAO,EAAE,CAAA;IACpB,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AArCD,4DAqCC;AAED,SAAS,aAAa,CACpB,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,MAAyB;IAEzB,IAAM,oBAAoB,OAAG,kBAAI,yrCAAA,qnCAmChC,IAAA,CAAA;IAED,IAAM,KAAK,GAAG,GAAG,CAAA;IACjB,IAAM,WAAW,GAAG,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;IACxC,IAAM,YAAY,GAAG,MAAM,CAAC,MAAM,GAAG,KAAK,CAAA;IAC1C,IAAM,UAAU,GAAG,CAAC,GAAG,WAAW,CAAA;IAClC,IAAM,WAAW,GAAG,CAAC,GAAG,YAAY,CAAA;IAEpC,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IACvE,IAAM,QAAQ,GAAG,IAAA,2BAAa,EAC5B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,WAAW,EACX,YAAY,EACZ,EAAE,CAAC,OAAO,EACV,EAAE,CAAC,MAAM,CACV,CAAA;IACD,IAAM,QAAQ,GAAG,IAAA,2BAAa,EAC5B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,WAAW,EACX,YAAY,EACZ,EAAE,CAAC,OAAO,EACV,EAAE,CAAC,MAAM,CACV,CAAA;IAED,IAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;IAChD,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,QAAQ,EACR,CAAC,CACF,CAAA;IAED,IAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;IAChD,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,QAAQ,EACR,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IAEnC,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;QAEhD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAA;YAC/C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;YAChD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;YAEtC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;YAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAA;YACvC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;YAEnC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,UAAU,EAAE,CAAC,CAAC,CAAA;YAC9C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;YAChD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;YAEtC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAA;SACxC;IACH,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;QAClC,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;QAClC,EAAE,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC1B,EAAE,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC1B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AAED,SAAS,cAAc,CACrB,EAA0B,EAC1B,cAA2B,EAC3B,cAA2B,EAC3B,MAAyB;IAEzB,IAAM,kBAAkB,OAAG,kBAAI,kWAAA,8RAY9B,IAAA,CAAA;IAED,IAAM,oBAAoB,OAAG,kBAAI,4qBAAA,wmBAoBhC,IAAA,CAAA;IAEO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAE3D,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAC5E,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAC7C,OAAO,EACP,qBAAqB,CACtB,CAAA;IACD,IAAM,gBAAgB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAErE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAA;IAClC,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAEpC,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;QACxC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,OAAO,SAAA;KACR,CAAA;AACH,CAAC","sourcesContent":["import {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport type BackgroundBlurStage = {\n render(): void\n updateCoverage(coverage: [number, number]): void\n cleanUp(): void\n}\n\nexport function buildBackgroundBlurStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n): BackgroundBlurStage {\n const blurPass = buildBlurPass(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n canvas\n )\n const blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas)\n\n function render() {\n blurPass.render()\n blendPass.render()\n }\n\n function updateCoverage(coverage: [number, number]) {\n blendPass.updateCoverage(coverage)\n }\n\n function cleanUp() {\n blendPass.cleanUp()\n blurPass.cleanUp()\n }\n\n return {\n render,\n updateCoverage,\n cleanUp,\n }\n}\n\nfunction buildBlurPass(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n `\n\n const scale = 0.5\n const outputWidth = canvas.width * scale\n const outputHeight = canvas.height * scale\n const texelWidth = 1 / outputWidth\n const texelHeight = 1 / outputHeight\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize')\n const texture1 = createTexture(\n gl,\n gl.RGBA8,\n outputWidth,\n outputHeight,\n gl.NEAREST,\n gl.LINEAR\n )\n const texture2 = createTexture(\n gl,\n gl.RGBA8,\n outputWidth,\n outputHeight,\n gl.NEAREST,\n gl.LINEAR\n )\n\n const frameBuffer1 = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n texture1,\n 0\n )\n\n const frameBuffer2 = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n texture2,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(personMaskLocation, 1)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, personMaskTexture)\n\n for (let i = 0; i < 8; i++) {\n gl.uniform2f(texelSizeLocation, 0, texelHeight)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n\n gl.activeTexture(gl.TEXTURE2)\n gl.bindTexture(gl.TEXTURE_2D, texture1)\n gl.uniform1i(inputFrameLocation, 2)\n\n gl.uniform2f(texelSizeLocation, texelWidth, 0)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n\n gl.bindTexture(gl.TEXTURE_2D, texture2)\n }\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer2)\n gl.deleteFramebuffer(frameBuffer1)\n gl.deleteTexture(texture2)\n gl.deleteTexture(texture1)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return {\n render,\n cleanUp,\n }\n}\n\nfunction buildBlendPass(\n gl: WebGL2RenderingContext,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n canvas: HTMLCanvasElement\n) {\n const vertexShaderSource = glsl`#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n `\n\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n `\n\n const { width: outputWidth, height: outputHeight } = canvas\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const blurredInputFrame = gl.getUniformLocation(\n program,\n 'u_blurredInputFrame'\n )\n const coverageLocation = gl.getUniformLocation(program, 'u_coverage')\n\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(personMaskLocation, 1)\n gl.uniform1i(blurredInputFrame, 2)\n gl.uniform2f(coverageLocation, 0, 1)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.bindFramebuffer(gl.FRAMEBUFFER, null)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateCoverage(coverage: [number, number]) {\n gl.useProgram(program)\n gl.uniform2f(coverageLocation, coverage[0], coverage[1])\n }\n\n function cleanUp() {\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n gl.deleteShader(vertexShader)\n }\n\n return {\n render,\n updateCoverage,\n cleanUp,\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts b/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts new file mode 100644 index 0000000..19799b5 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts @@ -0,0 +1,9 @@ +import { BlendMode } from '../helpers/postProcessingHelper'; +export type BackgroundImageStage = { + render(): void; + updateCoverage(coverage: [number, number]): void; + updateLightWrapping(lightWrapping: number): void; + updateBlendMode(blendMode: BlendMode): void; + cleanUp(): void; +}; +export declare function buildBackgroundImageStage(gl: WebGL2RenderingContext, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, personMaskTexture: WebGLTexture, backgroundImage: HTMLImageElement | null, canvas: HTMLCanvasElement): BackgroundImageStage; diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.js b/es5/processors/webgl2/pipelines/backgroundImageStage.js new file mode 100644 index 0000000..939a37a --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.js @@ -0,0 +1,108 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundImageStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundImageStage(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "], ["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var outputRatio = outputWidth / outputHeight; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var backgroundScaleLocation = gl.getUniformLocation(program, 'u_backgroundScale'); + var backgroundOffsetLocation = gl.getUniformLocation(program, 'u_backgroundOffset'); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var backgroundLocation = gl.getUniformLocation(program, 'u_background'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + var lightWrappingLocation = gl.getUniformLocation(program, 'u_lightWrapping'); + var blendModeLocation = gl.getUniformLocation(program, 'u_blendMode'); + gl.useProgram(program); + gl.uniform2f(backgroundScaleLocation, 1, 1); + gl.uniform2f(backgroundOffsetLocation, 0, 0); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform2f(coverageLocation, 0, 1); + gl.uniform1f(lightWrappingLocation, 0); + gl.uniform1f(blendModeLocation, 0); + var backgroundTexture = null; + // TODO Find a better to handle background being loaded + if (backgroundImage === null || backgroundImage === void 0 ? void 0 : backgroundImage.complete) { + updateBackgroundImage(backgroundImage); + } + else if (backgroundImage) { + backgroundImage.onload = function () { + updateBackgroundImage(backgroundImage); + }; + } + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + if (backgroundTexture !== null) { + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, backgroundTexture); + // TODO Handle correctly the background not loaded yet + gl.uniform1i(backgroundLocation, 2); + } + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateBackgroundImage(backgroundImage) { + backgroundTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.LINEAR, gl.LINEAR); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.RGBA, gl.UNSIGNED_BYTE, backgroundImage); + var xOffset = 0; + var yOffset = 0; + var backgroundWidth = backgroundImage.naturalWidth; + var backgroundHeight = backgroundImage.naturalHeight; + var backgroundRatio = backgroundWidth / backgroundHeight; + if (backgroundRatio < outputRatio) { + backgroundHeight = backgroundWidth / outputRatio; + yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2; + } + else { + backgroundWidth = backgroundHeight * outputRatio; + xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2; + } + var xScale = backgroundWidth / backgroundImage.naturalWidth; + var yScale = backgroundHeight / backgroundImage.naturalHeight; + xOffset /= backgroundImage.naturalWidth; + yOffset /= backgroundImage.naturalHeight; + gl.uniform2f(backgroundScaleLocation, xScale, yScale); + gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function updateLightWrapping(lightWrapping) { + gl.useProgram(program); + gl.uniform1f(lightWrappingLocation, lightWrapping); + } + function updateBlendMode(blendMode) { + gl.useProgram(program); + gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1); + } + function cleanUp() { + gl.deleteTexture(backgroundTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + updateLightWrapping: updateLightWrapping, + updateBlendMode: updateBlendMode, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundImageStage = buildBackgroundImageStage; +var templateObject_1, templateObject_2; +//# sourceMappingURL=backgroundImageStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.js.map b/es5/processors/webgl2/pipelines/backgroundImageStage.js.map new file mode 100644 index 0000000..d262e70 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundImageStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/backgroundImageStage.ts"],"names":[],"mappings":";;;;;;;AACA,sDAK+B;AAU/B,SAAgB,yBAAyB,CACvC,EAA0B,EAC1B,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,eAAwC,EACxC,MAAyB;IAEzB,IAAM,kBAAkB,OAAG,kBAAI,giBAAA,4dAiB9B,IAAA,CAAA;IAED,IAAM,oBAAoB,OAAG,kBAAI,uvCAAA,mrCAmChC,IAAA,CAAA;IAEO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAC3D,IAAM,WAAW,GAAG,WAAW,GAAG,YAAY,CAAA;IAE9C,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAC5E,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,uBAAuB,GAAG,EAAE,CAAC,kBAAkB,CACnD,OAAO,EACP,mBAAmB,CACpB,CAAA;IACD,IAAM,wBAAwB,GAAG,EAAE,CAAC,kBAAkB,CACpD,OAAO,EACP,oBAAoB,CACrB,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,gBAAgB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IACrE,IAAM,qBAAqB,GAAG,EAAE,CAAC,kBAAkB,CACjD,OAAO,EACP,iBAAiB,CAClB,CAAA;IACD,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IAEvE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,uBAAuB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3C,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAC5C,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACpC,EAAE,CAAC,SAAS,CAAC,qBAAqB,EAAE,CAAC,CAAC,CAAA;IACtC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAA;IAElC,IAAI,iBAAiB,GAAwB,IAAI,CAAA;IACjD,uDAAuD;IACvD,IAAI,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,QAAQ,EAAE;QAC7B,qBAAqB,CAAC,eAAe,CAAC,CAAA;KACvC;SAAM,IAAI,eAAe,EAAE;QAC1B,eAAe,CAAC,MAAM,GAAG;YACvB,qBAAqB,CAAC,eAAe,CAAC,CAAA;QACxC,CAAC,CAAA;KACF;IAED,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;QAChD,IAAI,iBAAiB,KAAK,IAAI,EAAE;YAC9B,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;YAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;YAChD,sDAAsD;YACtD,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;SACpC;QACD,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;QACxC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,qBAAqB,CAAC,eAAiC;QAC9D,iBAAiB,GAAG,IAAA,2BAAa,EAC/B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,eAAe,CAAC,YAAY,EAC5B,eAAe,CAAC,aAAa,EAC7B,EAAE,CAAC,MAAM,EACT,EAAE,CAAC,MAAM,CACV,CAAA;QACD,EAAE,CAAC,aAAa,CACd,EAAE,CAAC,UAAU,EACb,CAAC,EACD,CAAC,EACD,CAAC,EACD,eAAe,CAAC,YAAY,EAC5B,eAAe,CAAC,aAAa,EAC7B,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,eAAe,CAChB,CAAA;QAED,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,IAAI,eAAe,GAAG,eAAe,CAAC,YAAY,CAAA;QAClD,IAAI,gBAAgB,GAAG,eAAe,CAAC,aAAa,CAAA;QACpD,IAAM,eAAe,GAAG,eAAe,GAAG,gBAAgB,CAAA;QAC1D,IAAI,eAAe,GAAG,WAAW,EAAE;YACjC,gBAAgB,GAAG,eAAe,GAAG,WAAW,CAAA;YAChD,OAAO,GAAG,CAAC,eAAe,CAAC,aAAa,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAA;SACjE;aAAM;YACL,eAAe,GAAG,gBAAgB,GAAG,WAAW,CAAA;YAChD,OAAO,GAAG,CAAC,eAAe,CAAC,YAAY,GAAG,eAAe,CAAC,GAAG,CAAC,CAAA;SAC/D;QAED,IAAM,MAAM,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,CAAA;QAC7D,IAAM,MAAM,GAAG,gBAAgB,GAAG,eAAe,CAAC,aAAa,CAAA;QAC/D,OAAO,IAAI,eAAe,CAAC,YAAY,CAAA;QACvC,OAAO,IAAI,eAAe,CAAC,aAAa,CAAA;QAExC,EAAE,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;QACrD,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,mBAAmB,CAAC,aAAqB;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,qBAAqB,EAAE,aAAa,CAAC,CAAA;IACpD,CAAC;IAED,SAAS,eAAe,CAAC,SAAoB;QAC3C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjE,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,mBAAmB,qBAAA;QACnB,eAAe,iBAAA;QACf,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AA5MD,8DA4MC","sourcesContent":["import { BlendMode } from '../helpers/postProcessingHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport type BackgroundImageStage = {\n render(): void\n updateCoverage(coverage: [number, number]): void\n updateLightWrapping(lightWrapping: number): void\n updateBlendMode(blendMode: BlendMode): void\n cleanUp(): void\n}\n\nexport function buildBackgroundImageStage(\n gl: WebGL2RenderingContext,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n backgroundImage: HTMLImageElement | null,\n canvas: HTMLCanvasElement\n): BackgroundImageStage {\n const vertexShaderSource = glsl`#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n `\n\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n `\n\n const { width: outputWidth, height: outputHeight } = canvas\n const outputRatio = outputWidth / outputHeight\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const backgroundScaleLocation = gl.getUniformLocation(\n program,\n 'u_backgroundScale'\n )\n const backgroundOffsetLocation = gl.getUniformLocation(\n program,\n 'u_backgroundOffset'\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const backgroundLocation = gl.getUniformLocation(program, 'u_background')\n const coverageLocation = gl.getUniformLocation(program, 'u_coverage')\n const lightWrappingLocation = gl.getUniformLocation(\n program,\n 'u_lightWrapping'\n )\n const blendModeLocation = gl.getUniformLocation(program, 'u_blendMode')\n\n gl.useProgram(program)\n gl.uniform2f(backgroundScaleLocation, 1, 1)\n gl.uniform2f(backgroundOffsetLocation, 0, 0)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(personMaskLocation, 1)\n gl.uniform2f(coverageLocation, 0, 1)\n gl.uniform1f(lightWrappingLocation, 0)\n gl.uniform1f(blendModeLocation, 0)\n\n let backgroundTexture: WebGLTexture | null = null\n // TODO Find a better to handle background being loaded\n if (backgroundImage?.complete) {\n updateBackgroundImage(backgroundImage)\n } else if (backgroundImage) {\n backgroundImage.onload = () => {\n updateBackgroundImage(backgroundImage)\n }\n }\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, personMaskTexture)\n if (backgroundTexture !== null) {\n gl.activeTexture(gl.TEXTURE2)\n gl.bindTexture(gl.TEXTURE_2D, backgroundTexture)\n // TODO Handle correctly the background not loaded yet\n gl.uniform1i(backgroundLocation, 2)\n }\n gl.bindFramebuffer(gl.FRAMEBUFFER, null)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateBackgroundImage(backgroundImage: HTMLImageElement) {\n backgroundTexture = createTexture(\n gl,\n gl.RGBA8,\n backgroundImage.naturalWidth,\n backgroundImage.naturalHeight,\n gl.LINEAR,\n gl.LINEAR\n )\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n backgroundImage.naturalWidth,\n backgroundImage.naturalHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n backgroundImage\n )\n\n let xOffset = 0\n let yOffset = 0\n let backgroundWidth = backgroundImage.naturalWidth\n let backgroundHeight = backgroundImage.naturalHeight\n const backgroundRatio = backgroundWidth / backgroundHeight\n if (backgroundRatio < outputRatio) {\n backgroundHeight = backgroundWidth / outputRatio\n yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2\n } else {\n backgroundWidth = backgroundHeight * outputRatio\n xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2\n }\n\n const xScale = backgroundWidth / backgroundImage.naturalWidth\n const yScale = backgroundHeight / backgroundImage.naturalHeight\n xOffset /= backgroundImage.naturalWidth\n yOffset /= backgroundImage.naturalHeight\n\n gl.uniform2f(backgroundScaleLocation, xScale, yScale)\n gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset)\n }\n\n function updateCoverage(coverage: [number, number]) {\n gl.useProgram(program)\n gl.uniform2f(coverageLocation, coverage[0], coverage[1])\n }\n\n function updateLightWrapping(lightWrapping: number) {\n gl.useProgram(program)\n gl.uniform1f(lightWrappingLocation, lightWrapping)\n }\n\n function updateBlendMode(blendMode: BlendMode) {\n gl.useProgram(program)\n gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1)\n }\n\n function cleanUp() {\n gl.deleteTexture(backgroundTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n gl.deleteShader(vertexShader)\n }\n\n return {\n render,\n updateCoverage,\n updateLightWrapping,\n updateBlendMode,\n cleanUp,\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/fastBilateralFilterStage.d.ts b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.d.ts new file mode 100644 index 0000000..1319022 --- /dev/null +++ b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.d.ts @@ -0,0 +1,7 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildFastBilateralFilterStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, inputTexture: WebGLTexture, segmentationConfig: SegmentationConfig, outputTexture: WebGLTexture, canvas: HTMLCanvasElement): { + render: () => void; + updateSigmaSpace: (sigmaSpace: number) => void; + updateSigmaColor: (sigmaColor: number) => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js new file mode 100644 index 0000000..fa2987a --- /dev/null +++ b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js @@ -0,0 +1,82 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildFastBilateralFilterStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildFastBilateralFilterStage(gl, vertexShader, positionBuffer, texCoordBuffer, inputTexture, segmentationConfig, outputTexture, canvas) { + // NOTE(mmalavalli): This is a faster approximation of the joint bilateral filter. + // For a given pixel, instead of calculating the space and color weights of all + // the pixels within the filter kernel, which would have a complexity of O(r^2), + // we calculate the space and color weights of only those pixels which form two + // diagonal lines between the two pairs of opposite corners of the filter kernel, + // which would have a complexity of O(r). This improves the overall complexity + // of this stage from O(w x h x r^2) to O(w x h x r), where: + // w => width of the output video frame + // h => height of the output video frame + // r => radius of the joint bilateral filter kernel + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var outputWidth = canvas.width, outputHeight = canvas.height; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var segmentationMaskLocation = gl.getUniformLocation(program, 'u_segmentationMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var stepLocation = gl.getUniformLocation(program, 'u_step'); + var radiusLocation = gl.getUniformLocation(program, 'u_radius'); + var offsetLocation = gl.getUniformLocation(program, 'u_offset'); + var sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel'); + var sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor'); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(segmentationMaskLocation, 1); + gl.uniform2f(texelSizeLocation, texelWidth, texelHeight); + // Ensures default values are configured to prevent infinite + // loop in fragment shader + updateSigmaSpace(0); + updateSigmaColor(0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateSigmaSpace(sigmaSpace) { + sigmaSpace *= Math.max(outputWidth / segmentationWidth, outputHeight / segmentationHeight); + var kSparsityFactor = 0.66; // Higher is more sparse. + var sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor); + var step = sparsity; + var radius = sigmaSpace; + var offset = step > 1 ? step * 0.5 : 0; + var sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace; + gl.useProgram(program); + gl.uniform1f(stepLocation, step); + gl.uniform1f(radiusLocation, radius); + gl.uniform1f(offsetLocation, offset); + gl.uniform1f(sigmaTexelLocation, sigmaTexel); + } + function updateSigmaColor(sigmaColor) { + gl.useProgram(program); + gl.uniform1f(sigmaColorLocation, sigmaColor); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, updateSigmaSpace: updateSigmaSpace, updateSigmaColor: updateSigmaColor, cleanUp: cleanUp }; +} +exports.buildFastBilateralFilterStage = buildFastBilateralFilterStage; +var templateObject_1; +//# sourceMappingURL=fastBilateralFilterStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js.map b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js.map new file mode 100644 index 0000000..e03106e --- /dev/null +++ b/es5/processors/webgl2/pipelines/fastBilateralFilterStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fastBilateralFilterStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/fastBilateralFilterStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAI+B;AAE/B,SAAgB,6BAA6B,CAC3C,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,YAA0B,EAC1B,kBAAsC,EACtC,aAA2B,EAC3B,MAAyB;IAEzB,kFAAkF;IAClF,+EAA+E;IAC/E,gFAAgF;IAChF,+EAA+E;IAC/E,iFAAiF;IACjF,8EAA8E;IAC9E,4DAA4D;IAC5D,uCAAuC;IACvC,wCAAwC;IACxC,mDAAmD;IACnD,IAAM,oBAAoB,OAAG,kBAAI,+vHAAA,2rHAiGhC,IAAA,CAAA;IAEK,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IACO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAC3D,IAAM,UAAU,GAAG,CAAC,GAAG,WAAW,CAAA;IAClC,IAAM,WAAW,GAAG,CAAC,GAAG,YAAY,CAAA;IAEpC,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,wBAAwB,GAAG,EAAE,CAAC,kBAAkB,CACpD,OAAO,EACP,oBAAoB,CACrB,CAAA;IACD,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IACvE,IAAM,YAAY,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAA;IAC7D,IAAM,cAAc,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAA;IACjE,IAAM,cAAc,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAA;IACjE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IAEzE,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,CAAC,CAAC,CAAA;IACzC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,UAAU,EAAE,WAAW,CAAC,CAAA;IAExD,4DAA4D;IAC5D,0BAA0B;IAC1B,gBAAgB,CAAC,CAAC,CAAC,CAAA;IACnB,gBAAgB,CAAC,CAAC,CAAC,CAAA;IAEnB,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;QAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,gBAAgB,CAAC,UAAkB;QAC1C,UAAU,IAAI,IAAI,CAAC,GAAG,CACpB,WAAW,GAAG,iBAAiB,EAC/B,YAAY,GAAG,kBAAkB,CAClC,CAAA;QAED,IAAM,eAAe,GAAG,IAAI,CAAA,CAAC,yBAAyB;QACtD,IAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC,CAAA;QACrE,IAAM,IAAI,GAAG,QAAQ,CAAA;QACrB,IAAM,MAAM,GAAG,UAAU,CAAA;QACzB,IAAM,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACxC,IAAM,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,GAAG,UAAU,CAAA;QAEjE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,YAAY,EAAE,IAAI,CAAC,CAAA;QAChC,EAAE,CAAC,SAAS,CAAC,cAAc,EAAE,MAAM,CAAC,CAAA;QACpC,EAAE,CAAC,SAAS,CAAC,cAAc,EAAE,MAAM,CAAC,CAAA;QACpC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAA;IAC9C,CAAC;IAED,SAAS,gBAAgB,CAAC,UAAkB;QAC1C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAA;IAC9C,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,gBAAgB,kBAAA,EAAE,gBAAgB,kBAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAChE,CAAC;AAnND,sEAmNC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n glsl,\n} from '../helpers/webglHelper'\n\nexport function buildFastBilateralFilterStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n inputTexture: WebGLTexture,\n segmentationConfig: SegmentationConfig,\n outputTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n) {\n // NOTE(mmalavalli): This is a faster approximation of the joint bilateral filter.\n // For a given pixel, instead of calculating the space and color weights of all\n // the pixels within the filter kernel, which would have a complexity of O(r^2),\n // we calculate the space and color weights of only those pixels which form two\n // diagonal lines between the two pairs of opposite corners of the filter kernel,\n // which would have a complexity of O(r). This improves the overall complexity\n // of this stage from O(w x h x r^2) to O(w x h x r), where:\n // w => width of the output video frame\n // h => height of the output video frame\n // r => radius of the joint bilateral filter kernel\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n return exp(-0.5 * x * x / sigma / sigma);\n }\n\n float calculateSpaceWeight(vec2 coord) {\n float x = distance(v_texCoord, coord);\n float sigma = u_sigmaTexel;\n return gaussian(x, sigma);\n }\n\n float calculateColorWeight(vec2 coord) {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 coordColor = texture(u_inputFrame, coord).rgb;\n float x = distance(centerColor, coordColor);\n float sigma = u_sigmaColor;\n return gaussian(x, sigma);\n }\n\n void main() {\n vec3 centerColor = texture(u_inputFrame, v_texCoord).rgb;\n float newVal = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(v_texCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(v_texCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(v_texCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(v_texCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n newVal = 0.0;\n } else if (totalSegAlpha >= 4.0) {\n newVal = 1.0;\n } else {\n for (float i = 0.0; i <= u_radius - u_offset; i += u_step) {\n vec2 shift = vec2(i, i) * u_texelSize;\n vec2 coord = vec2(v_texCoord + shift);\n float spaceWeight = calculateSpaceWeight(coord);\n float colorWeight = calculateColorWeight(coord);\n float weight = spaceWeight * colorWeight;\n float alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * alpha;\n\n if (i != 0.0) {\n shift = vec2(i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a;\n \n shift = vec2(-i, -i) * u_texelSize;\n coord = vec2(v_texCoord + shift);\n colorWeight = calculateColorWeight(coord);\n weight = spaceWeight * colorWeight;\n alpha = texture(u_segmentationMask, coord).a;\n totalWeight += weight;\n newVal += weight * texture(u_segmentationMask, coord).a; \n }\n }\n newVal /= totalWeight;\n }\n\n outColor = vec4(vec3(0.0), newVal);\n }\n `\n\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n const { width: outputWidth, height: outputHeight } = canvas\n const texelWidth = 1 / outputWidth\n const texelHeight = 1 / outputHeight\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const segmentationMaskLocation = gl.getUniformLocation(\n program,\n 'u_segmentationMask'\n )\n const texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize')\n const stepLocation = gl.getUniformLocation(program, 'u_step')\n const radiusLocation = gl.getUniformLocation(program, 'u_radius')\n const offsetLocation = gl.getUniformLocation(program, 'u_offset')\n const sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel')\n const sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor')\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(segmentationMaskLocation, 1)\n gl.uniform2f(texelSizeLocation, texelWidth, texelHeight)\n\n // Ensures default values are configured to prevent infinite\n // loop in fragment shader\n updateSigmaSpace(0)\n updateSigmaColor(0)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, inputTexture)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateSigmaSpace(sigmaSpace: number) {\n sigmaSpace *= Math.max(\n outputWidth / segmentationWidth,\n outputHeight / segmentationHeight\n )\n\n const kSparsityFactor = 0.66 // Higher is more sparse.\n const sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor)\n const step = sparsity\n const radius = sigmaSpace\n const offset = step > 1 ? step * 0.5 : 0\n const sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace\n\n gl.useProgram(program)\n gl.uniform1f(stepLocation, step)\n gl.uniform1f(radiusLocation, radius)\n gl.uniform1f(offsetLocation, offset)\n gl.uniform1f(sigmaTexelLocation, sigmaTexel)\n }\n\n function updateSigmaColor(sigmaColor: number) {\n gl.useProgram(program)\n gl.uniform1f(sigmaColorLocation, sigmaColor)\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, updateSigmaSpace, updateSigmaColor, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts b/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts new file mode 100644 index 0000000..592aa2f --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts @@ -0,0 +1,5 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildLoadSegmentationStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, segmentationConfig: SegmentationConfig, outputTexture: WebGLTexture): { + render: (segmentationData: Uint8ClampedArray) => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.js b/es5/processors/webgl2/pipelines/loadSegmentationStage.js new file mode 100644 index 0000000..f081b78 --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.js @@ -0,0 +1,41 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildLoadSegmentationStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildLoadSegmentationStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, outputTexture) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation'); + var inputTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputLocation, 1); + function render(segmentationData) { + gl.viewport(0, 0, segmentationWidth, segmentationHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, segmentationWidth, segmentationHeight, gl.RGBA, gl.UNSIGNED_BYTE, segmentationData); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(inputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildLoadSegmentationStage = buildLoadSegmentationStage; +var templateObject_1; +//# sourceMappingURL=loadSegmentationStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map b/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map new file mode 100644 index 0000000..7240401 --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"loadSegmentationStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/loadSegmentationStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAK+B;AAE/B,SAAgB,0BAA0B,CACxC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,kBAAsC,EACtC,aAA2B;IAE3B,IAAM,oBAAoB,OAAG,kBAAI,+WAAA,2SAchC,IAAA,CAAA;IACK,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IACD,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,aAAa,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,qBAAqB,CAAC,CAAA;IAC3E,IAAM,YAAY,GAAG,IAAA,2BAAa,EAChC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,iBAAiB,EACjB,kBAAkB,CACnB,CAAA;IAED,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC,CAAA;IAE9B,SAAS,MAAM,CAAC,gBAAmC;QACjD,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,CAAA;QACxD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;QAC3C,EAAE,CAAC,aAAa,CACd,EAAE,CAAC,UAAU,EACb,CAAC,EACD,CAAC,EACD,CAAC,EACD,iBAAiB,EACjB,kBAAkB,EAClB,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,gBAAgB,CACjB,CAAA;QACD,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,YAAY,CAAC,CAAA;QAC9B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAC5B,CAAC;AAvFD,gEAuFC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport function buildLoadSegmentationStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n segmentationConfig: SegmentationConfig,\n outputTexture: WebGLTexture\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).a;\n outColor = vec4(vec3(0.0), segmentation);\n }\n `\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation')\n const inputTexture = createTexture(\n gl,\n gl.RGBA8,\n segmentationWidth,\n segmentationHeight\n )\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(inputLocation, 1)\n\n function render(segmentationData: Uint8ClampedArray) {\n gl.viewport(0, 0, segmentationWidth, segmentationHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, inputTexture)\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n segmentationWidth,\n segmentationHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n segmentationData\n )\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteTexture(inputTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts b/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts new file mode 100644 index 0000000..4c89c09 --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts @@ -0,0 +1,10 @@ +import { BackgroundConfig } from '../helpers/backgroundHelper'; +import { PostProcessingConfig } from '../helpers/postProcessingHelper'; +import { SegmentationConfig } from '../helpers/segmentationHelper'; +import { SourcePlayback } from '../helpers/sourceHelper'; +export declare function buildWebGL2Pipeline(sourcePlayback: SourcePlayback, backgroundImage: HTMLImageElement | null, backgroundConfig: BackgroundConfig, segmentationConfig: SegmentationConfig, canvas: HTMLCanvasElement, benchmark: any, debounce: boolean): { + render: (segmentationData: Uint8ClampedArray) => Promise; + sampleInputFrame: () => Promise; + updatePostProcessingConfig: (postProcessingConfig: PostProcessingConfig) => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.js b/es5/processors/webgl2/pipelines/webgl2Pipeline.js new file mode 100644 index 0000000..1f0ba38 --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.js @@ -0,0 +1,161 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +var backgroundBlurStage_1 = require("./backgroundBlurStage"); +var backgroundImageStage_1 = require("./backgroundImageStage"); +var fastBilateralFilterStage_1 = require("./fastBilateralFilterStage"); +var loadSegmentationStage_1 = require("./loadSegmentationStage"); +function buildWebGL2Pipeline(sourcePlayback, backgroundImage, backgroundConfig, segmentationConfig, canvas, benchmark, debounce) { + var shouldUpscaleCurrentMask = true; + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var gl = canvas.getContext('webgl2'); + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var vertexArray = gl.createVertexArray(); + gl.bindVertexArray(vertexArray); + var positionBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + // We don't use texStorage2D here because texImage2D seems faster + // to upload video texture than texSubImage2D even though the latter + // is supposed to be the recommended way: + // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures + var inputFrameTexture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + // TODO Rename segmentation and person mask to be more specific + var segmentationTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var personMaskTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight); + var loadSegmentationStage = (0, loadSegmentationStage_1.buildLoadSegmentationStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, segmentationTexture); + var fastBilateralFilterStage = (0, fastBilateralFilterStage_1.buildFastBilateralFilterStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationTexture, segmentationConfig, personMaskTexture, canvas); + var backgroundStage = backgroundConfig.type === 'blur' + ? (0, backgroundBlurStage_1.buildBackgroundBlurStage)(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) + : (0, backgroundImageStage_1.buildBackgroundImageStage)(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas); + function sampleInputFrame() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + gl.clearColor(0, 0, 0, 0); + gl.clear(gl.COLOR_BUFFER_BIT); + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + // texImage2D seems faster than texSubImage2D to upload + // video texture + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, sourcePlayback.htmlElement); + gl.bindVertexArray(vertexArray); + return [2 /*return*/]; + }); + }); + } + function render(segmentationData) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + benchmark.start('imageCompositionDelay'); + if (shouldUpscaleCurrentMask) { + loadSegmentationStage.render(segmentationData); + } + fastBilateralFilterStage.render(); + backgroundStage.render(); + if (debounce) { + shouldUpscaleCurrentMask = !shouldUpscaleCurrentMask; + } + benchmark.end('imageCompositionDelay'); + return [2 /*return*/]; + }); + }); + } + function updatePostProcessingConfig(postProcessingConfig) { + var blendMode = postProcessingConfig.blendMode, coverage = postProcessingConfig.coverage, lightWrapping = postProcessingConfig.lightWrapping, _a = postProcessingConfig.jointBilateralFilter, jointBilateralFilter = _a === void 0 ? {} : _a; + var sigmaColor = jointBilateralFilter.sigmaColor, sigmaSpace = jointBilateralFilter.sigmaSpace; + if (typeof sigmaColor === 'number') { + fastBilateralFilterStage.updateSigmaColor(sigmaColor); + } + if (typeof sigmaSpace === 'number') { + fastBilateralFilterStage.updateSigmaSpace(sigmaSpace); + } + if (Array.isArray(coverage)) { + if (backgroundConfig.type === 'blur' || backgroundConfig.type === 'image') { + backgroundStage.updateCoverage(coverage); + } + } + if (backgroundConfig.type === 'image') { + var backgroundImageStage = backgroundStage; + if (typeof lightWrapping === 'number') { + backgroundImageStage.updateLightWrapping(lightWrapping); + } + if (typeof blendMode === 'string') { + backgroundImageStage.updateBlendMode(blendMode); + } + } + else if (backgroundConfig.type !== 'blur') { + // TODO Handle no background in a separate pipeline path + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage([0, 0.9999]); + backgroundImageStage.updateLightWrapping(0); + } + } + function cleanUp() { + backgroundStage.cleanUp(); + fastBilateralFilterStage.cleanUp(); + loadSegmentationStage.cleanUp(); + gl.deleteTexture(personMaskTexture); + gl.deleteTexture(segmentationTexture); + gl.deleteTexture(inputFrameTexture); + gl.deleteBuffer(texCoordBuffer); + gl.deleteBuffer(positionBuffer); + gl.deleteVertexArray(vertexArray); + gl.deleteShader(vertexShader); + } + return { render: render, sampleInputFrame: sampleInputFrame, updatePostProcessingConfig: updatePostProcessingConfig, cleanUp: cleanUp }; +} +exports.buildWebGL2Pipeline = buildWebGL2Pipeline; +var templateObject_1; +//# sourceMappingURL=webgl2Pipeline.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map b/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map new file mode 100644 index 0000000..66527d3 --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webgl2Pipeline.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/webgl2Pipeline.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEA,oEAGsC;AAEtC,sDAA2E;AAC3E,6DAE8B;AAC9B,+DAG+B;AAC/B,uEAA0E;AAC1E,iEAAoE;AAEpE,SAAgB,mBAAmB,CACjC,cAA8B,EAC9B,eAAwC,EACxC,gBAAkC,EAClC,kBAAsC,EACtC,MAAyB,EACzB,SAAc,EACd,QAAiB;IAEjB,IAAI,wBAAwB,GAAG,IAAI,CAAA;IAEnC,IAAM,kBAAkB,OAAG,kBAAI,sRAAA,kNAW9B,IAAA,CAAA;IAEO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAY;IACtD,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IAED,IAAM,EAAE,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAE,CAAA;IAEvC,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAE5E,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA;IAE/B,IAAM,cAAc,GAAG,EAAE,CAAC,YAAY,EAAG,CAAA;IACzC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,YAAY,EACf,IAAI,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,EAC9D,EAAE,CAAC,WAAW,CACf,CAAA;IAED,IAAM,cAAc,GAAG,EAAE,CAAC,YAAY,EAAG,CAAA;IACzC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,YAAY,EACf,IAAI,YAAY,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,EAC1D,EAAE,CAAC,WAAW,CACf,CAAA;IAED,iEAAiE;IACjE,oEAAoE;IACpE,yCAAyC;IACzC,oHAAoH;IACpH,IAAM,iBAAiB,GAAG,EAAE,CAAC,aAAa,EAAE,CAAA;IAC5C,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;IAChD,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAA;IAClE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAA;IAElE,+DAA+D;IAC/D,IAAM,mBAAmB,GAAG,IAAA,2BAAa,EACvC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,iBAAiB,EACjB,kBAAkB,CAClB,CAAA;IACF,IAAM,iBAAiB,GAAG,IAAA,2BAAa,EACrC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,WAAW,EACX,YAAY,CACZ,CAAA;IACF,IAAM,qBAAqB,GAAG,IAAA,kDAA0B,EACtD,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,kBAAkB,EAClB,mBAAmB,CACpB,CAAA;IACD,IAAM,wBAAwB,GAAG,IAAA,wDAA6B,EAC5D,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,mBAAmB,EACnB,kBAAkB,EAClB,iBAAiB,EACjB,MAAM,CACP,CAAA;IACD,IAAM,eAAe,GACnB,gBAAgB,CAAC,IAAI,KAAK,MAAM;QAC9B,CAAC,CAAC,IAAA,8CAAwB,EACtB,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,MAAM,CACP;QACH,CAAC,CAAC,IAAA,gDAAyB,EACvB,EAAE,EACF,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,eAAe,EACf,MAAM,CACP,CAAA;IAEP,SAAe,gBAAgB;;;gBAC7B,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBACzB,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,gBAAgB,CAAC,CAAA;gBAE7B,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;gBAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;gBAEhD,uDAAuD;gBACvD,gBAAgB;gBAChB,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,UAAU,EACb,CAAC,EACD,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,cAAc,CAAC,WAAW,CAC3B,CAAA;gBAED,EAAE,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA;;;;KAChC;IAED,SAAe,MAAM,CAAC,gBAAmC;;;gBACvD,SAAS,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAA;gBACxC,IAAI,wBAAwB,EAAE;oBAC5B,qBAAqB,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAA;iBAC/C;gBACD,wBAAwB,CAAC,MAAM,EAAE,CAAA;gBACjC,eAAe,CAAC,MAAM,EAAE,CAAA;gBACxB,IAAI,QAAQ,EAAE;oBACZ,wBAAwB,GAAG,CAAC,wBAAwB,CAAA;iBACrD;gBACD,SAAS,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;;;;KACvC;IAED,SAAS,0BAA0B,CACjC,oBAA0C;QAGxC,IAAA,SAAS,GAIP,oBAAoB,UAJb,EACT,QAAQ,GAGN,oBAAoB,SAHd,EACR,aAAa,GAEX,oBAAoB,cAFT,EACb,KACE,oBAAoB,qBADG,EAAzB,oBAAoB,mBAAG,EAAE,KAAA,CACH;QAGtB,IAAA,UAAU,GAER,oBAAoB,WAFZ,EACV,UAAU,GACR,oBAAoB,WADZ,CACY;QAExB,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;YAClC,wBAAwB,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAA;SACtD;QACD,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;YAClC,wBAAwB,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAA;SACtD;QACD,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;YAC3B,IAAI,gBAAgB,CAAC,IAAI,KAAK,MAAM,IAAI,gBAAgB,CAAC,IAAI,KAAK,OAAO,EAAE;gBACzE,eAAe,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAA;aACzC;SACF;QACD,IAAI,gBAAgB,CAAC,IAAI,KAAK,OAAO,EAAE;YACrC,IAAM,oBAAoB,GAAG,eAAuC,CAAA;YACpE,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;gBACrC,oBAAoB,CAAC,mBAAmB,CAAC,aAAa,CAAC,CAAA;aACxD;YACD,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;gBACjC,oBAAoB,CAAC,eAAe,CAAC,SAAS,CAAC,CAAA;aAChD;SACF;aAAM,IAAI,gBAAgB,CAAC,IAAI,KAAK,MAAM,EAAE;YAC3C,wDAAwD;YACxD,IAAM,oBAAoB,GAAG,eAAuC,CAAA;YACpE,oBAAoB,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAA;YAChD,oBAAoB,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAA;SAC5C;IACH,CAAC;IAED,SAAS,OAAO;QACd,eAAe,CAAC,OAAO,EAAE,CAAA;QACzB,wBAAwB,CAAC,OAAO,EAAE,CAAA;QAClC,qBAAqB,CAAC,OAAO,EAAE,CAAA;QAE/B,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,mBAAmB,CAAC,CAAA;QACrC,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,gBAAgB,kBAAA,EAAE,0BAA0B,4BAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAC1E,CAAC;AA5MD,kDA4MC","sourcesContent":["import { BackgroundConfig } from '../helpers/backgroundHelper'\nimport { PostProcessingConfig } from '../helpers/postProcessingHelper'\nimport {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport { SourcePlayback } from '../helpers/sourceHelper'\nimport { compileShader, createTexture, glsl } from '../helpers/webglHelper'\nimport {\n buildBackgroundBlurStage,\n} from './backgroundBlurStage'\nimport {\n BackgroundImageStage,\n buildBackgroundImageStage,\n} from './backgroundImageStage'\nimport { buildFastBilateralFilterStage } from './fastBilateralFilterStage'\nimport { buildLoadSegmentationStage } from './loadSegmentationStage'\n\nexport function buildWebGL2Pipeline(\n sourcePlayback: SourcePlayback,\n backgroundImage: HTMLImageElement | null,\n backgroundConfig: BackgroundConfig,\n segmentationConfig: SegmentationConfig,\n canvas: HTMLCanvasElement,\n benchmark: any,\n debounce: boolean\n) {\n let shouldUpscaleCurrentMask = true\n\n const vertexShaderSource = glsl`#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n `\n\n const { width: outputWidth, height: outputHeight } = canvas;\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n\n const gl = canvas.getContext('webgl2')!\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n\n const vertexArray = gl.createVertexArray()\n gl.bindVertexArray(vertexArray)\n\n const positionBuffer = gl.createBuffer()!\n gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer)\n gl.bufferData(\n gl.ARRAY_BUFFER,\n new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]),\n gl.STATIC_DRAW\n )\n\n const texCoordBuffer = gl.createBuffer()!\n gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer)\n gl.bufferData(\n gl.ARRAY_BUFFER,\n new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]),\n gl.STATIC_DRAW\n )\n\n // We don't use texStorage2D here because texImage2D seems faster\n // to upload video texture than texSubImage2D even though the latter\n // is supposed to be the recommended way:\n // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures\n const inputFrameTexture = gl.createTexture()\n gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)\n\n // TODO Rename segmentation and person mask to be more specific\n const segmentationTexture = createTexture(\n gl,\n gl.RGBA8,\n segmentationWidth,\n segmentationHeight\n )!\n const personMaskTexture = createTexture(\n gl,\n gl.RGBA8,\n outputWidth,\n outputHeight\n )!\n const loadSegmentationStage = buildLoadSegmentationStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n segmentationConfig,\n segmentationTexture\n )\n const fastBilateralFilterStage = buildFastBilateralFilterStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n segmentationTexture,\n segmentationConfig,\n personMaskTexture,\n canvas\n )\n const backgroundStage =\n backgroundConfig.type === 'blur'\n ? buildBackgroundBlurStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n canvas\n )\n : buildBackgroundImageStage(\n gl,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n backgroundImage,\n canvas\n )\n\n async function sampleInputFrame() {\n gl.clearColor(0, 0, 0, 0)\n gl.clear(gl.COLOR_BUFFER_BIT)\n\n gl.activeTexture(gl.TEXTURE0)\n gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture)\n\n // texImage2D seems faster than texSubImage2D to upload\n // video texture\n gl.texImage2D(\n gl.TEXTURE_2D,\n 0,\n gl.RGBA,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n sourcePlayback.htmlElement\n )\n\n gl.bindVertexArray(vertexArray)\n }\n\n async function render(segmentationData: Uint8ClampedArray) {\n benchmark.start('imageCompositionDelay')\n if (shouldUpscaleCurrentMask) {\n loadSegmentationStage.render(segmentationData)\n }\n fastBilateralFilterStage.render()\n backgroundStage.render()\n if (debounce) {\n shouldUpscaleCurrentMask = !shouldUpscaleCurrentMask\n }\n benchmark.end('imageCompositionDelay')\n }\n\n function updatePostProcessingConfig(\n postProcessingConfig: PostProcessingConfig\n ) {\n const {\n blendMode,\n coverage,\n lightWrapping,\n jointBilateralFilter = {}\n } = postProcessingConfig\n\n const {\n sigmaColor,\n sigmaSpace\n } = jointBilateralFilter\n\n if (typeof sigmaColor === 'number') {\n fastBilateralFilterStage.updateSigmaColor(sigmaColor)\n }\n if (typeof sigmaSpace === 'number') {\n fastBilateralFilterStage.updateSigmaSpace(sigmaSpace)\n }\n if (Array.isArray(coverage)) {\n if (backgroundConfig.type === 'blur' || backgroundConfig.type === 'image') {\n backgroundStage.updateCoverage(coverage)\n }\n }\n if (backgroundConfig.type === 'image') {\n const backgroundImageStage = backgroundStage as BackgroundImageStage\n if (typeof lightWrapping === 'number') {\n backgroundImageStage.updateLightWrapping(lightWrapping)\n }\n if (typeof blendMode === 'string') {\n backgroundImageStage.updateBlendMode(blendMode)\n }\n } else if (backgroundConfig.type !== 'blur') {\n // TODO Handle no background in a separate pipeline path\n const backgroundImageStage = backgroundStage as BackgroundImageStage\n backgroundImageStage.updateCoverage([0, 0.9999])\n backgroundImageStage.updateLightWrapping(0)\n }\n }\n\n function cleanUp() {\n backgroundStage.cleanUp()\n fastBilateralFilterStage.cleanUp()\n loadSegmentationStage.cleanUp()\n\n gl.deleteTexture(personMaskTexture)\n gl.deleteTexture(segmentationTexture)\n gl.deleteTexture(inputFrameTexture)\n gl.deleteBuffer(texCoordBuffer)\n gl.deleteBuffer(positionBuffer)\n gl.deleteVertexArray(vertexArray)\n gl.deleteShader(vertexShader)\n }\n\n return { render, sampleInputFrame, updatePostProcessingConfig, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/types.d.ts b/es5/types.d.ts new file mode 100644 index 0000000..213849b --- /dev/null +++ b/es5/types.d.ts @@ -0,0 +1,79 @@ +/** + * @private + */ +declare global { + interface Window { + chrome: any; + createTwilioTFLiteModule: () => Promise; + createTwilioTFLiteSIMDModule: () => Promise; + OffscreenCanvas: typeof OffscreenCanvas; + Twilio: Object & { + VideoProcessors?: any; + }; + } +} +/** + * @private + */ +export declare enum WebGL2PipelineType { + Blur = "blur", + Image = "image" +} +/** + * @private + */ +export interface Timing { + delay?: number; + end?: number; + start?: number; +} +/** + * @private + */ +export interface Dimensions { + height: number; + width: number; +} +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +export declare enum ImageFit { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + Contain = "Contain", + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + Cover = "Cover", + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + Fill = "Fill", + /** + * Ignore height and width and use the original size. + */ + None = "None" +} +/** + * Specifies which pipeline to use when processing video frames. + */ +export declare enum Pipeline { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Canvas2D = "Canvas2D", + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + WebGL2 = "WebGL2" +} diff --git a/es5/types.js b/es5/types.js new file mode 100644 index 0000000..d31b53a --- /dev/null +++ b/es5/types.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pipeline = exports.ImageFit = exports.WebGL2PipelineType = void 0; +/** + * @private + */ +var WebGL2PipelineType; +(function (WebGL2PipelineType) { + WebGL2PipelineType["Blur"] = "blur"; + WebGL2PipelineType["Image"] = "image"; +})(WebGL2PipelineType || (exports.WebGL2PipelineType = WebGL2PipelineType = {})); +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +var ImageFit; +(function (ImageFit) { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + ImageFit["Contain"] = "Contain"; + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + ImageFit["Cover"] = "Cover"; + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + ImageFit["Fill"] = "Fill"; + /** + * Ignore height and width and use the original size. + */ + ImageFit["None"] = "None"; +})(ImageFit || (exports.ImageFit = ImageFit = {})); +/** + * Specifies which pipeline to use when processing video frames. + */ +var Pipeline; +(function (Pipeline) { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Pipeline["Canvas2D"] = "Canvas2D"; + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + Pipeline["WebGL2"] = "WebGL2"; +})(Pipeline || (exports.Pipeline = Pipeline = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/es5/types.js.map b/es5/types.js.map new file mode 100644 index 0000000..a51d73d --- /dev/null +++ b/es5/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../lib/types.ts"],"names":[],"mappings":";;;AAaA;;GAEG;AACH,IAAY,kBAGX;AAHD,WAAY,kBAAkB;IAC5B,mCAAa,CAAA;IACb,qCAAe,CAAA;AACjB,CAAC,EAHW,kBAAkB,kCAAlB,kBAAkB,QAG7B;AAmBD;;GAEG;AACH,IAAY,QAuBX;AAvBD,WAAY,QAAQ;IAClB;;;;OAIG;IACH,+BAAmB,CAAA;IAEnB;;;OAGG;IACH,2BAAe,CAAA;IAEf;;OAEG;IACH,yBAAa,CAAA;IAEb;;OAEG;IACH,yBAAa,CAAA;AACf,CAAC,EAvBW,QAAQ,wBAAR,QAAQ,QAuBnB;AAED;;GAEG;AACH,IAAY,QAgBX;AAhBD,WAAY,QAAQ;IAClB;;;;;OAKG;IACH,iCAAqB,CAAA;IAErB;;;;;OAKG;IACH,6BAAiB,CAAA;AACnB,CAAC,EAhBW,QAAQ,wBAAR,QAAQ,QAgBnB","sourcesContent":["/**\n * @private\n */\n declare global {\n interface Window {\n chrome: any;\n createTwilioTFLiteModule: () => Promise;\n createTwilioTFLiteSIMDModule: () => Promise;\n OffscreenCanvas: typeof OffscreenCanvas;\n Twilio: Object & { VideoProcessors?: any };\n }\n}\n\n/**\n * @private\n */\nexport enum WebGL2PipelineType {\n Blur = 'blur',\n Image = 'image',\n}\n\n/**\n * @private\n */\nexport interface Timing {\n delay?: number;\n end?: number;\n start?: number;\n}\n\n/**\n * @private\n */\nexport interface Dimensions {\n height: number;\n width: number;\n}\n\n/**\n * ImageFit specifies the positioning of an image inside a viewport.\n */\nexport enum ImageFit {\n /**\n * Scale the image up or down to fill the viewport while preserving the aspect ratio.\n * The image will be fully visible but will add empty space in the viewport if\n * aspect ratios do not match.\n */\n Contain = 'Contain',\n\n /**\n * Scale the image to fill both height and width of the viewport while preserving\n * the aspect ratio, but will crop the image if aspect ratios do not match.\n */\n Cover = 'Cover',\n\n /**\n * Stretches the image to fill the viewport regardless of aspect ratio.\n */\n Fill = 'Fill',\n\n /**\n * Ignore height and width and use the original size.\n */\n None = 'None'\n}\n\n/**\n * Specifies which pipeline to use when processing video frames.\n */\nexport enum Pipeline {\n /**\n * Use canvas 2d rendering context. Some browsers such as Safari do not\n * have full support of this feature. Please test your application to make sure it works as intented. See\n * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility)\n * for reference.\n */\n Canvas2D = 'Canvas2D',\n\n /**\n * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work\n * on some older versions of browsers. Please test your application to make sure it works as intented. See\n * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility)\n * for reference.\n */\n WebGL2 = 'WebGL2'\n}\n"]} \ No newline at end of file diff --git a/es5/utils/Benchmark.d.ts b/es5/utils/Benchmark.d.ts new file mode 100644 index 0000000..7de7039 --- /dev/null +++ b/es5/utils/Benchmark.d.ts @@ -0,0 +1,15 @@ +/** + * @private + */ +export declare class Benchmark { + static readonly cacheSize = 41; + private _timingCache; + private _timings; + constructor(); + end(name: string): void; + getAverageDelay(name: string): number | undefined; + getNames(): string[]; + getRate(name: string): number | undefined; + start(name: string): void; + private _save; +} diff --git a/es5/utils/Benchmark.js b/es5/utils/Benchmark.js new file mode 100644 index 0000000..72660e5 --- /dev/null +++ b/es5/utils/Benchmark.js @@ -0,0 +1,79 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Benchmark = void 0; +/** + * @private + */ +var Benchmark = /** @class */ (function () { + function Benchmark() { + this._timingCache = new Map(); + this._timings = new Map(); + } + Benchmark.prototype.end = function (name) { + var timing = this._timings.get(name); + if (!timing) { + return; + } + timing.end = Date.now(); + timing.delay = timing.end - timing.start; + this._save(name, __assign({}, timing)); + }; + Benchmark.prototype.getAverageDelay = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || !timingCache.length) { + return; + } + return timingCache.map(function (timing) { return timing.delay; }) + .reduce(function (total, value) { return total += value; }, 0) / timingCache.length; + }; + Benchmark.prototype.getNames = function () { + return Array.from(this._timingCache.keys()); + }; + Benchmark.prototype.getRate = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || timingCache.length < 2) { + return; + } + var totalDelay = timingCache[timingCache.length - 1].end - timingCache[0].start; + return (timingCache.length / totalDelay) * 1000; + }; + Benchmark.prototype.start = function (name) { + var timing = this._timings.get(name); + if (!timing) { + timing = {}; + this._timings.set(name, timing); + } + timing.start = Date.now(); + delete timing.end; + delete timing.delay; + }; + Benchmark.prototype._save = function (name, timing) { + var timingCache = this._timingCache.get(name); + if (!timingCache) { + timingCache = []; + this._timingCache.set(name, timingCache); + } + timingCache.push(timing); + if (timingCache.length > Benchmark.cacheSize) { + timingCache.splice(0, timingCache.length - Benchmark.cacheSize); + } + }; + // NOTE (csantos): How many timing information to save per benchmark. + // This is about the amount of timing info generated on a 24fps input. + // Enough samples to calculate fps + Benchmark.cacheSize = 41; + return Benchmark; +}()); +exports.Benchmark = Benchmark; +//# sourceMappingURL=Benchmark.js.map \ No newline at end of file diff --git a/es5/utils/Benchmark.js.map b/es5/utils/Benchmark.js.map new file mode 100644 index 0000000..fb5a05d --- /dev/null +++ b/es5/utils/Benchmark.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Benchmark.js","sourceRoot":"","sources":["../../lib/utils/Benchmark.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAEA;;GAEG;AACH;IAUE;QACE,IAAI,CAAC,YAAY,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9B,IAAI,CAAC,QAAQ,GAAG,IAAI,GAAG,EAAE,CAAC;IAC5B,CAAC;IAED,uBAAG,GAAH,UAAI,IAAY;QACd,IAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,EAAE;YACX,OAAO;SACR;QACD,MAAM,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC,KAAM,CAAC;QAC1C,IAAI,CAAC,KAAK,CAAC,IAAI,eAAM,MAAM,EAAE,CAAC;IAChC,CAAC;IAED,mCAAe,GAAf,UAAgB,IAAY;QAC1B,IAAM,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,WAAW,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;YACvC,OAAO;SACR;QACD,OAAO,WAAW,CAAC,GAAG,CAAC,UAAA,MAAM,IAAI,OAAA,MAAM,CAAC,KAAM,EAAb,CAAa,CAAC;aAC5C,MAAM,CAAC,UAAC,KAAa,EAAE,KAAa,IAAK,OAAA,KAAK,IAAI,KAAK,EAAd,CAAc,EAAE,CAAC,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC;IACtF,CAAC;IAED,4BAAQ,GAAR;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC;IAC9C,CAAC;IAED,2BAAO,GAAP,UAAQ,IAAY;QAClB,IAAM,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1C,OAAO;SACR;QACD,IAAM,UAAU,GAAG,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAI,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,KAAM,CAAC;QACpF,OAAO,CAAC,WAAW,CAAC,MAAM,GAAG,UAAU,CAAC,GAAG,IAAI,CAAC;IAClD,CAAC;IAED,yBAAK,GAAL,UAAM,IAAY;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,GAAG,EAAE,CAAC;YACZ,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACjC;QACD,MAAM,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC1B,OAAO,MAAM,CAAC,GAAG,CAAC;QAClB,OAAO,MAAM,CAAC,KAAK,CAAC;IACtB,CAAC;IAEO,yBAAK,GAAb,UAAc,IAAY,EAAE,MAAc;QACxC,IAAI,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAC9C,IAAI,CAAC,WAAW,EAAE;YAChB,WAAW,GAAG,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;SAC1C;QAED,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAEzB,IAAI,WAAW,CAAC,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE;YAC5C,WAAW,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW,CAAC,MAAM,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;SACjE;IACH,CAAC;IApED,qEAAqE;IACrE,sEAAsE;IACtE,kCAAkC;IAClB,mBAAS,GAAG,EAAE,CAAC;IAkEjC,gBAAC;CAAA,AAvED,IAuEC;AAvEY,8BAAS","sourcesContent":["import { Timing } from '../types';\n\n/**\n * @private\n */\nexport class Benchmark {\n\n // NOTE (csantos): How many timing information to save per benchmark.\n // This is about the amount of timing info generated on a 24fps input.\n // Enough samples to calculate fps\n static readonly cacheSize = 41;\n\n private _timingCache: Map;\n private _timings: Map;\n\n constructor() {\n this._timingCache = new Map();\n this._timings = new Map();\n }\n\n end(name: string) {\n const timing = this._timings.get(name);\n if (!timing) {\n return;\n }\n timing.end = Date.now();\n timing.delay = timing.end - timing.start!;\n this._save(name, {...timing});\n }\n\n getAverageDelay(name: string): number | undefined {\n const timingCache = this._timingCache.get(name);\n if (!timingCache || !timingCache.length) {\n return;\n }\n return timingCache.map(timing => timing.delay!)\n .reduce((total: number, value: number) => total += value, 0) / timingCache.length;\n }\n\n getNames(): string[] {\n return Array.from(this._timingCache.keys());\n }\n\n getRate(name: string): number | undefined {\n const timingCache = this._timingCache.get(name);\n if (!timingCache || timingCache.length < 2) {\n return;\n }\n const totalDelay = timingCache[timingCache.length - 1].end! - timingCache[0].start!;\n return (timingCache.length / totalDelay) * 1000;\n }\n\n start(name: string) {\n let timing = this._timings.get(name);\n if (!timing) {\n timing = {};\n this._timings.set(name, timing);\n }\n timing.start = Date.now();\n delete timing.end;\n delete timing.delay;\n }\n\n private _save(name: string, timing: Timing) {\n let timingCache = this._timingCache.get(name);\n if (!timingCache) {\n timingCache = [];\n this._timingCache.set(name, timingCache);\n }\n\n timingCache.push(timing);\n\n if (timingCache.length > Benchmark.cacheSize) {\n timingCache.splice(0, timingCache.length - Benchmark.cacheSize);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/es5/utils/TwilioTFLite.d.ts b/es5/utils/TwilioTFLite.d.ts new file mode 100644 index 0000000..762d5b8 --- /dev/null +++ b/es5/utils/TwilioTFLite.d.ts @@ -0,0 +1,14 @@ +/** + * @private + */ +export declare class TwilioTFLite { + private _inputBuffer; + private _isSimdEnabled; + private _tflite; + get isSimdEnabled(): boolean | null; + initialize(assetsPath: string, modelName: string, moduleLoaderName: string, moduleSimdLoaderName: string): Promise; + loadInputBuffer(inputBuffer: Uint8ClampedArray): void; + runInference(): Uint8ClampedArray; + private _loadScript; + private _loadWasmModule; +} diff --git a/es5/utils/TwilioTFLite.js b/es5/utils/TwilioTFLite.js new file mode 100644 index 0000000..6c8cc3e --- /dev/null +++ b/es5/utils/TwilioTFLite.js @@ -0,0 +1,174 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TwilioTFLite = void 0; +var loadedScripts = new Set(); +var model; +/** + * @private + */ +var TwilioTFLite = /** @class */ (function () { + function TwilioTFLite() { + this._inputBuffer = null; + this._isSimdEnabled = null; + this._tflite = null; + } + Object.defineProperty(TwilioTFLite.prototype, "isSimdEnabled", { + get: function () { + return this._isSimdEnabled; + }, + enumerable: false, + configurable: true + }); + TwilioTFLite.prototype.initialize = function (assetsPath, modelName, moduleLoaderName, moduleSimdLoaderName) { + return __awaiter(this, void 0, void 0, function () { + var _a, modelResponse, _b, tflite, modelBufferOffset; + return __generator(this, function (_c) { + switch (_c.label) { + case 0: + if (this._tflite) { + return [2 /*return*/]; + } + return [4 /*yield*/, Promise.all([ + this._loadWasmModule(assetsPath, moduleLoaderName, moduleSimdLoaderName), + fetch("".concat(assetsPath).concat(modelName)), + ])]; + case 1: + _a = _c.sent(), modelResponse = _a[1]; + _b = model; + if (_b) return [3 /*break*/, 3]; + return [4 /*yield*/, modelResponse.arrayBuffer()]; + case 2: + _b = (_c.sent()); + _c.label = 3; + case 3: + model = _b; + tflite = this._tflite; + modelBufferOffset = tflite._getModelBufferMemoryOffset(); + tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset); + tflite._loadModel(model.byteLength); + return [2 /*return*/]; + } + }); + }); + }; + TwilioTFLite.prototype.loadInputBuffer = function (inputBuffer) { + var tflite = this._tflite; + var height = tflite._getInputHeight(); + var width = tflite._getInputWidth(); + var pixels = width * height; + var tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4; + for (var i = 0; i < pixels; i++) { + var curTFLiteOffset = tfliteInputMemoryOffset + i * 3; + var curImageBufferOffset = i * 4; + tflite.HEAPF32[curTFLiteOffset] = inputBuffer[curImageBufferOffset] / 255; + tflite.HEAPF32[curTFLiteOffset + 1] = inputBuffer[curImageBufferOffset + 1] / 255; + tflite.HEAPF32[curTFLiteOffset + 2] = inputBuffer[curImageBufferOffset + 2] / 255; + } + this._inputBuffer = inputBuffer; + }; + TwilioTFLite.prototype.runInference = function () { + var tflite = this._tflite; + var height = tflite._getInputHeight(); + var width = tflite._getInputWidth(); + var pixels = width * height; + var tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + tflite._runInference(); + var inputBuffer = this._inputBuffer || new Uint8ClampedArray(pixels * 4); + for (var i = 0; i < pixels; i++) { + inputBuffer[i * 4 + 3] = Math.round(tflite.HEAPF32[tfliteOutputMemoryOffset + i] * 255); + } + return inputBuffer; + }; + TwilioTFLite.prototype._loadScript = function (path) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + if (loadedScripts.has(path)) { + return [2 /*return*/]; + } + return [2 /*return*/, new Promise(function (resolve, reject) { + var script = document.createElement('script'); + script.onload = function () { + loadedScripts.add(path); + resolve(); + }; + script.onerror = function () { + reject(); + }; + document.head.append(script); + script.src = path; + })]; + }); + }); + }; + TwilioTFLite.prototype._loadWasmModule = function (assetsPath, moduleLoaderName, moduleSimdLoaderName) { + return __awaiter(this, void 0, void 0, function () { + var _a, _b, _c; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 3, , 6]); + return [4 /*yield*/, this._loadScript("".concat(assetsPath).concat(moduleSimdLoaderName))]; + case 1: + _d.sent(); + _a = this; + return [4 /*yield*/, createTwilioTFLiteSIMDModule()]; + case 2: + _a._tflite = _d.sent(); + this._isSimdEnabled = true; + return [3 /*break*/, 6]; + case 3: + _b = _d.sent(); + return [4 /*yield*/, this._loadScript("".concat(assetsPath).concat(moduleLoaderName))]; + case 4: + _d.sent(); + _c = this; + return [4 /*yield*/, createTwilioTFLiteModule()]; + case 5: + _c._tflite = _d.sent(); + this._isSimdEnabled = false; + return [3 /*break*/, 6]; + case 6: return [2 /*return*/]; + } + }); + }); + }; + return TwilioTFLite; +}()); +exports.TwilioTFLite = TwilioTFLite; +//# sourceMappingURL=TwilioTFLite.js.map \ No newline at end of file diff --git a/es5/utils/TwilioTFLite.js.map b/es5/utils/TwilioTFLite.js.map new file mode 100644 index 0000000..598c6a2 --- /dev/null +++ b/es5/utils/TwilioTFLite.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TwilioTFLite.js","sourceRoot":"","sources":["../../lib/utils/TwilioTFLite.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAGA,IAAM,aAAa,GAAG,IAAI,GAAG,EAAU,CAAC;AACxC,IAAI,KAAkB,CAAC;AAEvB;;GAEG;AACH;IAAA;QACU,iBAAY,GAA6B,IAAI,CAAC;QAC9C,mBAAc,GAAmB,IAAI,CAAC;QACtC,YAAO,GAAQ,IAAI,CAAC;IAgG9B,CAAC;IA9FC,sBAAI,uCAAa;aAAjB;YACE,OAAO,IAAI,CAAC,cAAc,CAAC;QAC7B,CAAC;;;OAAA;IAEK,iCAAU,GAAhB,UACE,UAAkB,EAClB,SAAiB,EACjB,gBAAwB,EACxB,oBAA4B;;;;;;wBAE5B,IAAI,IAAI,CAAC,OAAO,EAAE;4BAChB,sBAAO;yBACR;wBAC2C,qBAAM,OAAO,CAAC,GAAG,CAAC;gCAC5D,IAAI,CAAC,eAAe,CAClB,UAAU,EACV,gBAAgB,EAChB,oBAAoB,CACrB;gCACD,KAAK,CAAC,UAAG,UAAU,SAAG,SAAS,CAAE,CAAC;6BACnC,CAAC,EAAA;;wBAPI,KAAsC,SAO1C,EAPO,aAAa,QAAA;wBAQd,KAAA,KAAK,CAAA;gCAAL,wBAAK;wBAAI,qBAAM,aAAa,CAAC,WAAW,EAAE,EAAA;;8BAAjC,SAAiC;;;wBAAlD,KAAK,KAA6C,CAAC;wBAClC,MAAM,GAAK,IAAI,QAAT,CAAU;wBAC3B,iBAAiB,GAAG,MAAM,CAAC,2BAA2B,EAAE,CAAC;wBAC/D,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,iBAAiB,CAAC,CAAC;wBAC5D,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;;;;;KACrC;IAED,sCAAe,GAAf,UAAgB,WAA8B;QACpC,IAAS,MAAM,GAAK,IAAI,QAAT,CAAU;QACjC,IAAM,MAAM,GAAG,MAAM,CAAC,eAAe,EAAE,CAAC;QACxC,IAAM,KAAK,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACtC,IAAM,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;QAC9B,IAAM,uBAAuB,GAAG,MAAM,CAAC,qBAAqB,EAAE,GAAG,CAAC,CAAC;QAEnE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;YAC/B,IAAM,eAAe,GAAG,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAC;YACxD,IAAM,oBAAoB,GAAG,CAAC,GAAG,CAAC,CAAC;YACnC,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,GAAG,WAAW,CAAC,oBAAoB,CAAC,GAAG,GAAG,CAAC;YAC1E,MAAM,CAAC,OAAO,CAAC,eAAe,GAAG,CAAC,CAAC,GAAG,WAAW,CAAC,oBAAoB,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;YAClF,MAAM,CAAC,OAAO,CAAC,eAAe,GAAG,CAAC,CAAC,GAAG,WAAW,CAAC,oBAAoB,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;SACnF;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC;IAClC,CAAC;IAED,mCAAY,GAAZ;QACU,IAAS,MAAM,GAAK,IAAI,QAAT,CAAU;QACjC,IAAM,MAAM,GAAG,MAAM,CAAC,eAAe,EAAE,CAAC;QACxC,IAAM,KAAK,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;QACtC,IAAM,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;QAC9B,IAAM,wBAAwB,GAAG,MAAM,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC;QAErE,MAAM,CAAC,aAAa,EAAE,CAAC;QAEvB,IAAM,WAAW,GAAG,IAAI,CAAC,YAAY,IAAI,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAC3E,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;YAC/B,WAAY,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,OAAO,CAAC,wBAAwB,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;SAC1F;QACD,OAAO,WAAY,CAAC;IACtB,CAAC;IAEa,kCAAW,GAAzB,UAA0B,IAAY;;;gBACpC,IAAI,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;oBAC3B,sBAAO;iBACR;gBACD,sBAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;wBACjC,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;wBAChD,MAAM,CAAC,MAAM,GAAG;4BACd,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;4BACxB,OAAO,EAAE,CAAC;wBACZ,CAAC,CAAC;wBACF,MAAM,CAAC,OAAO,GAAG;4BACf,MAAM,EAAE,CAAC;wBACX,CAAC,CAAC;wBACF,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;wBAC7B,MAAM,CAAC,GAAG,GAAG,IAAI,CAAC;oBACpB,CAAC,CAAC,EAAC;;;KACJ;IAEa,sCAAe,GAA7B,UACE,UAAkB,EAClB,gBAAwB,EACxB,oBAA4B;;;;;;;wBAG1B,qBAAM,IAAI,CAAC,WAAW,CAAC,UAAG,UAAU,SAAG,oBAAoB,CAAE,CAAC,EAAA;;wBAA9D,SAA8D,CAAC;wBAC/D,KAAA,IAAI,CAAA;wBAAW,qBAAM,4BAA4B,EAAE,EAAA;;wBAAnD,GAAK,OAAO,GAAG,SAAoC,CAAC;wBACpD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;;;;wBAE3B,qBAAM,IAAI,CAAC,WAAW,CAAC,UAAG,UAAU,SAAG,gBAAgB,CAAE,CAAC,EAAA;;wBAA1D,SAA0D,CAAC;wBAC3D,KAAA,IAAI,CAAA;wBAAW,qBAAM,wBAAwB,EAAE,EAAA;;wBAA/C,GAAK,OAAO,GAAG,SAAgC,CAAC;wBAChD,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;;;;;;KAE/B;IACH,mBAAC;AAAD,CAAC,AAnGD,IAmGC;AAnGY,oCAAY","sourcesContent":["declare function createTwilioTFLiteModule(): Promise;\ndeclare function createTwilioTFLiteSIMDModule(): Promise;\n\nconst loadedScripts = new Set();\nlet model: ArrayBuffer;\n\n/**\n * @private\n */\nexport class TwilioTFLite {\n private _inputBuffer: Uint8ClampedArray | null = null;\n private _isSimdEnabled: boolean | null = null;\n private _tflite: any = null;\n\n get isSimdEnabled(): boolean | null {\n return this._isSimdEnabled;\n }\n\n async initialize(\n assetsPath: string,\n modelName: string,\n moduleLoaderName: string,\n moduleSimdLoaderName: string,\n ): Promise {\n if (this._tflite) {\n return;\n }\n const [, modelResponse]: [void, Response] = await Promise.all([\n this._loadWasmModule(\n assetsPath,\n moduleLoaderName,\n moduleSimdLoaderName,\n ),\n fetch(`${assetsPath}${modelName}`),\n ]);\n model = model || await modelResponse.arrayBuffer();\n const { _tflite: tflite } = this;\n const modelBufferOffset = tflite._getModelBufferMemoryOffset();\n tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset);\n tflite._loadModel(model.byteLength);\n }\n\n loadInputBuffer(inputBuffer: Uint8ClampedArray): void {\n const { _tflite: tflite } = this;\n const height = tflite._getInputHeight();\n const width = tflite._getInputWidth();\n const pixels = width * height;\n const tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4;\n\n for (let i = 0; i < pixels; i++) {\n const curTFLiteOffset = tfliteInputMemoryOffset + i * 3;\n const curImageBufferOffset = i * 4;\n tflite.HEAPF32[curTFLiteOffset] = inputBuffer[curImageBufferOffset] / 255;\n tflite.HEAPF32[curTFLiteOffset + 1] = inputBuffer[curImageBufferOffset + 1] / 255;\n tflite.HEAPF32[curTFLiteOffset + 2] = inputBuffer[curImageBufferOffset + 2] / 255;\n }\n this._inputBuffer = inputBuffer;\n }\n\n runInference(): Uint8ClampedArray {\n const { _tflite: tflite } = this;\n const height = tflite._getInputHeight();\n const width = tflite._getInputWidth();\n const pixels = width * height;\n const tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4;\n\n tflite._runInference();\n\n const inputBuffer = this._inputBuffer || new Uint8ClampedArray(pixels * 4);\n for (let i = 0; i < pixels; i++) {\n inputBuffer![i * 4 + 3] = Math.round(tflite.HEAPF32[tfliteOutputMemoryOffset + i] * 255);\n }\n return inputBuffer!;\n }\n\n private async _loadScript(path: string): Promise {\n if (loadedScripts.has(path)) {\n return;\n }\n return new Promise((resolve, reject) => {\n const script = document.createElement('script');\n script.onload = () => {\n loadedScripts.add(path);\n resolve();\n };\n script.onerror = () => {\n reject();\n };\n document.head.append(script);\n script.src = path;\n });\n }\n\n private async _loadWasmModule(\n assetsPath: string,\n moduleLoaderName: string,\n moduleSimdLoaderName: string,\n ): Promise {\n try {\n await this._loadScript(`${assetsPath}${moduleSimdLoaderName}`);\n this._tflite = await createTwilioTFLiteSIMDModule();\n this._isSimdEnabled = true;\n } catch {\n await this._loadScript(`${assetsPath}${moduleLoaderName}`);\n this._tflite = await createTwilioTFLiteModule();\n this._isSimdEnabled = false;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/es5/utils/support.d.ts b/es5/utils/support.d.ts new file mode 100644 index 0000000..31f8897 --- /dev/null +++ b/es5/utils/support.d.ts @@ -0,0 +1,24 @@ +/** + * @private + */ +export declare function isBrowserSupported(): boolean; +/** + * @private + */ +export declare function isChromiumImageBitmap(): boolean; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +export declare const isSupported: boolean; diff --git a/es5/utils/support.js b/es5/utils/support.js new file mode 100644 index 0000000..948bf43 --- /dev/null +++ b/es5/utils/support.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSupported = exports.isChromiumImageBitmap = exports.isBrowserSupported = void 0; +/** + * @private + */ +function getCanvas() { + return typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas'); +} +/** + * @private + */ +function isBrowserSupported() { + if (typeof window !== 'undefined' && typeof document !== 'undefined') { + return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2')); + } + else { + return false; + } +} +exports.isBrowserSupported = isBrowserSupported; +/** + * @private + */ +function isChromiumImageBitmap() { + return typeof chrome === 'object' + && /Chrome/.test(navigator.userAgent) + && typeof createImageBitmap === 'function'; +} +exports.isChromiumImageBitmap = isChromiumImageBitmap; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +exports.isSupported = isBrowserSupported(); +//# sourceMappingURL=support.js.map \ No newline at end of file diff --git a/es5/utils/support.js.map b/es5/utils/support.js.map new file mode 100644 index 0000000..07bb2e5 --- /dev/null +++ b/es5/utils/support.js.map @@ -0,0 +1 @@ +{"version":3,"file":"support.js","sourceRoot":"","sources":["../../lib/utils/support.ts"],"names":[],"mappings":";;;AAEA;;GAEG;AACH,SAAS,SAAS;IAChB,OAAO,OAAO,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;AAC/G,CAAC;AAED;;GAEG;AACH,SAAgB,kBAAkB;IAChC,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;QACpE,OAAO,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,SAAS,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;KAC7E;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAND,gDAMC;AAED;;GAEG;AACH,SAAgB,qBAAqB;IACnC,OAAO,OAAO,MAAM,KAAK,QAAQ;WAC5B,QAAQ,CAAC,IAAI,CAAC,SAAS,CAAC,SAAS,CAAC;WAClC,OAAO,iBAAiB,KAAK,UAAU,CAAC;AAC/C,CAAC;AAJD,sDAIC;AAED;;;;;;;;;;;;;;GAcG;AACU,QAAA,WAAW,GAAG,kBAAkB,EAAE,CAAC","sourcesContent":["declare const chrome: any;\n\n/**\n * @private\n */\nfunction getCanvas() {\n return typeof OffscreenCanvas !== 'undefined' ? new OffscreenCanvas(1, 1) : document.createElement('canvas');\n}\n\n/**\n * @private\n */\nexport function isBrowserSupported() {\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2'));\n } else {\n return false;\n }\n}\n\n/**\n * @private\n */\nexport function isChromiumImageBitmap() {\n return typeof chrome === 'object'\n && /Chrome/.test(navigator.userAgent)\n && typeof createImageBitmap === 'function';\n}\n\n/**\n * Check if the current browser is officially supported by twilio-video-procesors.js.\n * This is set to `true` for browsers that supports canvas\n * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or\n * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext)\n * rendering context.\n * @example\n * ```ts\n * import { isSupported } from '@twilio/video-processors';\n *\n * if (isSupported) {\n * // Initialize the background processors\n * }\n * ```\n */\nexport const isSupported = isBrowserSupported();\n"]} \ No newline at end of file diff --git a/es5/utils/version.d.ts b/es5/utils/version.d.ts new file mode 100644 index 0000000..f640437 --- /dev/null +++ b/es5/utils/version.d.ts @@ -0,0 +1,4 @@ +/** + * The current version of the library. + */ +export declare const version: string; diff --git a/es5/utils/version.js b/es5/utils/version.js new file mode 100644 index 0000000..9df1d11 --- /dev/null +++ b/es5/utils/version.js @@ -0,0 +1,9 @@ +"use strict"; +// This file is generated on build. To make changes, see scripts/version.js +Object.defineProperty(exports, "__esModule", { value: true }); +exports.version = void 0; +/** + * The current version of the library. + */ +exports.version = '2.2.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/es5/utils/version.js.map b/es5/utils/version.js.map new file mode 100644 index 0000000..9e7c3b6 --- /dev/null +++ b/es5/utils/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../lib/utils/version.ts"],"names":[],"mappings":";AAAA,2EAA2E;;;AAE3E;;GAEG;AACU,QAAA,OAAO,GAAW,OAAO,CAAC","sourcesContent":["// This file is generated on build. To make changes, see scripts/version.js\n\n/**\n * The current version of the library.\n */\nexport const version: string = '2.2.0';\n"]} \ No newline at end of file diff --git a/lib/utils/version.ts b/lib/utils/version.ts new file mode 100644 index 0000000..9cbac31 --- /dev/null +++ b/lib/utils/version.ts @@ -0,0 +1,6 @@ +// This file is generated on build. To make changes, see scripts/version.js + +/** + * The current version of the library. + */ +export const version: string = '2.2.0'; diff --git a/package.json b/package.json index d81ae7c..4009173 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "@twilio/video-processors", "title": "Twilio Video Processors", "description": "Twilio Video Processors JavaScript Library", - "version": "2.2.0-dev", + "version": "2.2.0", "homepage": "https://github.com/twilio/twilio-video-processors.js#readme", "author": "Charlie Santos ", "contributors": [