diff --git a/dist/build/selfie_segmentation_landscape.tflite b/dist/build/selfie_segmentation_landscape.tflite new file mode 100644 index 0000000..4ea3f8a Binary files /dev/null and b/dist/build/selfie_segmentation_landscape.tflite differ diff --git a/dist/build/tflite-1-0-0.js b/dist/build/tflite-1-0-0.js new file mode 100644 index 0000000..7d2e41c --- /dev/null +++ b/dist/build/tflite-1-0-0.js @@ -0,0 +1,21 @@ + +var createTwilioTFLiteModule = (function() { + var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined; + if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename; + return ( +function(createTwilioTFLiteModule) { + createTwilioTFLiteModule = createTwilioTFLiteModule || {}; + +var Module=typeof createTwilioTFLiteModule!=="undefined"?createTwilioTFLiteModule:{};var readyPromiseResolve,readyPromiseReject;Module["ready"]=new Promise(function(resolve,reject){readyPromiseResolve=resolve;readyPromiseReject=reject});var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram="./this.program";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof process.versions==="object"&&typeof process.versions.node==="string";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory="";function locateFile(path){if(Module["locateFile"]){return Module["locateFile"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require("path").dirname(scriptDirectory)+"/"}else{scriptDirectory=__dirname+"/"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require("fs");if(!nodePath)nodePath=require("path");filename=nodePath["normalize"](filename);return nodeFS["readFileSync"](filename,binary?null:"utf8")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){thisProgram=process["argv"][1].replace(/\\/g,"/")}arguments_=process["argv"].slice(2);process["on"]("uncaughtException",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process["on"]("unhandledRejection",abort);quit_=function(status){process["exit"](status)};Module["inspect"]=function(){return"[Emscripten Module object]"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){arguments_=scriptArgs}else if(typeof arguments!="undefined"){arguments_=arguments}if(typeof quit==="function"){quit_=function(status){quit(status)}}if(typeof print!=="undefined"){if(typeof console==="undefined")console={};console.log=print;console.warn=console.error=typeof printErr!=="undefined"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(typeof document!=="undefined"&&document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf("blob:")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf("/")+1)}else{scriptDirectory=""}{read_=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module["print"]||console.log.bind(console);var err=Module["printErr"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module["arguments"])arguments_=Module["arguments"];if(Module["thisProgram"])thisProgram=Module["thisProgram"];if(Module["quit"])quit_=Module["quit"];var wasmBinary;if(Module["wasmBinary"])wasmBinary=Module["wasmBinary"];var noExitRuntime=Module["noExitRuntime"]||true;if(typeof WebAssembly!=="object"){abort("no native wasm support detected")}var wasmMemory;var ABORT=false;var EXITSTATUS;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(heap,idx,maxBytesToRead){var endIdx=idx+maxBytesToRead;var endPtr=idx;while(heap[endPtr]&&!(endPtr>=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str="";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):""}function writeAsciiToMemory(str,buffer,dontAddNull){for(var i=0;i>0]=str.charCodeAt(i)}if(!dontAddNull)HEAP8[buffer>>0]=0}function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module["HEAP8"]=HEAP8=new Int8Array(buf);Module["HEAP16"]=HEAP16=new Int16Array(buf);Module["HEAP32"]=HEAP32=new Int32Array(buf);Module["HEAPU8"]=HEAPU8=new Uint8Array(buf);Module["HEAPU16"]=HEAPU16=new Uint16Array(buf);Module["HEAPU32"]=HEAPU32=new Uint32Array(buf);Module["HEAPF32"]=HEAPF32=new Float32Array(buf);Module["HEAPF64"]=HEAPF64=new Float64Array(buf)}var INITIAL_MEMORY=Module["INITIAL_MEMORY"]||16777216;var wasmTable;var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnInit(cb){__ATINIT__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}what+="";err(what);ABORT=true;EXITSTATUS=1;what="abort("+what+"). Build with -s ASSERTIONS=1 for more info.";var e=new WebAssembly.RuntimeError(what);readyPromiseReject(e);throw e}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix="file://";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile="tflite-1-0-0.wasm";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(file){try{if(file==wasmBinaryFile&&wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(file)}else{throw"both async and sync fetching of the wasm failed"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)){if(typeof fetch==="function"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){if(!response["ok"]){throw"failed to load wasm binary file at '"+wasmBinaryFile+"'"}return response["arrayBuffer"]()}).catch(function(){return getBinary(wasmBinaryFile)})}else{if(readAsync){return new Promise(function(resolve,reject){readAsync(wasmBinaryFile,function(response){resolve(new Uint8Array(response))},reject)})}}}return Promise.resolve().then(function(){return getBinary(wasmBinaryFile)})}function createWasm(){var info={"a":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module["asm"]=exports;wasmMemory=Module["asm"]["q"];updateGlobalBufferAndViews(wasmMemory.buffer);wasmTable=Module["asm"]["D"];addOnInit(Module["asm"]["r"]);removeRunDependency("wasm-instantiate")}addRunDependency("wasm-instantiate");function receiveInstantiatedSource(output){receiveInstance(output["instance"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){var result=WebAssembly.instantiate(binary,info);return result}).then(receiver,function(reason){err("failed to asynchronously prepare wasm: "+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming==="function"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch==="function"){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err("wasm streaming compile failed: "+reason);err("falling back to ArrayBuffer instantiation");return instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module["instantiateWasm"]){try{var exports=Module["instantiateWasm"](info,receiveInstance);return exports}catch(e){err("Module.instantiateWasm callback failed with error: "+e);return false}}instantiateAsync().catch(readyPromiseReject);return{}}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback=="function"){callback(Module);continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){wasmTable.get(func)()}else{wasmTable.get(func)(callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var runtimeKeepaliveCounter=0;function keepRuntimeAlive(){return noExitRuntime||runtimeKeepaliveCounter>0}function _abort(){abort()}var _emscripten_get_now;if(ENVIRONMENT_IS_NODE){_emscripten_get_now=function(){var t=process["hrtime"]();return t[0]*1e3+t[1]/1e6}}else if(typeof dateNow!=="undefined"){_emscripten_get_now=dateNow}else _emscripten_get_now=function(){return performance.now()};var _emscripten_get_now_is_monotonic=true;function setErrNo(value){HEAP32[___errno_location()>>2]=value;return value}function _clock_gettime(clk_id,tp){var now;if(clk_id===0){now=Date.now()}else if((clk_id===1||clk_id===4)&&_emscripten_get_now_is_monotonic){now=_emscripten_get_now()}else{setErrNo(28);return-1}HEAP32[tp>>2]=now/1e3|0;HEAP32[tp+4>>2]=now%1e3*1e3*1e3|0;return 0}function _dlopen(filename,flag){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _dlsym(handle,symbol){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _emscripten_get_heap_max(){return 2147483648}function _emscripten_memcpy_big(dest,src,num){HEAPU8.copyWithin(dest,src,src+num)}function emscripten_realloc_buffer(size){try{wasmMemory.grow(size-buffer.byteLength+65535>>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){var oldSize=HEAPU8.length;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(requestedSize,overGrownHeapSize),65536));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}function _emscripten_thread_sleep(msecs){var start=_emscripten_get_now();while(_emscripten_get_now()-start0){return}preRun();if(runDependencies>0){return}function doRun(){if(calledRun)return;calledRun=true;Module["calledRun"]=true;if(ABORT)return;initRuntime();preMain();readyPromiseResolve(Module);if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout(function(){setTimeout(function(){Module["setStatus"]("")},1);doRun()},1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){EXITSTATUS=status;if(implicit&&keepRuntimeAlive()&&status===0){return}if(keepRuntimeAlive()){}else{exitRuntime();if(Module["onExit"])Module["onExit"](status);ABORT=true}quit_(status,new ExitStatus(status))}if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}run(); + + + return createTwilioTFLiteModule.ready +} +); +})(); +if (typeof exports === 'object' && typeof module === 'object') + module.exports = createTwilioTFLiteModule; +else if (typeof define === 'function' && define['amd']) + define([], function() { return createTwilioTFLiteModule; }); +else if (typeof exports === 'object') + exports["createTwilioTFLiteModule"] = createTwilioTFLiteModule; diff --git a/dist/build/tflite-1-0-0.wasm b/dist/build/tflite-1-0-0.wasm new file mode 100755 index 0000000..ead8e52 Binary files /dev/null and b/dist/build/tflite-1-0-0.wasm differ diff --git a/dist/build/tflite-simd-1-0-0.js b/dist/build/tflite-simd-1-0-0.js new file mode 100644 index 0000000..d68e168 --- /dev/null +++ b/dist/build/tflite-simd-1-0-0.js @@ -0,0 +1,21 @@ + +var createTwilioTFLiteSIMDModule = (function() { + var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined; + if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename; + return ( +function(createTwilioTFLiteSIMDModule) { + createTwilioTFLiteSIMDModule = createTwilioTFLiteSIMDModule || {}; + +var Module=typeof createTwilioTFLiteSIMDModule!=="undefined"?createTwilioTFLiteSIMDModule:{};var readyPromiseResolve,readyPromiseReject;Module["ready"]=new Promise(function(resolve,reject){readyPromiseResolve=resolve;readyPromiseReject=reject});var moduleOverrides={};var key;for(key in Module){if(Module.hasOwnProperty(key)){moduleOverrides[key]=Module[key]}}var arguments_=[];var thisProgram="./this.program";var quit_=function(status,toThrow){throw toThrow};var ENVIRONMENT_IS_WEB=false;var ENVIRONMENT_IS_WORKER=false;var ENVIRONMENT_IS_NODE=false;var ENVIRONMENT_IS_SHELL=false;ENVIRONMENT_IS_WEB=typeof window==="object";ENVIRONMENT_IS_WORKER=typeof importScripts==="function";ENVIRONMENT_IS_NODE=typeof process==="object"&&typeof process.versions==="object"&&typeof process.versions.node==="string";ENVIRONMENT_IS_SHELL=!ENVIRONMENT_IS_WEB&&!ENVIRONMENT_IS_NODE&&!ENVIRONMENT_IS_WORKER;var scriptDirectory="";function locateFile(path){if(Module["locateFile"]){return Module["locateFile"](path,scriptDirectory)}return scriptDirectory+path}var read_,readAsync,readBinary,setWindowTitle;var nodeFS;var nodePath;if(ENVIRONMENT_IS_NODE){if(ENVIRONMENT_IS_WORKER){scriptDirectory=require("path").dirname(scriptDirectory)+"/"}else{scriptDirectory=__dirname+"/"}read_=function shell_read(filename,binary){if(!nodeFS)nodeFS=require("fs");if(!nodePath)nodePath=require("path");filename=nodePath["normalize"](filename);return nodeFS["readFileSync"](filename,binary?null:"utf8")};readBinary=function readBinary(filename){var ret=read_(filename,true);if(!ret.buffer){ret=new Uint8Array(ret)}assert(ret.buffer);return ret};if(process["argv"].length>1){thisProgram=process["argv"][1].replace(/\\/g,"/")}arguments_=process["argv"].slice(2);process["on"]("uncaughtException",function(ex){if(!(ex instanceof ExitStatus)){throw ex}});process["on"]("unhandledRejection",abort);quit_=function(status){process["exit"](status)};Module["inspect"]=function(){return"[Emscripten Module object]"}}else if(ENVIRONMENT_IS_SHELL){if(typeof read!="undefined"){read_=function shell_read(f){return read(f)}}readBinary=function readBinary(f){var data;if(typeof readbuffer==="function"){return new Uint8Array(readbuffer(f))}data=read(f,"binary");assert(typeof data==="object");return data};if(typeof scriptArgs!="undefined"){arguments_=scriptArgs}else if(typeof arguments!="undefined"){arguments_=arguments}if(typeof quit==="function"){quit_=function(status){quit(status)}}if(typeof print!=="undefined"){if(typeof console==="undefined")console={};console.log=print;console.warn=console.error=typeof printErr!=="undefined"?printErr:print}}else if(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER){if(ENVIRONMENT_IS_WORKER){scriptDirectory=self.location.href}else if(typeof document!=="undefined"&&document.currentScript){scriptDirectory=document.currentScript.src}if(_scriptDir){scriptDirectory=_scriptDir}if(scriptDirectory.indexOf("blob:")!==0){scriptDirectory=scriptDirectory.substr(0,scriptDirectory.lastIndexOf("/")+1)}else{scriptDirectory=""}{read_=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.send(null);return xhr.responseText};if(ENVIRONMENT_IS_WORKER){readBinary=function(url){var xhr=new XMLHttpRequest;xhr.open("GET",url,false);xhr.responseType="arraybuffer";xhr.send(null);return new Uint8Array(xhr.response)}}readAsync=function(url,onload,onerror){var xhr=new XMLHttpRequest;xhr.open("GET",url,true);xhr.responseType="arraybuffer";xhr.onload=function(){if(xhr.status==200||xhr.status==0&&xhr.response){onload(xhr.response);return}onerror()};xhr.onerror=onerror;xhr.send(null)}}setWindowTitle=function(title){document.title=title}}else{}var out=Module["print"]||console.log.bind(console);var err=Module["printErr"]||console.warn.bind(console);for(key in moduleOverrides){if(moduleOverrides.hasOwnProperty(key)){Module[key]=moduleOverrides[key]}}moduleOverrides=null;if(Module["arguments"])arguments_=Module["arguments"];if(Module["thisProgram"])thisProgram=Module["thisProgram"];if(Module["quit"])quit_=Module["quit"];var wasmBinary;if(Module["wasmBinary"])wasmBinary=Module["wasmBinary"];var noExitRuntime=Module["noExitRuntime"]||true;if(typeof WebAssembly!=="object"){abort("no native wasm support detected")}var wasmMemory;var ABORT=false;var EXITSTATUS;function assert(condition,text){if(!condition){abort("Assertion failed: "+text)}}var UTF8Decoder=typeof TextDecoder!=="undefined"?new TextDecoder("utf8"):undefined;function UTF8ArrayToString(heap,idx,maxBytesToRead){var endIdx=idx+maxBytesToRead;var endPtr=idx;while(heap[endPtr]&&!(endPtr>=endIdx))++endPtr;if(endPtr-idx>16&&heap.subarray&&UTF8Decoder){return UTF8Decoder.decode(heap.subarray(idx,endPtr))}else{var str="";while(idx>10,56320|ch&1023)}}}return str}function UTF8ToString(ptr,maxBytesToRead){return ptr?UTF8ArrayToString(HEAPU8,ptr,maxBytesToRead):""}function writeAsciiToMemory(str,buffer,dontAddNull){for(var i=0;i>0]=str.charCodeAt(i)}if(!dontAddNull)HEAP8[buffer>>0]=0}function alignUp(x,multiple){if(x%multiple>0){x+=multiple-x%multiple}return x}var buffer,HEAP8,HEAPU8,HEAP16,HEAPU16,HEAP32,HEAPU32,HEAPF32,HEAPF64;function updateGlobalBufferAndViews(buf){buffer=buf;Module["HEAP8"]=HEAP8=new Int8Array(buf);Module["HEAP16"]=HEAP16=new Int16Array(buf);Module["HEAP32"]=HEAP32=new Int32Array(buf);Module["HEAPU8"]=HEAPU8=new Uint8Array(buf);Module["HEAPU16"]=HEAPU16=new Uint16Array(buf);Module["HEAPU32"]=HEAPU32=new Uint32Array(buf);Module["HEAPF32"]=HEAPF32=new Float32Array(buf);Module["HEAPF64"]=HEAPF64=new Float64Array(buf)}var INITIAL_MEMORY=Module["INITIAL_MEMORY"]||16777216;var wasmTable;var __ATPRERUN__=[];var __ATINIT__=[];var __ATMAIN__=[];var __ATPOSTRUN__=[];var runtimeInitialized=false;var runtimeExited=false;function preRun(){if(Module["preRun"]){if(typeof Module["preRun"]=="function")Module["preRun"]=[Module["preRun"]];while(Module["preRun"].length){addOnPreRun(Module["preRun"].shift())}}callRuntimeCallbacks(__ATPRERUN__)}function initRuntime(){runtimeInitialized=true;callRuntimeCallbacks(__ATINIT__)}function preMain(){callRuntimeCallbacks(__ATMAIN__)}function exitRuntime(){runtimeExited=true}function postRun(){if(Module["postRun"]){if(typeof Module["postRun"]=="function")Module["postRun"]=[Module["postRun"]];while(Module["postRun"].length){addOnPostRun(Module["postRun"].shift())}}callRuntimeCallbacks(__ATPOSTRUN__)}function addOnPreRun(cb){__ATPRERUN__.unshift(cb)}function addOnInit(cb){__ATINIT__.unshift(cb)}function addOnPostRun(cb){__ATPOSTRUN__.unshift(cb)}var runDependencies=0;var runDependencyWatcher=null;var dependenciesFulfilled=null;function addRunDependency(id){runDependencies++;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}}function removeRunDependency(id){runDependencies--;if(Module["monitorRunDependencies"]){Module["monitorRunDependencies"](runDependencies)}if(runDependencies==0){if(runDependencyWatcher!==null){clearInterval(runDependencyWatcher);runDependencyWatcher=null}if(dependenciesFulfilled){var callback=dependenciesFulfilled;dependenciesFulfilled=null;callback()}}}Module["preloadedImages"]={};Module["preloadedAudios"]={};function abort(what){if(Module["onAbort"]){Module["onAbort"](what)}what+="";err(what);ABORT=true;EXITSTATUS=1;what="abort("+what+"). Build with -s ASSERTIONS=1 for more info.";var e=new WebAssembly.RuntimeError(what);readyPromiseReject(e);throw e}function hasPrefix(str,prefix){return String.prototype.startsWith?str.startsWith(prefix):str.indexOf(prefix)===0}var dataURIPrefix="data:application/octet-stream;base64,";function isDataURI(filename){return hasPrefix(filename,dataURIPrefix)}var fileURIPrefix="file://";function isFileURI(filename){return hasPrefix(filename,fileURIPrefix)}var wasmBinaryFile="tflite-simd-1-0-0.wasm";if(!isDataURI(wasmBinaryFile)){wasmBinaryFile=locateFile(wasmBinaryFile)}function getBinary(file){try{if(file==wasmBinaryFile&&wasmBinary){return new Uint8Array(wasmBinary)}if(readBinary){return readBinary(file)}else{throw"both async and sync fetching of the wasm failed"}}catch(err){abort(err)}}function getBinaryPromise(){if(!wasmBinary&&(ENVIRONMENT_IS_WEB||ENVIRONMENT_IS_WORKER)){if(typeof fetch==="function"&&!isFileURI(wasmBinaryFile)){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){if(!response["ok"]){throw"failed to load wasm binary file at '"+wasmBinaryFile+"'"}return response["arrayBuffer"]()}).catch(function(){return getBinary(wasmBinaryFile)})}else{if(readAsync){return new Promise(function(resolve,reject){readAsync(wasmBinaryFile,function(response){resolve(new Uint8Array(response))},reject)})}}}return Promise.resolve().then(function(){return getBinary(wasmBinaryFile)})}function createWasm(){var info={"a":asmLibraryArg};function receiveInstance(instance,module){var exports=instance.exports;Module["asm"]=exports;wasmMemory=Module["asm"]["q"];updateGlobalBufferAndViews(wasmMemory.buffer);wasmTable=Module["asm"]["D"];addOnInit(Module["asm"]["r"]);removeRunDependency("wasm-instantiate")}addRunDependency("wasm-instantiate");function receiveInstantiatedSource(output){receiveInstance(output["instance"])}function instantiateArrayBuffer(receiver){return getBinaryPromise().then(function(binary){var result=WebAssembly.instantiate(binary,info);return result}).then(receiver,function(reason){err("failed to asynchronously prepare wasm: "+reason);abort(reason)})}function instantiateAsync(){if(!wasmBinary&&typeof WebAssembly.instantiateStreaming==="function"&&!isDataURI(wasmBinaryFile)&&!isFileURI(wasmBinaryFile)&&typeof fetch==="function"){return fetch(wasmBinaryFile,{credentials:"same-origin"}).then(function(response){var result=WebAssembly.instantiateStreaming(response,info);return result.then(receiveInstantiatedSource,function(reason){err("wasm streaming compile failed: "+reason);err("falling back to ArrayBuffer instantiation");return instantiateArrayBuffer(receiveInstantiatedSource)})})}else{return instantiateArrayBuffer(receiveInstantiatedSource)}}if(Module["instantiateWasm"]){try{var exports=Module["instantiateWasm"](info,receiveInstance);return exports}catch(e){err("Module.instantiateWasm callback failed with error: "+e);return false}}instantiateAsync().catch(readyPromiseReject);return{}}function callRuntimeCallbacks(callbacks){while(callbacks.length>0){var callback=callbacks.shift();if(typeof callback=="function"){callback(Module);continue}var func=callback.func;if(typeof func==="number"){if(callback.arg===undefined){wasmTable.get(func)()}else{wasmTable.get(func)(callback.arg)}}else{func(callback.arg===undefined?null:callback.arg)}}}var runtimeKeepaliveCounter=0;function keepRuntimeAlive(){return noExitRuntime||runtimeKeepaliveCounter>0}function _abort(){abort()}var _emscripten_get_now;if(ENVIRONMENT_IS_NODE){_emscripten_get_now=function(){var t=process["hrtime"]();return t[0]*1e3+t[1]/1e6}}else if(typeof dateNow!=="undefined"){_emscripten_get_now=dateNow}else _emscripten_get_now=function(){return performance.now()};var _emscripten_get_now_is_monotonic=true;function setErrNo(value){HEAP32[___errno_location()>>2]=value;return value}function _clock_gettime(clk_id,tp){var now;if(clk_id===0){now=Date.now()}else if((clk_id===1||clk_id===4)&&_emscripten_get_now_is_monotonic){now=_emscripten_get_now()}else{setErrNo(28);return-1}HEAP32[tp>>2]=now/1e3|0;HEAP32[tp+4>>2]=now%1e3*1e3*1e3|0;return 0}function _dlopen(filename,flag){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _dlsym(handle,symbol){abort("To use dlopen, you need to use Emscripten's linking support, see https://github.com/emscripten-core/emscripten/wiki/Linking")}function _emscripten_get_heap_max(){return 2147483648}function _emscripten_memcpy_big(dest,src,num){HEAPU8.copyWithin(dest,src,src+num)}function emscripten_realloc_buffer(size){try{wasmMemory.grow(size-buffer.byteLength+65535>>>16);updateGlobalBufferAndViews(wasmMemory.buffer);return 1}catch(e){}}function _emscripten_resize_heap(requestedSize){var oldSize=HEAPU8.length;var maxHeapSize=2147483648;if(requestedSize>maxHeapSize){return false}for(var cutDown=1;cutDown<=4;cutDown*=2){var overGrownHeapSize=oldSize*(1+.2/cutDown);overGrownHeapSize=Math.min(overGrownHeapSize,requestedSize+100663296);var newSize=Math.min(maxHeapSize,alignUp(Math.max(requestedSize,overGrownHeapSize),65536));var replacement=emscripten_realloc_buffer(newSize);if(replacement){return true}}return false}function _emscripten_thread_sleep(msecs){var start=_emscripten_get_now();while(_emscripten_get_now()-start0){return}preRun();if(runDependencies>0){return}function doRun(){if(calledRun)return;calledRun=true;Module["calledRun"]=true;if(ABORT)return;initRuntime();preMain();readyPromiseResolve(Module);if(Module["onRuntimeInitialized"])Module["onRuntimeInitialized"]();postRun()}if(Module["setStatus"]){Module["setStatus"]("Running...");setTimeout(function(){setTimeout(function(){Module["setStatus"]("")},1);doRun()},1)}else{doRun()}}Module["run"]=run;function exit(status,implicit){EXITSTATUS=status;if(implicit&&keepRuntimeAlive()&&status===0){return}if(keepRuntimeAlive()){}else{exitRuntime();if(Module["onExit"])Module["onExit"](status);ABORT=true}quit_(status,new ExitStatus(status))}if(Module["preInit"]){if(typeof Module["preInit"]=="function")Module["preInit"]=[Module["preInit"]];while(Module["preInit"].length>0){Module["preInit"].pop()()}}run(); + + + return createTwilioTFLiteSIMDModule.ready +} +); +})(); +if (typeof exports === 'object' && typeof module === 'object') + module.exports = createTwilioTFLiteSIMDModule; +else if (typeof define === 'function' && define['amd']) + define([], function() { return createTwilioTFLiteSIMDModule; }); +else if (typeof exports === 'object') + exports["createTwilioTFLiteSIMDModule"] = createTwilioTFLiteSIMDModule; diff --git a/dist/build/tflite-simd-1-0-0.wasm b/dist/build/tflite-simd-1-0-0.wasm new file mode 100755 index 0000000..bb24d2d Binary files /dev/null and b/dist/build/tflite-simd-1-0-0.wasm differ diff --git a/dist/build/twilio-video-processors.js b/dist/build/twilio-video-processors.js new file mode 100644 index 0000000..7a6bc5f --- /dev/null +++ b/dist/build/twilio-video-processors.js @@ -0,0 +1,1726 @@ +/*! twilio-video-processors.js 2.1.0 + +The following license applies to all parts of this software except as +documented below. + + Copyright (C) 2022 Twilio Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. Neither the name of Twilio nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BackgroundProcessor = void 0; +var Processor_1 = require("../Processor"); +var Benchmark_1 = require("../../utils/Benchmark"); +var version_1 = require("../../utils/version"); +var types_1 = require("../../types"); +var webgl2_1 = require("../webgl2"); +var constants_1 = require("../../constants"); +/** + * @private + */ +var BackgroundProcessor = /** @class */ (function (_super) { + __extends(BackgroundProcessor, _super); + function BackgroundProcessor(options) { + var _this = _super.call(this) || this; + _this._backgroundImage = null; + _this._outputCanvas = null; + _this._outputContext = null; + _this._webgl2Pipeline = null; + _this._currentMask = new Uint8ClampedArray(); + _this._debounce = true; + _this._debounceCount = constants_1.DEBOUNCE_COUNT; + _this._dummyImageData = new ImageData(1, 1); + _this._inferenceDimensions = constants_1.WASM_INFERENCE_DIMENSIONS; + _this._inputMemoryOffset = 0; + // tslint:disable-next-line no-unused-variable + _this._isSimdEnabled = null; + _this._maskBlurRadius = constants_1.MASK_BLUR_RADIUS; + _this._maskUsageCounter = 0; + _this._outputMemoryOffset = 0; + _this._personProbabilityThreshold = constants_1.PERSON_PROBABILITY_THRESHOLD; + _this._pipeline = types_1.Pipeline.WebGL2; + // tslint:disable-next-line no-unused-variable + _this._version = version_1.version; + if (typeof options.assetsPath !== 'string') { + throw new Error('assetsPath parameter is missing'); + } + var assetsPath = options.assetsPath; + if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') { + assetsPath += '/'; + } + _this.maskBlurRadius = options.maskBlurRadius; + _this._assetsPath = assetsPath; + _this._debounce = typeof options.debounce === 'boolean' ? options.debounce : _this._debounce; + _this._debounceCount = _this._debounce ? _this._debounceCount : 1; + _this._inferenceDimensions = options.inferenceDimensions || _this._inferenceDimensions; + _this._historyCount = constants_1.HISTORY_COUNT_MULTIPLIER * _this._debounceCount; + _this._personProbabilityThreshold = options.personProbabilityThreshold || _this._personProbabilityThreshold; + _this._pipeline = options.pipeline || _this._pipeline; + _this._benchmark = new Benchmark_1.Benchmark(); + _this._inputCanvas = document.createElement('canvas'); + _this._inputContext = _this._inputCanvas.getContext('2d'); + _this._maskCanvas = typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._maskContext = _this._maskCanvas.getContext('2d'); + _this._masks = []; + return _this; + } + Object.defineProperty(BackgroundProcessor.prototype, "maskBlurRadius", { + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get: function () { + return this._maskBlurRadius; + }, + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set: function (radius) { + if (typeof radius !== 'number' || radius < 0) { + console.warn("Valid mask blur radius not found. Using ".concat(constants_1.MASK_BLUR_RADIUS, " as default.")); + radius = constants_1.MASK_BLUR_RADIUS; + } + this._maskBlurRadius = radius; + }, + enumerable: false, + configurable: true + }); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + BackgroundProcessor.prototype.loadModel = function () { + return __awaiter(this, void 0, void 0, function () { + var _a, tflite, modelResponse, model, modelBufferOffset; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: return [4 /*yield*/, Promise.all([ + this._loadTwilioTfLite(), + fetch(this._assetsPath + constants_1.MODEL_NAME), + ])]; + case 1: + _a = _b.sent(), tflite = _a[0], modelResponse = _a[1]; + return [4 /*yield*/, modelResponse.arrayBuffer()]; + case 2: + model = _b.sent(); + modelBufferOffset = tflite._getModelBufferMemoryOffset(); + tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset); + tflite._loadModel(model.byteLength); + this._inputMemoryOffset = tflite._getInputMemoryOffset() / 4; + this._outputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + this._tflite = tflite; + return [2 /*return*/]; + } + }); + }); + }; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + BackgroundProcessor.prototype.processFrame = function (inputFrameBuffer, outputFrameBuffer) { + var _a, _b; + return __awaiter(this, void 0, void 0, function () { + var _c, inferenceWidth, inferenceHeight, inputFrame, captureWidth, captureHeight, reInitDummyImage, personMask, ctx; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!this._tflite) { + return [2 /*return*/]; + } + if (!inputFrameBuffer || !outputFrameBuffer) { + throw new Error('Missing input or output frame buffer'); + } + this._benchmark.end('captureFrameDelay'); + this._benchmark.start('processFrameDelay'); + _c = this._inferenceDimensions, inferenceWidth = _c.width, inferenceHeight = _c.height; + inputFrame = inputFrameBuffer; + captureWidth = inputFrame.width, captureHeight = inputFrame.height; + if (inputFrame.videoWidth) { + inputFrame = inputFrame; + captureWidth = inputFrame.videoWidth; + captureHeight = inputFrame.videoHeight; + } + if (this._outputCanvas !== outputFrameBuffer) { + this._outputCanvas = outputFrameBuffer; + this._outputContext = this._outputCanvas + .getContext(this._pipeline === types_1.Pipeline.Canvas2D ? '2d' : 'webgl2'); + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + } + if (!this._webgl2Pipeline && this._pipeline === types_1.Pipeline.WebGL2) { + this._createWebGL2Pipeline(inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight); + } + if (!(this._pipeline === types_1.Pipeline.WebGL2)) return [3 /*break*/, 2]; + return [4 /*yield*/, ((_b = this._webgl2Pipeline) === null || _b === void 0 ? void 0 : _b.render())]; + case 1: + _d.sent(); + return [3 /*break*/, 4]; + case 2: + reInitDummyImage = false; + if (this._inputCanvas.width !== inferenceWidth) { + this._inputCanvas.width = inferenceWidth; + this._maskCanvas.width = inferenceWidth; + reInitDummyImage = true; + } + if (this._inputCanvas.height !== inferenceHeight) { + this._inputCanvas.height = inferenceHeight; + this._maskCanvas.height = inferenceHeight; + reInitDummyImage = true; + } + if (reInitDummyImage) { + this._dummyImageData = new ImageData(new Uint8ClampedArray(inferenceWidth * inferenceHeight * 4), inferenceWidth, inferenceHeight); + } + return [4 /*yield*/, this._createPersonMask(inputFrame)]; + case 3: + personMask = _d.sent(); + ctx = this._outputContext; + this._benchmark.start('imageCompositionDelay'); + this._maskContext.putImageData(personMask, 0, 0); + ctx.save(); + ctx.filter = "blur(".concat(this._maskBlurRadius, "px)"); + ctx.globalCompositeOperation = 'copy'; + ctx.drawImage(this._maskCanvas, 0, 0, captureWidth, captureHeight); + ctx.filter = 'none'; + ctx.globalCompositeOperation = 'source-in'; + ctx.drawImage(inputFrame, 0, 0, captureWidth, captureHeight); + ctx.globalCompositeOperation = 'destination-over'; + this._setBackground(inputFrame); + ctx.restore(); + this._benchmark.end('imageCompositionDelay'); + _d.label = 4; + case 4: + this._benchmark.end('processFrameDelay'); + this._benchmark.end('totalProcessingDelay'); + // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk + // for a more accurate fps + this._benchmark.start('totalProcessingDelay'); + this._benchmark.start('captureFrameDelay'); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor.prototype._addMask = function (mask) { + if (this._masks.length >= this._historyCount) { + this._masks.splice(0, this._masks.length - this._historyCount + 1); + } + this._masks.push(mask); + }; + BackgroundProcessor.prototype._applyAlpha = function (imageData) { + var weightedSum = this._masks.reduce(function (sum, mask, j) { return sum + (j + 1) * (j + 1); }, 0); + var pixels = imageData.height * imageData.width; + var _loop_1 = function (i) { + var w = this_1._masks.reduce(function (sum, mask, j) { return sum + mask[i] * (j + 1) * (j + 1); }, 0) / weightedSum; + imageData.data[i * 4 + 3] = Math.round(w * 255); + }; + var this_1 = this; + for (var i = 0; i < pixels; i++) { + _loop_1(i); + } + }; + BackgroundProcessor.prototype._createPersonMask = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var imageData, shouldRunInference; + return __generator(this, function (_a) { + imageData = this._dummyImageData; + shouldRunInference = this._maskUsageCounter < 1; + this._benchmark.start('inputImageResizeDelay'); + if (shouldRunInference) { + imageData = this._getResizedInputImageData(inputFrame); + } + this._benchmark.end('inputImageResizeDelay'); + this._benchmark.start('segmentationDelay'); + if (shouldRunInference) { + this._currentMask = this._runTwilioTfLiteInference(imageData); + this._maskUsageCounter = this._debounceCount; + } + this._addMask(this._currentMask); + this._applyAlpha(imageData); + this._maskUsageCounter--; + this._benchmark.end('segmentationDelay'); + return [2 /*return*/, imageData]; + }); + }); + }; + BackgroundProcessor.prototype._createWebGL2Pipeline = function (inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight) { + this._webgl2Pipeline = (0, webgl2_1.buildWebGL2Pipeline)({ + htmlElement: inputFrame, + width: captureWidth, + height: captureHeight, + }, this._backgroundImage, { type: this._getWebGL2PipelineType() }, { inputResolution: "".concat(inferenceWidth, "x").concat(inferenceHeight) }, this._outputCanvas, this._tflite, this._benchmark, this._debounce); + this._webgl2Pipeline.updatePostProcessingConfig({ + smoothSegmentationMask: true, + jointBilateralFilter: { + sigmaSpace: 10, + sigmaColor: 0.12 + }, + coverage: [ + 0, + 0.99 + ], + lightWrapping: 0, + blendMode: 'screen' + }); + }; + BackgroundProcessor.prototype._getResizedInputImageData = function (inputFrame) { + var _a = this._inputCanvas, width = _a.width, height = _a.height; + this._inputContext.drawImage(inputFrame, 0, 0, width, height); + var imageData = this._inputContext.getImageData(0, 0, width, height); + return imageData; + }; + BackgroundProcessor.prototype._loadJs = function (url) { + if (BackgroundProcessor._loadedScripts.includes(url)) { + return Promise.resolve(); + } + return new Promise(function (resolve, reject) { + var script = document.createElement('script'); + script.onload = function () { + BackgroundProcessor._loadedScripts.push(url); + resolve(); + }; + script.onerror = reject; + document.head.append(script); + script.src = url; + }); + }; + BackgroundProcessor.prototype._loadTwilioTfLite = function () { + return __awaiter(this, void 0, void 0, function () { + var tflite, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: return [4 /*yield*/, this._loadJs(this._assetsPath + constants_1.TFLITE_SIMD_LOADER_NAME)]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + _b.trys.push([2, 4, , 7]); + return [4 /*yield*/, window.createTwilioTFLiteSIMDModule()]; + case 3: + tflite = _b.sent(); + this._isSimdEnabled = true; + return [3 /*break*/, 7]; + case 4: + _a = _b.sent(); + console.warn('SIMD not supported. You may experience poor quality of background replacement.'); + return [4 /*yield*/, this._loadJs(this._assetsPath + constants_1.TFLITE_LOADER_NAME)]; + case 5: + _b.sent(); + return [4 /*yield*/, window.createTwilioTFLiteModule()]; + case 6: + tflite = _b.sent(); + this._isSimdEnabled = false; + return [3 /*break*/, 7]; + case 7: return [2 /*return*/, tflite]; + } + }); + }); + }; + BackgroundProcessor.prototype._runTwilioTfLiteInference = function (inputImage) { + var _a = this, _b = _a._inferenceDimensions, width = _b.width, height = _b.height, offset = _a._inputMemoryOffset, tflite = _a._tflite; + var pixels = width * height; + for (var i = 0; i < pixels; i++) { + tflite.HEAPF32[offset + i * 3] = inputImage.data[i * 4] / 255; + tflite.HEAPF32[offset + i * 3 + 1] = inputImage.data[i * 4 + 1] / 255; + tflite.HEAPF32[offset + i * 3 + 2] = inputImage.data[i * 4 + 2] / 255; + } + tflite._runInference(); + var inferenceData = new Uint8ClampedArray(pixels * 4); + for (var i = 0; i < pixels; i++) { + var personProbability = tflite.HEAPF32[this._outputMemoryOffset + i]; + inferenceData[i] = Number(personProbability >= this._personProbabilityThreshold) * personProbability; + } + return inferenceData; + }; + BackgroundProcessor._loadedScripts = []; + return BackgroundProcessor; +}(Processor_1.Processor)); +exports.BackgroundProcessor = BackgroundProcessor; + +},{"../../constants":1,"../../types":16,"../../utils/Benchmark":17,"../../utils/version":19,"../Processor":3,"../webgl2":9}],5:[function(require,module,exports){ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GaussianBlurBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var constants_1 = require("../../constants"); +var types_1 = require("../../types"); +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * + * const blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * pipeline: Pipeline.WebGL2, + * debounce: true, + * }); + * + * blurBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(blurBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * ``` + */ +var GaussianBlurBackgroundProcessor = /** @class */ (function (_super) { + __extends(GaussianBlurBackgroundProcessor, _super); + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + function GaussianBlurBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + _this._blurFilterRadius = constants_1.BLUR_FILTER_RADIUS; + // tslint:disable-next-line no-unused-variable + _this._name = 'GaussianBlurBackgroundProcessor'; + _this.blurFilterRadius = options.blurFilterRadius; + return _this; + } + Object.defineProperty(GaussianBlurBackgroundProcessor.prototype, "blurFilterRadius", { + /** + * The current background blur filter radius in pixels. + */ + get: function () { + return this._blurFilterRadius; + }, + /** + * Set a new background blur filter radius in pixels. + */ + set: function (radius) { + if (!radius) { + console.warn("Valid blur filter radius not found. Using ".concat(constants_1.BLUR_FILTER_RADIUS, " as default.")); + radius = constants_1.BLUR_FILTER_RADIUS; + } + this._blurFilterRadius = radius; + }, + enumerable: false, + configurable: true + }); + GaussianBlurBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Blur; + }; + GaussianBlurBackgroundProcessor.prototype._setBackground = function (inputFrame) { + if (!this._outputContext) { + return; + } + var ctx = this._outputContext; + ctx.filter = "blur(".concat(this._blurFilterRadius, "px)"); + ctx.drawImage(inputFrame, 0, 0); + }; + return GaussianBlurBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.GaussianBlurBackgroundProcessor = GaussianBlurBackgroundProcessor; + +},{"../../constants":1,"../../types":16,"./BackgroundProcessor":4}],6:[function(require,module,exports){ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var types_1 = require("../../types"); +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * + * let virtualBackground; + * const img = new Image(); + * + * img.onload = () => { + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * pipeline: Pipeline.WebGL2, + * + * // Desktop Safari and iOS browsers do not support SIMD. + * // Set debounce to true to achieve an acceptable performance. + * debounce: isSafari(), + * }); + * + * virtualBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * }; + * img.src = '/background.jpg'; + * ``` + */ +var VirtualBackgroundProcessor = /** @class */ (function (_super) { + __extends(VirtualBackgroundProcessor, _super); + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + function VirtualBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + // tslint:disable-next-line no-unused-variable + _this._name = 'VirtualBackgroundProcessor'; + _this.backgroundImage = options.backgroundImage; + _this.fitType = options.fitType; + return _this; + } + Object.defineProperty(VirtualBackgroundProcessor.prototype, "backgroundImage", { + /** + * The HTMLImageElement representing the current background image. + */ + get: function () { + return this._backgroundImage; + }, + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set: function (image) { + var _a; + if (!image || !image.complete || !image.naturalHeight) { + throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded'); + } + this._backgroundImage = image; + // Triggers recreation of the pipeline in the next processFrame call + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(VirtualBackgroundProcessor.prototype, "fitType", { + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get: function () { + return this._fitType; + }, + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set: function (fitType) { + var validTypes = Object.keys(types_1.ImageFit); + if (!validTypes.includes(fitType)) { + console.warn("Valid fitType not found. Using '".concat(types_1.ImageFit.Fill, "' as default.")); + fitType = types_1.ImageFit.Fill; + } + this._fitType = fitType; + }, + enumerable: false, + configurable: true + }); + VirtualBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Image; + }; + VirtualBackgroundProcessor.prototype._setBackground = function () { + if (!this._outputContext || !this._outputCanvas) { + return; + } + var img = this._backgroundImage; + var imageWidth = img.naturalWidth; + var imageHeight = img.naturalHeight; + var canvasWidth = this._outputCanvas.width; + var canvasHeight = this._outputCanvas.height; + var ctx = this._outputContext; + if (this._fitType === types_1.ImageFit.Fill) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight); + } + else if (this._fitType === types_1.ImageFit.None) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight); + } + else if (this._fitType === types_1.ImageFit.Contain) { + var _a = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Contain), x = _a.x, y = _a.y, w = _a.w, h = _a.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + else if (this._fitType === types_1.ImageFit.Cover) { + var _b = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Cover), x = _b.x, y = _b.y, w = _b.w, h = _b.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + }; + VirtualBackgroundProcessor.prototype._getFitPosition = function (contentWidth, contentHeight, viewportWidth, viewportHeight, type) { + // Calculate new content width to fit viewport width + var factor = viewportWidth / contentWidth; + var newContentWidth = viewportWidth; + var newContentHeight = factor * contentHeight; + // Scale down the resulting height and width more + // to fit viewport height if the content still exceeds it + if ((type === types_1.ImageFit.Contain && newContentHeight > viewportHeight) + || (type === types_1.ImageFit.Cover && viewportHeight > newContentHeight)) { + factor = viewportHeight / newContentHeight; + newContentWidth = factor * newContentWidth; + newContentHeight = viewportHeight; + } + // Calculate the destination top left corner to center the content + var x = (viewportWidth - newContentWidth) / 2; + var y = (viewportHeight - newContentHeight) / 2; + return { + x: x, + y: y, + w: newContentWidth, + h: newContentHeight, + }; + }; + return VirtualBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.VirtualBackgroundProcessor = VirtualBackgroundProcessor; + +},{"../../types":16,"./BackgroundProcessor":4}],7:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.inputResolutions = void 0; +exports.inputResolutions = { + '640x360': [640, 360], + '256x256': [256, 256], + '256x144': [256, 144], + '160x96': [160, 96], +}; + +},{}],8:[function(require,module,exports){ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readPixelsAsync = exports.createTexture = exports.compileShader = exports.createProgram = exports.createPiplelineStageProgram = exports.glsl = void 0; +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +exports.glsl = String.raw; +function createPiplelineStageProgram(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer) { + var program = createProgram(gl, vertexShader, fragmentShader); + var positionAttributeLocation = gl.getAttribLocation(program, 'a_position'); + gl.enableVertexAttribArray(positionAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0); + var texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord'); + gl.enableVertexAttribArray(texCoordAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0); + return program; +} +exports.createPiplelineStageProgram = createPiplelineStageProgram; +function createProgram(gl, vertexShader, fragmentShader) { + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error("Could not link WebGL program: ".concat(gl.getProgramInfoLog(program))); + } + return program; +} +exports.createProgram = createProgram; +function compileShader(gl, shaderType, shaderSource) { + var shader = gl.createShader(shaderType); + gl.shaderSource(shader, shaderSource); + gl.compileShader(shader); + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + throw new Error("Could not compile shader: ".concat(gl.getShaderInfoLog(shader))); + } + return shader; +} +exports.compileShader = compileShader; +function createTexture(gl, internalformat, width, height, minFilter, magFilter) { + if (minFilter === void 0) { minFilter = gl.NEAREST; } + if (magFilter === void 0) { magFilter = gl.NEAREST; } + var texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter); + gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height); + return texture; +} +exports.createTexture = createTexture; +function readPixelsAsync(gl, x, y, width, height, format, type, dest) { + return __awaiter(this, void 0, void 0, function () { + var buf; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + buf = gl.createBuffer(); + gl.bindBuffer(gl.PIXEL_PACK_BUFFER, buf); + gl.bufferData(gl.PIXEL_PACK_BUFFER, dest.byteLength, gl.STREAM_READ); + gl.readPixels(x, y, width, height, format, type, 0); + gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null); + return [4 /*yield*/, getBufferSubDataAsync(gl, gl.PIXEL_PACK_BUFFER, buf, 0, dest)]; + case 1: + _a.sent(); + gl.deleteBuffer(buf); + return [2 /*return*/, dest]; + } + }); + }); +} +exports.readPixelsAsync = readPixelsAsync; +function getBufferSubDataAsync(gl, target, buffer, srcByteOffset, dstBuffer, dstOffset, length) { + return __awaiter(this, void 0, void 0, function () { + var sync, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0); + gl.flush(); + return [4 /*yield*/, clientWaitAsync(gl, sync)]; + case 1: + res = _a.sent(); + gl.deleteSync(sync); + if (res !== gl.WAIT_FAILED) { + gl.bindBuffer(target, buffer); + gl.getBufferSubData(target, srcByteOffset, dstBuffer, dstOffset, length); + gl.bindBuffer(target, null); + } + return [2 /*return*/]; + } + }); + }); +} +function clientWaitAsync(gl, sync) { + return new Promise(function (resolve) { + function test() { + var res = gl.clientWaitSync(sync, 0, 0); + if (res === gl.WAIT_FAILED) { + resolve(res); + return; + } + if (res === gl.TIMEOUT_EXPIRED) { + requestAnimationFrame(test); + return; + } + resolve(res); + } + requestAnimationFrame(test); + }); +} + +},{}],9:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +var webgl2Pipeline_1 = require("./pipelines/webgl2Pipeline"); +Object.defineProperty(exports, "buildWebGL2Pipeline", { enumerable: true, get: function () { return webgl2Pipeline_1.buildWebGL2Pipeline; } }); + +},{"./pipelines/webgl2Pipeline":15}],10:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundBlurStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundBlurStage(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var blurPass = buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas); + var blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas); + function render() { + blurPass.render(); + blendPass.render(); + } + function updateCoverage(coverage) { + blendPass.updateCoverage(coverage); + } + function cleanUp() { + blendPass.cleanUp(); + blurPass.cleanUp(); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundBlurStage = buildBackgroundBlurStage; +function buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "]))); + var scale = 0.5; + var outputWidth = canvas.width * scale; + var outputHeight = canvas.height * scale; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var texture1 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var texture2 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var frameBuffer1 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0); + var frameBuffer2 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture2, 0); + gl.useProgram(program); + gl.uniform1i(personMaskLocation, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + for (var i = 0; i < 8; i++) { + gl.uniform2f(texelSizeLocation, 0, texelHeight); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, texture1); + gl.uniform1i(inputFrameLocation, 2); + gl.uniform2f(texelSizeLocation, texelWidth, 0); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.bindTexture(gl.TEXTURE_2D, texture2); + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer2); + gl.deleteFramebuffer(frameBuffer1); + gl.deleteTexture(texture2); + gl.deleteTexture(texture1); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { + render: render, + cleanUp: cleanUp, + }; +} +function buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_3 || (templateObject_3 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var blurredInputFrame = gl.getUniformLocation(program, 'u_blurredInputFrame'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform1i(blurredInputFrame, 2); + gl.uniform2f(coverageLocation, 0, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function cleanUp() { + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +var templateObject_1, templateObject_2, templateObject_3; + +},{"../helpers/webglHelper":8}],11:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundImageStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundImageStage(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "], ["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var outputRatio = outputWidth / outputHeight; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var backgroundScaleLocation = gl.getUniformLocation(program, 'u_backgroundScale'); + var backgroundOffsetLocation = gl.getUniformLocation(program, 'u_backgroundOffset'); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var backgroundLocation = gl.getUniformLocation(program, 'u_background'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + var lightWrappingLocation = gl.getUniformLocation(program, 'u_lightWrapping'); + var blendModeLocation = gl.getUniformLocation(program, 'u_blendMode'); + gl.useProgram(program); + gl.uniform2f(backgroundScaleLocation, 1, 1); + gl.uniform2f(backgroundOffsetLocation, 0, 0); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform2f(coverageLocation, 0, 1); + gl.uniform1f(lightWrappingLocation, 0); + gl.uniform1f(blendModeLocation, 0); + var backgroundTexture = null; + // TODO Find a better to handle background being loaded + if (backgroundImage === null || backgroundImage === void 0 ? void 0 : backgroundImage.complete) { + updateBackgroundImage(backgroundImage); + } + else if (backgroundImage) { + backgroundImage.onload = function () { + updateBackgroundImage(backgroundImage); + }; + } + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + if (backgroundTexture !== null) { + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, backgroundTexture); + // TODO Handle correctly the background not loaded yet + gl.uniform1i(backgroundLocation, 2); + } + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateBackgroundImage(backgroundImage) { + backgroundTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.LINEAR, gl.LINEAR); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.RGBA, gl.UNSIGNED_BYTE, backgroundImage); + var xOffset = 0; + var yOffset = 0; + var backgroundWidth = backgroundImage.naturalWidth; + var backgroundHeight = backgroundImage.naturalHeight; + var backgroundRatio = backgroundWidth / backgroundHeight; + if (backgroundRatio < outputRatio) { + backgroundHeight = backgroundWidth / outputRatio; + yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2; + } + else { + backgroundWidth = backgroundHeight * outputRatio; + xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2; + } + var xScale = backgroundWidth / backgroundImage.naturalWidth; + var yScale = backgroundHeight / backgroundImage.naturalHeight; + xOffset /= backgroundImage.naturalWidth; + yOffset /= backgroundImage.naturalHeight; + gl.uniform2f(backgroundScaleLocation, xScale, yScale); + gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function updateLightWrapping(lightWrapping) { + gl.useProgram(program); + gl.uniform1f(lightWrappingLocation, lightWrapping); + } + function updateBlendMode(blendMode) { + gl.useProgram(program); + gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1); + } + function cleanUp() { + gl.deleteTexture(backgroundTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + updateLightWrapping: updateLightWrapping, + updateBlendMode: updateBlendMode, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundImageStage = buildBackgroundImageStage; +var templateObject_1, templateObject_2; + +},{"../helpers/webglHelper":8}],12:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildJointBilateralFilterStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildJointBilateralFilterStage(gl, vertexShader, positionBuffer, texCoordBuffer, inputTexture, segmentationConfig, outputTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var outputWidth = canvas.width, outputHeight = canvas.height; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var segmentationMaskLocation = gl.getUniformLocation(program, 'u_segmentationMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var stepLocation = gl.getUniformLocation(program, 'u_step'); + var radiusLocation = gl.getUniformLocation(program, 'u_radius'); + var offsetLocation = gl.getUniformLocation(program, 'u_offset'); + var sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel'); + var sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor'); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(segmentationMaskLocation, 1); + gl.uniform2f(texelSizeLocation, texelWidth, texelHeight); + // Ensures default values are configured to prevent infinite + // loop in fragment shader + updateSigmaSpace(0); + updateSigmaColor(0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateSigmaSpace(sigmaSpace) { + sigmaSpace *= Math.max(outputWidth / segmentationWidth, outputHeight / segmentationHeight); + var kSparsityFactor = 0.66; // Higher is more sparse. + var sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor); + var step = sparsity; + var radius = sigmaSpace; + var offset = step > 1 ? step * 0.5 : 0; + var sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace; + gl.useProgram(program); + gl.uniform1f(stepLocation, step); + gl.uniform1f(radiusLocation, radius); + gl.uniform1f(offsetLocation, offset); + gl.uniform1f(sigmaTexelLocation, sigmaTexel); + } + function updateSigmaColor(sigmaColor) { + gl.useProgram(program); + gl.uniform1f(sigmaColorLocation, sigmaColor); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, updateSigmaSpace: updateSigmaSpace, updateSigmaColor: updateSigmaColor, cleanUp: cleanUp }; +} +exports.buildJointBilateralFilterStage = buildJointBilateralFilterStage; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],13:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildLoadSegmentationStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildLoadSegmentationStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite, outputTexture) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n " + // TFLite memory will be accessed as float32 + ]))); + // TFLite memory will be accessed as float32 + var tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation'); + var inputTexture = (0, webglHelper_1.createTexture)(gl, gl.R32F, segmentationWidth, segmentationHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputLocation, 1); + function render() { + gl.viewport(0, 0, segmentationWidth, segmentationHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, segmentationWidth, segmentationHeight, gl.RED, gl.FLOAT, tflite.HEAPF32, tfliteOutputMemoryOffset); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(inputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildLoadSegmentationStage = buildLoadSegmentationStage; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],14:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildResizingStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildResizingStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n " + // TFLite memory will be accessed as float32 + ]))); + // TFLite memory will be accessed as float32 + var tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], outputWidth = _a[0], outputHeight = _a[1]; + var outputPixelCount = outputWidth * outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var outputTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + var outputPixels = new Uint8Array(outputPixelCount * 4); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + // Downloads pixels asynchronously from GPU while rendering the current frame + (0, webglHelper_1.readPixelsAsync)(gl, 0, 0, outputWidth, outputHeight, gl.RGBA, gl.UNSIGNED_BYTE, outputPixels); + for (var i = 0; i < outputPixelCount; i++) { + var tfliteIndex = tfliteInputMemoryOffset + i * 3; + var outputIndex = i * 4; + tflite.HEAPF32[tfliteIndex] = outputPixels[outputIndex] / 255; + tflite.HEAPF32[tfliteIndex + 1] = outputPixels[outputIndex + 1] / 255; + tflite.HEAPF32[tfliteIndex + 2] = outputPixels[outputIndex + 2] / 255; + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(outputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildResizingStage = buildResizingStage; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],15:[function(require,module,exports){ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +var backgroundBlurStage_1 = require("./backgroundBlurStage"); +var backgroundImageStage_1 = require("./backgroundImageStage"); +var jointBilateralFilterStage_1 = require("./jointBilateralFilterStage"); +var loadSegmentationStage_1 = require("./loadSegmentationStage"); +var resizingStage_1 = require("./resizingStage"); +function buildWebGL2Pipeline(sourcePlayback, backgroundImage, backgroundConfig, segmentationConfig, canvas, tflite, benchmark, debounce) { + var shouldRunInference = true; + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var frameWidth = sourcePlayback.width, frameHeight = sourcePlayback.height; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var gl = canvas.getContext('webgl2'); + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var vertexArray = gl.createVertexArray(); + gl.bindVertexArray(vertexArray); + var positionBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + // We don't use texStorage2D here because texImage2D seems faster + // to upload video texture than texSubImage2D even though the latter + // is supposed to be the recommended way: + // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures + var inputFrameTexture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + // TODO Rename segmentation and person mask to be more specific + var segmentationTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var personMaskTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, frameWidth, frameHeight); + var resizingStage = (0, resizingStage_1.buildResizingStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite); + var loadSegmentationStage = (0, loadSegmentationStage_1.buildLoadSegmentationStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite, segmentationTexture); + var jointBilateralFilterStage = (0, jointBilateralFilterStage_1.buildJointBilateralFilterStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationTexture, segmentationConfig, personMaskTexture, canvas); + var backgroundStage = backgroundConfig.type === 'blur' + ? (0, backgroundBlurStage_1.buildBackgroundBlurStage)(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) + : (0, backgroundImageStage_1.buildBackgroundImageStage)(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas); + function render() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + benchmark.start('inputImageResizeDelay'); + gl.clearColor(0, 0, 0, 0); + gl.clear(gl.COLOR_BUFFER_BIT); + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + // texImage2D seems faster than texSubImage2D to upload + // video texture + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, sourcePlayback.htmlElement); + gl.bindVertexArray(vertexArray); + resizingStage.render(); + benchmark.end('inputImageResizeDelay'); + benchmark.start('segmentationDelay'); + if (shouldRunInference) { + tflite._runInference(); + } + if (debounce) { + shouldRunInference = !shouldRunInference; + } + benchmark.end('segmentationDelay'); + benchmark.start('imageCompositionDelay'); + loadSegmentationStage.render(); + jointBilateralFilterStage.render(); + backgroundStage.render(); + benchmark.end('imageCompositionDelay'); + return [2 /*return*/]; + }); + }); + } + function updatePostProcessingConfig(postProcessingConfig) { + jointBilateralFilterStage.updateSigmaSpace(postProcessingConfig.jointBilateralFilter.sigmaSpace); + jointBilateralFilterStage.updateSigmaColor(postProcessingConfig.jointBilateralFilter.sigmaColor); + if (backgroundConfig.type === 'image') { + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage(postProcessingConfig.coverage); + backgroundImageStage.updateLightWrapping(postProcessingConfig.lightWrapping); + backgroundImageStage.updateBlendMode(postProcessingConfig.blendMode); + } + else if (backgroundConfig.type === 'blur') { + var backgroundBlurStage = backgroundStage; + backgroundBlurStage.updateCoverage(postProcessingConfig.coverage); + } + else { + // TODO Handle no background in a separate pipeline path + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage([0, 0.9999]); + backgroundImageStage.updateLightWrapping(0); + } + } + function cleanUp() { + backgroundStage.cleanUp(); + jointBilateralFilterStage.cleanUp(); + loadSegmentationStage.cleanUp(); + resizingStage.cleanUp(); + gl.deleteTexture(personMaskTexture); + gl.deleteTexture(segmentationTexture); + gl.deleteTexture(inputFrameTexture); + gl.deleteBuffer(texCoordBuffer); + gl.deleteBuffer(positionBuffer); + gl.deleteVertexArray(vertexArray); + gl.deleteShader(vertexShader); + } + return { render: render, updatePostProcessingConfig: updatePostProcessingConfig, cleanUp: cleanUp }; +} +exports.buildWebGL2Pipeline = buildWebGL2Pipeline; +var templateObject_1; + +},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8,"./backgroundBlurStage":10,"./backgroundImageStage":11,"./jointBilateralFilterStage":12,"./loadSegmentationStage":13,"./resizingStage":14}],16:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pipeline = exports.ImageFit = exports.WebGL2PipelineType = void 0; +/** + * @private + */ +var WebGL2PipelineType; +(function (WebGL2PipelineType) { + WebGL2PipelineType["Blur"] = "blur"; + WebGL2PipelineType["Image"] = "image"; +})(WebGL2PipelineType || (exports.WebGL2PipelineType = WebGL2PipelineType = {})); +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +var ImageFit; +(function (ImageFit) { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + ImageFit["Contain"] = "Contain"; + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + ImageFit["Cover"] = "Cover"; + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + ImageFit["Fill"] = "Fill"; + /** + * Ignore height and width and use the original size. + */ + ImageFit["None"] = "None"; +})(ImageFit || (exports.ImageFit = ImageFit = {})); +/** + * Specifies which pipeline to use when processing video frames. + */ +var Pipeline; +(function (Pipeline) { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Pipeline["Canvas2D"] = "Canvas2D"; + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + Pipeline["WebGL2"] = "WebGL2"; +})(Pipeline || (exports.Pipeline = Pipeline = {})); + +},{}],17:[function(require,module,exports){ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Benchmark = void 0; +/** + * @private + */ +var Benchmark = /** @class */ (function () { + function Benchmark() { + this._timingCache = new Map(); + this._timings = new Map(); + } + Benchmark.prototype.end = function (name) { + var timing = this._timings.get(name); + if (!timing) { + return; + } + timing.end = Date.now(); + timing.delay = timing.end - timing.start; + this._save(name, __assign({}, timing)); + }; + Benchmark.prototype.getAverageDelay = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || !timingCache.length) { + return; + } + return timingCache.map(function (timing) { return timing.delay; }) + .reduce(function (total, value) { return total += value; }, 0) / timingCache.length; + }; + Benchmark.prototype.getNames = function () { + return Array.from(this._timingCache.keys()); + }; + Benchmark.prototype.getRate = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || timingCache.length < 2) { + return; + } + var totalDelay = timingCache[timingCache.length - 1].end - timingCache[0].start; + return (timingCache.length / totalDelay) * 1000; + }; + Benchmark.prototype.start = function (name) { + var timing = this._timings.get(name); + if (!timing) { + timing = {}; + this._timings.set(name, timing); + } + timing.start = Date.now(); + delete timing.end; + delete timing.delay; + }; + Benchmark.prototype._save = function (name, timing) { + var timingCache = this._timingCache.get(name); + if (!timingCache) { + timingCache = []; + this._timingCache.set(name, timingCache); + } + timingCache.push(timing); + if (timingCache.length > Benchmark.cacheSize) { + timingCache.splice(0, timingCache.length - Benchmark.cacheSize); + } + }; + // NOTE (csantos): How many timing information to save per benchmark. + // This is about the amount of timing info generated on a 24fps input. + // Enough samples to calculate fps + Benchmark.cacheSize = 41; + return Benchmark; +}()); +exports.Benchmark = Benchmark; + +},{}],18:[function(require,module,exports){ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSupported = exports.isBrowserSupported = void 0; +/** + * @private + */ +function getCanvas() { + return typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas'); +} +/** + * @private + */ +function isBrowserSupported() { + if (typeof window !== 'undefined' && typeof document !== 'undefined') { + return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2')); + } + else { + return false; + } +} +exports.isBrowserSupported = isBrowserSupported; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +exports.isSupported = isBrowserSupported(); + +},{}],19:[function(require,module,exports){ +"use strict"; +// This file is generated on build. To make changes, see scripts/version.js +Object.defineProperty(exports, "__esModule", { value: true }); +exports.version = void 0; +/** + * The current version of the library. + */ +exports.version = '2.1.0'; + +},{}]},{},[2]); diff --git a/dist/build/twilio-video-processors.min.js b/dist/build/twilio-video-processors.min.js new file mode 100644 index 0000000..6e82a1a --- /dev/null +++ b/dist/build/twilio-video-processors.min.js @@ -0,0 +1,38 @@ +/*! twilio-video-processors.js 2.1.0 + +The following license applies to all parts of this software except as +documented below. + + Copyright (C) 2022 Twilio Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. Neither the name of Twilio nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]=this._historyCount){this._masks.splice(0,this._masks.length-this._historyCount+1)}this._masks.push(mask)};BackgroundProcessor.prototype._applyAlpha=function(imageData){var weightedSum=this._masks.reduce(function(sum,mask,j){return sum+(j+1)*(j+1)},0);var pixels=imageData.height*imageData.width;var _loop_1=function(i){var w=this_1._masks.reduce(function(sum,mask,j){return sum+mask[i]*(j+1)*(j+1)},0)/weightedSum;imageData.data[i*4+3]=Math.round(w*255)};var this_1=this;for(var i=0;i=this._personProbabilityThreshold)*personProbability}return inferenceData};BackgroundProcessor._loadedScripts=[];return BackgroundProcessor}(Processor_1.Processor);exports.BackgroundProcessor=BackgroundProcessor},{"../../constants":1,"../../types":16,"../../utils/Benchmark":17,"../../utils/version":19,"../Processor":3,"../webgl2":9}],5:[function(require,module,exports){"use strict";var __extends=this&&this.__extends||function(){var extendStatics=function(d,b){extendStatics=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(d,b){d.__proto__=b}||function(d,b){for(var p in b)if(Object.prototype.hasOwnProperty.call(b,p))d[p]=b[p]};return extendStatics(d,b)};return function(d,b){if(typeof b!=="function"&&b!==null)throw new TypeError("Class extends value "+String(b)+" is not a constructor or null");extendStatics(d,b);function __(){this.constructor=d}d.prototype=b===null?Object.create(b):(__.prototype=b.prototype,new __)}}();Object.defineProperty(exports,"__esModule",{value:true});exports.GaussianBlurBackgroundProcessor=void 0;var BackgroundProcessor_1=require("./BackgroundProcessor");var constants_1=require("../../constants");var types_1=require("../../types");var GaussianBlurBackgroundProcessor=function(_super){__extends(GaussianBlurBackgroundProcessor,_super);function GaussianBlurBackgroundProcessor(options){var _this=_super.call(this,options)||this;_this._blurFilterRadius=constants_1.BLUR_FILTER_RADIUS;_this._name="GaussianBlurBackgroundProcessor";_this.blurFilterRadius=options.blurFilterRadius;return _this}Object.defineProperty(GaussianBlurBackgroundProcessor.prototype,"blurFilterRadius",{get:function(){return this._blurFilterRadius},set:function(radius){if(!radius){console.warn("Valid blur filter radius not found. Using ".concat(constants_1.BLUR_FILTER_RADIUS," as default."));radius=constants_1.BLUR_FILTER_RADIUS}this._blurFilterRadius=radius},enumerable:false,configurable:true});GaussianBlurBackgroundProcessor.prototype._getWebGL2PipelineType=function(){return types_1.WebGL2PipelineType.Blur};GaussianBlurBackgroundProcessor.prototype._setBackground=function(inputFrame){if(!this._outputContext){return}var ctx=this._outputContext;ctx.filter="blur(".concat(this._blurFilterRadius,"px)");ctx.drawImage(inputFrame,0,0)};return GaussianBlurBackgroundProcessor}(BackgroundProcessor_1.BackgroundProcessor);exports.GaussianBlurBackgroundProcessor=GaussianBlurBackgroundProcessor},{"../../constants":1,"../../types":16,"./BackgroundProcessor":4}],6:[function(require,module,exports){"use strict";var __extends=this&&this.__extends||function(){var extendStatics=function(d,b){extendStatics=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(d,b){d.__proto__=b}||function(d,b){for(var p in b)if(Object.prototype.hasOwnProperty.call(b,p))d[p]=b[p]};return extendStatics(d,b)};return function(d,b){if(typeof b!=="function"&&b!==null)throw new TypeError("Class extends value "+String(b)+" is not a constructor or null");extendStatics(d,b);function __(){this.constructor=d}d.prototype=b===null?Object.create(b):(__.prototype=b.prototype,new __)}}();Object.defineProperty(exports,"__esModule",{value:true});exports.VirtualBackgroundProcessor=void 0;var BackgroundProcessor_1=require("./BackgroundProcessor");var types_1=require("../../types");var VirtualBackgroundProcessor=function(_super){__extends(VirtualBackgroundProcessor,_super);function VirtualBackgroundProcessor(options){var _this=_super.call(this,options)||this;_this._name="VirtualBackgroundProcessor";_this.backgroundImage=options.backgroundImage;_this.fitType=options.fitType;return _this}Object.defineProperty(VirtualBackgroundProcessor.prototype,"backgroundImage",{get:function(){return this._backgroundImage},set:function(image){var _a;if(!image||!image.complete||!image.naturalHeight){throw new Error("Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded")}this._backgroundImage=image;(_a=this._webgl2Pipeline)===null||_a===void 0?void 0:_a.cleanUp();this._webgl2Pipeline=null},enumerable:false,configurable:true});Object.defineProperty(VirtualBackgroundProcessor.prototype,"fitType",{get:function(){return this._fitType},set:function(fitType){var validTypes=Object.keys(types_1.ImageFit);if(!validTypes.includes(fitType)){console.warn("Valid fitType not found. Using '".concat(types_1.ImageFit.Fill,"' as default."));fitType=types_1.ImageFit.Fill}this._fitType=fitType},enumerable:false,configurable:true});VirtualBackgroundProcessor.prototype._getWebGL2PipelineType=function(){return types_1.WebGL2PipelineType.Image};VirtualBackgroundProcessor.prototype._setBackground=function(){if(!this._outputContext||!this._outputCanvas){return}var img=this._backgroundImage;var imageWidth=img.naturalWidth;var imageHeight=img.naturalHeight;var canvasWidth=this._outputCanvas.width;var canvasHeight=this._outputCanvas.height;var ctx=this._outputContext;if(this._fitType===types_1.ImageFit.Fill){ctx.drawImage(img,0,0,imageWidth,imageHeight,0,0,canvasWidth,canvasHeight)}else if(this._fitType===types_1.ImageFit.None){ctx.drawImage(img,0,0,imageWidth,imageHeight)}else if(this._fitType===types_1.ImageFit.Contain){var _a=this._getFitPosition(imageWidth,imageHeight,canvasWidth,canvasHeight,types_1.ImageFit.Contain),x=_a.x,y=_a.y,w=_a.w,h=_a.h;ctx.drawImage(img,0,0,imageWidth,imageHeight,x,y,w,h)}else if(this._fitType===types_1.ImageFit.Cover){var _b=this._getFitPosition(imageWidth,imageHeight,canvasWidth,canvasHeight,types_1.ImageFit.Cover),x=_b.x,y=_b.y,w=_b.w,h=_b.h;ctx.drawImage(img,0,0,imageWidth,imageHeight,x,y,w,h)}};VirtualBackgroundProcessor.prototype._getFitPosition=function(contentWidth,contentHeight,viewportWidth,viewportHeight,type){var factor=viewportWidth/contentWidth;var newContentWidth=viewportWidth;var newContentHeight=factor*contentHeight;if(type===types_1.ImageFit.Contain&&newContentHeight>viewportHeight||type===types_1.ImageFit.Cover&&viewportHeight>newContentHeight){factor=viewportHeight/newContentHeight;newContentWidth=factor*newContentWidth;newContentHeight=viewportHeight}var x=(viewportWidth-newContentWidth)/2;var y=(viewportHeight-newContentHeight)/2;return{x:x,y:y,w:newContentWidth,h:newContentHeight}};return VirtualBackgroundProcessor}(BackgroundProcessor_1.BackgroundProcessor);exports.VirtualBackgroundProcessor=VirtualBackgroundProcessor},{"../../types":16,"./BackgroundProcessor":4}],7:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.inputResolutions=void 0;exports.inputResolutions={"640x360":[640,360],"256x256":[256,256],"256x144":[256,144],"160x96":[160,96]}},{}],8:[function(require,module,exports){"use strict";var __awaiter=this&&this.__awaiter||function(thisArg,_arguments,P,generator){function adopt(value){return value instanceof P?value:new P(function(resolve){resolve(value)})}return new(P||(P=Promise))(function(resolve,reject){function fulfilled(value){try{step(generator.next(value))}catch(e){reject(e)}}function rejected(value){try{step(generator["throw"](value))}catch(e){reject(e)}}function step(result){result.done?resolve(result.value):adopt(result.value).then(fulfilled,rejected)}step((generator=generator.apply(thisArg,_arguments||[])).next())})};var __generator=this&&this.__generator||function(thisArg,body){var _={label:0,sent:function(){if(t[0]&1)throw t[1];return t[1]},trys:[],ops:[]},f,y,t,g;return g={next:verb(0),throw:verb(1),return:verb(2)},typeof Symbol==="function"&&(g[Symbol.iterator]=function(){return this}),g;function verb(n){return function(v){return step([n,v])}}function step(op){if(f)throw new TypeError("Generator is already executing.");while(g&&(g=0,op[0]&&(_=0)),_)try{if(f=1,y&&(t=op[0]&2?y["return"]:op[0]?y["throw"]||((t=y["return"])&&t.call(y),0):y.next)&&!(t=t.call(y,op[1])).done)return t;if(y=0,t)op=[op[0]&2,t.value];switch(op[0]){case 0:case 1:t=op;break;case 4:_.label++;return{value:op[1],done:false};case 5:_.label++;y=op[1];op=[0];continue;case 7:op=_.ops.pop();_.trys.pop();continue;default:if(!(t=_.trys,t=t.length>0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "])));var _a=segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution],segmentationWidth=_a[0],segmentationHeight=_a[1];var outputWidth=canvas.width,outputHeight=canvas.height;var texelWidth=1/outputWidth;var texelHeight=1/outputHeight;var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var segmentationMaskLocation=gl.getUniformLocation(program,"u_segmentationMask");var texelSizeLocation=gl.getUniformLocation(program,"u_texelSize");var stepLocation=gl.getUniformLocation(program,"u_step");var radiusLocation=gl.getUniformLocation(program,"u_radius");var offsetLocation=gl.getUniformLocation(program,"u_offset");var sigmaTexelLocation=gl.getUniformLocation(program,"u_sigmaTexel");var sigmaColorLocation=gl.getUniformLocation(program,"u_sigmaColor");var frameBuffer=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,outputTexture,0);gl.useProgram(program);gl.uniform1i(inputFrameLocation,0);gl.uniform1i(segmentationMaskLocation,1);gl.uniform2f(texelSizeLocation,texelWidth,texelHeight);updateSigmaSpace(0);updateSigmaColor(0);function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,inputTexture);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function updateSigmaSpace(sigmaSpace){sigmaSpace*=Math.max(outputWidth/segmentationWidth,outputHeight/segmentationHeight);var kSparsityFactor=.66;var sparsity=Math.max(1,Math.sqrt(sigmaSpace)*kSparsityFactor);var step=sparsity;var radius=sigmaSpace;var offset=step>1?step*.5:0;var sigmaTexel=Math.max(texelWidth,texelHeight)*sigmaSpace;gl.useProgram(program);gl.uniform1f(stepLocation,step);gl.uniform1f(radiusLocation,radius);gl.uniform1f(offsetLocation,offset);gl.uniform1f(sigmaTexelLocation,sigmaTexel)}function updateSigmaColor(sigmaColor){gl.useProgram(program);gl.uniform1f(sigmaColorLocation,sigmaColor)}function cleanUp(){gl.deleteFramebuffer(frameBuffer);gl.deleteProgram(program);gl.deleteShader(fragmentShader)}return{render:render,updateSigmaSpace:updateSigmaSpace,updateSigmaColor:updateSigmaColor,cleanUp:cleanUp}}exports.buildJointBilateralFilterStage=buildJointBilateralFilterStage;var templateObject_1},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],13:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};Object.defineProperty(exports,"__esModule",{value:true});exports.buildLoadSegmentationStage=void 0;var segmentationHelper_1=require("../helpers/segmentationHelper");var webglHelper_1=require("../helpers/webglHelper");function buildLoadSegmentationStage(gl,vertexShader,positionBuffer,texCoordBuffer,segmentationConfig,tflite,outputTexture){var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_1||(templateObject_1=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "])));var tfliteOutputMemoryOffset=tflite._getOutputMemoryOffset()/4;var _a=segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution],segmentationWidth=_a[0],segmentationHeight=_a[1];var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputLocation=gl.getUniformLocation(program,"u_inputSegmentation");var inputTexture=(0,webglHelper_1.createTexture)(gl,gl.R32F,segmentationWidth,segmentationHeight);var frameBuffer=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,outputTexture,0);gl.useProgram(program);gl.uniform1i(inputLocation,1);function render(){gl.viewport(0,0,segmentationWidth,segmentationHeight);gl.useProgram(program);gl.activeTexture(gl.TEXTURE1);gl.bindTexture(gl.TEXTURE_2D,inputTexture);gl.texSubImage2D(gl.TEXTURE_2D,0,0,0,segmentationWidth,segmentationHeight,gl.RED,gl.FLOAT,tflite.HEAPF32,tfliteOutputMemoryOffset);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.drawArrays(gl.TRIANGLE_STRIP,0,4)}function cleanUp(){gl.deleteFramebuffer(frameBuffer);gl.deleteTexture(inputTexture);gl.deleteProgram(program);gl.deleteShader(fragmentShader)}return{render:render,cleanUp:cleanUp}}exports.buildLoadSegmentationStage=buildLoadSegmentationStage;var templateObject_1},{"../helpers/segmentationHelper":7,"../helpers/webglHelper":8}],14:[function(require,module,exports){"use strict";var __makeTemplateObject=this&&this.__makeTemplateObject||function(cooked,raw){if(Object.defineProperty){Object.defineProperty(cooked,"raw",{value:raw})}else{cooked.raw=raw}return cooked};Object.defineProperty(exports,"__esModule",{value:true});exports.buildResizingStage=void 0;var segmentationHelper_1=require("../helpers/segmentationHelper");var webglHelper_1=require("../helpers/webglHelper");function buildResizingStage(gl,vertexShader,positionBuffer,texCoordBuffer,segmentationConfig,tflite){var fragmentShaderSource=(0,webglHelper_1.glsl)(templateObject_1||(templateObject_1=__makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n "],["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n "])));var tfliteInputMemoryOffset=tflite._getInputMemoryOffset()/4;var _a=segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution],outputWidth=_a[0],outputHeight=_a[1];var outputPixelCount=outputWidth*outputHeight;var fragmentShader=(0,webglHelper_1.compileShader)(gl,gl.FRAGMENT_SHADER,fragmentShaderSource);var program=(0,webglHelper_1.createPiplelineStageProgram)(gl,vertexShader,fragmentShader,positionBuffer,texCoordBuffer);var inputFrameLocation=gl.getUniformLocation(program,"u_inputFrame");var outputTexture=(0,webglHelper_1.createTexture)(gl,gl.RGBA8,outputWidth,outputHeight);var frameBuffer=gl.createFramebuffer();gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.framebufferTexture2D(gl.FRAMEBUFFER,gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D,outputTexture,0);var outputPixels=new Uint8Array(outputPixelCount*4);gl.useProgram(program);gl.uniform1i(inputFrameLocation,0);function render(){gl.viewport(0,0,outputWidth,outputHeight);gl.useProgram(program);gl.bindFramebuffer(gl.FRAMEBUFFER,frameBuffer);gl.drawArrays(gl.TRIANGLE_STRIP,0,4);(0,webglHelper_1.readPixelsAsync)(gl,0,0,outputWidth,outputHeight,gl.RGBA,gl.UNSIGNED_BYTE,outputPixels);for(var i=0;i0&&t[t.length-1])&&(op[0]===6||op[0]===2)){_=0;continue}if(op[0]===3&&(!t||op[1]>t[0]&&op[1]Benchmark.cacheSize){timingCache.splice(0,timingCache.length-Benchmark.cacheSize)}};Benchmark.cacheSize=41;return Benchmark}();exports.Benchmark=Benchmark},{}],18:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.isSupported=exports.isBrowserSupported=void 0;function getCanvas(){return typeof window.OffscreenCanvas!=="undefined"?new window.OffscreenCanvas(1,1):document.createElement("canvas")}function isBrowserSupported(){if(typeof window!=="undefined"&&typeof document!=="undefined"){return!!(getCanvas().getContext("2d")||getCanvas().getContext("webgl2"))}else{return false}}exports.isBrowserSupported=isBrowserSupported;exports.isSupported=isBrowserSupported()},{}],19:[function(require,module,exports){"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.version=void 0;exports.version="2.1.0"},{}]},{},[2]); \ No newline at end of file diff --git a/dist/docs/.nojekyll b/dist/docs/.nojekyll new file mode 100644 index 0000000..e2ac661 --- /dev/null +++ b/dist/docs/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/dist/docs/assets/highlight.css b/dist/docs/assets/highlight.css new file mode 100644 index 0000000..cce54cc --- /dev/null +++ b/dist/docs/assets/highlight.css @@ -0,0 +1,120 @@ +:root { + --light-hl-0: #001080; + --dark-hl-0: #9CDCFE; + --light-hl-1: #000000; + --dark-hl-1: #D4D4D4; + --light-hl-2: #AF00DB; + --dark-hl-2: #C586C0; + --light-hl-3: #0000FF; + --dark-hl-3: #569CD6; + --light-hl-4: #A31515; + --dark-hl-4: #CE9178; + --light-hl-5: #800000; + --dark-hl-5: #808080; + --light-hl-6: #800000; + --dark-hl-6: #569CD6; + --light-hl-7: #000000FF; + --dark-hl-7: #D4D4D4; + --light-hl-8: #E50000; + --dark-hl-8: #9CDCFE; + --light-hl-9: #0000FF; + --dark-hl-9: #CE9178; + --light-hl-10: #0070C1; + --dark-hl-10: #4FC1FF; + --light-hl-11: #795E26; + --dark-hl-11: #DCDCAA; + --light-hl-12: #008000; + --dark-hl-12: #6A9955; + --light-hl-13: #098658; + --dark-hl-13: #B5CEA8; + --light-code-background: #FFFFFF; + --dark-code-background: #1E1E1E; +} + +@media (prefers-color-scheme: light) { :root { + --hl-0: var(--light-hl-0); + --hl-1: var(--light-hl-1); + --hl-2: var(--light-hl-2); + --hl-3: var(--light-hl-3); + --hl-4: var(--light-hl-4); + --hl-5: var(--light-hl-5); + --hl-6: var(--light-hl-6); + --hl-7: var(--light-hl-7); + --hl-8: var(--light-hl-8); + --hl-9: var(--light-hl-9); + --hl-10: var(--light-hl-10); + --hl-11: var(--light-hl-11); + --hl-12: var(--light-hl-12); + --hl-13: var(--light-hl-13); + --code-background: var(--light-code-background); +} } + +@media (prefers-color-scheme: dark) { :root { + --hl-0: var(--dark-hl-0); + --hl-1: var(--dark-hl-1); + --hl-2: var(--dark-hl-2); + --hl-3: var(--dark-hl-3); + --hl-4: var(--dark-hl-4); + --hl-5: var(--dark-hl-5); + --hl-6: var(--dark-hl-6); + --hl-7: var(--dark-hl-7); + --hl-8: var(--dark-hl-8); + --hl-9: var(--dark-hl-9); + --hl-10: var(--dark-hl-10); + --hl-11: var(--dark-hl-11); + --hl-12: var(--dark-hl-12); + --hl-13: var(--dark-hl-13); + --code-background: var(--dark-code-background); +} } + +:root[data-theme='light'] { + --hl-0: var(--light-hl-0); + --hl-1: var(--light-hl-1); + --hl-2: var(--light-hl-2); + --hl-3: var(--light-hl-3); + --hl-4: var(--light-hl-4); + --hl-5: var(--light-hl-5); + --hl-6: var(--light-hl-6); + --hl-7: var(--light-hl-7); + --hl-8: var(--light-hl-8); + --hl-9: var(--light-hl-9); + --hl-10: var(--light-hl-10); + --hl-11: var(--light-hl-11); + --hl-12: var(--light-hl-12); + --hl-13: var(--light-hl-13); + --code-background: var(--light-code-background); +} + +:root[data-theme='dark'] { + --hl-0: var(--dark-hl-0); + --hl-1: var(--dark-hl-1); + --hl-2: var(--dark-hl-2); + --hl-3: var(--dark-hl-3); + --hl-4: var(--dark-hl-4); + --hl-5: var(--dark-hl-5); + --hl-6: var(--dark-hl-6); + --hl-7: var(--dark-hl-7); + --hl-8: var(--dark-hl-8); + --hl-9: var(--dark-hl-9); + --hl-10: var(--dark-hl-10); + --hl-11: var(--dark-hl-11); + --hl-12: var(--dark-hl-12); + --hl-13: var(--dark-hl-13); + --code-background: var(--dark-code-background); +} + +.hl-0 { color: var(--hl-0); } +.hl-1 { color: var(--hl-1); } +.hl-2 { color: var(--hl-2); } +.hl-3 { color: var(--hl-3); } +.hl-4 { color: var(--hl-4); } +.hl-5 { color: var(--hl-5); } +.hl-6 { color: var(--hl-6); } +.hl-7 { color: var(--hl-7); } +.hl-8 { color: var(--hl-8); } +.hl-9 { color: var(--hl-9); } +.hl-10 { color: var(--hl-10); } +.hl-11 { color: var(--hl-11); } +.hl-12 { color: var(--hl-12); } +.hl-13 { color: var(--hl-13); } +pre, code { background: var(--code-background); } diff --git a/dist/docs/assets/main.js b/dist/docs/assets/main.js new file mode 100644 index 0000000..3cee05e --- /dev/null +++ b/dist/docs/assets/main.js @@ -0,0 +1,58 @@ +"use strict"; +"use strict";(()=>{var Se=Object.create;var re=Object.defineProperty;var we=Object.getOwnPropertyDescriptor;var Te=Object.getOwnPropertyNames;var ke=Object.getPrototypeOf,Qe=Object.prototype.hasOwnProperty;var Pe=(t,e)=>()=>(e||t((e={exports:{}}).exports,e),e.exports);var Ie=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let i of Te(e))!Qe.call(t,i)&&i!==r&&re(t,i,{get:()=>e[i],enumerable:!(n=we(e,i))||n.enumerable});return t};var Ce=(t,e,r)=>(r=t!=null?Se(ke(t)):{},Ie(e||!t||!t.__esModule?re(r,"default",{value:t,enumerable:!0}):r,t));var ae=Pe((se,oe)=>{(function(){var t=function(e){var r=new t.Builder;return r.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),r.searchPipeline.add(t.stemmer),e.call(r,r),r.build()};t.version="2.3.9";t.utils={},t.utils.warn=function(e){return function(r){e.console&&console.warn&&console.warn(r)}}(this),t.utils.asString=function(e){return e==null?"":e.toString()},t.utils.clone=function(e){if(e==null)return e;for(var r=Object.create(null),n=Object.keys(e),i=0;i0){var d=t.utils.clone(r)||{};d.position=[a,u],d.index=s.length,s.push(new t.Token(n.slice(a,o),d))}a=o+1}}return s},t.tokenizer.separator=/[\s\-]+/;t.Pipeline=function(){this._stack=[]},t.Pipeline.registeredFunctions=Object.create(null),t.Pipeline.registerFunction=function(e,r){r in this.registeredFunctions&&t.utils.warn("Overwriting existing registered function: "+r),e.label=r,t.Pipeline.registeredFunctions[e.label]=e},t.Pipeline.warnIfFunctionNotRegistered=function(e){var r=e.label&&e.label in this.registeredFunctions;r||t.utils.warn(`Function is not registered with pipeline. This may cause problems when serialising the index. +`,e)},t.Pipeline.load=function(e){var r=new t.Pipeline;return e.forEach(function(n){var i=t.Pipeline.registeredFunctions[n];if(i)r.add(i);else throw new Error("Cannot load unregistered function: "+n)}),r},t.Pipeline.prototype.add=function(){var e=Array.prototype.slice.call(arguments);e.forEach(function(r){t.Pipeline.warnIfFunctionNotRegistered(r),this._stack.push(r)},this)},t.Pipeline.prototype.after=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");n=n+1,this._stack.splice(n,0,r)},t.Pipeline.prototype.before=function(e,r){t.Pipeline.warnIfFunctionNotRegistered(r);var n=this._stack.indexOf(e);if(n==-1)throw new Error("Cannot find existingFn");this._stack.splice(n,0,r)},t.Pipeline.prototype.remove=function(e){var r=this._stack.indexOf(e);r!=-1&&this._stack.splice(r,1)},t.Pipeline.prototype.run=function(e){for(var r=this._stack.length,n=0;n1&&(oe&&(n=s),o!=e);)i=n-r,s=r+Math.floor(i/2),o=this.elements[s*2];if(o==e||o>e)return s*2;if(ol?d+=2:a==l&&(r+=n[u+1]*i[d+1],u+=2,d+=2);return r},t.Vector.prototype.similarity=function(e){return this.dot(e)/this.magnitude()||0},t.Vector.prototype.toArray=function(){for(var e=new Array(this.elements.length/2),r=1,n=0;r0){var o=s.str.charAt(0),a;o in s.node.edges?a=s.node.edges[o]:(a=new t.TokenSet,s.node.edges[o]=a),s.str.length==1&&(a.final=!0),i.push({node:a,editsRemaining:s.editsRemaining,str:s.str.slice(1)})}if(s.editsRemaining!=0){if("*"in s.node.edges)var l=s.node.edges["*"];else{var l=new t.TokenSet;s.node.edges["*"]=l}if(s.str.length==0&&(l.final=!0),i.push({node:l,editsRemaining:s.editsRemaining-1,str:s.str}),s.str.length>1&&i.push({node:s.node,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)}),s.str.length==1&&(s.node.final=!0),s.str.length>=1){if("*"in s.node.edges)var u=s.node.edges["*"];else{var u=new t.TokenSet;s.node.edges["*"]=u}s.str.length==1&&(u.final=!0),i.push({node:u,editsRemaining:s.editsRemaining-1,str:s.str.slice(1)})}if(s.str.length>1){var d=s.str.charAt(0),m=s.str.charAt(1),y;m in s.node.edges?y=s.node.edges[m]:(y=new t.TokenSet,s.node.edges[m]=y),s.str.length==1&&(y.final=!0),i.push({node:y,editsRemaining:s.editsRemaining-1,str:d+s.str.slice(2)})}}}return n},t.TokenSet.fromString=function(e){for(var r=new t.TokenSet,n=r,i=0,s=e.length;i=e;r--){var n=this.uncheckedNodes[r],i=n.child.toString();i in this.minimizedNodes?n.parent.edges[n.char]=this.minimizedNodes[i]:(n.child._str=i,this.minimizedNodes[i]=n.child),this.uncheckedNodes.pop()}};t.Index=function(e){this.invertedIndex=e.invertedIndex,this.fieldVectors=e.fieldVectors,this.tokenSet=e.tokenSet,this.fields=e.fields,this.pipeline=e.pipeline},t.Index.prototype.search=function(e){return this.query(function(r){var n=new t.QueryParser(e,r);n.parse()})},t.Index.prototype.query=function(e){for(var r=new t.Query(this.fields),n=Object.create(null),i=Object.create(null),s=Object.create(null),o=Object.create(null),a=Object.create(null),l=0;l1?this._b=1:this._b=e},t.Builder.prototype.k1=function(e){this._k1=e},t.Builder.prototype.add=function(e,r){var n=e[this._ref],i=Object.keys(this._fields);this._documents[n]=r||{},this.documentCount+=1;for(var s=0;s=this.length)return t.QueryLexer.EOS;var e=this.str.charAt(this.pos);return this.pos+=1,e},t.QueryLexer.prototype.width=function(){return this.pos-this.start},t.QueryLexer.prototype.ignore=function(){this.start==this.pos&&(this.pos+=1),this.start=this.pos},t.QueryLexer.prototype.backup=function(){this.pos-=1},t.QueryLexer.prototype.acceptDigitRun=function(){var e,r;do e=this.next(),r=e.charCodeAt(0);while(r>47&&r<58);e!=t.QueryLexer.EOS&&this.backup()},t.QueryLexer.prototype.more=function(){return this.pos1&&(e.backup(),e.emit(t.QueryLexer.TERM)),e.ignore(),e.more())return t.QueryLexer.lexText},t.QueryLexer.lexEditDistance=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.EDIT_DISTANCE),t.QueryLexer.lexText},t.QueryLexer.lexBoost=function(e){return e.ignore(),e.acceptDigitRun(),e.emit(t.QueryLexer.BOOST),t.QueryLexer.lexText},t.QueryLexer.lexEOS=function(e){e.width()>0&&e.emit(t.QueryLexer.TERM)},t.QueryLexer.termSeparator=t.tokenizer.separator,t.QueryLexer.lexText=function(e){for(;;){var r=e.next();if(r==t.QueryLexer.EOS)return t.QueryLexer.lexEOS;if(r.charCodeAt(0)==92){e.escapeCharacter();continue}if(r==":")return t.QueryLexer.lexField;if(r=="~")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexEditDistance;if(r=="^")return e.backup(),e.width()>0&&e.emit(t.QueryLexer.TERM),t.QueryLexer.lexBoost;if(r=="+"&&e.width()===1||r=="-"&&e.width()===1)return e.emit(t.QueryLexer.PRESENCE),t.QueryLexer.lexText;if(r.match(t.QueryLexer.termSeparator))return t.QueryLexer.lexTerm}},t.QueryParser=function(e,r){this.lexer=new t.QueryLexer(e),this.query=r,this.currentClause={},this.lexemeIdx=0},t.QueryParser.prototype.parse=function(){this.lexer.run(),this.lexemes=this.lexer.lexemes;for(var e=t.QueryParser.parseClause;e;)e=e(this);return this.query},t.QueryParser.prototype.peekLexeme=function(){return this.lexemes[this.lexemeIdx]},t.QueryParser.prototype.consumeLexeme=function(){var e=this.peekLexeme();return this.lexemeIdx+=1,e},t.QueryParser.prototype.nextClause=function(){var e=this.currentClause;this.query.clause(e),this.currentClause={}},t.QueryParser.parseClause=function(e){var r=e.peekLexeme();if(r!=null)switch(r.type){case t.QueryLexer.PRESENCE:return t.QueryParser.parsePresence;case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expected either a field or a term, found "+r.type;throw r.str.length>=1&&(n+=" with value '"+r.str+"'"),new t.QueryParseError(n,r.start,r.end)}},t.QueryParser.parsePresence=function(e){var r=e.consumeLexeme();if(r!=null){switch(r.str){case"-":e.currentClause.presence=t.Query.presence.PROHIBITED;break;case"+":e.currentClause.presence=t.Query.presence.REQUIRED;break;default:var n="unrecognised presence operator'"+r.str+"'";throw new t.QueryParseError(n,r.start,r.end)}var i=e.peekLexeme();if(i==null){var n="expecting term or field, found nothing";throw new t.QueryParseError(n,r.start,r.end)}switch(i.type){case t.QueryLexer.FIELD:return t.QueryParser.parseField;case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var n="expecting term or field, found '"+i.type+"'";throw new t.QueryParseError(n,i.start,i.end)}}},t.QueryParser.parseField=function(e){var r=e.consumeLexeme();if(r!=null){if(e.query.allFields.indexOf(r.str)==-1){var n=e.query.allFields.map(function(o){return"'"+o+"'"}).join(", "),i="unrecognised field '"+r.str+"', possible fields: "+n;throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.fields=[r.str];var s=e.peekLexeme();if(s==null){var i="expecting term, found nothing";throw new t.QueryParseError(i,r.start,r.end)}switch(s.type){case t.QueryLexer.TERM:return t.QueryParser.parseTerm;default:var i="expecting term, found '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseTerm=function(e){var r=e.consumeLexeme();if(r!=null){e.currentClause.term=r.str.toLowerCase(),r.str.indexOf("*")!=-1&&(e.currentClause.usePipeline=!1);var n=e.peekLexeme();if(n==null){e.nextClause();return}switch(n.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+n.type+"'";throw new t.QueryParseError(i,n.start,n.end)}}},t.QueryParser.parseEditDistance=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="edit distance must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.editDistance=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},t.QueryParser.parseBoost=function(e){var r=e.consumeLexeme();if(r!=null){var n=parseInt(r.str,10);if(isNaN(n)){var i="boost must be numeric";throw new t.QueryParseError(i,r.start,r.end)}e.currentClause.boost=n;var s=e.peekLexeme();if(s==null){e.nextClause();return}switch(s.type){case t.QueryLexer.TERM:return e.nextClause(),t.QueryParser.parseTerm;case t.QueryLexer.FIELD:return e.nextClause(),t.QueryParser.parseField;case t.QueryLexer.EDIT_DISTANCE:return t.QueryParser.parseEditDistance;case t.QueryLexer.BOOST:return t.QueryParser.parseBoost;case t.QueryLexer.PRESENCE:return e.nextClause(),t.QueryParser.parsePresence;default:var i="Unexpected lexeme type '"+s.type+"'";throw new t.QueryParseError(i,s.start,s.end)}}},function(e,r){typeof define=="function"&&define.amd?define(r):typeof se=="object"?oe.exports=r():e.lunr=r()}(this,function(){return t})})()});var ne=[];function G(t,e){ne.push({selector:e,constructor:t})}var U=class{constructor(){this.alwaysVisibleMember=null;this.createComponents(document.body),this.ensureActivePageVisible(),this.ensureFocusedElementVisible(),this.listenForCodeCopies(),window.addEventListener("hashchange",()=>this.ensureFocusedElementVisible())}createComponents(e){ne.forEach(r=>{e.querySelectorAll(r.selector).forEach(n=>{n.dataset.hasInstance||(new r.constructor({el:n,app:this}),n.dataset.hasInstance=String(!0))})})}filterChanged(){this.ensureFocusedElementVisible()}ensureActivePageVisible(){let e=document.querySelector(".tsd-navigation .current"),r=e?.parentElement;for(;r&&!r.classList.contains(".tsd-navigation");)r instanceof HTMLDetailsElement&&(r.open=!0),r=r.parentElement;if(e){let n=e.getBoundingClientRect().top-document.documentElement.clientHeight/4;document.querySelector(".site-menu").scrollTop=n}}ensureFocusedElementVisible(){if(this.alwaysVisibleMember&&(this.alwaysVisibleMember.classList.remove("always-visible"),this.alwaysVisibleMember.firstElementChild.remove(),this.alwaysVisibleMember=null),!location.hash)return;let e=document.getElementById(location.hash.substring(1));if(!e)return;let r=e.parentElement;for(;r&&r.tagName!=="SECTION";)r=r.parentElement;if(r&&r.offsetParent==null){this.alwaysVisibleMember=r,r.classList.add("always-visible");let n=document.createElement("p");n.classList.add("warning"),n.textContent="This member is normally hidden due to your filter settings.",r.prepend(n)}}listenForCodeCopies(){document.querySelectorAll("pre > button").forEach(e=>{let r;e.addEventListener("click",()=>{e.previousElementSibling instanceof HTMLElement&&navigator.clipboard.writeText(e.previousElementSibling.innerText.trim()),e.textContent="Copied!",e.classList.add("visible"),clearTimeout(r),r=setTimeout(()=>{e.classList.remove("visible"),r=setTimeout(()=>{e.textContent="Copy"},100)},1e3)})})}};var ie=(t,e=100)=>{let r;return()=>{clearTimeout(r),r=setTimeout(()=>t(),e)}};var ce=Ce(ae());function de(){let t=document.getElementById("tsd-search");if(!t)return;let e=document.getElementById("tsd-search-script");t.classList.add("loading"),e&&(e.addEventListener("error",()=>{t.classList.remove("loading"),t.classList.add("failure")}),e.addEventListener("load",()=>{t.classList.remove("loading"),t.classList.add("ready")}),window.searchData&&t.classList.remove("loading"));let r=document.querySelector("#tsd-search input"),n=document.querySelector("#tsd-search .results");if(!r||!n)throw new Error("The input field or the result list wrapper was not found");let i=!1;n.addEventListener("mousedown",()=>i=!0),n.addEventListener("mouseup",()=>{i=!1,t.classList.remove("has-focus")}),r.addEventListener("focus",()=>t.classList.add("has-focus")),r.addEventListener("blur",()=>{i||(i=!1,t.classList.remove("has-focus"))});let s={base:t.dataset.base+"/"};Oe(t,n,r,s)}function Oe(t,e,r,n){r.addEventListener("input",ie(()=>{Re(t,e,r,n)},200));let i=!1;r.addEventListener("keydown",s=>{i=!0,s.key=="Enter"?Fe(e,r):s.key=="Escape"?r.blur():s.key=="ArrowUp"?ue(e,-1):s.key==="ArrowDown"?ue(e,1):i=!1}),r.addEventListener("keypress",s=>{i&&s.preventDefault()}),document.body.addEventListener("keydown",s=>{s.altKey||s.ctrlKey||s.metaKey||!r.matches(":focus")&&s.key==="/"&&(r.focus(),s.preventDefault())})}function _e(t,e){t.index||window.searchData&&(e.classList.remove("loading"),e.classList.add("ready"),t.data=window.searchData,t.index=ce.Index.load(window.searchData.index))}function Re(t,e,r,n){if(_e(n,t),!n.index||!n.data)return;e.textContent="";let i=r.value.trim(),s=i?n.index.search(`*${i}*`):[];for(let o=0;oa.score-o.score);for(let o=0,a=Math.min(10,s.length);o${le(l.parent,i)}.${u}`);let d=document.createElement("li");d.classList.value=l.classes??"";let m=document.createElement("a");m.href=n.base+l.url,m.innerHTML=u,d.append(m),e.appendChild(d)}}function ue(t,e){let r=t.querySelector(".current");if(!r)r=t.querySelector(e==1?"li:first-child":"li:last-child"),r&&r.classList.add("current");else{let n=r;if(e===1)do n=n.nextElementSibling??void 0;while(n instanceof HTMLElement&&n.offsetParent==null);else do n=n.previousElementSibling??void 0;while(n instanceof HTMLElement&&n.offsetParent==null);n&&(r.classList.remove("current"),n.classList.add("current"))}}function Fe(t,e){let r=t.querySelector(".current");if(r||(r=t.querySelector("li:first-child")),r){let n=r.querySelector("a");n&&(window.location.href=n.href),e.blur()}}function le(t,e){if(e==="")return t;let r=t.toLocaleLowerCase(),n=e.toLocaleLowerCase(),i=[],s=0,o=r.indexOf(n);for(;o!=-1;)i.push(K(t.substring(s,o)),`${K(t.substring(o,o+n.length))}`),s=o+n.length,o=r.indexOf(n,s);return i.push(K(t.substring(s))),i.join("")}var Me={"&":"&","<":"<",">":">","'":"'",'"':"""};function K(t){return t.replace(/[&<>"'"]/g,e=>Me[e])}var P=class{constructor(e){this.el=e.el,this.app=e.app}};var M="mousedown",fe="mousemove",N="mouseup",J={x:0,y:0},he=!1,ee=!1,De=!1,D=!1,pe=/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);document.documentElement.classList.add(pe?"is-mobile":"not-mobile");pe&&"ontouchstart"in document.documentElement&&(De=!0,M="touchstart",fe="touchmove",N="touchend");document.addEventListener(M,t=>{ee=!0,D=!1;let e=M=="touchstart"?t.targetTouches[0]:t;J.y=e.pageY||0,J.x=e.pageX||0});document.addEventListener(fe,t=>{if(ee&&!D){let e=M=="touchstart"?t.targetTouches[0]:t,r=J.x-(e.pageX||0),n=J.y-(e.pageY||0);D=Math.sqrt(r*r+n*n)>10}});document.addEventListener(N,()=>{ee=!1});document.addEventListener("click",t=>{he&&(t.preventDefault(),t.stopImmediatePropagation(),he=!1)});var X=class extends P{constructor(r){super(r);this.className=this.el.dataset.toggle||"",this.el.addEventListener(N,n=>this.onPointerUp(n)),this.el.addEventListener("click",n=>n.preventDefault()),document.addEventListener(M,n=>this.onDocumentPointerDown(n)),document.addEventListener(N,n=>this.onDocumentPointerUp(n))}setActive(r){if(this.active==r)return;this.active=r,document.documentElement.classList.toggle("has-"+this.className,r),this.el.classList.toggle("active",r);let n=(this.active?"to-has-":"from-has-")+this.className;document.documentElement.classList.add(n),setTimeout(()=>document.documentElement.classList.remove(n),500)}onPointerUp(r){D||(this.setActive(!0),r.preventDefault())}onDocumentPointerDown(r){if(this.active){if(r.target.closest(".col-sidebar, .tsd-filter-group"))return;this.setActive(!1)}}onDocumentPointerUp(r){if(!D&&this.active&&r.target.closest(".col-sidebar")){let n=r.target.closest("a");if(n){let i=window.location.href;i.indexOf("#")!=-1&&(i=i.substring(0,i.indexOf("#"))),n.href.substring(0,i.length)==i&&setTimeout(()=>this.setActive(!1),250)}}}};var te;try{te=localStorage}catch{te={getItem(){return null},setItem(){}}}var Q=te;var me=document.head.appendChild(document.createElement("style"));me.dataset.for="filters";var Y=class extends P{constructor(r){super(r);this.key=`filter-${this.el.name}`,this.value=this.el.checked,this.el.addEventListener("change",()=>{this.setLocalStorage(this.el.checked)}),this.setLocalStorage(this.fromLocalStorage()),me.innerHTML+=`html:not(.${this.key}) .tsd-is-${this.el.name} { display: none; } +`}fromLocalStorage(){let r=Q.getItem(this.key);return r?r==="true":this.el.checked}setLocalStorage(r){Q.setItem(this.key,r.toString()),this.value=r,this.handleValueChange()}handleValueChange(){this.el.checked=this.value,document.documentElement.classList.toggle(this.key,this.value),this.app.filterChanged(),document.querySelectorAll(".tsd-index-section").forEach(r=>{r.style.display="block";let n=Array.from(r.querySelectorAll(".tsd-index-link")).every(i=>i.offsetParent==null);r.style.display=n?"none":"block"})}};var Z=class extends P{constructor(r){super(r);this.summary=this.el.querySelector(".tsd-accordion-summary"),this.icon=this.summary.querySelector("svg"),this.key=`tsd-accordion-${this.summary.dataset.key??this.summary.textContent.trim().replace(/\s+/g,"-").toLowerCase()}`;let n=Q.getItem(this.key);this.el.open=n?n==="true":this.el.open,this.el.addEventListener("toggle",()=>this.update()),this.update()}update(){this.icon.style.transform=`rotate(${this.el.open?0:-90}deg)`,Q.setItem(this.key,this.el.open.toString())}};function ve(t){let e=Q.getItem("tsd-theme")||"os";t.value=e,ye(e),t.addEventListener("change",()=>{Q.setItem("tsd-theme",t.value),ye(t.value)})}function ye(t){document.documentElement.dataset.theme=t}de();G(X,"a[data-toggle]");G(Z,".tsd-index-accordion");G(Y,".tsd-filter-item input[type=checkbox]");var ge=document.getElementById("tsd-theme");ge&&ve(ge);var Ae=new U;Object.defineProperty(window,"app",{value:Ae});document.querySelectorAll("summary a").forEach(t=>{t.addEventListener("click",()=>{location.assign(t.href)})});})(); +/*! Bundled license information: + +lunr/lunr.js: + (** + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale + * @license MIT + *) + (*! + * lunr.utils + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Set + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.tokenizer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Pipeline + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Vector + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.stemmer + * Copyright (C) 2020 Oliver Nightingale + * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt + *) + (*! + * lunr.stopWordFilter + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.trimmer + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.TokenSet + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Index + * Copyright (C) 2020 Oliver Nightingale + *) + (*! + * lunr.Builder + * Copyright (C) 2020 Oliver Nightingale + *) +*/ diff --git a/dist/docs/assets/search.js b/dist/docs/assets/search.js new file mode 100644 index 0000000..9d2bd55 --- /dev/null +++ b/dist/docs/assets/search.js @@ -0,0 +1 @@ +window.searchData = JSON.parse("{\"rows\":[{\"kind\":128,\"name\":\"GaussianBlurBackgroundProcessor\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html\",\"classes\":\"\"},{\"kind\":512,\"name\":\"constructor\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#constructor\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":262144,\"name\":\"blurFilterRadius\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#blurFilterRadius\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":262144,\"name\":\"maskBlurRadius\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":2048,\"name\":\"loadModel\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#loadModel\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":2048,\"name\":\"processFrame\",\"url\":\"classes/GaussianBlurBackgroundProcessor.html#processFrame\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessor\"},{\"kind\":256,\"name\":\"GaussianBlurBackgroundProcessorOptions\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html\",\"classes\":\"\"},{\"kind\":1024,\"name\":\"blurFilterRadius\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#blurFilterRadius\",\"classes\":\"\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"assetsPath\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#assetsPath\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"debounce\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#debounce\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"maskBlurRadius\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"pipeline\",\"url\":\"interfaces/GaussianBlurBackgroundProcessorOptions.html#pipeline\",\"classes\":\"tsd-is-inherited\",\"parent\":\"GaussianBlurBackgroundProcessorOptions\"},{\"kind\":8,\"name\":\"ImageFit\",\"url\":\"enums/ImageFit.html\",\"classes\":\"\"},{\"kind\":16,\"name\":\"Contain\",\"url\":\"enums/ImageFit.html#Contain\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"Cover\",\"url\":\"enums/ImageFit.html#Cover\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"Fill\",\"url\":\"enums/ImageFit.html#Fill\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":16,\"name\":\"None\",\"url\":\"enums/ImageFit.html#None\",\"classes\":\"\",\"parent\":\"ImageFit\"},{\"kind\":8,\"name\":\"Pipeline\",\"url\":\"enums/Pipeline.html\",\"classes\":\"\"},{\"kind\":16,\"name\":\"Canvas2D\",\"url\":\"enums/Pipeline.html#Canvas2D\",\"classes\":\"\",\"parent\":\"Pipeline\"},{\"kind\":16,\"name\":\"WebGL2\",\"url\":\"enums/Pipeline.html#WebGL2\",\"classes\":\"\",\"parent\":\"Pipeline\"},{\"kind\":32,\"name\":\"isSupported\",\"url\":\"variables/isSupported.html\",\"classes\":\"\"},{\"kind\":32,\"name\":\"version\",\"url\":\"variables/version.html\",\"classes\":\"\"},{\"kind\":128,\"name\":\"VirtualBackgroundProcessor\",\"url\":\"classes/VirtualBackgroundProcessor.html\",\"classes\":\"\"},{\"kind\":512,\"name\":\"constructor\",\"url\":\"classes/VirtualBackgroundProcessor.html#constructor\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"backgroundImage\",\"url\":\"classes/VirtualBackgroundProcessor.html#backgroundImage\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"fitType\",\"url\":\"classes/VirtualBackgroundProcessor.html#fitType\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":262144,\"name\":\"maskBlurRadius\",\"url\":\"classes/VirtualBackgroundProcessor.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":2048,\"name\":\"loadModel\",\"url\":\"classes/VirtualBackgroundProcessor.html#loadModel\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":2048,\"name\":\"processFrame\",\"url\":\"classes/VirtualBackgroundProcessor.html#processFrame\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessor\"},{\"kind\":256,\"name\":\"VirtualBackgroundProcessorOptions\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html\",\"classes\":\"\"},{\"kind\":1024,\"name\":\"backgroundImage\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#backgroundImage\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"fitType\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#fitType\",\"classes\":\"\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"assetsPath\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#assetsPath\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"debounce\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#debounce\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"maskBlurRadius\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#maskBlurRadius\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"},{\"kind\":1024,\"name\":\"pipeline\",\"url\":\"interfaces/VirtualBackgroundProcessorOptions.html#pipeline\",\"classes\":\"tsd-is-inherited\",\"parent\":\"VirtualBackgroundProcessorOptions\"}],\"index\":{\"version\":\"2.3.9\",\"fields\":[\"name\",\"comment\"],\"fieldVectors\":[[\"name/0\",[0,32.055]],[\"comment/0\",[]],[\"name/1\",[1,26.946]],[\"comment/1\",[]],[\"name/2\",[2,26.946]],[\"comment/2\",[]],[\"name/3\",[3,21.068]],[\"comment/3\",[]],[\"name/4\",[4,26.946]],[\"comment/4\",[]],[\"name/5\",[5,26.946]],[\"comment/5\",[]],[\"name/6\",[6,32.055]],[\"comment/6\",[]],[\"name/7\",[2,26.946]],[\"comment/7\",[]],[\"name/8\",[7,26.946]],[\"comment/8\",[]],[\"name/9\",[8,26.946]],[\"comment/9\",[]],[\"name/10\",[3,21.068]],[\"comment/10\",[]],[\"name/11\",[9,23.582]],[\"comment/11\",[]],[\"name/12\",[10,32.055]],[\"comment/12\",[]],[\"name/13\",[11,32.055]],[\"comment/13\",[]],[\"name/14\",[12,32.055]],[\"comment/14\",[]],[\"name/15\",[13,32.055]],[\"comment/15\",[]],[\"name/16\",[14,32.055]],[\"comment/16\",[]],[\"name/17\",[9,23.582]],[\"comment/17\",[]],[\"name/18\",[15,32.055]],[\"comment/18\",[]],[\"name/19\",[16,32.055]],[\"comment/19\",[]],[\"name/20\",[17,32.055]],[\"comment/20\",[]],[\"name/21\",[18,32.055]],[\"comment/21\",[]],[\"name/22\",[19,32.055]],[\"comment/22\",[]],[\"name/23\",[1,26.946]],[\"comment/23\",[]],[\"name/24\",[20,26.946]],[\"comment/24\",[]],[\"name/25\",[21,26.946]],[\"comment/25\",[]],[\"name/26\",[3,21.068]],[\"comment/26\",[]],[\"name/27\",[4,26.946]],[\"comment/27\",[]],[\"name/28\",[5,26.946]],[\"comment/28\",[]],[\"name/29\",[22,32.055]],[\"comment/29\",[]],[\"name/30\",[20,26.946]],[\"comment/30\",[]],[\"name/31\",[21,26.946]],[\"comment/31\",[]],[\"name/32\",[7,26.946]],[\"comment/32\",[]],[\"name/33\",[8,26.946]],[\"comment/33\",[]],[\"name/34\",[3,21.068]],[\"comment/34\",[]],[\"name/35\",[9,23.582]],[\"comment/35\",[]]],\"invertedIndex\":[[\"assetspath\",{\"_index\":7,\"name\":{\"8\":{},\"32\":{}},\"comment\":{}}],[\"backgroundimage\",{\"_index\":20,\"name\":{\"24\":{},\"30\":{}},\"comment\":{}}],[\"blurfilterradius\",{\"_index\":2,\"name\":{\"2\":{},\"7\":{}},\"comment\":{}}],[\"canvas2d\",{\"_index\":15,\"name\":{\"18\":{}},\"comment\":{}}],[\"constructor\",{\"_index\":1,\"name\":{\"1\":{},\"23\":{}},\"comment\":{}}],[\"contain\",{\"_index\":11,\"name\":{\"13\":{}},\"comment\":{}}],[\"cover\",{\"_index\":12,\"name\":{\"14\":{}},\"comment\":{}}],[\"debounce\",{\"_index\":8,\"name\":{\"9\":{},\"33\":{}},\"comment\":{}}],[\"fill\",{\"_index\":13,\"name\":{\"15\":{}},\"comment\":{}}],[\"fittype\",{\"_index\":21,\"name\":{\"25\":{},\"31\":{}},\"comment\":{}}],[\"gaussianblurbackgroundprocessor\",{\"_index\":0,\"name\":{\"0\":{}},\"comment\":{}}],[\"gaussianblurbackgroundprocessoroptions\",{\"_index\":6,\"name\":{\"6\":{}},\"comment\":{}}],[\"imagefit\",{\"_index\":10,\"name\":{\"12\":{}},\"comment\":{}}],[\"issupported\",{\"_index\":17,\"name\":{\"20\":{}},\"comment\":{}}],[\"loadmodel\",{\"_index\":4,\"name\":{\"4\":{},\"27\":{}},\"comment\":{}}],[\"maskblurradius\",{\"_index\":3,\"name\":{\"3\":{},\"10\":{},\"26\":{},\"34\":{}},\"comment\":{}}],[\"none\",{\"_index\":14,\"name\":{\"16\":{}},\"comment\":{}}],[\"pipeline\",{\"_index\":9,\"name\":{\"11\":{},\"17\":{},\"35\":{}},\"comment\":{}}],[\"processframe\",{\"_index\":5,\"name\":{\"5\":{},\"28\":{}},\"comment\":{}}],[\"version\",{\"_index\":18,\"name\":{\"21\":{}},\"comment\":{}}],[\"virtualbackgroundprocessor\",{\"_index\":19,\"name\":{\"22\":{}},\"comment\":{}}],[\"virtualbackgroundprocessoroptions\",{\"_index\":22,\"name\":{\"29\":{}},\"comment\":{}}],[\"webgl2\",{\"_index\":16,\"name\":{\"19\":{}},\"comment\":{}}]],\"pipeline\":[]}}"); \ No newline at end of file diff --git a/dist/docs/assets/style.css b/dist/docs/assets/style.css new file mode 100644 index 0000000..258146f --- /dev/null +++ b/dist/docs/assets/style.css @@ -0,0 +1,1379 @@ +:root { + /* Light */ + --light-color-background: #f2f4f8; + --light-color-background-secondary: #eff0f1; + --light-color-warning-text: #222; + --light-color-background-warning: #e6e600; + --light-color-icon-background: var(--light-color-background); + --light-color-accent: #c5c7c9; + --light-color-active-menu-item: var(--light-color-accent); + --light-color-text: #222; + --light-color-text-aside: #6e6e6e; + --light-color-link: #1f70c2; + + --light-color-ts-project: #b111c9; + --light-color-ts-module: var(--light-color-ts-project); + --light-color-ts-namespace: var(--light-color-ts-project); + --light-color-ts-enum: #7e6f15; + --light-color-ts-enum-member: var(--light-color-ts-enum); + --light-color-ts-variable: #4760ec; + --light-color-ts-function: #572be7; + --light-color-ts-class: #1f70c2; + --light-color-ts-interface: #108024; + --light-color-ts-constructor: var(--light-color-ts-class); + --light-color-ts-property: var(--light-color-ts-variable); + --light-color-ts-method: var(--light-color-ts-function); + --light-color-ts-call-signature: var(--light-color-ts-method); + --light-color-ts-index-signature: var(--light-color-ts-property); + --light-color-ts-constructor-signature: var(--light-color-ts-constructor); + --light-color-ts-parameter: var(--light-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --light-color-ts-type-parameter: var(--light-color-ts-type-alias); + --light-color-ts-accessor: var(--light-color-ts-property); + --light-color-ts-get-signature: var(--light-color-ts-accessor); + --light-color-ts-set-signature: var(--light-color-ts-accessor); + --light-color-ts-type-alias: #d51270; + /* reference not included as links will be colored with the kind that it points to */ + + --light-external-icon: url("data:image/svg+xml;utf8,"); + --light-color-scheme: light; + + /* Dark */ + --dark-color-background: #2b2e33; + --dark-color-background-secondary: #1e2024; + --dark-color-background-warning: #bebe00; + --dark-color-warning-text: #222; + --dark-color-icon-background: var(--dark-color-background-secondary); + --dark-color-accent: #9096a2; + --dark-color-active-menu-item: #5d5d6a; + --dark-color-text: #f5f5f5; + --dark-color-text-aside: #dddddd; + --dark-color-link: #00aff4; + + --dark-color-ts-project: #e358ff; + --dark-color-ts-module: var(--dark-color-ts-project); + --dark-color-ts-namespace: var(--dark-color-ts-project); + --dark-color-ts-enum: #f4d93e; + --dark-color-ts-enum-member: var(--dark-color-ts-enum); + --dark-color-ts-variable: #798dff; + --dark-color-ts-function: #a280ff; + --dark-color-ts-class: #8ac4ff; + --dark-color-ts-interface: #6cff87; + --dark-color-ts-constructor: var(--dark-color-ts-class); + --dark-color-ts-property: var(--dark-color-ts-variable); + --dark-color-ts-method: var(--dark-color-ts-function); + --dark-color-ts-call-signature: var(--dark-color-ts-method); + --dark-color-ts-index-signature: var(--dark-color-ts-property); + --dark-color-ts-constructor-signature: var(--dark-color-ts-constructor); + --dark-color-ts-parameter: var(--dark-color-ts-variable); + /* type literal not included as links will never be generated to it */ + --dark-color-ts-type-parameter: var(--dark-color-ts-type-alias); + --dark-color-ts-accessor: var(--dark-color-ts-property); + --dark-color-ts-get-signature: var(--dark-color-ts-accessor); + --dark-color-ts-set-signature: var(--dark-color-ts-accessor); + --dark-color-ts-type-alias: #ff6492; + /* reference not included as links will be colored with the kind that it points to */ + + --dark-external-icon: url("data:image/svg+xml;utf8,"); + --dark-color-scheme: dark; +} + +@media (prefers-color-scheme: light) { + :root { + --color-background: var(--light-color-background); + --color-background-secondary: var(--light-color-background-secondary); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-icon-background: var(--light-color-icon-background); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + --color-link: var(--light-color-link); + + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); + } +} + +@media (prefers-color-scheme: dark) { + :root { + --color-background: var(--dark-color-background); + --color-background-secondary: var(--dark-color-background-secondary); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-icon-background: var(--dark-color-icon-background); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + --color-link: var(--dark-color-link); + + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); + } +} + +html { + color-scheme: var(--color-scheme); +} + +body { + margin: 0; +} + +:root[data-theme="light"] { + --color-background: var(--light-color-background); + --color-background-secondary: var(--light-color-background-secondary); + --color-background-warning: var(--light-color-background-warning); + --color-warning-text: var(--light-color-warning-text); + --color-icon-background: var(--light-color-icon-background); + --color-accent: var(--light-color-accent); + --color-active-menu-item: var(--light-color-active-menu-item); + --color-text: var(--light-color-text); + --color-text-aside: var(--light-color-text-aside); + --color-link: var(--light-color-link); + + --color-ts-module: var(--light-color-ts-module); + --color-ts-namespace: var(--light-color-ts-namespace); + --color-ts-enum: var(--light-color-ts-enum); + --color-ts-enum-member: var(--light-color-ts-enum-member); + --color-ts-variable: var(--light-color-ts-variable); + --color-ts-function: var(--light-color-ts-function); + --color-ts-class: var(--light-color-ts-class); + --color-ts-interface: var(--light-color-ts-interface); + --color-ts-constructor: var(--light-color-ts-constructor); + --color-ts-property: var(--light-color-ts-property); + --color-ts-method: var(--light-color-ts-method); + --color-ts-call-signature: var(--light-color-ts-call-signature); + --color-ts-index-signature: var(--light-color-ts-index-signature); + --color-ts-constructor-signature: var( + --light-color-ts-constructor-signature + ); + --color-ts-parameter: var(--light-color-ts-parameter); + --color-ts-type-parameter: var(--light-color-ts-type-parameter); + --color-ts-accessor: var(--light-color-ts-accessor); + --color-ts-get-signature: var(--light-color-ts-get-signature); + --color-ts-set-signature: var(--light-color-ts-set-signature); + --color-ts-type-alias: var(--light-color-ts-type-alias); + + --external-icon: var(--light-external-icon); + --color-scheme: var(--light-color-scheme); +} + +:root[data-theme="dark"] { + --color-background: var(--dark-color-background); + --color-background-secondary: var(--dark-color-background-secondary); + --color-background-warning: var(--dark-color-background-warning); + --color-warning-text: var(--dark-color-warning-text); + --color-icon-background: var(--dark-color-icon-background); + --color-accent: var(--dark-color-accent); + --color-active-menu-item: var(--dark-color-active-menu-item); + --color-text: var(--dark-color-text); + --color-text-aside: var(--dark-color-text-aside); + --color-link: var(--dark-color-link); + + --color-ts-module: var(--dark-color-ts-module); + --color-ts-namespace: var(--dark-color-ts-namespace); + --color-ts-enum: var(--dark-color-ts-enum); + --color-ts-enum-member: var(--dark-color-ts-enum-member); + --color-ts-variable: var(--dark-color-ts-variable); + --color-ts-function: var(--dark-color-ts-function); + --color-ts-class: var(--dark-color-ts-class); + --color-ts-interface: var(--dark-color-ts-interface); + --color-ts-constructor: var(--dark-color-ts-constructor); + --color-ts-property: var(--dark-color-ts-property); + --color-ts-method: var(--dark-color-ts-method); + --color-ts-call-signature: var(--dark-color-ts-call-signature); + --color-ts-index-signature: var(--dark-color-ts-index-signature); + --color-ts-constructor-signature: var( + --dark-color-ts-constructor-signature + ); + --color-ts-parameter: var(--dark-color-ts-parameter); + --color-ts-type-parameter: var(--dark-color-ts-type-parameter); + --color-ts-accessor: var(--dark-color-ts-accessor); + --color-ts-get-signature: var(--dark-color-ts-get-signature); + --color-ts-set-signature: var(--dark-color-ts-set-signature); + --color-ts-type-alias: var(--dark-color-ts-type-alias); + + --external-icon: var(--dark-external-icon); + --color-scheme: var(--dark-color-scheme); +} + +.always-visible, +.always-visible .tsd-signatures { + display: inherit !important; +} + +h1, +h2, +h3, +h4, +h5, +h6 { + line-height: 1.2; +} + +h1 > a, +h2 > a, +h3 > a, +h4 > a, +h5 > a, +h6 > a { + text-decoration: none; + color: var(--color-text); +} + +h1 { + font-size: 1.875rem; + margin: 0.67rem 0; +} + +h2 { + font-size: 1.5rem; + margin: 0.83rem 0; +} + +h3 { + font-size: 1.25rem; + margin: 1rem 0; +} + +h4 { + font-size: 1.05rem; + margin: 1.33rem 0; +} + +h5 { + font-size: 1rem; + margin: 1.5rem 0; +} + +h6 { + font-size: 0.875rem; + margin: 2.33rem 0; +} + +.uppercase { + text-transform: uppercase; +} + +dl, +menu, +ol, +ul { + margin: 1em 0; +} + +dd { + margin: 0 0 0 40px; +} + +.container { + max-width: 1700px; + padding: 0 2rem; +} + +/* Footer */ +.tsd-generator { + border-top: 1px solid var(--color-accent); + padding-top: 1rem; + padding-bottom: 1rem; + max-height: 3.5rem; +} + +.tsd-generator > p { + margin-top: 0; + margin-bottom: 0; + padding: 0 1rem; +} + +.container-main { + margin: 0 auto; + /* toolbar, footer, margin */ + min-height: calc(100vh - 41px - 56px - 4rem); +} + +@keyframes fade-in { + from { + opacity: 0; + } + to { + opacity: 1; + } +} +@keyframes fade-out { + from { + opacity: 1; + visibility: visible; + } + to { + opacity: 0; + } +} +@keyframes fade-in-delayed { + 0% { + opacity: 0; + } + 33% { + opacity: 0; + } + 100% { + opacity: 1; + } +} +@keyframes fade-out-delayed { + 0% { + opacity: 1; + visibility: visible; + } + 66% { + opacity: 0; + } + 100% { + opacity: 0; + } +} +@keyframes pop-in-from-right { + from { + transform: translate(100%, 0); + } + to { + transform: translate(0, 0); + } +} +@keyframes pop-out-to-right { + from { + transform: translate(0, 0); + visibility: visible; + } + to { + transform: translate(100%, 0); + } +} +body { + background: var(--color-background); + font-family: "Segoe UI", sans-serif; + font-size: 16px; + color: var(--color-text); +} + +a { + color: var(--color-link); + text-decoration: none; +} +a:hover { + text-decoration: underline; +} +a.external[target="_blank"] { + background-image: var(--external-icon); + background-position: top 3px right; + background-repeat: no-repeat; + padding-right: 13px; +} + +code, +pre { + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + padding: 0.2em; + margin: 0; + font-size: 0.875rem; + border-radius: 0.8em; +} + +pre { + position: relative; + white-space: pre; + white-space: pre-wrap; + word-wrap: break-word; + padding: 10px; + border: 1px solid var(--color-accent); +} +pre code { + padding: 0; + font-size: 100%; +} +pre > button { + position: absolute; + top: 10px; + right: 10px; + opacity: 0; + transition: opacity 0.1s; + box-sizing: border-box; +} +pre:hover > button, +pre > button.visible { + opacity: 1; +} + +blockquote { + margin: 1em 0; + padding-left: 1em; + border-left: 4px solid gray; +} + +.tsd-typography { + line-height: 1.333em; +} +.tsd-typography ul { + list-style: square; + padding: 0 0 0 20px; + margin: 0; +} +.tsd-typography .tsd-index-panel h3, +.tsd-index-panel .tsd-typography h3, +.tsd-typography h4, +.tsd-typography h5, +.tsd-typography h6 { + font-size: 1em; +} +.tsd-typography h5, +.tsd-typography h6 { + font-weight: normal; +} +.tsd-typography p, +.tsd-typography ul, +.tsd-typography ol { + margin: 1em 0; +} +.tsd-typography table { + border-collapse: collapse; + border: none; +} +.tsd-typography td, +.tsd-typography th { + padding: 6px 13px; + border: 1px solid var(--color-accent); +} +.tsd-typography thead, +.tsd-typography tr:nth-child(even) { + background-color: var(--color-background-secondary); +} + +.tsd-breadcrumb { + margin: 0; + padding: 0; + color: var(--color-text-aside); +} +.tsd-breadcrumb a { + color: var(--color-text-aside); + text-decoration: none; +} +.tsd-breadcrumb a:hover { + text-decoration: underline; +} +.tsd-breadcrumb li { + display: inline; +} +.tsd-breadcrumb li:after { + content: " / "; +} + +.tsd-comment-tags { + display: flex; + flex-direction: column; +} +dl.tsd-comment-tag-group { + display: flex; + align-items: center; + overflow: hidden; + margin: 0.5em 0; +} +dl.tsd-comment-tag-group dt { + display: flex; + margin-right: 0.5em; + font-size: 0.875em; + font-weight: normal; +} +dl.tsd-comment-tag-group dd { + margin: 0; +} +code.tsd-tag { + padding: 0.25em 0.4em; + border: 0.1em solid var(--color-accent); + margin-right: 0.25em; + font-size: 70%; +} +h1 code.tsd-tag:first-of-type { + margin-left: 0.25em; +} + +dl.tsd-comment-tag-group dd:before, +dl.tsd-comment-tag-group dd:after { + content: " "; +} +dl.tsd-comment-tag-group dd pre, +dl.tsd-comment-tag-group dd:after { + clear: both; +} +dl.tsd-comment-tag-group p { + margin: 0; +} + +.tsd-panel.tsd-comment .lead { + font-size: 1.1em; + line-height: 1.333em; + margin-bottom: 2em; +} +.tsd-panel.tsd-comment .lead:last-child { + margin-bottom: 0; +} + +.tsd-filter-visibility h4 { + font-size: 1rem; + padding-top: 0.75rem; + padding-bottom: 0.5rem; + margin: 0; +} +.tsd-filter-item:not(:last-child) { + margin-bottom: 0.5rem; +} +.tsd-filter-input { + display: flex; + width: fit-content; + width: -moz-fit-content; + align-items: center; + user-select: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + cursor: pointer; +} +.tsd-filter-input input[type="checkbox"] { + cursor: pointer; + position: absolute; + width: 1.5em; + height: 1.5em; + opacity: 0; +} +.tsd-filter-input input[type="checkbox"]:disabled { + pointer-events: none; +} +.tsd-filter-input svg { + cursor: pointer; + width: 1.5em; + height: 1.5em; + margin-right: 0.5em; + border-radius: 0.33em; + /* Leaving this at full opacity breaks event listeners on Firefox. + Don't remove unless you know what you're doing. */ + opacity: 0.99; +} +.tsd-filter-input input[type="checkbox"]:focus + svg { + transform: scale(0.95); +} +.tsd-filter-input input[type="checkbox"]:focus:not(:focus-visible) + svg { + transform: scale(1); +} +.tsd-checkbox-background { + fill: var(--color-accent); +} +input[type="checkbox"]:checked ~ svg .tsd-checkbox-checkmark { + stroke: var(--color-text); +} +.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-background { + fill: var(--color-background); + stroke: var(--color-accent); + stroke-width: 0.25rem; +} +.tsd-filter-input input:disabled ~ svg > .tsd-checkbox-checkmark { + stroke: var(--color-accent); +} + +.tsd-theme-toggle { + padding-top: 0.75rem; +} +.tsd-theme-toggle > h4 { + display: inline; + vertical-align: middle; + margin-right: 0.75rem; +} + +.tsd-hierarchy { + list-style: square; + margin: 0; +} +.tsd-hierarchy .target { + font-weight: bold; +} + +.tsd-panel-group.tsd-index-group { + margin-bottom: 0; +} +.tsd-index-panel .tsd-index-list { + list-style: none; + line-height: 1.333em; + margin: 0; + padding: 0.25rem 0 0 0; + overflow: hidden; + display: grid; + grid-template-columns: repeat(3, 1fr); + column-gap: 1rem; + grid-template-rows: auto; +} +@media (max-width: 1024px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(2, 1fr); + } +} +@media (max-width: 768px) { + .tsd-index-panel .tsd-index-list { + grid-template-columns: repeat(1, 1fr); + } +} +.tsd-index-panel .tsd-index-list li { + -webkit-page-break-inside: avoid; + -moz-page-break-inside: avoid; + -ms-page-break-inside: avoid; + -o-page-break-inside: avoid; + page-break-inside: avoid; +} + +.tsd-flag { + display: inline-block; + padding: 0.25em 0.4em; + border-radius: 4px; + color: var(--color-comment-tag-text); + background-color: var(--color-comment-tag); + text-indent: 0; + font-size: 75%; + line-height: 1; + font-weight: normal; +} + +.tsd-anchor { + position: relative; + top: -100px; +} + +.tsd-member { + position: relative; +} +.tsd-member .tsd-anchor + h3 { + display: flex; + align-items: center; + margin-top: 0; + margin-bottom: 0; + border-bottom: none; +} + +.tsd-navigation.settings { + margin: 1rem 0; +} +.tsd-navigation > a, +.tsd-navigation .tsd-accordion-summary { + width: calc(100% - 0.5rem); +} +.tsd-navigation a, +.tsd-navigation summary > span, +.tsd-page-navigation a { + display: inline-flex; + align-items: center; + padding: 0.25rem; + color: var(--color-text); + text-decoration: none; + box-sizing: border-box; +} +.tsd-navigation a.current, +.tsd-page-navigation a.current { + background: var(--color-active-menu-item); +} +.tsd-navigation a:hover, +.tsd-page-navigation a:hover { + text-decoration: underline; +} +.tsd-navigation ul, +.tsd-page-navigation ul { + margin-top: 0; + margin-bottom: 0; + padding: 0; + list-style: none; +} +.tsd-navigation li, +.tsd-page-navigation li { + padding: 0; + max-width: 100%; +} +.tsd-nested-navigation { + margin-left: 3rem; +} +.tsd-nested-navigation > li > details { + margin-left: -1.5rem; +} +.tsd-small-nested-navigation { + margin-left: 1.5rem; +} +.tsd-small-nested-navigation > li > details { + margin-left: -1.5rem; +} + +.tsd-nested-navigation > li > a, +.tsd-nested-navigation > li > span { + width: calc(100% - 1.75rem - 0.5rem); +} + +.tsd-page-navigation ul { + padding-left: 1.75rem; +} + +#tsd-sidebar-links a { + margin-top: 0; + margin-bottom: 0.5rem; + line-height: 1.25rem; +} +#tsd-sidebar-links a:last-of-type { + margin-bottom: 0; +} + +a.tsd-index-link { + padding: 0.25rem 0 !important; + font-size: 1rem; + line-height: 1.25rem; + display: inline-flex; + align-items: center; + color: var(--color-text); +} +.tsd-accordion-summary { + list-style-type: none; /* hide marker on non-safari */ + outline: none; /* broken on safari, so just hide it */ +} +.tsd-accordion-summary::-webkit-details-marker { + display: none; /* hide marker on safari */ +} +.tsd-accordion-summary, +.tsd-accordion-summary a { + user-select: none; + -moz-user-select: none; + -webkit-user-select: none; + -ms-user-select: none; + + cursor: pointer; +} +.tsd-accordion-summary a { + width: calc(100% - 1.5rem); +} +.tsd-accordion-summary > * { + margin-top: 0; + margin-bottom: 0; + padding-top: 0; + padding-bottom: 0; +} +.tsd-index-accordion .tsd-accordion-summary > svg { + margin-left: 0.25rem; +} +.tsd-index-content > :not(:first-child) { + margin-top: 0.75rem; +} +.tsd-index-heading { + margin-top: 1.5rem; + margin-bottom: 0.75rem; +} + +.tsd-kind-icon { + margin-right: 0.5rem; + width: 1.25rem; + height: 1.25rem; + min-width: 1.25rem; + min-height: 1.25rem; +} +.tsd-kind-icon path { + transform-origin: center; + transform: scale(1.1); +} +.tsd-signature > .tsd-kind-icon { + margin-right: 0.8rem; +} + +.tsd-panel { + margin-bottom: 2.5rem; +} +.tsd-panel.tsd-member { + margin-bottom: 4rem; +} +.tsd-panel:empty { + display: none; +} +.tsd-panel > h1, +.tsd-panel > h2, +.tsd-panel > h3 { + margin: 1.5rem -1.5rem 0.75rem -1.5rem; + padding: 0 1.5rem 0.75rem 1.5rem; +} +.tsd-panel > h1.tsd-before-signature, +.tsd-panel > h2.tsd-before-signature, +.tsd-panel > h3.tsd-before-signature { + margin-bottom: 0; + border-bottom: none; +} + +.tsd-panel-group { + margin: 4rem 0; +} +.tsd-panel-group.tsd-index-group { + margin: 2rem 0; +} +.tsd-panel-group.tsd-index-group details { + margin: 2rem 0; +} + +#tsd-search { + transition: background-color 0.2s; +} +#tsd-search .title { + position: relative; + z-index: 2; +} +#tsd-search .field { + position: absolute; + left: 0; + top: 0; + right: 2.5rem; + height: 100%; +} +#tsd-search .field input { + box-sizing: border-box; + position: relative; + top: -50px; + z-index: 1; + width: 100%; + padding: 0 10px; + opacity: 0; + outline: 0; + border: 0; + background: transparent; + color: var(--color-text); +} +#tsd-search .field label { + position: absolute; + overflow: hidden; + right: -40px; +} +#tsd-search .field input, +#tsd-search .title, +#tsd-toolbar-links a { + transition: opacity 0.2s; +} +#tsd-search .results { + position: absolute; + visibility: hidden; + top: 40px; + width: 100%; + margin: 0; + padding: 0; + list-style: none; + box-shadow: 0 0 4px rgba(0, 0, 0, 0.25); +} +#tsd-search .results li { + padding: 0 10px; + background-color: var(--color-background); +} +#tsd-search .results li:nth-child(even) { + background-color: var(--color-background-secondary); +} +#tsd-search .results li.state { + display: none; +} +#tsd-search .results li.current:not(.no-results), +#tsd-search .results li:hover:not(.no-results) { + background-color: var(--color-accent); +} +#tsd-search .results a { + display: block; +} +#tsd-search .results a:before { + top: 10px; +} +#tsd-search .results span.parent { + color: var(--color-text-aside); + font-weight: normal; +} +#tsd-search.has-focus { + background-color: var(--color-accent); +} +#tsd-search.has-focus .field input { + top: 0; + opacity: 1; +} +#tsd-search.has-focus .title, +#tsd-search.has-focus #tsd-toolbar-links a { + z-index: 0; + opacity: 0; +} +#tsd-search.has-focus .results { + visibility: visible; +} +#tsd-search.loading .results li.state.loading { + display: block; +} +#tsd-search.failure .results li.state.failure { + display: block; +} + +#tsd-toolbar-links { + position: absolute; + top: 0; + right: 2rem; + height: 100%; + display: flex; + align-items: center; + justify-content: flex-end; +} +#tsd-toolbar-links a { + margin-left: 1.5rem; +} +#tsd-toolbar-links a:hover { + text-decoration: underline; +} + +.tsd-signature { + margin: 0 0 1rem 0; + padding: 1rem 0.5rem; + border: 1px solid var(--color-accent); + font-family: Menlo, Monaco, Consolas, "Courier New", monospace; + font-size: 14px; + overflow-x: auto; +} + +.tsd-signature-symbol { + color: var(--color-text-aside); + font-weight: normal; +} + +.tsd-signature-type { + font-style: italic; + font-weight: normal; +} + +.tsd-signatures { + padding: 0; + margin: 0 0 1em 0; + list-style-type: none; +} +.tsd-signatures .tsd-signature { + margin: 0; + border-color: var(--color-accent); + border-width: 1px 0; + transition: background-color 0.1s; +} +.tsd-description .tsd-signatures .tsd-signature { + border-width: 1px; +} + +ul.tsd-parameter-list, +ul.tsd-type-parameter-list { + list-style: square; + margin: 0; + padding-left: 20px; +} +ul.tsd-parameter-list > li.tsd-parameter-signature, +ul.tsd-type-parameter-list > li.tsd-parameter-signature { + list-style: none; + margin-left: -20px; +} +ul.tsd-parameter-list h5, +ul.tsd-type-parameter-list h5 { + font-size: 16px; + margin: 1em 0 0.5em 0; +} +.tsd-sources { + margin-top: 1rem; + font-size: 0.875em; +} +.tsd-sources a { + color: var(--color-text-aside); + text-decoration: underline; +} +.tsd-sources ul { + list-style: none; + padding: 0; +} + +.tsd-page-toolbar { + position: sticky; + z-index: 1; + top: 0; + left: 0; + width: 100%; + color: var(--color-text); + background: var(--color-background-secondary); + border-bottom: 1px var(--color-accent) solid; + transition: transform 0.3s ease-in-out; +} +.tsd-page-toolbar a { + color: var(--color-text); + text-decoration: none; +} +.tsd-page-toolbar a.title { + font-weight: bold; +} +.tsd-page-toolbar a.title:hover { + text-decoration: underline; +} +.tsd-page-toolbar .tsd-toolbar-contents { + display: flex; + justify-content: space-between; + height: 2.5rem; + margin: 0 auto; +} +.tsd-page-toolbar .table-cell { + position: relative; + white-space: nowrap; + line-height: 40px; +} +.tsd-page-toolbar .table-cell:first-child { + width: 100%; +} +.tsd-page-toolbar .tsd-toolbar-icon { + box-sizing: border-box; + line-height: 0; + padding: 12px 0; +} + +.tsd-widget { + display: inline-block; + overflow: hidden; + opacity: 0.8; + height: 40px; + transition: + opacity 0.1s, + background-color 0.2s; + vertical-align: bottom; + cursor: pointer; +} +.tsd-widget:hover { + opacity: 0.9; +} +.tsd-widget.active { + opacity: 1; + background-color: var(--color-accent); +} +.tsd-widget.no-caption { + width: 40px; +} +.tsd-widget.no-caption:before { + margin: 0; +} + +.tsd-widget.options, +.tsd-widget.menu { + display: none; +} +input[type="checkbox"] + .tsd-widget:before { + background-position: -120px 0; +} +input[type="checkbox"]:checked + .tsd-widget:before { + background-position: -160px 0; +} + +img { + max-width: 100%; +} + +.tsd-anchor-icon { + display: inline-flex; + align-items: center; + margin-left: 0.5rem; + vertical-align: middle; + color: var(--color-text); +} + +.tsd-anchor-icon svg { + width: 1em; + height: 1em; + visibility: hidden; +} + +.tsd-anchor-link:hover > .tsd-anchor-icon svg { + visibility: visible; +} + +.deprecated { + text-decoration: line-through; +} + +.warning { + padding: 1rem; + color: var(--color-warning-text); + background: var(--color-background-warning); +} + +.tsd-kind-project { + color: var(--color-ts-project); +} +.tsd-kind-module { + color: var(--color-ts-module); +} +.tsd-kind-namespace { + color: var(--color-ts-namespace); +} +.tsd-kind-enum { + color: var(--color-ts-enum); +} +.tsd-kind-enum-member { + color: var(--color-ts-enum-member); +} +.tsd-kind-variable { + color: var(--color-ts-variable); +} +.tsd-kind-function { + color: var(--color-ts-function); +} +.tsd-kind-class { + color: var(--color-ts-class); +} +.tsd-kind-interface { + color: var(--color-ts-interface); +} +.tsd-kind-constructor { + color: var(--color-ts-constructor); +} +.tsd-kind-property { + color: var(--color-ts-property); +} +.tsd-kind-method { + color: var(--color-ts-method); +} +.tsd-kind-call-signature { + color: var(--color-ts-call-signature); +} +.tsd-kind-index-signature { + color: var(--color-ts-index-signature); +} +.tsd-kind-constructor-signature { + color: var(--color-ts-constructor-signature); +} +.tsd-kind-parameter { + color: var(--color-ts-parameter); +} +.tsd-kind-type-literal { + color: var(--color-ts-type-literal); +} +.tsd-kind-type-parameter { + color: var(--color-ts-type-parameter); +} +.tsd-kind-accessor { + color: var(--color-ts-accessor); +} +.tsd-kind-get-signature { + color: var(--color-ts-get-signature); +} +.tsd-kind-set-signature { + color: var(--color-ts-set-signature); +} +.tsd-kind-type-alias { + color: var(--color-ts-type-alias); +} + +/* if we have a kind icon, don't color the text by kind */ +.tsd-kind-icon ~ span { + color: var(--color-text); +} + +* { + scrollbar-width: thin; + scrollbar-color: var(--color-accent) var(--color-icon-background); +} + +*::-webkit-scrollbar { + width: 0.75rem; +} + +*::-webkit-scrollbar-track { + background: var(--color-icon-background); +} + +*::-webkit-scrollbar-thumb { + background-color: var(--color-accent); + border-radius: 999rem; + border: 0.25rem solid var(--color-icon-background); +} + +/* mobile */ +@media (max-width: 769px) { + .tsd-widget.options, + .tsd-widget.menu { + display: inline-block; + } + + .container-main { + display: flex; + } + html .col-content { + float: none; + max-width: 100%; + width: 100%; + } + html .col-sidebar { + position: fixed !important; + overflow-y: auto; + -webkit-overflow-scrolling: touch; + z-index: 1024; + top: 0 !important; + bottom: 0 !important; + left: auto !important; + right: 0 !important; + padding: 1.5rem 1.5rem 0 0; + width: 75vw; + visibility: hidden; + background-color: var(--color-background); + transform: translate(100%, 0); + } + html .col-sidebar > *:last-child { + padding-bottom: 20px; + } + html .overlay { + content: ""; + display: block; + position: fixed; + z-index: 1023; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: rgba(0, 0, 0, 0.75); + visibility: hidden; + } + + .to-has-menu .overlay { + animation: fade-in 0.4s; + } + + .to-has-menu .col-sidebar { + animation: pop-in-from-right 0.4s; + } + + .from-has-menu .overlay { + animation: fade-out 0.4s; + } + + .from-has-menu .col-sidebar { + animation: pop-out-to-right 0.4s; + } + + .has-menu body { + overflow: hidden; + } + .has-menu .overlay { + visibility: visible; + } + .has-menu .col-sidebar { + visibility: visible; + transform: translate(0, 0); + display: flex; + flex-direction: column; + gap: 1.5rem; + max-height: 100vh; + padding: 1rem 2rem; + } + .has-menu .tsd-navigation { + max-height: 100%; + } +} + +/* one sidebar */ +@media (min-width: 770px) { + .container-main { + display: grid; + grid-template-columns: minmax(0, 1fr) minmax(0, 2fr); + grid-template-areas: "sidebar content"; + margin: 2rem auto; + } + + .col-sidebar { + grid-area: sidebar; + } + .col-content { + grid-area: content; + padding: 0 1rem; + } +} +@media (min-width: 770px) and (max-width: 1399px) { + .col-sidebar { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + padding-top: 1rem; + } + .site-menu { + margin-top: 1rem; + } +} + +/* two sidebars */ +@media (min-width: 1200px) { + .container-main { + grid-template-columns: minmax(0, 1fr) minmax(0, 2.5fr) minmax(0, 20rem); + grid-template-areas: "sidebar content toc"; + } + + .col-sidebar { + display: contents; + } + + .page-menu { + grid-area: toc; + padding-left: 1rem; + } + .site-menu { + grid-area: sidebar; + } + + .site-menu { + margin-top: 1rem 0; + } + + .page-menu, + .site-menu { + max-height: calc(100vh - 2rem - 42px); + overflow: auto; + position: sticky; + top: 42px; + } +} diff --git a/dist/docs/classes/GaussianBlurBackgroundProcessor.html b/dist/docs/classes/GaussianBlurBackgroundProcessor.html new file mode 100644 index 0000000..3c745d3 --- /dev/null +++ b/dist/docs/classes/GaussianBlurBackgroundProcessor.html @@ -0,0 +1,202 @@ +GaussianBlurBackgroundProcessor | @twilio/video-processors
+
+ +
+
+
+
+ +

Class GaussianBlurBackgroundProcessor

+
+

The GaussianBlurBackgroundProcessor, when added to a VideoTrack, +applies a gaussian blur filter on the background in each video frame +and leaves the foreground (person(s)) untouched. Each instance of +GaussianBlurBackgroundProcessor should be added to only one VideoTrack +at a time to prevent overlapping of image data from multiple VideoTracks.

+
+
+

Example

import { createLocalVideoTrack } from 'twilio-video';
import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors';

const blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
pipeline: Pipeline.WebGL2,
debounce: true,
});

blurBackground.loadModel().then(() => {
createLocalVideoTrack({
// Increasing the capture resolution decreases the output FPS
// especially on browsers that do not support SIMD
// such as desktop Safari and iOS browsers, or on Chrome
// with capture resolutions above 640x480 for webgl2.
width: 640,
height: 480,
// Any frame rate above 24 fps on desktop browsers increase CPU
// usage without noticeable increase in quality.
frameRate: 24
}).then(track => {
track.addProcessor(blurBackground, {
inputFrameBufferType: 'video',
outputFrameBufferContextType: 'webgl2',
});
});
}); +
+
+
+

Hierarchy

+
    +
  • BackgroundProcessor +
      +
    • GaussianBlurBackgroundProcessor
+
+
+
+ +
+
+

Constructors

+
+
+

Accessors

+
+
+

Methods

+
+
+

Constructors

+
+ +
+
+

Accessors

+
+ +
    +
  • get blurFilterRadius(): number
  • +
  • +

    The current background blur filter radius in pixels.

    +
    +

    Returns number

    +
  • +
  • set blurFilterRadius(radius): void
  • +
  • +

    Set a new background blur filter radius in pixels.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+ +
    +
  • get maskBlurRadius(): number
  • +
  • +

    The current blur radius when smoothing out the edges of the person's mask.

    +
    +

    Returns number

    +
  • +
  • set maskBlurRadius(radius): void
  • +
  • +

    Set a new blur radius to be used when smoothing out the edges of the person's mask.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+

Methods

+
+ +
    + +
  • +

    Load the segmentation model. +Call this method before attaching the processor to ensure +video frames are processed correctly.

    +
    +

    Returns Promise<void>

    +
+
+ +
    + +
  • +

    Apply a transform to the background of an input video frame and leaving +the foreground (person(s)) untouched. Any exception detected will +result in the frame being dropped.

    +
    +
    +

    Parameters

    +
      +
    • +
      inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement
      +

      The source of the input frame to process. +
      +
      +OffscreenCanvas - Good for canvas-related processing +that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLCanvasElement - This is recommended on browsers +that doesn't support OffscreenCanvas, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLVideoElement - Recommended when using [[Pipeline.WebGL2]] but +works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. +

      +
    • +
    • +
      outputFrameBuffer: HTMLCanvasElement
      +

      The output frame buffer to use to draw the processed frame.

      +
    +

    Returns Promise<void>

    +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/classes/VirtualBackgroundProcessor.html b/dist/docs/classes/VirtualBackgroundProcessor.html new file mode 100644 index 0000000..b1b1c79 --- /dev/null +++ b/dist/docs/classes/VirtualBackgroundProcessor.html @@ -0,0 +1,227 @@ +VirtualBackgroundProcessor | @twilio/video-processors
+
+ +
+
+
+
+ +

Class VirtualBackgroundProcessor

+
+

The VirtualBackgroundProcessor, when added to a VideoTrack, +replaces the background in each video frame with a given image, +and leaves the foreground (person(s)) untouched. Each instance of +VirtualBackgroundProcessor should be added to only one VideoTrack +at a time to prevent overlapping of image data from multiple VideoTracks.

+
+
+

Example

import { createLocalVideoTrack } from 'twilio-video';
import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors';

let virtualBackground;
const img = new Image();

img.onload = () => {
virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,
pipeline: Pipeline.WebGL2,

// Desktop Safari and iOS browsers do not support SIMD.
// Set debounce to true to achieve an acceptable performance.
debounce: isSafari(),
});

virtualBackground.loadModel().then(() => {
createLocalVideoTrack({
// Increasing the capture resolution decreases the output FPS
// especially on browsers that do not support SIMD
// such as desktop Safari and iOS browsers, or on Chrome
// with capture resolutions above 640x480 for webgl2.
width: 640,
height: 480,
// Any frame rate above 24 fps on desktop browsers increase CPU
// usage without noticeable increase in quality.
frameRate: 24
}).then(track => {
track.addProcessor(virtualBackground, {
inputFrameBufferType: 'video',
outputFrameBufferContextType: 'webgl2',
});
});
});
};
img.src = '/background.jpg'; +
+
+
+

Hierarchy

+
    +
  • BackgroundProcessor +
      +
    • VirtualBackgroundProcessor
+
+
+
+ +
+
+

Constructors

+
+
+

Accessors

+
+
+

Methods

+
+
+

Constructors

+
+ +
+
+

Accessors

+
+ +
    +
  • get backgroundImage(): HTMLImageElement
  • +
  • +

    The HTMLImageElement representing the current background image.

    +
    +

    Returns HTMLImageElement

    +
  • +
  • set backgroundImage(image): void
  • +
  • +

    Set an HTMLImageElement as the new background image. +An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow +security guidelines +when loading the image from a different origin. Failing to do so will result to an empty output frame.

    +
    +
    +

    Parameters

    +
      +
    • +
      image: HTMLImageElement
    +

    Returns void

    +
+
+ +
    +
  • get fitType(): ImageFit
  • +
  • +

    The current [[ImageFit]] for positioning of the background image in the viewport.

    +
    +

    Returns ImageFit

    +
  • +
  • set fitType(fitType): void
  • +
  • +

    Set a new [[ImageFit]] to be used for positioning the background image in the viewport.

    +
    +
    +

    Parameters

    +
    +

    Returns void

    +
+
+ +
    +
  • get maskBlurRadius(): number
  • +
  • +

    The current blur radius when smoothing out the edges of the person's mask.

    +
    +

    Returns number

    +
  • +
  • set maskBlurRadius(radius): void
  • +
  • +

    Set a new blur radius to be used when smoothing out the edges of the person's mask.

    +
    +
    +

    Parameters

    +
      +
    • +
      radius: number
    +

    Returns void

    +
+
+

Methods

+
+ +
    + +
  • +

    Load the segmentation model. +Call this method before attaching the processor to ensure +video frames are processed correctly.

    +
    +

    Returns Promise<void>

    +
+
+ +
    + +
  • +

    Apply a transform to the background of an input video frame and leaving +the foreground (person(s)) untouched. Any exception detected will +result in the frame being dropped.

    +
    +
    +

    Parameters

    +
      +
    • +
      inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement
      +

      The source of the input frame to process. +
      +
      +OffscreenCanvas - Good for canvas-related processing +that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLCanvasElement - This is recommended on browsers +that doesn't support OffscreenCanvas, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. +
      +
      +HTMLVideoElement - Recommended when using [[Pipeline.WebGL2]] but +works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. +

      +
    • +
    • +
      outputFrameBuffer: HTMLCanvasElement
      +

      The output frame buffer to use to draw the processed frame.

      +
    +

    Returns Promise<void>

    +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/enums/ImageFit.html b/dist/docs/enums/ImageFit.html new file mode 100644 index 0000000..7d00262 --- /dev/null +++ b/dist/docs/enums/ImageFit.html @@ -0,0 +1,97 @@ +ImageFit | @twilio/video-processors
+
+ +
+
+
+
+ +

Enumeration ImageFit

+
+

ImageFit specifies the positioning of an image inside a viewport.

+
+
+
+
+
+ +
+
+

Enumeration Members

+
+
+

Enumeration Members

+
+ +
Contain: "Contain"
+

Scale the image up or down to fill the viewport while preserving the aspect ratio. +The image will be fully visible but will add empty space in the viewport if +aspect ratios do not match.

+
+
+
+ +
Cover: "Cover"
+

Scale the image to fill both height and width of the viewport while preserving +the aspect ratio, but will crop the image if aspect ratios do not match.

+
+
+
+ +
Fill: "Fill"
+

Stretches the image to fill the viewport regardless of aspect ratio.

+
+
+
+ +
None: "None"
+

Ignore height and width and use the original size.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/enums/Pipeline.html b/dist/docs/enums/Pipeline.html new file mode 100644 index 0000000..065d555 --- /dev/null +++ b/dist/docs/enums/Pipeline.html @@ -0,0 +1,84 @@ +Pipeline | @twilio/video-processors
+
+ +
+
+
+
+ +

Enumeration Pipeline

+
+

Specifies which pipeline to use when processing video frames.

+
+
+
+
+
+ +
+
+

Enumeration Members

+
+
+

Enumeration Members

+
+ +
Canvas2D: "Canvas2D"
+

Use canvas 2d rendering context. Some browsers such as Safari do not +have full support of this feature. Please test your application to make sure it works as intented. See +browser compatibility page +for reference.

+
+
+
+ +
WebGL2: "WebGL2"
+

Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work +on some older versions of browsers. Please test your application to make sure it works as intented. See +browser compatibility page +for reference.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/index.html b/dist/docs/index.html new file mode 100644 index 0000000..644b49c --- /dev/null +++ b/dist/docs/index.html @@ -0,0 +1,99 @@ +@twilio/video-processors
+
+ +
+
+
+
+

@twilio/video-processors

+

Twilio Video Processors

+

[!WARNING]
We are no longer allowing new customers to onboard to Twilio Video. Effective December 5th, 2024, Twilio Video will End of Life (EOL) and will cease to function for all customers. Customers may transition to any video provider they choose, however, we are recommending customers migrate to the Zoom Video SDK and we have prepared a Migration Guide. Additional information on this EOL is available in our Help Center here.

+
+

Twilio Video Processors is a collection of video processing tools which can be used with Twilio Video JavaScript SDK to apply transformations and filters to a VideoTrack.

+

   See it live here!

+

Features

The following Video Processors are provided to apply transformations and filters to a person's background. You can also use them as a reference for creating your own Video Processors that can be used with Twilio Video JavaScript SDK.

+ +

Prerequisites

+

Note

The Node.js and NPM requirements do not apply if the goal is to use this library as a dependency of your project. They only apply if you want to check the source code out and build the artifacts and/or run tests.

+

Installation

NPM

You can install directly from npm.

+
npm install @twilio/video-processors --save
+
+

Using this method, you can import twilio-video-processors like so:

+
import * as VideoProcessors from '@twilio/video-processors';
+
+

Script tag

You can also copy twilio-video-processors.js from the dist/build folder and include it directly in your web app using a <script> tag.

+
<script src="https://my-server-path/twilio-video-processors.js"></script>
+
+

Using this method, twilio-video-processors.js will set a browser global:

+
const VideoProcessors = Twilio.VideoProcessors;
+
+

Assets

In order to achieve the best performance, the VideoProcessors use WebAssembly to run TensorFlow Lite for person segmentation. You need to serve the tflite model and binaries so they can be loaded properly. These files can be downloaded from the dist/build folder. Check the API docs for details and the examples folder for reference.

+

Usage

These processors run TensorFlow Lite using MediaPipe Selfie Segmentation Landscape Model and requires WebAssembly SIMD support in order to achieve the best performance. We recommend that, when calling Video.createLocalVideoTrack, the video capture constraints be set to 24 fps frame rate with 640x480 capture dimensions. Higher resolutions can still be used for increased accuracy, but may degrade performance, resulting in a lower output frame rate on low powered devices.

+

Best Practice

Please check out the following pages for best practice.

+ +
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html b/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html new file mode 100644 index 0000000..8c0f55d --- /dev/null +++ b/dist/docs/interfaces/GaussianBlurBackgroundProcessorOptions.html @@ -0,0 +1,140 @@ +GaussianBlurBackgroundProcessorOptions | @twilio/video-processors
+
+ +
+
+
+
+ +

Interface GaussianBlurBackgroundProcessorOptions

+
+

Options passed to [[GaussianBlurBackgroundProcessor]] constructor.

+
+
+
+

Hierarchy

+
    +
  • BackgroundProcessorOptions +
      +
    • GaussianBlurBackgroundProcessorOptions
+
+
+
+ +
+
+

Properties

+
+ +
assetsPath: string
+

The VideoProcessors load assets dynamically depending on certain browser features. +You need to serve all the assets and provide the root path so they can be referenced properly. +These assets can be copied from the dist/build folder which you can add as part of your deployment process.

+
+
+

Example


+
+For virtual background: +
+ +
const virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,
});
await virtualBackground.loadModel(); +
+
+For blur background: +
+ +
const blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets'
});
await blurBackground.loadModel(); +
+
+
+ +
blurFilterRadius?: number
+

The background blur filter radius to use in pixels.

+
+
+

Default

15
+
+
+
+ +
debounce?: boolean
+

Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.

+
+
+

Default

true
+
+
+
+ +
maskBlurRadius?: number
+

The blur radius to use when smoothing out the edges of the person's mask.

+
+
+

Default

5
+
+
+
+ +
pipeline?: Pipeline
+

Specifies which pipeline to use when processing video frames.

+
+
+

Default

'WebGL2'
+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html b/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html new file mode 100644 index 0000000..b53ca2d --- /dev/null +++ b/dist/docs/interfaces/VirtualBackgroundProcessorOptions.html @@ -0,0 +1,152 @@ +VirtualBackgroundProcessorOptions | @twilio/video-processors
+
+ +
+
+
+
+ +

Interface VirtualBackgroundProcessorOptions

+
+

Options passed to [[VirtualBackgroundProcessor]] constructor.

+
+
+
+

Hierarchy

+
    +
  • BackgroundProcessorOptions +
      +
    • VirtualBackgroundProcessorOptions
+
+
+
+ +
+
+

Properties

+
+ +
assetsPath: string
+

The VideoProcessors load assets dynamically depending on certain browser features. +You need to serve all the assets and provide the root path so they can be referenced properly. +These assets can be copied from the dist/build folder which you can add as part of your deployment process.

+
+
+

Example


+
+For virtual background: +
+ +
const virtualBackground = new VirtualBackgroundProcessor({
assetsPath: 'https://my-server-path/assets',
backgroundImage: img,
});
await virtualBackground.loadModel(); +
+
+For blur background: +
+ +
const blurBackground = new GaussianBlurBackgroundProcessor({
assetsPath: 'https://my-server-path/assets'
});
await blurBackground.loadModel(); +
+
+
+ +
backgroundImage: HTMLImageElement
+

The HTMLImageElement to use for background replacement. +An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow +security guidelines +when loading the image from a different origin. Failing to do so will result to an empty output frame.

+
+
+
+ +
debounce?: boolean
+

Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.

+
+
+

Default

true
+
+
+
+ +
fitType?: ImageFit
+

The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]] +supports this option. WebGL2 ignores this option and falls back to Cover.

+
+
+

Default

'Fill'
+
+
+
+ +
maskBlurRadius?: number
+

The blur radius to use when smoothing out the edges of the person's mask.

+
+
+

Default

5
+
+
+
+ +
pipeline?: Pipeline
+

Specifies which pipeline to use when processing video frames.

+
+
+

Default

'WebGL2'
+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/modules.html b/dist/docs/modules.html new file mode 100644 index 0000000..3acea24 --- /dev/null +++ b/dist/docs/modules.html @@ -0,0 +1,64 @@ +@twilio/video-processors
+
+ +
+ +
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/variables/isSupported.html b/dist/docs/variables/isSupported.html new file mode 100644 index 0000000..dd98e01 --- /dev/null +++ b/dist/docs/variables/isSupported.html @@ -0,0 +1,55 @@ +isSupported | @twilio/video-processors
+
+ +
+
+
+
+ +

Variable isSupportedConst

+
isSupported: boolean = ...
+

Check if the current browser is officially supported by twilio-video-procesors.js. +This is set to true for browsers that supports canvas +2D or +webgl2 +rendering context.

+
+
+

Example

import { isSupported } from '@twilio/video-processors';

if (isSupported) {
// Initialize the background processors
} +
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/dist/docs/variables/version.html b/dist/docs/variables/version.html new file mode 100644 index 0000000..0e43dac --- /dev/null +++ b/dist/docs/variables/version.html @@ -0,0 +1,48 @@ +version | @twilio/video-processors
+
+ +
+
+
+
+ +

Variable versionConst

+
version: string = '2.1.0'
+

The current version of the library.

+
+
+
+
+

Generated using TypeDoc

+
\ No newline at end of file diff --git a/es5/constants.d.ts b/es5/constants.d.ts new file mode 100644 index 0000000..17516b1 --- /dev/null +++ b/es5/constants.d.ts @@ -0,0 +1,10 @@ +import { Dimensions } from './types'; +export declare const BLUR_FILTER_RADIUS = 15; +export declare const DEBOUNCE_COUNT = 2; +export declare const MASK_BLUR_RADIUS = 5; +export declare const HISTORY_COUNT_MULTIPLIER = 3; +export declare const PERSON_PROBABILITY_THRESHOLD = 0.4; +export declare const MODEL_NAME = "selfie_segmentation_landscape.tflite"; +export declare const TFLITE_LOADER_NAME = "tflite-1-0-0.js"; +export declare const TFLITE_SIMD_LOADER_NAME = "tflite-simd-1-0-0.js"; +export declare const WASM_INFERENCE_DIMENSIONS: Dimensions; diff --git a/es5/constants.js b/es5/constants.js new file mode 100644 index 0000000..b04d889 --- /dev/null +++ b/es5/constants.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WASM_INFERENCE_DIMENSIONS = exports.TFLITE_SIMD_LOADER_NAME = exports.TFLITE_LOADER_NAME = exports.MODEL_NAME = exports.PERSON_PROBABILITY_THRESHOLD = exports.HISTORY_COUNT_MULTIPLIER = exports.MASK_BLUR_RADIUS = exports.DEBOUNCE_COUNT = exports.BLUR_FILTER_RADIUS = void 0; +exports.BLUR_FILTER_RADIUS = 15; +exports.DEBOUNCE_COUNT = 2; +exports.MASK_BLUR_RADIUS = 5; +exports.HISTORY_COUNT_MULTIPLIER = 3; +exports.PERSON_PROBABILITY_THRESHOLD = 0.4; +exports.MODEL_NAME = 'selfie_segmentation_landscape.tflite'; +exports.TFLITE_LOADER_NAME = 'tflite-1-0-0.js'; +exports.TFLITE_SIMD_LOADER_NAME = 'tflite-simd-1-0-0.js'; +exports.WASM_INFERENCE_DIMENSIONS = { + width: 256, + height: 144, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/es5/constants.js.map b/es5/constants.js.map new file mode 100644 index 0000000..2fd4005 --- /dev/null +++ b/es5/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../lib/constants.ts"],"names":[],"mappings":";;;AAEa,QAAA,kBAAkB,GAAG,EAAE,CAAC;AACxB,QAAA,cAAc,GAAG,CAAC,CAAC;AACnB,QAAA,gBAAgB,GAAG,CAAC,CAAC;AACrB,QAAA,wBAAwB,GAAG,CAAC,CAAC;AAC7B,QAAA,4BAA4B,GAAG,GAAG,CAAC;AACnC,QAAA,UAAU,GAAG,sCAAsC,CAAC;AACpD,QAAA,kBAAkB,GAAG,iBAAiB,CAAC;AACvC,QAAA,uBAAuB,GAAG,sBAAsB,CAAC;AAEjD,QAAA,yBAAyB,GAAe;IACnD,KAAK,EAAE,GAAG;IACV,MAAM,EAAE,GAAG;CACZ,CAAC","sourcesContent":["import { Dimensions } from './types';\n\nexport const BLUR_FILTER_RADIUS = 15;\nexport const DEBOUNCE_COUNT = 2;\nexport const MASK_BLUR_RADIUS = 5;\nexport const HISTORY_COUNT_MULTIPLIER = 3;\nexport const PERSON_PROBABILITY_THRESHOLD = 0.4;\nexport const MODEL_NAME = 'selfie_segmentation_landscape.tflite';\nexport const TFLITE_LOADER_NAME = 'tflite-1-0-0.js';\nexport const TFLITE_SIMD_LOADER_NAME = 'tflite-simd-1-0-0.js';\n\nexport const WASM_INFERENCE_DIMENSIONS: Dimensions = {\n width: 256,\n height: 144,\n};\n"]} \ No newline at end of file diff --git a/es5/index.d.ts b/es5/index.d.ts new file mode 100644 index 0000000..dd49bd2 --- /dev/null +++ b/es5/index.d.ts @@ -0,0 +1,6 @@ +import { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions } from './processors/background/GaussianBlurBackgroundProcessor'; +import { VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions } from './processors/background/VirtualBackgroundProcessor'; +import { ImageFit, Pipeline } from './types'; +import { isSupported } from './utils/support'; +import { version } from './utils/version'; +export { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions, ImageFit, Pipeline, isSupported, version, VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions, }; diff --git a/es5/index.js b/es5/index.js new file mode 100644 index 0000000..8a60daa --- /dev/null +++ b/es5/index.js @@ -0,0 +1,30 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = exports.version = exports.isSupported = exports.Pipeline = exports.ImageFit = exports.GaussianBlurBackgroundProcessor = void 0; +var GaussianBlurBackgroundProcessor_1 = require("./processors/background/GaussianBlurBackgroundProcessor"); +Object.defineProperty(exports, "GaussianBlurBackgroundProcessor", { enumerable: true, get: function () { return GaussianBlurBackgroundProcessor_1.GaussianBlurBackgroundProcessor; } }); +var VirtualBackgroundProcessor_1 = require("./processors/background/VirtualBackgroundProcessor"); +Object.defineProperty(exports, "VirtualBackgroundProcessor", { enumerable: true, get: function () { return VirtualBackgroundProcessor_1.VirtualBackgroundProcessor; } }); +var types_1 = require("./types"); +Object.defineProperty(exports, "ImageFit", { enumerable: true, get: function () { return types_1.ImageFit; } }); +Object.defineProperty(exports, "Pipeline", { enumerable: true, get: function () { return types_1.Pipeline; } }); +var support_1 = require("./utils/support"); +Object.defineProperty(exports, "isSupported", { enumerable: true, get: function () { return support_1.isSupported; } }); +var version_1 = require("./utils/version"); +Object.defineProperty(exports, "version", { enumerable: true, get: function () { return version_1.version; } }); +if (typeof window !== 'undefined') { + window.Twilio = window.Twilio || {}; + window.Twilio.VideoProcessors = __assign(__assign({}, window.Twilio.VideoProcessors), { GaussianBlurBackgroundProcessor: GaussianBlurBackgroundProcessor_1.GaussianBlurBackgroundProcessor, ImageFit: types_1.ImageFit, Pipeline: types_1.Pipeline, isSupported: support_1.isSupported, version: version_1.version, VirtualBackgroundProcessor: VirtualBackgroundProcessor_1.VirtualBackgroundProcessor }); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/es5/index.js.map b/es5/index.js.map new file mode 100644 index 0000000..e0b06e3 --- /dev/null +++ b/es5/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../lib/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,2GAAkJ;AAoBhJ,gHApBO,iEAA+B,OAoBP;AAnBjC,iGAAmI;AAyBjI,2GAzBO,uDAA0B,OAyBP;AAxB5B,iCAA6C;AAoB3C,yFApBO,gBAAQ,OAoBP;AACR,yFArBiB,gBAAQ,OAqBjB;AApBV,2CAA8C;AAqB5C,4FArBO,qBAAW,OAqBP;AApBb,2CAA0C;AAqBxC,wFArBO,iBAAO,OAqBP;AAnBT,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;IACjC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAC;IACpC,MAAM,CAAC,MAAM,CAAC,eAAe,yBACxB,MAAM,CAAC,MAAM,CAAC,eAAe,KAChC,+BAA+B,mEAAA,EAC/B,QAAQ,kBAAA,EACR,QAAQ,kBAAA,EACR,WAAW,uBAAA,EACX,OAAO,mBAAA,EACP,0BAA0B,yDAAA,GAC3B,CAAC;CACH","sourcesContent":["import { GaussianBlurBackgroundProcessor, GaussianBlurBackgroundProcessorOptions } from './processors/background/GaussianBlurBackgroundProcessor';\nimport { VirtualBackgroundProcessor, VirtualBackgroundProcessorOptions } from './processors/background/VirtualBackgroundProcessor';\nimport { ImageFit, Pipeline } from './types';\nimport { isSupported } from './utils/support';\nimport { version } from './utils/version';\n\nif (typeof window !== 'undefined') {\n window.Twilio = window.Twilio || {};\n window.Twilio.VideoProcessors = {\n ...window.Twilio.VideoProcessors,\n GaussianBlurBackgroundProcessor,\n ImageFit,\n Pipeline,\n isSupported,\n version,\n VirtualBackgroundProcessor,\n };\n}\n\nexport {\n GaussianBlurBackgroundProcessor,\n GaussianBlurBackgroundProcessorOptions,\n ImageFit,\n Pipeline,\n isSupported,\n version,\n VirtualBackgroundProcessor,\n VirtualBackgroundProcessorOptions,\n};\n"]} \ No newline at end of file diff --git a/es5/processors/Processor.d.ts b/es5/processors/Processor.d.ts new file mode 100644 index 0000000..3b5fac6 --- /dev/null +++ b/es5/processors/Processor.d.ts @@ -0,0 +1,13 @@ +/** + * @private + * The [[Processor]] is an abstract class for building your own custom processors. + */ +export declare abstract class Processor { + /** + * Applies a transform to an input frame and draw the results to an output frame buffer. + * The frame will be dropped if this method raises an exception. + * @param inputFrameBuffer - The source of the input frame to process. + * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + abstract processFrame(inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement, outputFrameBuffer: HTMLCanvasElement): Promise | void; +} diff --git a/es5/processors/Processor.js b/es5/processors/Processor.js new file mode 100644 index 0000000..3bf6156 --- /dev/null +++ b/es5/processors/Processor.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = void 0; +/** + * @private + * The [[Processor]] is an abstract class for building your own custom processors. + */ +var Processor = /** @class */ (function () { + function Processor() { + } + return Processor; +}()); +exports.Processor = Processor; +//# sourceMappingURL=Processor.js.map \ No newline at end of file diff --git a/es5/processors/Processor.js.map b/es5/processors/Processor.js.map new file mode 100644 index 0000000..ff5705d --- /dev/null +++ b/es5/processors/Processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Processor.js","sourceRoot":"","sources":["../../lib/processors/Processor.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACH;IAAA;IAWA,CAAC;IAAD,gBAAC;AAAD,CAAC,AAXD,IAWC;AAXqB,8BAAS","sourcesContent":["/**\n * @private\n * The [[Processor]] is an abstract class for building your own custom processors.\n */\nexport abstract class Processor {\n\n /**\n * Applies a transform to an input frame and draw the results to an output frame buffer.\n * The frame will be dropped if this method raises an exception.\n * @param inputFrameBuffer - The source of the input frame to process.\n * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame.\n */\n abstract processFrame(\n inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,\n outputFrameBuffer: HTMLCanvasElement): Promise | void;\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/BackgroundProcessor.d.ts b/es5/processors/background/BackgroundProcessor.d.ts new file mode 100644 index 0000000..0044fc6 --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.d.ts @@ -0,0 +1,148 @@ +import { Processor } from '../Processor'; +import { Dimensions, Pipeline, WebGL2PipelineType } from '../../types'; +import { buildWebGL2Pipeline } from '../webgl2'; +/** + * @private + */ +export interface BackgroundProcessorOptions { + /** + * The VideoProcessors load assets dynamically depending on certain browser features. + * You need to serve all the assets and provide the root path so they can be referenced properly. + * These assets can be copied from the `dist/build` folder which you can add as part of your deployment process. + * @example + *
+ *
+ * For virtual background: + *
+ * + * ```ts + * const virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * }); + * await virtualBackground.loadModel(); + * ``` + * + *
+ * For blur background: + *
+ * + * ```ts + * const blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets' + * }); + * await blurBackground.loadModel(); + * ``` + */ + assetsPath: string; + /** + * Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process. + * @default + * ```html + * true + * ``` + */ + debounce?: boolean; + /** + * @private + */ + inferenceDimensions?: Dimensions; + /** + * The blur radius to use when smoothing out the edges of the person's mask. + * @default + * ```html + * 5 + * ``` + */ + maskBlurRadius?: number; + /** + * @private + */ + personProbabilityThreshold?: number; + /** + * Specifies which pipeline to use when processing video frames. + * @default + * ```html + * 'WebGL2' + * ``` + */ + pipeline?: Pipeline; +} +/** + * @private + */ +export declare abstract class BackgroundProcessor extends Processor { + private static _loadedScripts; + protected _backgroundImage: HTMLImageElement | null; + protected _outputCanvas: HTMLCanvasElement | null; + protected _outputContext: CanvasRenderingContext2D | WebGL2RenderingContext | null; + protected _webgl2Pipeline: ReturnType | null; + private _assetsPath; + private _benchmark; + private _currentMask; + private _debounce; + private _debounceCount; + private _dummyImageData; + private _historyCount; + private _inferenceDimensions; + private _inputCanvas; + private _inputContext; + private _inputMemoryOffset; + private _isSimdEnabled; + private _maskBlurRadius; + private _maskCanvas; + private _maskContext; + private _masks; + private _maskUsageCounter; + private _outputMemoryOffset; + private _personProbabilityThreshold; + private _pipeline; + private _tflite; + private readonly _version; + constructor(options: BackgroundProcessorOptions); + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get maskBlurRadius(): number; + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set maskBlurRadius(radius: number); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + loadModel(): Promise; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + processFrame(inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement, outputFrameBuffer: HTMLCanvasElement): Promise; + protected abstract _getWebGL2PipelineType(): WebGL2PipelineType; + protected abstract _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): void; + private _addMask; + private _applyAlpha; + private _createPersonMask; + private _createWebGL2Pipeline; + private _getResizedInputImageData; + private _loadJs; + private _loadTwilioTfLite; + private _runTwilioTfLiteInference; +} diff --git a/es5/processors/background/BackgroundProcessor.js b/es5/processors/background/BackgroundProcessor.js new file mode 100644 index 0000000..6477d1c --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.js @@ -0,0 +1,398 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BackgroundProcessor = void 0; +var Processor_1 = require("../Processor"); +var Benchmark_1 = require("../../utils/Benchmark"); +var version_1 = require("../../utils/version"); +var types_1 = require("../../types"); +var webgl2_1 = require("../webgl2"); +var constants_1 = require("../../constants"); +/** + * @private + */ +var BackgroundProcessor = /** @class */ (function (_super) { + __extends(BackgroundProcessor, _super); + function BackgroundProcessor(options) { + var _this = _super.call(this) || this; + _this._backgroundImage = null; + _this._outputCanvas = null; + _this._outputContext = null; + _this._webgl2Pipeline = null; + _this._currentMask = new Uint8ClampedArray(); + _this._debounce = true; + _this._debounceCount = constants_1.DEBOUNCE_COUNT; + _this._dummyImageData = new ImageData(1, 1); + _this._inferenceDimensions = constants_1.WASM_INFERENCE_DIMENSIONS; + _this._inputMemoryOffset = 0; + // tslint:disable-next-line no-unused-variable + _this._isSimdEnabled = null; + _this._maskBlurRadius = constants_1.MASK_BLUR_RADIUS; + _this._maskUsageCounter = 0; + _this._outputMemoryOffset = 0; + _this._personProbabilityThreshold = constants_1.PERSON_PROBABILITY_THRESHOLD; + _this._pipeline = types_1.Pipeline.WebGL2; + // tslint:disable-next-line no-unused-variable + _this._version = version_1.version; + if (typeof options.assetsPath !== 'string') { + throw new Error('assetsPath parameter is missing'); + } + var assetsPath = options.assetsPath; + if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') { + assetsPath += '/'; + } + _this.maskBlurRadius = options.maskBlurRadius; + _this._assetsPath = assetsPath; + _this._debounce = typeof options.debounce === 'boolean' ? options.debounce : _this._debounce; + _this._debounceCount = _this._debounce ? _this._debounceCount : 1; + _this._inferenceDimensions = options.inferenceDimensions || _this._inferenceDimensions; + _this._historyCount = constants_1.HISTORY_COUNT_MULTIPLIER * _this._debounceCount; + _this._personProbabilityThreshold = options.personProbabilityThreshold || _this._personProbabilityThreshold; + _this._pipeline = options.pipeline || _this._pipeline; + _this._benchmark = new Benchmark_1.Benchmark(); + _this._inputCanvas = document.createElement('canvas'); + _this._inputContext = _this._inputCanvas.getContext('2d'); + _this._maskCanvas = typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas'); + _this._maskContext = _this._maskCanvas.getContext('2d'); + _this._masks = []; + return _this; + } + Object.defineProperty(BackgroundProcessor.prototype, "maskBlurRadius", { + /** + * The current blur radius when smoothing out the edges of the person's mask. + */ + get: function () { + return this._maskBlurRadius; + }, + /** + * Set a new blur radius to be used when smoothing out the edges of the person's mask. + */ + set: function (radius) { + if (typeof radius !== 'number' || radius < 0) { + console.warn("Valid mask blur radius not found. Using ".concat(constants_1.MASK_BLUR_RADIUS, " as default.")); + radius = constants_1.MASK_BLUR_RADIUS; + } + this._maskBlurRadius = radius; + }, + enumerable: false, + configurable: true + }); + /** + * Load the segmentation model. + * Call this method before attaching the processor to ensure + * video frames are processed correctly. + */ + BackgroundProcessor.prototype.loadModel = function () { + return __awaiter(this, void 0, void 0, function () { + var _a, tflite, modelResponse, model, modelBufferOffset; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: return [4 /*yield*/, Promise.all([ + this._loadTwilioTfLite(), + fetch(this._assetsPath + constants_1.MODEL_NAME), + ])]; + case 1: + _a = _b.sent(), tflite = _a[0], modelResponse = _a[1]; + return [4 /*yield*/, modelResponse.arrayBuffer()]; + case 2: + model = _b.sent(); + modelBufferOffset = tflite._getModelBufferMemoryOffset(); + tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset); + tflite._loadModel(model.byteLength); + this._inputMemoryOffset = tflite._getInputMemoryOffset() / 4; + this._outputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + this._tflite = tflite; + return [2 /*return*/]; + } + }); + }); + }; + /** + * Apply a transform to the background of an input video frame and leaving + * the foreground (person(s)) untouched. Any exception detected will + * result in the frame being dropped. + * @param inputFrameBuffer - The source of the input frame to process. + *
+ *
+ * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing + * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers + * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]]. + *
+ *
+ * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but + * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]]. + *
+ * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame. + */ + BackgroundProcessor.prototype.processFrame = function (inputFrameBuffer, outputFrameBuffer) { + var _a, _b; + return __awaiter(this, void 0, void 0, function () { + var _c, inferenceWidth, inferenceHeight, inputFrame, captureWidth, captureHeight, reInitDummyImage, personMask, ctx; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!this._tflite) { + return [2 /*return*/]; + } + if (!inputFrameBuffer || !outputFrameBuffer) { + throw new Error('Missing input or output frame buffer'); + } + this._benchmark.end('captureFrameDelay'); + this._benchmark.start('processFrameDelay'); + _c = this._inferenceDimensions, inferenceWidth = _c.width, inferenceHeight = _c.height; + inputFrame = inputFrameBuffer; + captureWidth = inputFrame.width, captureHeight = inputFrame.height; + if (inputFrame.videoWidth) { + inputFrame = inputFrame; + captureWidth = inputFrame.videoWidth; + captureHeight = inputFrame.videoHeight; + } + if (this._outputCanvas !== outputFrameBuffer) { + this._outputCanvas = outputFrameBuffer; + this._outputContext = this._outputCanvas + .getContext(this._pipeline === types_1.Pipeline.Canvas2D ? '2d' : 'webgl2'); + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + } + if (!this._webgl2Pipeline && this._pipeline === types_1.Pipeline.WebGL2) { + this._createWebGL2Pipeline(inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight); + } + if (!(this._pipeline === types_1.Pipeline.WebGL2)) return [3 /*break*/, 2]; + return [4 /*yield*/, ((_b = this._webgl2Pipeline) === null || _b === void 0 ? void 0 : _b.render())]; + case 1: + _d.sent(); + return [3 /*break*/, 4]; + case 2: + reInitDummyImage = false; + if (this._inputCanvas.width !== inferenceWidth) { + this._inputCanvas.width = inferenceWidth; + this._maskCanvas.width = inferenceWidth; + reInitDummyImage = true; + } + if (this._inputCanvas.height !== inferenceHeight) { + this._inputCanvas.height = inferenceHeight; + this._maskCanvas.height = inferenceHeight; + reInitDummyImage = true; + } + if (reInitDummyImage) { + this._dummyImageData = new ImageData(new Uint8ClampedArray(inferenceWidth * inferenceHeight * 4), inferenceWidth, inferenceHeight); + } + return [4 /*yield*/, this._createPersonMask(inputFrame)]; + case 3: + personMask = _d.sent(); + ctx = this._outputContext; + this._benchmark.start('imageCompositionDelay'); + this._maskContext.putImageData(personMask, 0, 0); + ctx.save(); + ctx.filter = "blur(".concat(this._maskBlurRadius, "px)"); + ctx.globalCompositeOperation = 'copy'; + ctx.drawImage(this._maskCanvas, 0, 0, captureWidth, captureHeight); + ctx.filter = 'none'; + ctx.globalCompositeOperation = 'source-in'; + ctx.drawImage(inputFrame, 0, 0, captureWidth, captureHeight); + ctx.globalCompositeOperation = 'destination-over'; + this._setBackground(inputFrame); + ctx.restore(); + this._benchmark.end('imageCompositionDelay'); + _d.label = 4; + case 4: + this._benchmark.end('processFrameDelay'); + this._benchmark.end('totalProcessingDelay'); + // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk + // for a more accurate fps + this._benchmark.start('totalProcessingDelay'); + this._benchmark.start('captureFrameDelay'); + return [2 /*return*/]; + } + }); + }); + }; + BackgroundProcessor.prototype._addMask = function (mask) { + if (this._masks.length >= this._historyCount) { + this._masks.splice(0, this._masks.length - this._historyCount + 1); + } + this._masks.push(mask); + }; + BackgroundProcessor.prototype._applyAlpha = function (imageData) { + var weightedSum = this._masks.reduce(function (sum, mask, j) { return sum + (j + 1) * (j + 1); }, 0); + var pixels = imageData.height * imageData.width; + var _loop_1 = function (i) { + var w = this_1._masks.reduce(function (sum, mask, j) { return sum + mask[i] * (j + 1) * (j + 1); }, 0) / weightedSum; + imageData.data[i * 4 + 3] = Math.round(w * 255); + }; + var this_1 = this; + for (var i = 0; i < pixels; i++) { + _loop_1(i); + } + }; + BackgroundProcessor.prototype._createPersonMask = function (inputFrame) { + return __awaiter(this, void 0, void 0, function () { + var imageData, shouldRunInference; + return __generator(this, function (_a) { + imageData = this._dummyImageData; + shouldRunInference = this._maskUsageCounter < 1; + this._benchmark.start('inputImageResizeDelay'); + if (shouldRunInference) { + imageData = this._getResizedInputImageData(inputFrame); + } + this._benchmark.end('inputImageResizeDelay'); + this._benchmark.start('segmentationDelay'); + if (shouldRunInference) { + this._currentMask = this._runTwilioTfLiteInference(imageData); + this._maskUsageCounter = this._debounceCount; + } + this._addMask(this._currentMask); + this._applyAlpha(imageData); + this._maskUsageCounter--; + this._benchmark.end('segmentationDelay'); + return [2 /*return*/, imageData]; + }); + }); + }; + BackgroundProcessor.prototype._createWebGL2Pipeline = function (inputFrame, captureWidth, captureHeight, inferenceWidth, inferenceHeight) { + this._webgl2Pipeline = (0, webgl2_1.buildWebGL2Pipeline)({ + htmlElement: inputFrame, + width: captureWidth, + height: captureHeight, + }, this._backgroundImage, { type: this._getWebGL2PipelineType() }, { inputResolution: "".concat(inferenceWidth, "x").concat(inferenceHeight) }, this._outputCanvas, this._tflite, this._benchmark, this._debounce); + this._webgl2Pipeline.updatePostProcessingConfig({ + smoothSegmentationMask: true, + jointBilateralFilter: { + sigmaSpace: 10, + sigmaColor: 0.12 + }, + coverage: [ + 0, + 0.99 + ], + lightWrapping: 0, + blendMode: 'screen' + }); + }; + BackgroundProcessor.prototype._getResizedInputImageData = function (inputFrame) { + var _a = this._inputCanvas, width = _a.width, height = _a.height; + this._inputContext.drawImage(inputFrame, 0, 0, width, height); + var imageData = this._inputContext.getImageData(0, 0, width, height); + return imageData; + }; + BackgroundProcessor.prototype._loadJs = function (url) { + if (BackgroundProcessor._loadedScripts.includes(url)) { + return Promise.resolve(); + } + return new Promise(function (resolve, reject) { + var script = document.createElement('script'); + script.onload = function () { + BackgroundProcessor._loadedScripts.push(url); + resolve(); + }; + script.onerror = reject; + document.head.append(script); + script.src = url; + }); + }; + BackgroundProcessor.prototype._loadTwilioTfLite = function () { + return __awaiter(this, void 0, void 0, function () { + var tflite, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: return [4 /*yield*/, this._loadJs(this._assetsPath + constants_1.TFLITE_SIMD_LOADER_NAME)]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + _b.trys.push([2, 4, , 7]); + return [4 /*yield*/, window.createTwilioTFLiteSIMDModule()]; + case 3: + tflite = _b.sent(); + this._isSimdEnabled = true; + return [3 /*break*/, 7]; + case 4: + _a = _b.sent(); + console.warn('SIMD not supported. You may experience poor quality of background replacement.'); + return [4 /*yield*/, this._loadJs(this._assetsPath + constants_1.TFLITE_LOADER_NAME)]; + case 5: + _b.sent(); + return [4 /*yield*/, window.createTwilioTFLiteModule()]; + case 6: + tflite = _b.sent(); + this._isSimdEnabled = false; + return [3 /*break*/, 7]; + case 7: return [2 /*return*/, tflite]; + } + }); + }); + }; + BackgroundProcessor.prototype._runTwilioTfLiteInference = function (inputImage) { + var _a = this, _b = _a._inferenceDimensions, width = _b.width, height = _b.height, offset = _a._inputMemoryOffset, tflite = _a._tflite; + var pixels = width * height; + for (var i = 0; i < pixels; i++) { + tflite.HEAPF32[offset + i * 3] = inputImage.data[i * 4] / 255; + tflite.HEAPF32[offset + i * 3 + 1] = inputImage.data[i * 4 + 1] / 255; + tflite.HEAPF32[offset + i * 3 + 2] = inputImage.data[i * 4 + 2] / 255; + } + tflite._runInference(); + var inferenceData = new Uint8ClampedArray(pixels * 4); + for (var i = 0; i < pixels; i++) { + var personProbability = tflite.HEAPF32[this._outputMemoryOffset + i]; + inferenceData[i] = Number(personProbability >= this._personProbabilityThreshold) * personProbability; + } + return inferenceData; + }; + BackgroundProcessor._loadedScripts = []; + return BackgroundProcessor; +}(Processor_1.Processor)); +exports.BackgroundProcessor = BackgroundProcessor; +//# sourceMappingURL=BackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/BackgroundProcessor.js.map b/es5/processors/background/BackgroundProcessor.js.map new file mode 100644 index 0000000..c9c559b --- /dev/null +++ b/es5/processors/background/BackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/BackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,0CAAyC;AACzC,mDAAkD;AAClD,+CAA8C;AAC9C,qCAAuE;AACvE,oCAAgD;AAEhD,6CASyB;AA2EzB;;GAEG;AACH;IAAkD,uCAAS;IAiCzD,6BAAY,OAAmC;QAA/C,YACE,iBAAO,SAyBR;QAxDS,sBAAgB,GAA4B,IAAI,CAAC;QACjD,mBAAa,GAA6B,IAAI,CAAC;QAC/C,oBAAc,GAA6D,IAAI,CAAC;QAChF,qBAAe,GAAkD,IAAI,CAAC;QAIxE,kBAAY,GAAmC,IAAI,iBAAiB,EAAE,CAAC;QACvE,eAAS,GAAY,IAAI,CAAC;QAC1B,oBAAc,GAAW,0BAAc,CAAC;QACxC,qBAAe,GAAc,IAAI,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAEjD,0BAAoB,GAAe,qCAAyB,CAAC;QAG7D,wBAAkB,GAAW,CAAC,CAAC;QACvC,8CAA8C;QACtC,oBAAc,GAAmB,IAAI,CAAC;QACtC,qBAAe,GAAW,4BAAgB,CAAC;QAI3C,uBAAiB,GAAW,CAAC,CAAC;QAC9B,yBAAmB,GAAW,CAAC,CAAC;QAChC,iCAA2B,GAAW,wCAA4B,CAAC;QACnE,eAAS,GAAa,gBAAQ,CAAC,MAAM,CAAC;QAE9C,8CAA8C;QAC7B,cAAQ,GAAW,iBAAO,CAAC;QAK1C,IAAI,OAAO,OAAO,CAAC,UAAU,KAAK,QAAQ,EAAE;YAC1C,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACpC,IAAI,UAAU,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE;YAC3D,UAAU,IAAI,GAAG,CAAC;SACnB;QAED,KAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAe,CAAC;QAC9C,KAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,KAAI,CAAC,SAAS,GAAG,OAAO,OAAO,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAI,CAAC,SAAS,CAAC;QAC3F,KAAI,CAAC,cAAc,GAAG,KAAI,CAAC,SAAS,CAAC,CAAC,CAAC,KAAI,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;QAC/D,KAAI,CAAC,oBAAoB,GAAG,OAAO,CAAC,mBAAoB,IAAI,KAAI,CAAC,oBAAoB,CAAC;QACtF,KAAI,CAAC,aAAa,GAAG,oCAAwB,GAAG,KAAI,CAAC,cAAc,CAAC;QACpE,KAAI,CAAC,2BAA2B,GAAG,OAAO,CAAC,0BAA2B,IAAI,KAAI,CAAC,2BAA2B,CAAC;QAC3G,KAAI,CAAC,SAAS,GAAG,OAAO,CAAC,QAAS,IAAI,KAAI,CAAC,SAAS,CAAC;QAErD,KAAI,CAAC,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QAClC,KAAI,CAAC,YAAY,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QACrD,KAAI,CAAC,aAAa,GAAG,KAAI,CAAC,YAAY,CAAC,UAAU,CAAC,IAAI,CAA6B,CAAC;QACpF,KAAI,CAAC,WAAW,GAAI,OAAO,MAAM,CAAC,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QACxI,KAAI,CAAC,YAAY,GAAG,KAAI,CAAC,WAAW,CAAC,UAAU,CAAC,IAAI,CAAsC,CAAC;QAC3F,KAAI,CAAC,MAAM,GAAG,EAAE,CAAC;;IACnB,CAAC;IAKD,sBAAI,+CAAc;QAHlB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,eAAe,CAAC;QAC9B,CAAC;QAED;;WAEG;aACH,UAAmB,MAAc;YAC/B,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,MAAM,GAAG,CAAC,EAAE;gBAC5C,OAAO,CAAC,IAAI,CAAC,kDAA2C,4BAAgB,iBAAc,CAAC,CAAC;gBACxF,MAAM,GAAG,4BAAgB,CAAC;aAC3B;YACD,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC;QAChC,CAAC;;;OAXA;IAaD;;;;OAIG;IACI,uCAAS,GAAf;;;;;4BACkC,qBAAM,OAAO,CAAC,GAAG,CAAC;4BACjD,IAAI,CAAC,iBAAiB,EAAE;4BACxB,KAAK,CAAC,IAAI,CAAC,WAAW,GAAG,sBAAU,CAAC;yBACrC,CAAC,EAAA;;wBAHI,KAA2B,SAG/B,EAHK,MAAM,QAAA,EAAE,aAAa,QAAA;wBAKd,qBAAM,aAAa,CAAC,WAAW,EAAE,EAAA;;wBAAzC,KAAK,GAAG,SAAiC;wBACzC,iBAAiB,GAAG,MAAM,CAAC,2BAA2B,EAAE,CAAC;wBAC/D,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,iBAAiB,CAAC,CAAC;wBAC5D,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;wBAEpC,IAAI,CAAC,kBAAkB,GAAG,MAAM,CAAC,qBAAqB,EAAE,GAAG,CAAC,CAAC;wBAC7D,IAAI,CAAC,mBAAmB,GAAG,MAAM,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC;wBAE/D,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;;;;;KACvB;IAED;;;;;;;;;;;;;;;;;;;OAmBG;IACG,0CAAY,GAAlB,UACE,gBAAwE,EACxE,iBAAoC;;;;;;;wBAEpC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;4BACjB,sBAAO;yBACR;wBACD,IAAI,CAAC,gBAAgB,IAAI,CAAC,iBAAiB,EAAE;4BAC3C,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;yBACzD;wBACD,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;wBAErC,KAAqD,IAAI,CAAC,oBAAoB,EAArE,cAAc,WAAA,EAAU,eAAe,YAAA,CAA+B;wBACjF,UAAU,GAAG,gBAAgB,CAAC;wBACrB,YAAY,GAA4B,UAAU,MAAtC,EAAU,aAAa,GAAK,UAAU,OAAf,CAAgB;wBAChE,IAAK,UAA+B,CAAC,UAAU,EAAE;4BAC/C,UAAU,GAAG,UAA8B,CAAC;4BAC5C,YAAY,GAAG,UAAU,CAAC,UAAU,CAAC;4BACrC,aAAa,GAAG,UAAU,CAAC,WAAW,CAAC;yBACxC;wBACD,IAAI,IAAI,CAAC,aAAa,KAAK,iBAAiB,EAAE;4BAC5C,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC;4BACvC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,aAAa;iCACrC,UAAU,CAAC,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,QAAQ,CACjB,CAAC;4BACpD,MAAA,IAAI,CAAC,eAAe,0CAAE,OAAO,EAAE,CAAC;4BAChC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC;yBAC7B;wBAED,IAAI,CAAC,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,MAAM,EAAE;4BAC/D,IAAI,CAAC,qBAAqB,CAAC,UAA8B,EAAE,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,eAAe,CAAC,CAAC;yBAC1H;6BAEG,CAAA,IAAI,CAAC,SAAS,KAAK,gBAAQ,CAAC,MAAM,CAAA,EAAlC,wBAAkC;wBACpC,qBAAM,CAAA,MAAA,IAAI,CAAC,eAAe,0CAAE,MAAM,EAAE,CAAA,EAAA;;wBAApC,SAAoC,CAAC;;;wBAGjC,gBAAgB,GAAG,KAAK,CAAC;wBAC7B,IAAI,IAAI,CAAC,YAAY,CAAC,KAAK,KAAK,cAAc,EAAE;4BAC9C,IAAI,CAAC,YAAY,CAAC,KAAK,GAAG,cAAc,CAAC;4BACzC,IAAI,CAAC,WAAW,CAAC,KAAK,GAAG,cAAc,CAAC;4BACxC,gBAAgB,GAAG,IAAI,CAAC;yBACzB;wBACD,IAAI,IAAI,CAAC,YAAY,CAAC,MAAM,KAAK,eAAe,EAAE;4BAChD,IAAI,CAAC,YAAY,CAAC,MAAM,GAAG,eAAe,CAAC;4BAC3C,IAAI,CAAC,WAAW,CAAC,MAAM,GAAG,eAAe,CAAC;4BAC1C,gBAAgB,GAAG,IAAI,CAAC;yBACzB;wBACD,IAAI,gBAAgB,EAAE;4BACpB,IAAI,CAAC,eAAe,GAAG,IAAI,SAAS,CAClC,IAAI,iBAAiB,CAAC,cAAc,GAAG,eAAe,GAAG,CAAC,CAAC,EAC3D,cAAc,EAAE,eAAe,CAAC,CAAC;yBACpC;wBAEkB,qBAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,CAAC,EAAA;;wBAArD,UAAU,GAAG,SAAwC;wBACrD,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;wBAC5D,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;wBAC/C,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;wBACjD,GAAG,CAAC,IAAI,EAAE,CAAC;wBACX,GAAG,CAAC,MAAM,GAAG,eAAQ,IAAI,CAAC,eAAe,QAAK,CAAC;wBAC/C,GAAG,CAAC,wBAAwB,GAAG,MAAM,CAAC;wBACtC,GAAG,CAAC,SAAS,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,YAAY,EAAE,aAAa,CAAC,CAAC;wBACnE,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC;wBACpB,GAAG,CAAC,wBAAwB,GAAG,WAAW,CAAC;wBAC3C,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,EAAE,YAAY,EAAE,aAAa,CAAC,CAAC;wBAC7D,GAAG,CAAC,wBAAwB,GAAG,kBAAkB,CAAC;wBAClD,IAAI,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC;wBAChC,GAAG,CAAC,OAAO,EAAE,CAAC;wBACd,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;;;wBAG/C,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;wBACzC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,sBAAsB,CAAC,CAAC;wBAE5C,+FAA+F;wBAC/F,0BAA0B;wBAC1B,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAC;wBAC9C,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;;;;;KAC5C;IAMO,sCAAQ,GAAhB,UAAiB,IAAoC;QACnD,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,IAAI,IAAI,CAAC,aAAa,EAAE;YAC5C,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC;SACpE;QACD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;IAEO,yCAAW,GAAnB,UAAoB,SAAoB;QACtC,IAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAC,GAAG,EAAE,IAAI,EAAE,CAAC,IAAK,OAAA,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAvB,CAAuB,EAAE,CAAC,CAAC,CAAC;QACrF,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC;gCACzC,CAAC;YACR,IAAM,CAAC,GAAG,OAAK,MAAM,CAAC,MAAM,CAAC,UAAC,GAAG,EAAE,IAAI,EAAE,CAAC,IAAK,OAAA,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAjC,CAAiC,EAAE,CAAC,CAAC,GAAG,WAAW,CAAC;YACnG,SAAS,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC;;;QAFlD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE;oBAAtB,CAAC;SAGT;IACH,CAAC;IAEa,+CAAiB,GAA/B,UAAgC,UAAkE;;;;gBAC5F,SAAS,GAAG,IAAI,CAAC,eAAe,CAAC;gBAC/B,kBAAkB,GAAG,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;gBAEtD,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;gBAC/C,IAAI,kBAAkB,EAAE;oBACtB,SAAS,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAC;iBACxD;gBACD,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAC;gBAE7C,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC;gBAC3C,IAAI,kBAAkB,EAAE;oBACtB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,yBAAyB,CAAC,SAAS,CAAC,CAAC;oBAC9D,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,cAAc,CAAC;iBAC9C;gBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACjC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBAC5B,IAAI,CAAC,iBAAiB,EAAE,CAAC;gBACzB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;gBAEzC,sBAAO,SAAS,EAAC;;;KAClB;IAEO,mDAAqB,GAA7B,UACE,UAA4B,EAC5B,YAAoB,EACpB,aAAqB,EACrB,cAAsB,EACtB,eAAuB;QAEvB,IAAI,CAAC,eAAe,GAAG,IAAA,4BAAmB,EACxC;YACE,WAAW,EAAE,UAAU;YACvB,KAAK,EAAE,YAAY;YACnB,MAAM,EAAE,aAAa;SACtB,EACD,IAAI,CAAC,gBAAgB,EACrB,EAAE,IAAI,EAAE,IAAI,CAAC,sBAAsB,EAAE,EAAE,EACvC,EAAE,eAAe,EAAE,UAAG,cAAc,cAAI,eAAe,CAAE,EAAE,EAC3D,IAAI,CAAC,aAAc,EACnB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,SAAS,CACf,CAAC;QACF,IAAI,CAAC,eAAe,CAAC,0BAA0B,CAAC;YAC9C,sBAAsB,EAAE,IAAI;YAC5B,oBAAoB,EAAE;gBACpB,UAAU,EAAE,EAAE;gBACd,UAAU,EAAE,IAAI;aACjB;YACD,QAAQ,EAAE;gBACR,CAAC;gBACD,IAAI;aACL;YACD,aAAa,EAAE,CAAC;YAChB,SAAS,EAAE,QAAQ;SACpB,CAAC,CAAC;IACL,CAAC;IAEO,uDAAyB,GAAjC,UAAkC,UAAkE;QAC5F,IAAA,KAAoB,IAAI,CAAC,YAAY,EAAnC,KAAK,WAAA,EAAE,MAAM,YAAsB,CAAC;QAC5C,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAM,SAAS,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,qCAAO,GAAf,UAAgB,GAAW;QACzB,IAAI,mBAAmB,CAAC,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACpD,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;SAC1B;QACD,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;YACjC,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAChD,MAAM,CAAC,MAAM,GAAG;gBACd,mBAAmB,CAAC,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBAC7C,OAAO,EAAE,CAAC;YACZ,CAAC,CAAC;YACF,MAAM,CAAC,OAAO,GAAG,MAAM,CAAC;YACxB,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAC7B,MAAM,CAAC,GAAG,GAAG,GAAG,CAAC;QACnB,CAAC,CAAC,CAAC;IACL,CAAC;IAEa,+CAAiB,GAA/B;;;;;4BAEE,qBAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,GAAG,mCAAuB,CAAC,EAAA;;wBAA9D,SAA8D,CAAC;;;;wBAGpD,qBAAM,MAAM,CAAC,4BAA4B,EAAE,EAAA;;wBAApD,MAAM,GAAG,SAA2C,CAAC;wBACrD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC;;;;wBAE3B,OAAO,CAAC,IAAI,CAAC,gFAAgF,CAAC,CAAC;wBAC/F,qBAAM,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,GAAG,8BAAkB,CAAC,EAAA;;wBAAzD,SAAyD,CAAC;wBACjD,qBAAM,MAAM,CAAC,wBAAwB,EAAE,EAAA;;wBAAhD,MAAM,GAAG,SAAuC,CAAC;wBACjD,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC;;4BAE9B,sBAAO,MAAM,EAAC;;;;KACf;IAEO,uDAAyB,GAAjC,UAAkC,UAAqB;QAC/C,IAAA,KAA2F,IAAI,EAA7F,4BAAuC,EAAf,KAAK,WAAA,EAAE,MAAM,YAAA,EAAwB,MAAM,wBAAA,EAAW,MAAM,aAAS,CAAC;QACtG,IAAM,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC;QAE9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;YAC/B,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;YAC9D,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;YACtE,MAAM,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,UAAU,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;SACvE;QAED,MAAM,CAAC,aAAa,EAAE,CAAC;QACvB,IAAM,aAAa,GAAG,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QAExD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE;YAC/B,IAAM,iBAAiB,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC,CAAC;YACvE,aAAa,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,iBAAiB,IAAI,IAAI,CAAC,2BAA2B,CAAC,GAAG,iBAAiB,CAAC;SACtG;QAED,OAAO,aAAa,CAAC;IACvB,CAAC;IAlVc,kCAAc,GAAa,EAAE,AAAf,CAAgB;IAmV/C,0BAAC;CAAA,AApVD,CAAkD,qBAAS,GAoV1D;AApVqB,kDAAmB","sourcesContent":["import { Processor } from '../Processor';\nimport { Benchmark } from '../../utils/Benchmark';\nimport { version } from '../../utils/version';\nimport { Dimensions, Pipeline, WebGL2PipelineType } from '../../types';\nimport { buildWebGL2Pipeline } from '../webgl2';\n\nimport {\n DEBOUNCE_COUNT,\n HISTORY_COUNT_MULTIPLIER,\n MASK_BLUR_RADIUS,\n MODEL_NAME,\n PERSON_PROBABILITY_THRESHOLD,\n TFLITE_LOADER_NAME,\n TFLITE_SIMD_LOADER_NAME,\n WASM_INFERENCE_DIMENSIONS,\n} from '../../constants';\n\n/**\n * @private\n */\nexport interface BackgroundProcessorOptions {\n /**\n * The VideoProcessors load assets dynamically depending on certain browser features.\n * You need to serve all the assets and provide the root path so they can be referenced properly.\n * These assets can be copied from the `dist/build` folder which you can add as part of your deployment process.\n * @example\n *
\n *
\n * For virtual background:\n *
\n *\n * ```ts\n * const virtualBackground = new VirtualBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n * backgroundImage: img,\n * });\n * await virtualBackground.loadModel();\n * ```\n *\n *
\n * For blur background:\n *
\n *\n * ```ts\n * const blurBackground = new GaussianBlurBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets'\n * });\n * await blurBackground.loadModel();\n * ```\n */\n assetsPath: string;\n\n /**\n * Whether to skip processing every other frame to improve the output frame rate, but reducing accuracy in the process.\n * @default\n * ```html\n * true\n * ```\n */\n debounce?: boolean;\n\n /**\n * @private\n */\n inferenceDimensions?: Dimensions;\n\n /**\n * The blur radius to use when smoothing out the edges of the person's mask.\n * @default\n * ```html\n * 5\n * ```\n */\n maskBlurRadius?: number;\n\n /**\n * @private\n */\n personProbabilityThreshold?: number;\n\n /**\n * Specifies which pipeline to use when processing video frames.\n * @default\n * ```html\n * 'WebGL2'\n * ```\n */\n pipeline?: Pipeline;\n}\n\n/**\n * @private\n */\nexport abstract class BackgroundProcessor extends Processor {\n private static _loadedScripts: string[] = [];\n\n protected _backgroundImage: HTMLImageElement | null = null;\n protected _outputCanvas: HTMLCanvasElement | null = null;\n protected _outputContext: CanvasRenderingContext2D | WebGL2RenderingContext | null = null;\n protected _webgl2Pipeline: ReturnType | null = null;\n\n private _assetsPath: string;\n private _benchmark: Benchmark;\n private _currentMask: Uint8ClampedArray | Uint8Array = new Uint8ClampedArray();\n private _debounce: boolean = true;\n private _debounceCount: number = DEBOUNCE_COUNT;\n private _dummyImageData: ImageData = new ImageData(1, 1);\n private _historyCount: number;\n private _inferenceDimensions: Dimensions = WASM_INFERENCE_DIMENSIONS;\n private _inputCanvas: HTMLCanvasElement;\n private _inputContext: CanvasRenderingContext2D;\n private _inputMemoryOffset: number = 0;\n // tslint:disable-next-line no-unused-variable\n private _isSimdEnabled: boolean | null = null;\n private _maskBlurRadius: number = MASK_BLUR_RADIUS;\n private _maskCanvas: OffscreenCanvas | HTMLCanvasElement;\n private _maskContext: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;\n private _masks: (Uint8ClampedArray | Uint8Array)[];\n private _maskUsageCounter: number = 0;\n private _outputMemoryOffset: number = 0;\n private _personProbabilityThreshold: number = PERSON_PROBABILITY_THRESHOLD;\n private _pipeline: Pipeline = Pipeline.WebGL2;\n private _tflite: any;\n // tslint:disable-next-line no-unused-variable\n private readonly _version: string = version;\n\n constructor(options: BackgroundProcessorOptions) {\n super();\n\n if (typeof options.assetsPath !== 'string') {\n throw new Error('assetsPath parameter is missing');\n }\n let assetsPath = options.assetsPath;\n if (assetsPath && assetsPath[assetsPath.length - 1] !== '/') {\n assetsPath += '/';\n }\n\n this.maskBlurRadius = options.maskBlurRadius!;\n this._assetsPath = assetsPath;\n this._debounce = typeof options.debounce === 'boolean' ? options.debounce : this._debounce;\n this._debounceCount = this._debounce ? this._debounceCount : 1;\n this._inferenceDimensions = options.inferenceDimensions! || this._inferenceDimensions;\n this._historyCount = HISTORY_COUNT_MULTIPLIER * this._debounceCount;\n this._personProbabilityThreshold = options.personProbabilityThreshold! || this._personProbabilityThreshold;\n this._pipeline = options.pipeline! || this._pipeline;\n\n this._benchmark = new Benchmark();\n this._inputCanvas = document.createElement('canvas');\n this._inputContext = this._inputCanvas.getContext('2d') as CanvasRenderingContext2D;\n this._maskCanvas = typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas');\n this._maskContext = this._maskCanvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n this._masks = [];\n }\n\n /**\n * The current blur radius when smoothing out the edges of the person's mask.\n */\n get maskBlurRadius(): number {\n return this._maskBlurRadius;\n }\n\n /**\n * Set a new blur radius to be used when smoothing out the edges of the person's mask.\n */\n set maskBlurRadius(radius: number) {\n if (typeof radius !== 'number' || radius < 0) {\n console.warn(`Valid mask blur radius not found. Using ${MASK_BLUR_RADIUS} as default.`);\n radius = MASK_BLUR_RADIUS;\n }\n this._maskBlurRadius = radius;\n }\n\n /**\n * Load the segmentation model.\n * Call this method before attaching the processor to ensure\n * video frames are processed correctly.\n */\n async loadModel() {\n const [tflite, modelResponse ] = await Promise.all([\n this._loadTwilioTfLite(),\n fetch(this._assetsPath + MODEL_NAME),\n ]);\n\n const model = await modelResponse.arrayBuffer();\n const modelBufferOffset = tflite._getModelBufferMemoryOffset();\n tflite.HEAPU8.set(new Uint8Array(model), modelBufferOffset);\n tflite._loadModel(model.byteLength);\n\n this._inputMemoryOffset = tflite._getInputMemoryOffset() / 4;\n this._outputMemoryOffset = tflite._getOutputMemoryOffset() / 4;\n\n this._tflite = tflite;\n }\n\n /**\n * Apply a transform to the background of an input video frame and leaving\n * the foreground (person(s)) untouched. Any exception detected will\n * result in the frame being dropped.\n * @param inputFrameBuffer - The source of the input frame to process.\n *
\n *
\n * [OffscreenCanvas](https://developer.mozilla.org/en-US/docs/Web/API/OffscreenCanvas) - Good for canvas-related processing\n * that can be rendered off screen. Only works when using [[Pipeline.Canvas2D]].\n *
\n *
\n * [HTMLCanvasElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement) - This is recommended on browsers\n * that doesn't support `OffscreenCanvas`, or if you need to render the frame on the screen. Only works when using [[Pipeline.Canvas2D]].\n *
\n *
\n * [HTMLVideoElement](https://developer.mozilla.org/en-US/docs/Web/API/HTMLVideoElement) - Recommended when using [[Pipeline.WebGL2]] but\n * works for both [[Pipeline.Canvas2D]] and [[Pipeline.WebGL2]].\n *
\n * @param outputFrameBuffer - The output frame buffer to use to draw the processed frame.\n */\n async processFrame(\n inputFrameBuffer: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement,\n outputFrameBuffer: HTMLCanvasElement\n ): Promise {\n if (!this._tflite) {\n return;\n }\n if (!inputFrameBuffer || !outputFrameBuffer) {\n throw new Error('Missing input or output frame buffer');\n }\n this._benchmark.end('captureFrameDelay');\n this._benchmark.start('processFrameDelay');\n\n const { width: inferenceWidth, height: inferenceHeight } = this._inferenceDimensions;\n let inputFrame = inputFrameBuffer;\n let { width: captureWidth, height: captureHeight } = inputFrame;\n if ((inputFrame as HTMLVideoElement).videoWidth) {\n inputFrame = inputFrame as HTMLVideoElement;\n captureWidth = inputFrame.videoWidth;\n captureHeight = inputFrame.videoHeight;\n }\n if (this._outputCanvas !== outputFrameBuffer) {\n this._outputCanvas = outputFrameBuffer;\n this._outputContext = this._outputCanvas\n .getContext(this._pipeline === Pipeline.Canvas2D ? '2d' : 'webgl2') as\n CanvasRenderingContext2D | WebGL2RenderingContext;\n this._webgl2Pipeline?.cleanUp();\n this._webgl2Pipeline = null;\n }\n\n if (!this._webgl2Pipeline && this._pipeline === Pipeline.WebGL2) {\n this._createWebGL2Pipeline(inputFrame as HTMLVideoElement, captureWidth, captureHeight, inferenceWidth, inferenceHeight);\n }\n\n if (this._pipeline === Pipeline.WebGL2) {\n await this._webgl2Pipeline?.render();\n } else {\n // Only set the canvas' dimensions if they have changed to prevent unnecessary redraw\n let reInitDummyImage = false;\n if (this._inputCanvas.width !== inferenceWidth) {\n this._inputCanvas.width = inferenceWidth;\n this._maskCanvas.width = inferenceWidth;\n reInitDummyImage = true;\n }\n if (this._inputCanvas.height !== inferenceHeight) {\n this._inputCanvas.height = inferenceHeight;\n this._maskCanvas.height = inferenceHeight;\n reInitDummyImage = true;\n }\n if (reInitDummyImage) {\n this._dummyImageData = new ImageData(\n new Uint8ClampedArray(inferenceWidth * inferenceHeight * 4),\n inferenceWidth, inferenceHeight);\n }\n\n const personMask = await this._createPersonMask(inputFrame);\n const ctx = this._outputContext as CanvasRenderingContext2D;\n this._benchmark.start('imageCompositionDelay');\n this._maskContext.putImageData(personMask, 0, 0);\n ctx.save();\n ctx.filter = `blur(${this._maskBlurRadius}px)`;\n ctx.globalCompositeOperation = 'copy';\n ctx.drawImage(this._maskCanvas, 0, 0, captureWidth, captureHeight);\n ctx.filter = 'none';\n ctx.globalCompositeOperation = 'source-in';\n ctx.drawImage(inputFrame, 0, 0, captureWidth, captureHeight);\n ctx.globalCompositeOperation = 'destination-over';\n this._setBackground(inputFrame);\n ctx.restore();\n this._benchmark.end('imageCompositionDelay');\n }\n\n this._benchmark.end('processFrameDelay');\n this._benchmark.end('totalProcessingDelay');\n\n // NOTE (csantos): Start the benchmark from here so we can include the delay from the Video sdk\n // for a more accurate fps\n this._benchmark.start('totalProcessingDelay');\n this._benchmark.start('captureFrameDelay');\n }\n\n protected abstract _getWebGL2PipelineType(): WebGL2PipelineType;\n\n protected abstract _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): void;\n\n private _addMask(mask: Uint8ClampedArray | Uint8Array) {\n if (this._masks.length >= this._historyCount) {\n this._masks.splice(0, this._masks.length - this._historyCount + 1);\n }\n this._masks.push(mask);\n }\n\n private _applyAlpha(imageData: ImageData) {\n const weightedSum = this._masks.reduce((sum, mask, j) => sum + (j + 1) * (j + 1), 0);\n const pixels = imageData.height * imageData.width;\n for (let i = 0; i < pixels; i++) {\n const w = this._masks.reduce((sum, mask, j) => sum + mask[i] * (j + 1) * (j + 1), 0) / weightedSum;\n imageData.data[i * 4 + 3] = Math.round(w * 255);\n }\n }\n\n private async _createPersonMask(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): Promise {\n let imageData = this._dummyImageData;\n const shouldRunInference = this._maskUsageCounter < 1;\n\n this._benchmark.start('inputImageResizeDelay');\n if (shouldRunInference) {\n imageData = this._getResizedInputImageData(inputFrame);\n }\n this._benchmark.end('inputImageResizeDelay');\n\n this._benchmark.start('segmentationDelay');\n if (shouldRunInference) {\n this._currentMask = this._runTwilioTfLiteInference(imageData);\n this._maskUsageCounter = this._debounceCount;\n }\n this._addMask(this._currentMask);\n this._applyAlpha(imageData);\n this._maskUsageCounter--;\n this._benchmark.end('segmentationDelay');\n\n return imageData;\n }\n\n private _createWebGL2Pipeline(\n inputFrame: HTMLVideoElement,\n captureWidth: number,\n captureHeight: number,\n inferenceWidth: number,\n inferenceHeight: number,\n ): void {\n this._webgl2Pipeline = buildWebGL2Pipeline(\n {\n htmlElement: inputFrame,\n width: captureWidth,\n height: captureHeight,\n },\n this._backgroundImage,\n { type: this._getWebGL2PipelineType() },\n { inputResolution: `${inferenceWidth}x${inferenceHeight}` },\n this._outputCanvas!,\n this._tflite,\n this._benchmark,\n this._debounce,\n );\n this._webgl2Pipeline.updatePostProcessingConfig({\n smoothSegmentationMask: true,\n jointBilateralFilter: {\n sigmaSpace: 10,\n sigmaColor: 0.12\n },\n coverage: [\n 0,\n 0.99\n ],\n lightWrapping: 0,\n blendMode: 'screen'\n });\n }\n\n private _getResizedInputImageData(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): ImageData {\n const { width, height } = this._inputCanvas;\n this._inputContext.drawImage(inputFrame, 0, 0, width, height);\n const imageData = this._inputContext.getImageData(0, 0, width, height);\n return imageData;\n }\n\n private _loadJs(url: string): Promise {\n if (BackgroundProcessor._loadedScripts.includes(url)) {\n return Promise.resolve();\n }\n return new Promise((resolve, reject) => {\n const script = document.createElement('script');\n script.onload = () => {\n BackgroundProcessor._loadedScripts.push(url);\n resolve();\n };\n script.onerror = reject;\n document.head.append(script);\n script.src = url;\n });\n }\n\n private async _loadTwilioTfLite(): Promise {\n let tflite: any;\n await this._loadJs(this._assetsPath + TFLITE_SIMD_LOADER_NAME);\n\n try {\n tflite = await window.createTwilioTFLiteSIMDModule();\n this._isSimdEnabled = true;\n } catch {\n console.warn('SIMD not supported. You may experience poor quality of background replacement.');\n await this._loadJs(this._assetsPath + TFLITE_LOADER_NAME);\n tflite = await window.createTwilioTFLiteModule();\n this._isSimdEnabled = false;\n }\n return tflite;\n }\n\n private _runTwilioTfLiteInference(inputImage: ImageData): Uint8ClampedArray {\n const { _inferenceDimensions: { width, height }, _inputMemoryOffset: offset, _tflite: tflite } = this;\n const pixels = width * height;\n\n for (let i = 0; i < pixels; i++) {\n tflite.HEAPF32[offset + i * 3] = inputImage.data[i * 4] / 255;\n tflite.HEAPF32[offset + i * 3 + 1] = inputImage.data[i * 4 + 1] / 255;\n tflite.HEAPF32[offset + i * 3 + 2] = inputImage.data[i * 4 + 2] / 255;\n }\n\n tflite._runInference();\n const inferenceData = new Uint8ClampedArray(pixels * 4);\n\n for (let i = 0; i < pixels; i++) {\n const personProbability = tflite.HEAPF32[this._outputMemoryOffset + i];\n inferenceData[i] = Number(personProbability >= this._personProbabilityThreshold) * personProbability;\n }\n\n return inferenceData;\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts b/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts new file mode 100644 index 0000000..9bfcdbf --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.d.ts @@ -0,0 +1,74 @@ +import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor'; +import { WebGL2PipelineType } from '../../types'; +/** + * Options passed to [[GaussianBlurBackgroundProcessor]] constructor. + */ +export interface GaussianBlurBackgroundProcessorOptions extends BackgroundProcessorOptions { + /** + * The background blur filter radius to use in pixels. + * @default + * ```html + * 15 + * ``` + */ + blurFilterRadius?: number; +} +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * + * const blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * pipeline: Pipeline.WebGL2, + * debounce: true, + * }); + * + * blurBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(blurBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * ``` + */ +export declare class GaussianBlurBackgroundProcessor extends BackgroundProcessor { + private _blurFilterRadius; + private readonly _name; + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + constructor(options: GaussianBlurBackgroundProcessorOptions); + /** + * The current background blur filter radius in pixels. + */ + get blurFilterRadius(): number; + /** + * Set a new background blur filter radius in pixels. + */ + set blurFilterRadius(radius: number); + protected _getWebGL2PipelineType(): WebGL2PipelineType; + protected _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): void; +} diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.js b/es5/processors/background/GaussianBlurBackgroundProcessor.js new file mode 100644 index 0000000..cacd2c8 --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.js @@ -0,0 +1,110 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GaussianBlurBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var constants_1 = require("../../constants"); +var types_1 = require("../../types"); +/** + * The GaussianBlurBackgroundProcessor, when added to a VideoTrack, + * applies a gaussian blur filter on the background in each video frame + * and leaves the foreground (person(s)) untouched. Each instance of + * GaussianBlurBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors'; + * + * const blurBackground = new GaussianBlurBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * pipeline: Pipeline.WebGL2, + * debounce: true, + * }); + * + * blurBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(blurBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * ``` + */ +var GaussianBlurBackgroundProcessor = /** @class */ (function (_super) { + __extends(GaussianBlurBackgroundProcessor, _super); + /** + * Construct a GaussianBlurBackgroundProcessor. Default values will be used for + * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and + * invalid properties will be ignored. + */ + function GaussianBlurBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + _this._blurFilterRadius = constants_1.BLUR_FILTER_RADIUS; + // tslint:disable-next-line no-unused-variable + _this._name = 'GaussianBlurBackgroundProcessor'; + _this.blurFilterRadius = options.blurFilterRadius; + return _this; + } + Object.defineProperty(GaussianBlurBackgroundProcessor.prototype, "blurFilterRadius", { + /** + * The current background blur filter radius in pixels. + */ + get: function () { + return this._blurFilterRadius; + }, + /** + * Set a new background blur filter radius in pixels. + */ + set: function (radius) { + if (!radius) { + console.warn("Valid blur filter radius not found. Using ".concat(constants_1.BLUR_FILTER_RADIUS, " as default.")); + radius = constants_1.BLUR_FILTER_RADIUS; + } + this._blurFilterRadius = radius; + }, + enumerable: false, + configurable: true + }); + GaussianBlurBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Blur; + }; + GaussianBlurBackgroundProcessor.prototype._setBackground = function (inputFrame) { + if (!this._outputContext) { + return; + } + var ctx = this._outputContext; + ctx.filter = "blur(".concat(this._blurFilterRadius, "px)"); + ctx.drawImage(inputFrame, 0, 0); + }; + return GaussianBlurBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.GaussianBlurBackgroundProcessor = GaussianBlurBackgroundProcessor; +//# sourceMappingURL=GaussianBlurBackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/GaussianBlurBackgroundProcessor.js.map b/es5/processors/background/GaussianBlurBackgroundProcessor.js.map new file mode 100644 index 0000000..fa34e02 --- /dev/null +++ b/es5/processors/background/GaussianBlurBackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"GaussianBlurBackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/GaussianBlurBackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,6DAAwF;AACxF,6CAAqD;AACrD,qCAAiD;AAgBjD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAsCG;AACH;IAAqD,mDAAmB;IAMtE;;;;OAIG;IACH,yCAAY,OAA+C;QAA3D,YACE,kBAAM,OAAO,CAAC,SAEf;QAZO,uBAAiB,GAAW,8BAAkB,CAAC;QACvD,8CAA8C;QAC7B,WAAK,GAAW,iCAAiC,CAAC;QASjE,KAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAiB,CAAC;;IACpD,CAAC;IAKD,sBAAI,6DAAgB;QAHpB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,iBAAiB,CAAC;QAChC,CAAC;QAED;;WAEG;aACH,UAAqB,MAAc;YACjC,IAAI,CAAC,MAAM,EAAE;gBACX,OAAO,CAAC,IAAI,CAAC,oDAA6C,8BAAkB,iBAAc,CAAC,CAAC;gBAC5F,MAAM,GAAG,8BAAkB,CAAC;aAC7B;YACD,IAAI,CAAC,iBAAiB,GAAG,MAAM,CAAC;QAClC,CAAC;;;OAXA;IAaS,gEAAsB,GAAhC;QACE,OAAO,0BAAkB,CAAC,IAAI,CAAC;IACjC,CAAC;IAES,wDAAc,GAAxB,UAAyB,UAAkE;QACzF,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE;YACxB,OAAO;SACR;QACD,IAAM,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;QAC5D,GAAG,CAAC,MAAM,GAAG,eAAQ,IAAI,CAAC,iBAAiB,QAAK,CAAC;QACjD,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;IAClC,CAAC;IACH,sCAAC;AAAD,CAAC,AA9CD,CAAqD,yCAAmB,GA8CvE;AA9CY,0EAA+B","sourcesContent":["import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor';\nimport { BLUR_FILTER_RADIUS } from '../../constants';\nimport { WebGL2PipelineType } from '../../types';\n\n/**\n * Options passed to [[GaussianBlurBackgroundProcessor]] constructor.\n */\nexport interface GaussianBlurBackgroundProcessorOptions extends BackgroundProcessorOptions {\n /**\n * The background blur filter radius to use in pixels.\n * @default\n * ```html\n * 15\n * ```\n */\n blurFilterRadius?: number;\n}\n\n/**\n * The GaussianBlurBackgroundProcessor, when added to a VideoTrack,\n * applies a gaussian blur filter on the background in each video frame\n * and leaves the foreground (person(s)) untouched. Each instance of\n * GaussianBlurBackgroundProcessor should be added to only one VideoTrack\n * at a time to prevent overlapping of image data from multiple VideoTracks.\n *\n * @example\n *\n * ```ts\n * import { createLocalVideoTrack } from 'twilio-video';\n * import { Pipeline, GaussianBlurBackgroundProcessor } from '@twilio/video-processors';\n *\n * const blurBackground = new GaussianBlurBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n * pipeline: Pipeline.WebGL2,\n * debounce: true,\n * });\n *\n * blurBackground.loadModel().then(() => {\n * createLocalVideoTrack({\n * // Increasing the capture resolution decreases the output FPS\n * // especially on browsers that do not support SIMD\n * // such as desktop Safari and iOS browsers, or on Chrome\n * // with capture resolutions above 640x480 for webgl2.\n * width: 640,\n * height: 480,\n * // Any frame rate above 24 fps on desktop browsers increase CPU\n * // usage without noticeable increase in quality.\n * frameRate: 24\n * }).then(track => {\n * track.addProcessor(blurBackground, {\n * inputFrameBufferType: 'video',\n * outputFrameBufferContextType: 'webgl2',\n * });\n * });\n * });\n * ```\n */\nexport class GaussianBlurBackgroundProcessor extends BackgroundProcessor {\n\n private _blurFilterRadius: number = BLUR_FILTER_RADIUS;\n // tslint:disable-next-line no-unused-variable\n private readonly _name: string = 'GaussianBlurBackgroundProcessor';\n\n /**\n * Construct a GaussianBlurBackgroundProcessor. Default values will be used for\n * any missing properties in [[GaussianBlurBackgroundProcessorOptions]], and\n * invalid properties will be ignored.\n */\n constructor(options: GaussianBlurBackgroundProcessorOptions) {\n super(options);\n this.blurFilterRadius = options.blurFilterRadius!;\n }\n\n /**\n * The current background blur filter radius in pixels.\n */\n get blurFilterRadius(): number {\n return this._blurFilterRadius;\n }\n\n /**\n * Set a new background blur filter radius in pixels.\n */\n set blurFilterRadius(radius: number) {\n if (!radius) {\n console.warn(`Valid blur filter radius not found. Using ${BLUR_FILTER_RADIUS} as default.`);\n radius = BLUR_FILTER_RADIUS;\n }\n this._blurFilterRadius = radius;\n }\n\n protected _getWebGL2PipelineType(): WebGL2PipelineType {\n return WebGL2PipelineType.Blur;\n }\n\n protected _setBackground(inputFrame: OffscreenCanvas | HTMLCanvasElement | HTMLVideoElement): void {\n if (!this._outputContext) {\n return;\n }\n const ctx = this._outputContext as CanvasRenderingContext2D;\n ctx.filter = `blur(${this._blurFilterRadius}px)`;\n ctx.drawImage(inputFrame, 0, 0);\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/background/VirtualBackgroundProcessor.d.ts b/es5/processors/background/VirtualBackgroundProcessor.d.ts new file mode 100644 index 0000000..d3ab58c --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.d.ts @@ -0,0 +1,104 @@ +import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor'; +import { ImageFit, WebGL2PipelineType } from '../../types'; +/** + * Options passed to [[VirtualBackgroundProcessor]] constructor. + */ +export interface VirtualBackgroundProcessorOptions extends BackgroundProcessorOptions { + /** + * The HTMLImageElement to use for background replacement. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + backgroundImage: HTMLImageElement; + /** + * The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]] + * supports this option. WebGL2 ignores this option and falls back to Cover. + * @default + * ```html + * 'Fill' + * ``` + */ + fitType?: ImageFit; +} +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * + * let virtualBackground; + * const img = new Image(); + * + * img.onload = () => { + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * pipeline: Pipeline.WebGL2, + * + * // Desktop Safari and iOS browsers do not support SIMD. + * // Set debounce to true to achieve an acceptable performance. + * debounce: isSafari(), + * }); + * + * virtualBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * }; + * img.src = '/background.jpg'; + * ``` + */ +export declare class VirtualBackgroundProcessor extends BackgroundProcessor { + private _fitType; + private readonly _name; + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + constructor(options: VirtualBackgroundProcessorOptions); + /** + * The HTMLImageElement representing the current background image. + */ + get backgroundImage(): HTMLImageElement; + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set backgroundImage(image: HTMLImageElement); + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get fitType(): ImageFit; + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set fitType(fitType: ImageFit); + protected _getWebGL2PipelineType(): WebGL2PipelineType; + protected _setBackground(): void; + private _getFitPosition; +} diff --git a/es5/processors/background/VirtualBackgroundProcessor.js b/es5/processors/background/VirtualBackgroundProcessor.js new file mode 100644 index 0000000..5c8416f --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.js @@ -0,0 +1,186 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VirtualBackgroundProcessor = void 0; +var BackgroundProcessor_1 = require("./BackgroundProcessor"); +var types_1 = require("../../types"); +/** + * The VirtualBackgroundProcessor, when added to a VideoTrack, + * replaces the background in each video frame with a given image, + * and leaves the foreground (person(s)) untouched. Each instance of + * VirtualBackgroundProcessor should be added to only one VideoTrack + * at a time to prevent overlapping of image data from multiple VideoTracks. + * + * @example + * + * ```ts + * import { createLocalVideoTrack } from 'twilio-video'; + * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors'; + * + * let virtualBackground; + * const img = new Image(); + * + * img.onload = () => { + * virtualBackground = new VirtualBackgroundProcessor({ + * assetsPath: 'https://my-server-path/assets', + * backgroundImage: img, + * pipeline: Pipeline.WebGL2, + * + * // Desktop Safari and iOS browsers do not support SIMD. + * // Set debounce to true to achieve an acceptable performance. + * debounce: isSafari(), + * }); + * + * virtualBackground.loadModel().then(() => { + * createLocalVideoTrack({ + * // Increasing the capture resolution decreases the output FPS + * // especially on browsers that do not support SIMD + * // such as desktop Safari and iOS browsers, or on Chrome + * // with capture resolutions above 640x480 for webgl2. + * width: 640, + * height: 480, + * // Any frame rate above 24 fps on desktop browsers increase CPU + * // usage without noticeable increase in quality. + * frameRate: 24 + * }).then(track => { + * track.addProcessor(virtualBackground, { + * inputFrameBufferType: 'video', + * outputFrameBufferContextType: 'webgl2', + * }); + * }); + * }); + * }; + * img.src = '/background.jpg'; + * ``` + */ +var VirtualBackgroundProcessor = /** @class */ (function (_super) { + __extends(VirtualBackgroundProcessor, _super); + /** + * Construct a VirtualBackgroundProcessor. Default values will be used for + * any missing optional properties in [[VirtualBackgroundProcessorOptions]], + * and invalid properties will be ignored. + */ + function VirtualBackgroundProcessor(options) { + var _this = _super.call(this, options) || this; + // tslint:disable-next-line no-unused-variable + _this._name = 'VirtualBackgroundProcessor'; + _this.backgroundImage = options.backgroundImage; + _this.fitType = options.fitType; + return _this; + } + Object.defineProperty(VirtualBackgroundProcessor.prototype, "backgroundImage", { + /** + * The HTMLImageElement representing the current background image. + */ + get: function () { + return this._backgroundImage; + }, + /** + * Set an HTMLImageElement as the new background image. + * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow + * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image) + * when loading the image from a different origin. Failing to do so will result to an empty output frame. + */ + set: function (image) { + var _a; + if (!image || !image.complete || !image.naturalHeight) { + throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded'); + } + this._backgroundImage = image; + // Triggers recreation of the pipeline in the next processFrame call + (_a = this._webgl2Pipeline) === null || _a === void 0 ? void 0 : _a.cleanUp(); + this._webgl2Pipeline = null; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(VirtualBackgroundProcessor.prototype, "fitType", { + /** + * The current [[ImageFit]] for positioning of the background image in the viewport. + */ + get: function () { + return this._fitType; + }, + /** + * Set a new [[ImageFit]] to be used for positioning the background image in the viewport. + */ + set: function (fitType) { + var validTypes = Object.keys(types_1.ImageFit); + if (!validTypes.includes(fitType)) { + console.warn("Valid fitType not found. Using '".concat(types_1.ImageFit.Fill, "' as default.")); + fitType = types_1.ImageFit.Fill; + } + this._fitType = fitType; + }, + enumerable: false, + configurable: true + }); + VirtualBackgroundProcessor.prototype._getWebGL2PipelineType = function () { + return types_1.WebGL2PipelineType.Image; + }; + VirtualBackgroundProcessor.prototype._setBackground = function () { + if (!this._outputContext || !this._outputCanvas) { + return; + } + var img = this._backgroundImage; + var imageWidth = img.naturalWidth; + var imageHeight = img.naturalHeight; + var canvasWidth = this._outputCanvas.width; + var canvasHeight = this._outputCanvas.height; + var ctx = this._outputContext; + if (this._fitType === types_1.ImageFit.Fill) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight); + } + else if (this._fitType === types_1.ImageFit.None) { + ctx.drawImage(img, 0, 0, imageWidth, imageHeight); + } + else if (this._fitType === types_1.ImageFit.Contain) { + var _a = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Contain), x = _a.x, y = _a.y, w = _a.w, h = _a.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + else if (this._fitType === types_1.ImageFit.Cover) { + var _b = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, types_1.ImageFit.Cover), x = _b.x, y = _b.y, w = _b.w, h = _b.h; + ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h); + } + }; + VirtualBackgroundProcessor.prototype._getFitPosition = function (contentWidth, contentHeight, viewportWidth, viewportHeight, type) { + // Calculate new content width to fit viewport width + var factor = viewportWidth / contentWidth; + var newContentWidth = viewportWidth; + var newContentHeight = factor * contentHeight; + // Scale down the resulting height and width more + // to fit viewport height if the content still exceeds it + if ((type === types_1.ImageFit.Contain && newContentHeight > viewportHeight) + || (type === types_1.ImageFit.Cover && viewportHeight > newContentHeight)) { + factor = viewportHeight / newContentHeight; + newContentWidth = factor * newContentWidth; + newContentHeight = viewportHeight; + } + // Calculate the destination top left corner to center the content + var x = (viewportWidth - newContentWidth) / 2; + var y = (viewportHeight - newContentHeight) / 2; + return { + x: x, + y: y, + w: newContentWidth, + h: newContentHeight, + }; + }; + return VirtualBackgroundProcessor; +}(BackgroundProcessor_1.BackgroundProcessor)); +exports.VirtualBackgroundProcessor = VirtualBackgroundProcessor; +//# sourceMappingURL=VirtualBackgroundProcessor.js.map \ No newline at end of file diff --git a/es5/processors/background/VirtualBackgroundProcessor.js.map b/es5/processors/background/VirtualBackgroundProcessor.js.map new file mode 100644 index 0000000..b657ea4 --- /dev/null +++ b/es5/processors/background/VirtualBackgroundProcessor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"VirtualBackgroundProcessor.js","sourceRoot":"","sources":["../../../lib/processors/background/VirtualBackgroundProcessor.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,6DAAwF;AACxF,qCAA2D;AAyB3D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgDG;AACH;IAAgD,8CAAmB;IAMjE;;;;OAIG;IACH,oCAAY,OAA0C;QAAtD,YACE,kBAAM,OAAO,CAAC,SAGf;QAZD,8CAA8C;QAC7B,WAAK,GAAW,4BAA4B,CAAC;QAS5D,KAAI,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC/C,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAQ,CAAC;;IAClC,CAAC;IAKD,sBAAI,uDAAe;QAHnB;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,gBAAiB,CAAC;QAChC,CAAC;QAED;;;;;WAKG;aACH,UAAoB,KAAuB;;YACzC,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,aAAa,EAAE;gBACrD,MAAM,IAAI,KAAK,CAAC,iGAAiG,CAAC,CAAC;aACpH;YACD,IAAI,CAAC,gBAAgB,GAAG,KAAK,CAAC;YAE9B,oEAAoE;YACpE,MAAA,IAAI,CAAC,eAAe,0CAAE,OAAO,EAAE,CAAC;YAChC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC;QAC9B,CAAC;;;OAjBA;IAsBD,sBAAI,+CAAO;QAHX;;WAEG;aACH;YACE,OAAO,IAAI,CAAC,QAAQ,CAAC;QACvB,CAAC;QAED;;WAEG;aACH,UAAY,OAAiB;YAC3B,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,gBAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,OAAc,CAAC,EAAE;gBACxC,OAAO,CAAC,IAAI,CAAC,0CAAmC,gBAAQ,CAAC,IAAI,kBAAe,CAAC,CAAC;gBAC9E,OAAO,GAAG,gBAAQ,CAAC,IAAI,CAAC;aACzB;YACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;QAC1B,CAAC;;;OAZA;IAcS,2DAAsB,GAAhC;QACE,OAAO,0BAAkB,CAAC,KAAK,CAAC;IAClC,CAAC;IAES,mDAAc,GAAxB;QACE,IAAI,CAAC,IAAI,CAAC,cAAc,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YAC/C,OAAO;SACR;QACD,IAAM,GAAG,GAAG,IAAI,CAAC,gBAAiB,CAAC;QACnC,IAAM,UAAU,GAAG,GAAG,CAAC,YAAY,CAAC;QACpC,IAAM,WAAW,GAAG,GAAG,CAAC,aAAa,CAAC;QACtC,IAAM,WAAW,GAAG,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC;QAC7C,IAAM,YAAY,GAAG,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC;QAC/C,IAAM,GAAG,GAAG,IAAI,CAAC,cAA0C,CAAC;QAE5D,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,IAAI,EAAE;YACnC,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC;SACpF;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,IAAI,EAAE;YAC1C,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;SACnD;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,OAAO,EAAE;YACvC,IAAA,KAAiB,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAQ,CAAC,OAAO,CAAC,EAAzG,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAA+F,CAAC;YAClH,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/D;aAAM,IAAI,IAAI,CAAC,QAAQ,KAAK,gBAAQ,CAAC,KAAK,EAAE;YACrC,IAAA,KAAiB,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,gBAAQ,CAAC,KAAK,CAAC,EAAvG,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAAA,EAAE,CAAC,OAA6F,CAAC;YAChH,GAAG,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/D;IACH,CAAC;IAEO,oDAAe,GAAvB,UAAwB,YAAoB,EAAE,aAAqB,EACjE,aAAqB,EAAE,cAAsB,EAAE,IAAc;QAG7D,oDAAoD;QACpD,IAAI,MAAM,GAAG,aAAa,GAAG,YAAY,CAAC;QAC1C,IAAI,eAAe,GAAG,aAAa,CAAC;QACpC,IAAI,gBAAgB,GAAG,MAAM,GAAG,aAAa,CAAC;QAE9C,iDAAiD;QACjD,yDAAyD;QACzD,IAAI,CAAC,IAAI,KAAK,gBAAQ,CAAC,OAAO,IAAI,gBAAgB,GAAG,cAAc,CAAC;eAC/D,CAAC,IAAI,KAAK,gBAAQ,CAAC,KAAK,IAAI,cAAc,GAAG,gBAAgB,CAAC,EAAE;YACnE,MAAM,GAAG,cAAc,GAAG,gBAAgB,CAAC;YAC3C,eAAe,GAAG,MAAM,GAAG,eAAe,CAAC;YAC3C,gBAAgB,GAAG,cAAc,CAAC;SACnC;QAED,kEAAkE;QAClE,IAAM,CAAC,GAAG,CAAC,aAAa,GAAG,eAAe,CAAC,GAAG,CAAC,CAAC;QAChD,IAAM,CAAC,GAAG,CAAC,cAAc,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAC;QAElD,OAAO;YACL,CAAC,GAAA;YAAE,CAAC,GAAA;YACJ,CAAC,EAAE,eAAe;YAClB,CAAC,EAAE,gBAAgB;SACpB,CAAC;IACJ,CAAC;IACH,iCAAC;AAAD,CAAC,AApHD,CAAgD,yCAAmB,GAoHlE;AApHY,gEAA0B","sourcesContent":["import { BackgroundProcessor, BackgroundProcessorOptions } from './BackgroundProcessor';\nimport { ImageFit, WebGL2PipelineType } from '../../types';\n\n/**\n * Options passed to [[VirtualBackgroundProcessor]] constructor.\n */\nexport interface VirtualBackgroundProcessorOptions extends BackgroundProcessorOptions {\n /**\n * The HTMLImageElement to use for background replacement.\n * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow\n * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image)\n * when loading the image from a different origin. Failing to do so will result to an empty output frame.\n */\n backgroundImage: HTMLImageElement;\n\n /**\n * The [[ImageFit]] to use for positioning of the background image in the viewport. Only the Canvas2D [[Pipeline]]\n * supports this option. WebGL2 ignores this option and falls back to Cover.\n * @default\n * ```html\n * 'Fill'\n * ```\n */\n fitType?: ImageFit;\n}\n\n/**\n * The VirtualBackgroundProcessor, when added to a VideoTrack,\n * replaces the background in each video frame with a given image,\n * and leaves the foreground (person(s)) untouched. Each instance of\n * VirtualBackgroundProcessor should be added to only one VideoTrack\n * at a time to prevent overlapping of image data from multiple VideoTracks.\n *\n * @example\n *\n * ```ts\n * import { createLocalVideoTrack } from 'twilio-video';\n * import { Pipeline, VirtualBackgroundProcessor } from '@twilio/video-processors';\n *\n * let virtualBackground;\n * const img = new Image();\n *\n * img.onload = () => {\n * virtualBackground = new VirtualBackgroundProcessor({\n * assetsPath: 'https://my-server-path/assets',\n * backgroundImage: img,\n * pipeline: Pipeline.WebGL2,\n *\n * // Desktop Safari and iOS browsers do not support SIMD.\n * // Set debounce to true to achieve an acceptable performance.\n * debounce: isSafari(),\n * });\n *\n * virtualBackground.loadModel().then(() => {\n * createLocalVideoTrack({\n * // Increasing the capture resolution decreases the output FPS\n * // especially on browsers that do not support SIMD\n * // such as desktop Safari and iOS browsers, or on Chrome\n * // with capture resolutions above 640x480 for webgl2.\n * width: 640,\n * height: 480,\n * // Any frame rate above 24 fps on desktop browsers increase CPU\n * // usage without noticeable increase in quality.\n * frameRate: 24\n * }).then(track => {\n * track.addProcessor(virtualBackground, {\n * inputFrameBufferType: 'video',\n * outputFrameBufferContextType: 'webgl2',\n * });\n * });\n * });\n * };\n * img.src = '/background.jpg';\n * ```\n */\nexport class VirtualBackgroundProcessor extends BackgroundProcessor {\n\n private _fitType!: ImageFit;\n // tslint:disable-next-line no-unused-variable\n private readonly _name: string = 'VirtualBackgroundProcessor';\n\n /**\n * Construct a VirtualBackgroundProcessor. Default values will be used for\n * any missing optional properties in [[VirtualBackgroundProcessorOptions]],\n * and invalid properties will be ignored.\n */\n constructor(options: VirtualBackgroundProcessorOptions) {\n super(options);\n this.backgroundImage = options.backgroundImage;\n this.fitType = options.fitType!;\n }\n\n /**\n * The HTMLImageElement representing the current background image.\n */\n get backgroundImage(): HTMLImageElement {\n return this._backgroundImage!;\n }\n\n /**\n * Set an HTMLImageElement as the new background image.\n * An error will be raised if the image hasn't been fully loaded yet. Additionally, the image must follow\n * [security guidelines](https://developer.mozilla.org/en-US/docs/Web/HTML/CORS_enabled_image)\n * when loading the image from a different origin. Failing to do so will result to an empty output frame.\n */\n set backgroundImage(image: HTMLImageElement) {\n if (!image || !image.complete || !image.naturalHeight) {\n throw new Error('Invalid image. Make sure that the image is an HTMLImageElement and has been successfully loaded');\n }\n this._backgroundImage = image;\n\n // Triggers recreation of the pipeline in the next processFrame call\n this._webgl2Pipeline?.cleanUp();\n this._webgl2Pipeline = null;\n }\n\n /**\n * The current [[ImageFit]] for positioning of the background image in the viewport.\n */\n get fitType(): ImageFit {\n return this._fitType;\n }\n\n /**\n * Set a new [[ImageFit]] to be used for positioning the background image in the viewport.\n */\n set fitType(fitType: ImageFit) {\n const validTypes = Object.keys(ImageFit);\n if (!validTypes.includes(fitType as any)) {\n console.warn(`Valid fitType not found. Using '${ImageFit.Fill}' as default.`);\n fitType = ImageFit.Fill;\n }\n this._fitType = fitType;\n }\n\n protected _getWebGL2PipelineType(): WebGL2PipelineType {\n return WebGL2PipelineType.Image;\n }\n\n protected _setBackground(): void {\n if (!this._outputContext || !this._outputCanvas) {\n return;\n }\n const img = this._backgroundImage!;\n const imageWidth = img.naturalWidth;\n const imageHeight = img.naturalHeight;\n const canvasWidth = this._outputCanvas.width;\n const canvasHeight = this._outputCanvas.height;\n const ctx = this._outputContext as CanvasRenderingContext2D;\n\n if (this._fitType === ImageFit.Fill) {\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, 0, 0, canvasWidth, canvasHeight);\n } else if (this._fitType === ImageFit.None) {\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight);\n } else if (this._fitType === ImageFit.Contain) {\n const { x, y, w, h } = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, ImageFit.Contain);\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h);\n } else if (this._fitType === ImageFit.Cover) {\n const { x, y, w, h } = this._getFitPosition(imageWidth, imageHeight, canvasWidth, canvasHeight, ImageFit.Cover);\n ctx.drawImage(img, 0, 0, imageWidth, imageHeight, x, y, w, h);\n }\n }\n\n private _getFitPosition(contentWidth: number, contentHeight: number,\n viewportWidth: number, viewportHeight: number, type: ImageFit)\n : { h: number, w: number, x: number, y: number } {\n\n // Calculate new content width to fit viewport width\n let factor = viewportWidth / contentWidth;\n let newContentWidth = viewportWidth;\n let newContentHeight = factor * contentHeight;\n\n // Scale down the resulting height and width more\n // to fit viewport height if the content still exceeds it\n if ((type === ImageFit.Contain && newContentHeight > viewportHeight)\n || (type === ImageFit.Cover && viewportHeight > newContentHeight)) {\n factor = viewportHeight / newContentHeight;\n newContentWidth = factor * newContentWidth;\n newContentHeight = viewportHeight;\n }\n\n // Calculate the destination top left corner to center the content\n const x = (viewportWidth - newContentWidth) / 2;\n const y = (viewportHeight - newContentHeight) / 2;\n\n return {\n x, y,\n w: newContentWidth,\n h: newContentHeight,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/backgroundHelper.d.ts b/es5/processors/webgl2/helpers/backgroundHelper.d.ts new file mode 100644 index 0000000..46a426f --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.d.ts @@ -0,0 +1,4 @@ +export type BackgroundConfig = { + type: 'none' | 'blur' | 'image'; + url?: string; +}; diff --git a/es5/processors/webgl2/helpers/backgroundHelper.js b/es5/processors/webgl2/helpers/backgroundHelper.js new file mode 100644 index 0000000..cc5ab03 --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=backgroundHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/backgroundHelper.js.map b/es5/processors/webgl2/helpers/backgroundHelper.js.map new file mode 100644 index 0000000..f7a7337 --- /dev/null +++ b/es5/processors/webgl2/helpers/backgroundHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/backgroundHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type BackgroundConfig = {\n type: 'none' | 'blur' | 'image'\n url?: string\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.d.ts b/es5/processors/webgl2/helpers/postProcessingHelper.d.ts new file mode 100644 index 0000000..595a46e --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.d.ts @@ -0,0 +1,12 @@ +export type BlendMode = 'screen' | 'linearDodge'; +export type PostProcessingConfig = { + smoothSegmentationMask: boolean; + jointBilateralFilter: JointBilateralFilterConfig; + coverage: [number, number]; + lightWrapping: number; + blendMode: BlendMode; +}; +export type JointBilateralFilterConfig = { + sigmaSpace: number; + sigmaColor: number; +}; diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.js b/es5/processors/webgl2/helpers/postProcessingHelper.js new file mode 100644 index 0000000..bf9a7db --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=postProcessingHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/postProcessingHelper.js.map b/es5/processors/webgl2/helpers/postProcessingHelper.js.map new file mode 100644 index 0000000..cf92490 --- /dev/null +++ b/es5/processors/webgl2/helpers/postProcessingHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"postProcessingHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/postProcessingHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type BlendMode = 'screen' | 'linearDodge'\n\nexport type PostProcessingConfig = {\n smoothSegmentationMask: boolean\n jointBilateralFilter: JointBilateralFilterConfig\n coverage: [number, number]\n lightWrapping: number\n blendMode: BlendMode\n}\n\nexport type JointBilateralFilterConfig = {\n sigmaSpace: number\n sigmaColor: number\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/renderingPipelineHelper.d.ts b/es5/processors/webgl2/helpers/renderingPipelineHelper.d.ts new file mode 100644 index 0000000..a1c72c4 --- /dev/null +++ b/es5/processors/webgl2/helpers/renderingPipelineHelper.d.ts @@ -0,0 +1,6 @@ +import { PostProcessingConfig } from './postProcessingHelper'; +export type RenderingPipeline = { + render(): Promise; + updatePostProcessingConfig(newPostProcessingConfig: PostProcessingConfig): void; + cleanUp(): void; +}; diff --git a/es5/processors/webgl2/helpers/renderingPipelineHelper.js b/es5/processors/webgl2/helpers/renderingPipelineHelper.js new file mode 100644 index 0000000..4111c62 --- /dev/null +++ b/es5/processors/webgl2/helpers/renderingPipelineHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=renderingPipelineHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/renderingPipelineHelper.js.map b/es5/processors/webgl2/helpers/renderingPipelineHelper.js.map new file mode 100644 index 0000000..1d2cab3 --- /dev/null +++ b/es5/processors/webgl2/helpers/renderingPipelineHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"renderingPipelineHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/renderingPipelineHelper.ts"],"names":[],"mappings":"","sourcesContent":["import { PostProcessingConfig } from './postProcessingHelper'\n\nexport type RenderingPipeline = {\n render(): Promise\n updatePostProcessingConfig(\n newPostProcessingConfig: PostProcessingConfig\n ): void\n // TODO Update background image only when loaded\n // updateBackgroundImage(backgroundImage: HTMLImageElement): void\n cleanUp(): void\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/segmentationHelper.d.ts b/es5/processors/webgl2/helpers/segmentationHelper.d.ts new file mode 100644 index 0000000..01bdece --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.d.ts @@ -0,0 +1,7 @@ +export type InputResolution = '640x360' | '256x256' | '256x144' | '160x96' | string; +export declare const inputResolutions: { + [resolution in InputResolution]: [number, number]; +}; +export type SegmentationConfig = { + inputResolution: InputResolution; +}; diff --git a/es5/processors/webgl2/helpers/segmentationHelper.js b/es5/processors/webgl2/helpers/segmentationHelper.js new file mode 100644 index 0000000..a150b9e --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.inputResolutions = void 0; +exports.inputResolutions = { + '640x360': [640, 360], + '256x256': [256, 256], + '256x144': [256, 144], + '160x96': [160, 96], +}; +//# sourceMappingURL=segmentationHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/segmentationHelper.js.map b/es5/processors/webgl2/helpers/segmentationHelper.js.map new file mode 100644 index 0000000..e612a03 --- /dev/null +++ b/es5/processors/webgl2/helpers/segmentationHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"segmentationHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/segmentationHelper.ts"],"names":[],"mappings":";;;AAEa,QAAA,gBAAgB,GAEzB;IACF,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,SAAS,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC;IACrB,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC;CACpB,CAAA","sourcesContent":["export type InputResolution = '640x360' | '256x256' | '256x144' | '160x96' | string\n\nexport const inputResolutions: {\n [resolution in InputResolution]: [number, number]\n} = {\n '640x360': [640, 360],\n '256x256': [256, 256],\n '256x144': [256, 144],\n '160x96': [160, 96],\n}\n\nexport type SegmentationConfig = {\n inputResolution: InputResolution\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/sourceHelper.d.ts b/es5/processors/webgl2/helpers/sourceHelper.d.ts new file mode 100644 index 0000000..b0fb557 --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.d.ts @@ -0,0 +1,9 @@ +export type SourceConfig = { + type: 'image' | 'video' | 'camera'; + url?: string; +}; +export type SourcePlayback = { + htmlElement: HTMLImageElement | HTMLVideoElement; + width: number; + height: number; +}; diff --git a/es5/processors/webgl2/helpers/sourceHelper.js b/es5/processors/webgl2/helpers/sourceHelper.js new file mode 100644 index 0000000..747a6bd --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=sourceHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/sourceHelper.js.map b/es5/processors/webgl2/helpers/sourceHelper.js.map new file mode 100644 index 0000000..23ae68f --- /dev/null +++ b/es5/processors/webgl2/helpers/sourceHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sourceHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/sourceHelper.ts"],"names":[],"mappings":"","sourcesContent":["export type SourceConfig = {\n type: 'image' | 'video' | 'camera'\n url?: string\n}\n\nexport type SourcePlayback = {\n htmlElement: HTMLImageElement | HTMLVideoElement\n width: number\n height: number\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/webglHelper.d.ts b/es5/processors/webgl2/helpers/webglHelper.d.ts new file mode 100644 index 0000000..fd5915e --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.d.ts @@ -0,0 +1,17 @@ +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +export declare const glsl: (template: { + raw: ArrayLike | readonly string[]; +}, ...substitutions: any[]) => string; +export declare function createPiplelineStageProgram(gl: WebGL2RenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer): WebGLProgram; +export declare function createProgram(gl: WebGL2RenderingContext, vertexShader: WebGLShader, fragmentShader: WebGLShader): WebGLProgram; +export declare function compileShader(gl: WebGL2RenderingContext, shaderType: number, shaderSource: string): WebGLShader; +export declare function createTexture(gl: WebGL2RenderingContext, internalformat: number, width: number, height: number, minFilter?: GLint, magFilter?: GLint): WebGLTexture | null; +export declare function readPixelsAsync(gl: WebGL2RenderingContext, x: number, y: number, width: number, height: number, format: number, type: number, dest: ArrayBufferView): Promise; diff --git a/es5/processors/webgl2/helpers/webglHelper.js b/es5/processors/webgl2/helpers/webglHelper.js new file mode 100644 index 0000000..6588466 --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.js @@ -0,0 +1,157 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readPixelsAsync = exports.createTexture = exports.compileShader = exports.createProgram = exports.createPiplelineStageProgram = exports.glsl = void 0; +/** + * Use it along with boyswan.glsl-literal VSCode extension + * to get GLSL syntax highlighting. + * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal + * + * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension + * to be installed as well. + * https://marketplace.visualstudio.com/items?itemName=slevesque.shader + */ +exports.glsl = String.raw; +function createPiplelineStageProgram(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer) { + var program = createProgram(gl, vertexShader, fragmentShader); + var positionAttributeLocation = gl.getAttribLocation(program, 'a_position'); + gl.enableVertexAttribArray(positionAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0); + var texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord'); + gl.enableVertexAttribArray(texCoordAttributeLocation); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0); + return program; +} +exports.createPiplelineStageProgram = createPiplelineStageProgram; +function createProgram(gl, vertexShader, fragmentShader) { + var program = gl.createProgram(); + gl.attachShader(program, vertexShader); + gl.attachShader(program, fragmentShader); + gl.linkProgram(program); + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error("Could not link WebGL program: ".concat(gl.getProgramInfoLog(program))); + } + return program; +} +exports.createProgram = createProgram; +function compileShader(gl, shaderType, shaderSource) { + var shader = gl.createShader(shaderType); + gl.shaderSource(shader, shaderSource); + gl.compileShader(shader); + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + throw new Error("Could not compile shader: ".concat(gl.getShaderInfoLog(shader))); + } + return shader; +} +exports.compileShader = compileShader; +function createTexture(gl, internalformat, width, height, minFilter, magFilter) { + if (minFilter === void 0) { minFilter = gl.NEAREST; } + if (magFilter === void 0) { magFilter = gl.NEAREST; } + var texture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter); + gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height); + return texture; +} +exports.createTexture = createTexture; +function readPixelsAsync(gl, x, y, width, height, format, type, dest) { + return __awaiter(this, void 0, void 0, function () { + var buf; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + buf = gl.createBuffer(); + gl.bindBuffer(gl.PIXEL_PACK_BUFFER, buf); + gl.bufferData(gl.PIXEL_PACK_BUFFER, dest.byteLength, gl.STREAM_READ); + gl.readPixels(x, y, width, height, format, type, 0); + gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null); + return [4 /*yield*/, getBufferSubDataAsync(gl, gl.PIXEL_PACK_BUFFER, buf, 0, dest)]; + case 1: + _a.sent(); + gl.deleteBuffer(buf); + return [2 /*return*/, dest]; + } + }); + }); +} +exports.readPixelsAsync = readPixelsAsync; +function getBufferSubDataAsync(gl, target, buffer, srcByteOffset, dstBuffer, dstOffset, length) { + return __awaiter(this, void 0, void 0, function () { + var sync, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0); + gl.flush(); + return [4 /*yield*/, clientWaitAsync(gl, sync)]; + case 1: + res = _a.sent(); + gl.deleteSync(sync); + if (res !== gl.WAIT_FAILED) { + gl.bindBuffer(target, buffer); + gl.getBufferSubData(target, srcByteOffset, dstBuffer, dstOffset, length); + gl.bindBuffer(target, null); + } + return [2 /*return*/]; + } + }); + }); +} +function clientWaitAsync(gl, sync) { + return new Promise(function (resolve) { + function test() { + var res = gl.clientWaitSync(sync, 0, 0); + if (res === gl.WAIT_FAILED) { + resolve(res); + return; + } + if (res === gl.TIMEOUT_EXPIRED) { + requestAnimationFrame(test); + return; + } + resolve(res); + } + requestAnimationFrame(test); + }); +} +//# sourceMappingURL=webglHelper.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/helpers/webglHelper.js.map b/es5/processors/webgl2/helpers/webglHelper.js.map new file mode 100644 index 0000000..e4c829c --- /dev/null +++ b/es5/processors/webgl2/helpers/webglHelper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webglHelper.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/helpers/webglHelper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;;;GAQG;AACU,QAAA,IAAI,GAAG,MAAM,CAAC,GAAG,CAAA;AAE9B,SAAgB,2BAA2B,CACzC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,cAA2B;IAE3B,IAAM,OAAO,GAAG,aAAa,CAAC,EAAE,EAAE,YAAY,EAAE,cAAc,CAAC,CAAA;IAE/D,IAAM,yBAAyB,GAAG,EAAE,CAAC,iBAAiB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAC7E,EAAE,CAAC,uBAAuB,CAAC,yBAAyB,CAAC,CAAA;IACrD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,mBAAmB,CAAC,yBAAyB,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAE3E,IAAM,yBAAyB,GAAG,EAAE,CAAC,iBAAiB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAC7E,EAAE,CAAC,uBAAuB,CAAC,yBAAyB,CAAC,CAAA;IACrD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,mBAAmB,CAAC,yBAAyB,EAAE,CAAC,EAAE,EAAE,CAAC,KAAK,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAE3E,OAAO,OAAO,CAAA;AAChB,CAAC;AApBD,kEAoBC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,YAAyB,EACzB,cAA2B;IAE3B,IAAM,OAAO,GAAG,EAAE,CAAC,aAAa,EAAG,CAAA;IACnC,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IACtC,EAAE,CAAC,YAAY,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACxC,EAAE,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;IACvB,IAAI,CAAC,EAAE,CAAC,mBAAmB,CAAC,OAAO,EAAE,EAAE,CAAC,WAAW,CAAC,EAAE;QACpD,MAAM,IAAI,KAAK,CACb,wCAAiC,EAAE,CAAC,iBAAiB,CAAC,OAAO,CAAC,CAAE,CACjE,CAAA;KACF;IACD,OAAO,OAAO,CAAA;AAChB,CAAC;AAfD,sCAeC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,UAAkB,EAClB,YAAoB;IAEpB,IAAM,MAAM,GAAG,EAAE,CAAC,YAAY,CAAC,UAAU,CAAE,CAAA;IAC3C,EAAE,CAAC,YAAY,CAAC,MAAM,EAAE,YAAY,CAAC,CAAA;IACrC,EAAE,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;IACxB,IAAI,CAAC,EAAE,CAAC,kBAAkB,CAAC,MAAM,EAAE,EAAE,CAAC,cAAc,CAAC,EAAE;QACrD,MAAM,IAAI,KAAK,CAAC,oCAA6B,EAAE,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAE,CAAC,CAAA;KAC5E;IACD,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sCAYC;AAED,SAAgB,aAAa,CAC3B,EAA0B,EAC1B,cAAsB,EACtB,KAAa,EACb,MAAc,EACd,SAA6B,EAC7B,SAA6B;IAD7B,0BAAA,EAAA,YAAmB,EAAE,CAAC,OAAO;IAC7B,0BAAA,EAAA,YAAmB,EAAE,CAAC,OAAO;IAE7B,IAAM,OAAO,GAAG,EAAE,CAAC,aAAa,EAAE,CAAA;IAClC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC,CAAA;IACtC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,SAAS,CAAC,CAAA;IACjE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,SAAS,CAAC,CAAA;IACjE,EAAE,CAAC,YAAY,CAAC,EAAE,CAAC,UAAU,EAAE,CAAC,EAAE,cAAc,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;IAChE,OAAO,OAAO,CAAA;AAChB,CAAC;AAhBD,sCAgBC;AAED,SAAsB,eAAe,CACnC,EAA0B,EAC1B,CAAS,EACT,CAAS,EACT,KAAa,EACb,MAAc,EACd,MAAc,EACd,IAAY,EACZ,IAAqB;;;;;;oBAEf,GAAG,GAAG,EAAE,CAAC,YAAY,EAAG,CAAA;oBAC9B,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAA;oBACxC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,EAAE,CAAC,WAAW,CAAC,CAAA;oBACpE,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;oBACnD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,iBAAiB,EAAE,IAAK,CAAC,CAAA;oBAE1C,qBAAM,qBAAqB,CAAC,EAAE,EAAE,EAAE,CAAC,iBAAiB,EAAE,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,EAAA;;oBAAnE,SAAmE,CAAA;oBAEnE,EAAE,CAAC,YAAY,CAAC,GAAG,CAAC,CAAA;oBACpB,sBAAO,IAAI,EAAA;;;;CACZ;AApBD,0CAoBC;AAED,SAAe,qBAAqB,CAClC,EAA0B,EAC1B,MAAc,EACd,MAAmB,EACnB,aAAqB,EACrB,SAA0B,EAC1B,SAAkB,EAClB,MAAe;;;;;;oBAET,IAAI,GAAG,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,0BAA0B,EAAE,CAAC,CAAE,CAAA;oBAC5D,EAAE,CAAC,KAAK,EAAE,CAAA;oBACE,qBAAM,eAAe,CAAC,EAAE,EAAE,IAAI,CAAC,EAAA;;oBAArC,GAAG,GAAG,SAA+B;oBAC3C,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;oBAEnB,IAAI,GAAG,KAAK,EAAE,CAAC,WAAW,EAAE;wBAC1B,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;wBAC7B,EAAE,CAAC,gBAAgB,CAAC,MAAM,EAAE,aAAa,EAAE,SAAS,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;wBACxE,EAAE,CAAC,UAAU,CAAC,MAAM,EAAE,IAAK,CAAC,CAAA;qBAC7B;;;;;CACF;AAED,SAAS,eAAe,CAAC,EAA0B,EAAE,IAAe;IAClE,OAAO,IAAI,OAAO,CAAS,UAAC,OAAO;QACjC,SAAS,IAAI;YACX,IAAM,GAAG,GAAG,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;YACzC,IAAI,GAAG,KAAK,EAAE,CAAC,WAAW,EAAE;gBAC1B,OAAO,CAAC,GAAG,CAAC,CAAA;gBACZ,OAAM;aACP;YACD,IAAI,GAAG,KAAK,EAAE,CAAC,eAAe,EAAE;gBAC9B,qBAAqB,CAAC,IAAI,CAAC,CAAA;gBAC3B,OAAM;aACP;YACD,OAAO,CAAC,GAAG,CAAC,CAAA;QACd,CAAC;QACD,qBAAqB,CAAC,IAAI,CAAC,CAAA;IAC7B,CAAC,CAAC,CAAA;AACJ,CAAC","sourcesContent":["/**\n * Use it along with boyswan.glsl-literal VSCode extension\n * to get GLSL syntax highlighting.\n * https://marketplace.visualstudio.com/items?itemName=boyswan.glsl-literal\n *\n * On VSCode OSS, boyswan.glsl-literal requires slevesque.shader extension\n * to be installed as well.\n * https://marketplace.visualstudio.com/items?itemName=slevesque.shader\n */\nexport const glsl = String.raw\n\nexport function createPiplelineStageProgram(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n fragmentShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer\n) {\n const program = createProgram(gl, vertexShader, fragmentShader)\n\n const positionAttributeLocation = gl.getAttribLocation(program, 'a_position')\n gl.enableVertexAttribArray(positionAttributeLocation)\n gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer)\n gl.vertexAttribPointer(positionAttributeLocation, 2, gl.FLOAT, false, 0, 0)\n\n const texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord')\n gl.enableVertexAttribArray(texCoordAttributeLocation)\n gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer)\n gl.vertexAttribPointer(texCoordAttributeLocation, 2, gl.FLOAT, false, 0, 0)\n\n return program\n}\n\nexport function createProgram(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n fragmentShader: WebGLShader\n) {\n const program = gl.createProgram()!\n gl.attachShader(program, vertexShader)\n gl.attachShader(program, fragmentShader)\n gl.linkProgram(program)\n if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {\n throw new Error(\n `Could not link WebGL program: ${gl.getProgramInfoLog(program)}`\n )\n }\n return program\n}\n\nexport function compileShader(\n gl: WebGL2RenderingContext,\n shaderType: number,\n shaderSource: string\n) {\n const shader = gl.createShader(shaderType)!\n gl.shaderSource(shader, shaderSource)\n gl.compileShader(shader)\n if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {\n throw new Error(`Could not compile shader: ${gl.getShaderInfoLog(shader)}`)\n }\n return shader\n}\n\nexport function createTexture(\n gl: WebGL2RenderingContext,\n internalformat: number,\n width: number,\n height: number,\n minFilter: GLint = gl.NEAREST,\n magFilter: GLint = gl.NEAREST\n) {\n const texture = gl.createTexture()\n gl.bindTexture(gl.TEXTURE_2D, texture)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, minFilter)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, magFilter)\n gl.texStorage2D(gl.TEXTURE_2D, 1, internalformat, width, height)\n return texture\n}\n\nexport async function readPixelsAsync(\n gl: WebGL2RenderingContext,\n x: number,\n y: number,\n width: number,\n height: number,\n format: number,\n type: number,\n dest: ArrayBufferView\n) {\n const buf = gl.createBuffer()!\n gl.bindBuffer(gl.PIXEL_PACK_BUFFER, buf)\n gl.bufferData(gl.PIXEL_PACK_BUFFER, dest.byteLength, gl.STREAM_READ)\n gl.readPixels(x, y, width, height, format, type, 0)\n gl.bindBuffer(gl.PIXEL_PACK_BUFFER, null!)\n\n await getBufferSubDataAsync(gl, gl.PIXEL_PACK_BUFFER, buf, 0, dest)\n\n gl.deleteBuffer(buf)\n return dest\n}\n\nasync function getBufferSubDataAsync(\n gl: WebGL2RenderingContext,\n target: number,\n buffer: WebGLBuffer,\n srcByteOffset: number,\n dstBuffer: ArrayBufferView,\n dstOffset?: GLuint,\n length?: number\n) {\n const sync = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0)!\n gl.flush()\n const res = await clientWaitAsync(gl, sync)\n gl.deleteSync(sync)\n\n if (res !== gl.WAIT_FAILED) {\n gl.bindBuffer(target, buffer)\n gl.getBufferSubData(target, srcByteOffset, dstBuffer, dstOffset, length)\n gl.bindBuffer(target, null!)\n }\n}\n\nfunction clientWaitAsync(gl: WebGL2RenderingContext, sync: WebGLSync) {\n return new Promise((resolve) => {\n function test() {\n const res = gl.clientWaitSync(sync, 0, 0)\n if (res === gl.WAIT_FAILED) {\n resolve(res)\n return\n }\n if (res === gl.TIMEOUT_EXPIRED) {\n requestAnimationFrame(test)\n return\n }\n resolve(res)\n }\n requestAnimationFrame(test)\n })\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/index.d.ts b/es5/processors/webgl2/index.d.ts new file mode 100644 index 0000000..a13dd35 --- /dev/null +++ b/es5/processors/webgl2/index.d.ts @@ -0,0 +1,8 @@ +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +import { buildWebGL2Pipeline } from './pipelines/webgl2Pipeline'; +export { buildWebGL2Pipeline }; diff --git a/es5/processors/webgl2/index.js b/es5/processors/webgl2/index.js new file mode 100644 index 0000000..a4bf8ef --- /dev/null +++ b/es5/processors/webgl2/index.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +/** + * This pipeline is based on Volcomix's react project. + * https://github.com/Volcomix/virtual-background + * It was modified and converted into a module to work with + * Twilio's Video Processor + */ +var webgl2Pipeline_1 = require("./pipelines/webgl2Pipeline"); +Object.defineProperty(exports, "buildWebGL2Pipeline", { enumerable: true, get: function () { return webgl2Pipeline_1.buildWebGL2Pipeline; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/index.js.map b/es5/processors/webgl2/index.js.map new file mode 100644 index 0000000..175f2a1 --- /dev/null +++ b/es5/processors/webgl2/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../lib/processors/webgl2/index.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,6DAAgE;AAEvD,oGAFA,oCAAmB,OAEA","sourcesContent":["/**\n * This pipeline is based on Volcomix's react project.\n * https://github.com/Volcomix/virtual-background\n * It was modified and converted into a module to work with\n * Twilio's Video Processor\n */\nimport { buildWebGL2Pipeline } from './pipelines/webgl2Pipeline'\n\nexport { buildWebGL2Pipeline };\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts b/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts new file mode 100644 index 0000000..34f572a --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.d.ts @@ -0,0 +1,6 @@ +export type BackgroundBlurStage = { + render(): void; + updateCoverage(coverage: [number, number]): void; + cleanUp(): void; +}; +export declare function buildBackgroundBlurStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, personMaskTexture: WebGLTexture, canvas: HTMLCanvasElement): BackgroundBlurStage; diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.js b/es5/processors/webgl2/pipelines/backgroundBlurStage.js new file mode 100644 index 0000000..c5eae59 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.js @@ -0,0 +1,122 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundBlurStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundBlurStage(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var blurPass = buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas); + var blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas); + function render() { + blurPass.render(); + blendPass.render(); + } + function updateCoverage(coverage) { + blendPass.updateCoverage(coverage); + } + function cleanUp() { + blendPass.cleanUp(); + blurPass.cleanUp(); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundBlurStage = buildBackgroundBlurStage; +function buildBlurPass(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n "]))); + var scale = 0.5; + var outputWidth = canvas.width * scale; + var outputHeight = canvas.height * scale; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var texture1 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var texture2 = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight, gl.NEAREST, gl.LINEAR); + var frameBuffer1 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0); + var frameBuffer2 = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture2, 0); + gl.useProgram(program); + gl.uniform1i(personMaskLocation, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + for (var i = 0; i < 8; i++) { + gl.uniform2f(texelSizeLocation, 0, texelHeight); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, texture1); + gl.uniform1i(inputFrameLocation, 2); + gl.uniform2f(texelSizeLocation, texelWidth, 0); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + gl.bindTexture(gl.TEXTURE_2D, texture2); + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer2); + gl.deleteFramebuffer(frameBuffer1); + gl.deleteTexture(texture2); + gl.deleteTexture(texture1); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { + render: render, + cleanUp: cleanUp, + }; +} +function buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_3 || (templateObject_3 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var blurredInputFrame = gl.getUniformLocation(program, 'u_blurredInputFrame'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform1i(blurredInputFrame, 2); + gl.uniform2f(coverageLocation, 0, 1); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function cleanUp() { + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + cleanUp: cleanUp, + }; +} +var templateObject_1, templateObject_2, templateObject_3; +//# sourceMappingURL=backgroundBlurStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map b/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map new file mode 100644 index 0000000..3ee3395 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundBlurStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundBlurStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/backgroundBlurStage.ts"],"names":[],"mappings":";;;;;;;AAAA,sDAK+B;AAQ/B,SAAgB,wBAAwB,CACtC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,MAAyB;IAEzB,IAAM,QAAQ,GAAG,aAAa,CAC5B,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,MAAM,CACP,CAAA;IACD,IAAM,SAAS,GAAG,cAAc,CAAC,EAAE,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,CAAC,CAAA;IAE5E,SAAS,MAAM;QACb,QAAQ,CAAC,MAAM,EAAE,CAAA;QACjB,SAAS,CAAC,MAAM,EAAE,CAAA;IACpB,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,SAAS,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAA;IACpC,CAAC;IAED,SAAS,OAAO;QACd,SAAS,CAAC,OAAO,EAAE,CAAA;QACnB,QAAQ,CAAC,OAAO,EAAE,CAAA;IACpB,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AArCD,4DAqCC;AAED,SAAS,aAAa,CACpB,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,MAAyB;IAEzB,IAAM,oBAAoB,OAAG,kBAAI,yrCAAA,qnCAmChC,IAAA,CAAA;IAED,IAAM,KAAK,GAAG,GAAG,CAAA;IACjB,IAAM,WAAW,GAAG,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;IACxC,IAAM,YAAY,GAAG,MAAM,CAAC,MAAM,GAAG,KAAK,CAAA;IAC1C,IAAM,UAAU,GAAG,CAAC,GAAG,WAAW,CAAA;IAClC,IAAM,WAAW,GAAG,CAAC,GAAG,YAAY,CAAA;IAEpC,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IACvE,IAAM,QAAQ,GAAG,IAAA,2BAAa,EAC5B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,WAAW,EACX,YAAY,EACZ,EAAE,CAAC,OAAO,EACV,EAAE,CAAC,MAAM,CACV,CAAA;IACD,IAAM,QAAQ,GAAG,IAAA,2BAAa,EAC5B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,WAAW,EACX,YAAY,EACZ,EAAE,CAAC,OAAO,EACV,EAAE,CAAC,MAAM,CACV,CAAA;IAED,IAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;IAChD,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,QAAQ,EACR,CAAC,CACF,CAAA;IAED,IAAM,YAAY,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;IAChD,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,QAAQ,EACR,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IAEnC,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;QAEhD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,EAAE,WAAW,CAAC,CAAA;YAC/C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;YAChD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;YAEtC,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;YAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAA;YACvC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;YAEnC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,UAAU,EAAE,CAAC,CAAC,CAAA;YAC9C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC,CAAA;YAChD,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;YAEtC,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAA;SACxC;IACH,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;QAClC,EAAE,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAA;QAClC,EAAE,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC1B,EAAE,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC1B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AAED,SAAS,cAAc,CACrB,EAA0B,EAC1B,cAA2B,EAC3B,cAA2B,EAC3B,MAAyB;IAEzB,IAAM,kBAAkB,OAAG,kBAAI,kWAAA,8RAY9B,IAAA,CAAA;IAED,IAAM,oBAAoB,OAAG,kBAAI,4qBAAA,wmBAoBhC,IAAA,CAAA;IAEO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAE3D,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAC5E,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAC7C,OAAO,EACP,qBAAqB,CACtB,CAAA;IACD,IAAM,gBAAgB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IAErE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAA;IAClC,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAEpC,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;QACxC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,OAAO,SAAA;KACR,CAAA;AACH,CAAC","sourcesContent":["import {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport type BackgroundBlurStage = {\n render(): void\n updateCoverage(coverage: [number, number]): void\n cleanUp(): void\n}\n\nexport function buildBackgroundBlurStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n): BackgroundBlurStage {\n const blurPass = buildBlurPass(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n canvas\n )\n const blendPass = buildBlendPass(gl, positionBuffer, texCoordBuffer, canvas)\n\n function render() {\n blurPass.render()\n blendPass.render()\n }\n\n function updateCoverage(coverage: [number, number]) {\n blendPass.updateCoverage(coverage)\n }\n\n function cleanUp() {\n blendPass.cleanUp()\n blurPass.cleanUp()\n }\n\n return {\n render,\n updateCoverage,\n cleanUp,\n }\n}\n\nfunction buildBlurPass(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform vec2 u_texelSize;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n const float offset[5] = float[](0.0, 1.0, 2.0, 3.0, 4.0);\n const float weight[5] = float[](0.2270270270, 0.1945945946, 0.1216216216,\n 0.0540540541, 0.0162162162);\n\n void main() {\n vec4 centerColor = texture(u_inputFrame, v_texCoord);\n float personMask = texture(u_personMask, v_texCoord).a;\n\n vec4 frameColor = centerColor * weight[0] * (1.0 - personMask);\n\n for (int i = 1; i < 5; i++) {\n vec2 offset = vec2(offset[i]) * u_texelSize;\n\n vec2 texCoord = v_texCoord + offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n\n texCoord = v_texCoord - offset;\n frameColor += texture(u_inputFrame, texCoord) * weight[i] *\n (1.0 - texture(u_personMask, texCoord).a);\n }\n outColor = vec4(frameColor.rgb + (1.0 - frameColor.a) * centerColor.rgb, 1.0);\n }\n `\n\n const scale = 0.5\n const outputWidth = canvas.width * scale\n const outputHeight = canvas.height * scale\n const texelWidth = 1 / outputWidth\n const texelHeight = 1 / outputHeight\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize')\n const texture1 = createTexture(\n gl,\n gl.RGBA8,\n outputWidth,\n outputHeight,\n gl.NEAREST,\n gl.LINEAR\n )\n const texture2 = createTexture(\n gl,\n gl.RGBA8,\n outputWidth,\n outputHeight,\n gl.NEAREST,\n gl.LINEAR\n )\n\n const frameBuffer1 = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n texture1,\n 0\n )\n\n const frameBuffer2 = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n texture2,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(personMaskLocation, 1)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, personMaskTexture)\n\n for (let i = 0; i < 8; i++) {\n gl.uniform2f(texelSizeLocation, 0, texelHeight)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer1)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n\n gl.activeTexture(gl.TEXTURE2)\n gl.bindTexture(gl.TEXTURE_2D, texture1)\n gl.uniform1i(inputFrameLocation, 2)\n\n gl.uniform2f(texelSizeLocation, texelWidth, 0)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer2)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n\n gl.bindTexture(gl.TEXTURE_2D, texture2)\n }\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer2)\n gl.deleteFramebuffer(frameBuffer1)\n gl.deleteTexture(texture2)\n gl.deleteTexture(texture1)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return {\n render,\n cleanUp,\n }\n}\n\nfunction buildBlendPass(\n gl: WebGL2RenderingContext,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n canvas: HTMLCanvasElement\n) {\n const vertexShaderSource = glsl`#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n `\n\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_blurredInputFrame;\n uniform vec2 u_coverage;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec3 color = texture(u_inputFrame, v_texCoord).rgb;\n vec3 blurredColor = texture(u_blurredInputFrame, v_texCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(mix(blurredColor, color, personMask), 1.0);\n }\n `\n\n const { width: outputWidth, height: outputHeight } = canvas\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const blurredInputFrame = gl.getUniformLocation(\n program,\n 'u_blurredInputFrame'\n )\n const coverageLocation = gl.getUniformLocation(program, 'u_coverage')\n\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(personMaskLocation, 1)\n gl.uniform1i(blurredInputFrame, 2)\n gl.uniform2f(coverageLocation, 0, 1)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.bindFramebuffer(gl.FRAMEBUFFER, null)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateCoverage(coverage: [number, number]) {\n gl.useProgram(program)\n gl.uniform2f(coverageLocation, coverage[0], coverage[1])\n }\n\n function cleanUp() {\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n gl.deleteShader(vertexShader)\n }\n\n return {\n render,\n updateCoverage,\n cleanUp,\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts b/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts new file mode 100644 index 0000000..19799b5 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.d.ts @@ -0,0 +1,9 @@ +import { BlendMode } from '../helpers/postProcessingHelper'; +export type BackgroundImageStage = { + render(): void; + updateCoverage(coverage: [number, number]): void; + updateLightWrapping(lightWrapping: number): void; + updateBlendMode(blendMode: BlendMode): void; + cleanUp(): void; +}; +export declare function buildBackgroundImageStage(gl: WebGL2RenderingContext, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, personMaskTexture: WebGLTexture, backgroundImage: HTMLImageElement | null, canvas: HTMLCanvasElement): BackgroundImageStage; diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.js b/es5/processors/webgl2/pipelines/backgroundImageStage.js new file mode 100644 index 0000000..939a37a --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.js @@ -0,0 +1,108 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildBackgroundImageStage = void 0; +var webglHelper_1 = require("../helpers/webglHelper"); +function buildBackgroundImageStage(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas) { + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "], ["#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n "]))); + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_2 || (templateObject_2 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n "]))); + var outputWidth = canvas.width, outputHeight = canvas.height; + var outputRatio = outputWidth / outputHeight; + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var backgroundScaleLocation = gl.getUniformLocation(program, 'u_backgroundScale'); + var backgroundOffsetLocation = gl.getUniformLocation(program, 'u_backgroundOffset'); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var personMaskLocation = gl.getUniformLocation(program, 'u_personMask'); + var backgroundLocation = gl.getUniformLocation(program, 'u_background'); + var coverageLocation = gl.getUniformLocation(program, 'u_coverage'); + var lightWrappingLocation = gl.getUniformLocation(program, 'u_lightWrapping'); + var blendModeLocation = gl.getUniformLocation(program, 'u_blendMode'); + gl.useProgram(program); + gl.uniform2f(backgroundScaleLocation, 1, 1); + gl.uniform2f(backgroundOffsetLocation, 0, 0); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(personMaskLocation, 1); + gl.uniform2f(coverageLocation, 0, 1); + gl.uniform1f(lightWrappingLocation, 0); + gl.uniform1f(blendModeLocation, 0); + var backgroundTexture = null; + // TODO Find a better to handle background being loaded + if (backgroundImage === null || backgroundImage === void 0 ? void 0 : backgroundImage.complete) { + updateBackgroundImage(backgroundImage); + } + else if (backgroundImage) { + backgroundImage.onload = function () { + updateBackgroundImage(backgroundImage); + }; + } + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, personMaskTexture); + if (backgroundTexture !== null) { + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, backgroundTexture); + // TODO Handle correctly the background not loaded yet + gl.uniform1i(backgroundLocation, 2); + } + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateBackgroundImage(backgroundImage) { + backgroundTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.LINEAR, gl.LINEAR); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, backgroundImage.naturalWidth, backgroundImage.naturalHeight, gl.RGBA, gl.UNSIGNED_BYTE, backgroundImage); + var xOffset = 0; + var yOffset = 0; + var backgroundWidth = backgroundImage.naturalWidth; + var backgroundHeight = backgroundImage.naturalHeight; + var backgroundRatio = backgroundWidth / backgroundHeight; + if (backgroundRatio < outputRatio) { + backgroundHeight = backgroundWidth / outputRatio; + yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2; + } + else { + backgroundWidth = backgroundHeight * outputRatio; + xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2; + } + var xScale = backgroundWidth / backgroundImage.naturalWidth; + var yScale = backgroundHeight / backgroundImage.naturalHeight; + xOffset /= backgroundImage.naturalWidth; + yOffset /= backgroundImage.naturalHeight; + gl.uniform2f(backgroundScaleLocation, xScale, yScale); + gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset); + } + function updateCoverage(coverage) { + gl.useProgram(program); + gl.uniform2f(coverageLocation, coverage[0], coverage[1]); + } + function updateLightWrapping(lightWrapping) { + gl.useProgram(program); + gl.uniform1f(lightWrappingLocation, lightWrapping); + } + function updateBlendMode(blendMode) { + gl.useProgram(program); + gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1); + } + function cleanUp() { + gl.deleteTexture(backgroundTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + gl.deleteShader(vertexShader); + } + return { + render: render, + updateCoverage: updateCoverage, + updateLightWrapping: updateLightWrapping, + updateBlendMode: updateBlendMode, + cleanUp: cleanUp, + }; +} +exports.buildBackgroundImageStage = buildBackgroundImageStage; +var templateObject_1, templateObject_2; +//# sourceMappingURL=backgroundImageStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/backgroundImageStage.js.map b/es5/processors/webgl2/pipelines/backgroundImageStage.js.map new file mode 100644 index 0000000..d262e70 --- /dev/null +++ b/es5/processors/webgl2/pipelines/backgroundImageStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"backgroundImageStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/backgroundImageStage.ts"],"names":[],"mappings":";;;;;;;AACA,sDAK+B;AAU/B,SAAgB,yBAAyB,CACvC,EAA0B,EAC1B,cAA2B,EAC3B,cAA2B,EAC3B,iBAA+B,EAC/B,eAAwC,EACxC,MAAyB;IAEzB,IAAM,kBAAkB,OAAG,kBAAI,giBAAA,4dAiB9B,IAAA,CAAA;IAED,IAAM,oBAAoB,OAAG,kBAAI,uvCAAA,mrCAmChC,IAAA,CAAA;IAEO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAC3D,IAAM,WAAW,GAAG,WAAW,GAAG,YAAY,CAAA;IAE9C,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAC5E,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,uBAAuB,GAAG,EAAE,CAAC,kBAAkB,CACnD,OAAO,EACP,mBAAmB,CACpB,CAAA;IACD,IAAM,wBAAwB,GAAG,EAAE,CAAC,kBAAkB,CACpD,OAAO,EACP,oBAAoB,CACrB,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,gBAAgB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAA;IACrE,IAAM,qBAAqB,GAAG,EAAE,CAAC,kBAAkB,CACjD,OAAO,EACP,iBAAiB,CAClB,CAAA;IACD,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IAEvE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,uBAAuB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3C,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IAC5C,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACpC,EAAE,CAAC,SAAS,CAAC,qBAAqB,EAAE,CAAC,CAAC,CAAA;IACtC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,CAAC,CAAC,CAAA;IAElC,IAAI,iBAAiB,GAAwB,IAAI,CAAA;IACjD,uDAAuD;IACvD,IAAI,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,QAAQ,EAAE;QAC7B,qBAAqB,CAAC,eAAe,CAAC,CAAA;KACvC;SAAM,IAAI,eAAe,EAAE;QAC1B,eAAe,CAAC,MAAM,GAAG;YACvB,qBAAqB,CAAC,eAAe,CAAC,CAAA;QACxC,CAAC,CAAA;KACF;IAED,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;QAChD,IAAI,iBAAiB,KAAK,IAAI,EAAE;YAC9B,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;YAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;YAChD,sDAAsD;YACtD,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;SACpC;QACD,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,IAAI,CAAC,CAAA;QACxC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,qBAAqB,CAAC,eAAiC;QAC9D,iBAAiB,GAAG,IAAA,2BAAa,EAC/B,EAAE,EACF,EAAE,CAAC,KAAK,EACR,eAAe,CAAC,YAAY,EAC5B,eAAe,CAAC,aAAa,EAC7B,EAAE,CAAC,MAAM,EACT,EAAE,CAAC,MAAM,CACV,CAAA;QACD,EAAE,CAAC,aAAa,CACd,EAAE,CAAC,UAAU,EACb,CAAC,EACD,CAAC,EACD,CAAC,EACD,eAAe,CAAC,YAAY,EAC5B,eAAe,CAAC,aAAa,EAC7B,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,eAAe,CAChB,CAAA;QAED,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,IAAI,eAAe,GAAG,eAAe,CAAC,YAAY,CAAA;QAClD,IAAI,gBAAgB,GAAG,eAAe,CAAC,aAAa,CAAA;QACpD,IAAM,eAAe,GAAG,eAAe,GAAG,gBAAgB,CAAA;QAC1D,IAAI,eAAe,GAAG,WAAW,EAAE;YACjC,gBAAgB,GAAG,eAAe,GAAG,WAAW,CAAA;YAChD,OAAO,GAAG,CAAC,eAAe,CAAC,aAAa,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAA;SACjE;aAAM;YACL,eAAe,GAAG,gBAAgB,GAAG,WAAW,CAAA;YAChD,OAAO,GAAG,CAAC,eAAe,CAAC,YAAY,GAAG,eAAe,CAAC,GAAG,CAAC,CAAA;SAC/D;QAED,IAAM,MAAM,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,CAAA;QAC7D,IAAM,MAAM,GAAG,gBAAgB,GAAG,eAAe,CAAC,aAAa,CAAA;QAC/D,OAAO,IAAI,eAAe,CAAC,YAAY,CAAA;QACvC,OAAO,IAAI,eAAe,CAAC,aAAa,CAAA;QAExC,EAAE,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,EAAE,MAAM,CAAC,CAAA;QACrD,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,cAAc,CAAC,QAA0B;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,gBAAgB,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,CAAC;IAED,SAAS,mBAAmB,CAAC,aAAqB;QAChD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,qBAAqB,EAAE,aAAa,CAAC,CAAA;IACpD,CAAC;IAED,SAAS,eAAe,CAAC,SAAoB;QAC3C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,SAAS,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjE,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO;QACL,MAAM,QAAA;QACN,cAAc,gBAAA;QACd,mBAAmB,qBAAA;QACnB,eAAe,iBAAA;QACf,OAAO,SAAA;KACR,CAAA;AACH,CAAC;AA5MD,8DA4MC","sourcesContent":["import { BlendMode } from '../helpers/postProcessingHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport type BackgroundImageStage = {\n render(): void\n updateCoverage(coverage: [number, number]): void\n updateLightWrapping(lightWrapping: number): void\n updateBlendMode(blendMode: BlendMode): void\n cleanUp(): void\n}\n\nexport function buildBackgroundImageStage(\n gl: WebGL2RenderingContext,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n personMaskTexture: WebGLTexture,\n backgroundImage: HTMLImageElement | null,\n canvas: HTMLCanvasElement\n): BackgroundImageStage {\n const vertexShaderSource = glsl`#version 300 es\n\n uniform vec2 u_backgroundScale;\n uniform vec2 u_backgroundOffset;\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n out vec2 v_backgroundCoord;\n\n void main() {\n // Flipping Y is required when rendering to canvas\n gl_Position = vec4(a_position * vec2(1.0, -1.0), 0.0, 1.0);\n v_texCoord = a_texCoord;\n v_backgroundCoord = a_texCoord * u_backgroundScale + u_backgroundOffset;\n }\n `\n\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_personMask;\n uniform sampler2D u_background;\n uniform vec2 u_coverage;\n uniform float u_lightWrapping;\n uniform float u_blendMode;\n\n in vec2 v_texCoord;\n in vec2 v_backgroundCoord;\n\n out vec4 outColor;\n\n vec3 screen(vec3 a, vec3 b) {\n return 1.0 - (1.0 - a) * (1.0 - b);\n }\n\n vec3 linearDodge(vec3 a, vec3 b) {\n return a + b;\n }\n\n void main() {\n vec3 frameColor = texture(u_inputFrame, v_texCoord).rgb;\n vec3 backgroundColor = texture(u_background, v_backgroundCoord).rgb;\n float personMask = texture(u_personMask, v_texCoord).a;\n float lightWrapMask = 1.0 - max(0.0, personMask - u_coverage.y) / (1.0 - u_coverage.y);\n vec3 lightWrap = u_lightWrapping * lightWrapMask * backgroundColor;\n frameColor = u_blendMode * linearDodge(frameColor, lightWrap) +\n (1.0 - u_blendMode) * screen(frameColor, lightWrap);\n personMask = smoothstep(u_coverage.x, u_coverage.y, personMask);\n outColor = vec4(frameColor * personMask + backgroundColor * (1.0 - personMask), 1.0);\n }\n `\n\n const { width: outputWidth, height: outputHeight } = canvas\n const outputRatio = outputWidth / outputHeight\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const backgroundScaleLocation = gl.getUniformLocation(\n program,\n 'u_backgroundScale'\n )\n const backgroundOffsetLocation = gl.getUniformLocation(\n program,\n 'u_backgroundOffset'\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const personMaskLocation = gl.getUniformLocation(program, 'u_personMask')\n const backgroundLocation = gl.getUniformLocation(program, 'u_background')\n const coverageLocation = gl.getUniformLocation(program, 'u_coverage')\n const lightWrappingLocation = gl.getUniformLocation(\n program,\n 'u_lightWrapping'\n )\n const blendModeLocation = gl.getUniformLocation(program, 'u_blendMode')\n\n gl.useProgram(program)\n gl.uniform2f(backgroundScaleLocation, 1, 1)\n gl.uniform2f(backgroundOffsetLocation, 0, 0)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(personMaskLocation, 1)\n gl.uniform2f(coverageLocation, 0, 1)\n gl.uniform1f(lightWrappingLocation, 0)\n gl.uniform1f(blendModeLocation, 0)\n\n let backgroundTexture: WebGLTexture | null = null\n // TODO Find a better to handle background being loaded\n if (backgroundImage?.complete) {\n updateBackgroundImage(backgroundImage)\n } else if (backgroundImage) {\n backgroundImage.onload = () => {\n updateBackgroundImage(backgroundImage)\n }\n }\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, personMaskTexture)\n if (backgroundTexture !== null) {\n gl.activeTexture(gl.TEXTURE2)\n gl.bindTexture(gl.TEXTURE_2D, backgroundTexture)\n // TODO Handle correctly the background not loaded yet\n gl.uniform1i(backgroundLocation, 2)\n }\n gl.bindFramebuffer(gl.FRAMEBUFFER, null)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateBackgroundImage(backgroundImage: HTMLImageElement) {\n backgroundTexture = createTexture(\n gl,\n gl.RGBA8,\n backgroundImage.naturalWidth,\n backgroundImage.naturalHeight,\n gl.LINEAR,\n gl.LINEAR\n )\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n backgroundImage.naturalWidth,\n backgroundImage.naturalHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n backgroundImage\n )\n\n let xOffset = 0\n let yOffset = 0\n let backgroundWidth = backgroundImage.naturalWidth\n let backgroundHeight = backgroundImage.naturalHeight\n const backgroundRatio = backgroundWidth / backgroundHeight\n if (backgroundRatio < outputRatio) {\n backgroundHeight = backgroundWidth / outputRatio\n yOffset = (backgroundImage.naturalHeight - backgroundHeight) / 2\n } else {\n backgroundWidth = backgroundHeight * outputRatio\n xOffset = (backgroundImage.naturalWidth - backgroundWidth) / 2\n }\n\n const xScale = backgroundWidth / backgroundImage.naturalWidth\n const yScale = backgroundHeight / backgroundImage.naturalHeight\n xOffset /= backgroundImage.naturalWidth\n yOffset /= backgroundImage.naturalHeight\n\n gl.uniform2f(backgroundScaleLocation, xScale, yScale)\n gl.uniform2f(backgroundOffsetLocation, xOffset, yOffset)\n }\n\n function updateCoverage(coverage: [number, number]) {\n gl.useProgram(program)\n gl.uniform2f(coverageLocation, coverage[0], coverage[1])\n }\n\n function updateLightWrapping(lightWrapping: number) {\n gl.useProgram(program)\n gl.uniform1f(lightWrappingLocation, lightWrapping)\n }\n\n function updateBlendMode(blendMode: BlendMode) {\n gl.useProgram(program)\n gl.uniform1f(blendModeLocation, blendMode === 'screen' ? 0 : 1)\n }\n\n function cleanUp() {\n gl.deleteTexture(backgroundTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n gl.deleteShader(vertexShader)\n }\n\n return {\n render,\n updateCoverage,\n updateLightWrapping,\n updateBlendMode,\n cleanUp,\n }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/jointBilateralFilterStage.d.ts b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.d.ts new file mode 100644 index 0000000..755fbbd --- /dev/null +++ b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.d.ts @@ -0,0 +1,7 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildJointBilateralFilterStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, inputTexture: WebGLTexture, segmentationConfig: SegmentationConfig, outputTexture: WebGLTexture, canvas: HTMLCanvasElement): { + render: () => void; + updateSigmaSpace: (sigmaSpace: number) => void; + updateSigmaColor: (sigmaColor: number) => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js new file mode 100644 index 0000000..7d7aa5c --- /dev/null +++ b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js @@ -0,0 +1,72 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildJointBilateralFilterStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildJointBilateralFilterStage(gl, vertexShader, positionBuffer, texCoordBuffer, inputTexture, segmentationConfig, outputTexture, canvas) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n "]))); + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var outputWidth = canvas.width, outputHeight = canvas.height; + var texelWidth = 1 / outputWidth; + var texelHeight = 1 / outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var segmentationMaskLocation = gl.getUniformLocation(program, 'u_segmentationMask'); + var texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize'); + var stepLocation = gl.getUniformLocation(program, 'u_step'); + var radiusLocation = gl.getUniformLocation(program, 'u_radius'); + var offsetLocation = gl.getUniformLocation(program, 'u_offset'); + var sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel'); + var sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor'); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + gl.uniform1i(segmentationMaskLocation, 1); + gl.uniform2f(texelSizeLocation, texelWidth, texelHeight); + // Ensures default values are configured to prevent infinite + // loop in fragment shader + updateSigmaSpace(0); + updateSigmaColor(0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function updateSigmaSpace(sigmaSpace) { + sigmaSpace *= Math.max(outputWidth / segmentationWidth, outputHeight / segmentationHeight); + var kSparsityFactor = 0.66; // Higher is more sparse. + var sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor); + var step = sparsity; + var radius = sigmaSpace; + var offset = step > 1 ? step * 0.5 : 0; + var sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace; + gl.useProgram(program); + gl.uniform1f(stepLocation, step); + gl.uniform1f(radiusLocation, radius); + gl.uniform1f(offsetLocation, offset); + gl.uniform1f(sigmaTexelLocation, sigmaTexel); + } + function updateSigmaColor(sigmaColor) { + gl.useProgram(program); + gl.uniform1f(sigmaColorLocation, sigmaColor); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, updateSigmaSpace: updateSigmaSpace, updateSigmaColor: updateSigmaColor, cleanUp: cleanUp }; +} +exports.buildJointBilateralFilterStage = buildJointBilateralFilterStage; +var templateObject_1; +//# sourceMappingURL=jointBilateralFilterStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js.map b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js.map new file mode 100644 index 0000000..ea44797 --- /dev/null +++ b/es5/processors/webgl2/pipelines/jointBilateralFilterStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jointBilateralFilterStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/jointBilateralFilterStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAI+B;AAE/B,SAAgB,8BAA8B,CAC5C,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,YAA0B,EAC1B,kBAAsC,EACtC,aAA2B,EAC3B,MAAyB;IAEzB,IAAM,oBAAoB,OAAG,kBAAI,wiFAAA,o+EAkEhC,IAAA,CAAA;IAEK,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IACO,IAAO,WAAW,GAA2B,MAAM,MAAjC,EAAU,YAAY,GAAK,MAAM,OAAX,CAAW;IAC3D,IAAM,UAAU,GAAG,CAAC,GAAG,WAAW,CAAA;IAClC,IAAM,WAAW,GAAG,CAAC,GAAG,YAAY,CAAA;IAEpC,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,wBAAwB,GAAG,EAAE,CAAC,kBAAkB,CACpD,OAAO,EACP,oBAAoB,CACrB,CAAA;IACD,IAAM,iBAAiB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAA;IACvE,IAAM,YAAY,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAA;IAC7D,IAAM,cAAc,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAA;IACjE,IAAM,cAAc,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAA;IACjE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IAEzE,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IACnC,EAAE,CAAC,SAAS,CAAC,wBAAwB,EAAE,CAAC,CAAC,CAAA;IACzC,EAAE,CAAC,SAAS,CAAC,iBAAiB,EAAE,UAAU,EAAE,WAAW,CAAC,CAAA;IAExD,4DAA4D;IAC5D,0BAA0B;IAC1B,gBAAgB,CAAC,CAAC,CAAC,CAAA;IACnB,gBAAgB,CAAC,CAAC,CAAC,CAAA;IAEnB,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;QAC3C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,gBAAgB,CAAC,UAAkB;QAC1C,UAAU,IAAI,IAAI,CAAC,GAAG,CACpB,WAAW,GAAG,iBAAiB,EAC/B,YAAY,GAAG,kBAAkB,CAClC,CAAA;QAED,IAAM,eAAe,GAAG,IAAI,CAAA,CAAC,yBAAyB;QACtD,IAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC,CAAA;QACrE,IAAM,IAAI,GAAG,QAAQ,CAAA;QACrB,IAAM,MAAM,GAAG,UAAU,CAAA;QACzB,IAAM,MAAM,GAAG,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACxC,IAAM,UAAU,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,GAAG,UAAU,CAAA;QAEjE,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,YAAY,EAAE,IAAI,CAAC,CAAA;QAChC,EAAE,CAAC,SAAS,CAAC,cAAc,EAAE,MAAM,CAAC,CAAA;QACpC,EAAE,CAAC,SAAS,CAAC,cAAc,EAAE,MAAM,CAAC,CAAA;QACpC,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAA;IAC9C,CAAC;IAED,SAAS,gBAAgB,CAAC,UAAkB;QAC1C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,UAAU,CAAC,CAAA;IAC9C,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,gBAAgB,kBAAA,EAAE,gBAAgB,kBAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAChE,CAAC;AA1KD,wEA0KC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n glsl,\n} from '../helpers/webglHelper'\n\nexport function buildJointBilateralFilterStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n inputTexture: WebGLTexture,\n segmentationConfig: SegmentationConfig,\n outputTexture: WebGLTexture,\n canvas: HTMLCanvasElement\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n uniform sampler2D u_segmentationMask;\n uniform vec2 u_texelSize;\n uniform float u_step;\n uniform float u_radius;\n uniform float u_offset;\n uniform float u_sigmaTexel;\n uniform float u_sigmaColor;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n float gaussian(float x, float sigma) {\n float coeff = -0.5 / (sigma * sigma * 4.0 + 1.0e-6);\n return exp((x * x) * coeff);\n }\n\n void main() {\n vec2 centerCoord = v_texCoord;\n vec3 centerColor = texture(u_inputFrame, centerCoord).rgb;\n float newVal = 0.0;\n\n float spaceWeight = 0.0;\n float colorWeight = 0.0;\n float totalWeight = 0.0;\n\n vec2 leftTopCoord = vec2(centerCoord + vec2(-u_radius, -u_radius) * u_texelSize);\n vec2 rightTopCoord = vec2(centerCoord + vec2(u_radius, -u_radius) * u_texelSize);\n vec2 leftBottomCoord = vec2(centerCoord + vec2(-u_radius, u_radius) * u_texelSize);\n vec2 rightBottomCoord = vec2(centerCoord + vec2(u_radius, u_radius) * u_texelSize);\n\n float leftTopSegAlpha = texture(u_segmentationMask, leftTopCoord).a;\n float rightTopSegAlpha = texture(u_segmentationMask, rightTopCoord).a;\n float leftBottomSegAlpha = texture(u_segmentationMask, leftBottomCoord).a;\n float rightBottomSegAlpha = texture(u_segmentationMask, rightBottomCoord).a;\n float totalSegAlpha = leftTopSegAlpha + rightTopSegAlpha + leftBottomSegAlpha + rightBottomSegAlpha;\n\n if (totalSegAlpha <= 0.0) {\n outColor = vec4(vec3(0.0), 0.0);\n } else if (totalSegAlpha >= 4.0) {\n outColor = vec4(vec3(0.0), 1.0);\n } else {\n for (float i = -u_radius + u_offset; i <= u_radius; i += u_step) {\n for (float j = -u_radius + u_offset; j <= u_radius; j += u_step) {\n vec2 shift = vec2(j, i) * u_texelSize;\n vec2 coord = vec2(centerCoord + shift);\n vec3 frameColor = texture(u_inputFrame, coord).rgb;\n float outVal = texture(u_segmentationMask, coord).a;\n\n spaceWeight = gaussian(distance(centerCoord, coord), u_sigmaTexel);\n colorWeight = gaussian(distance(centerColor, frameColor), u_sigmaColor);\n totalWeight += spaceWeight * colorWeight;\n\n newVal += spaceWeight * colorWeight * outVal;\n }\n }\n newVal /= totalWeight;\n\n outColor = vec4(vec3(0.0), newVal);\n }\n }\n `\n\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n const { width: outputWidth, height: outputHeight } = canvas\n const texelWidth = 1 / outputWidth\n const texelHeight = 1 / outputHeight\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const segmentationMaskLocation = gl.getUniformLocation(\n program,\n 'u_segmentationMask'\n )\n const texelSizeLocation = gl.getUniformLocation(program, 'u_texelSize')\n const stepLocation = gl.getUniformLocation(program, 'u_step')\n const radiusLocation = gl.getUniformLocation(program, 'u_radius')\n const offsetLocation = gl.getUniformLocation(program, 'u_offset')\n const sigmaTexelLocation = gl.getUniformLocation(program, 'u_sigmaTexel')\n const sigmaColorLocation = gl.getUniformLocation(program, 'u_sigmaColor')\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n gl.uniform1i(segmentationMaskLocation, 1)\n gl.uniform2f(texelSizeLocation, texelWidth, texelHeight)\n\n // Ensures default values are configured to prevent infinite\n // loop in fragment shader\n updateSigmaSpace(0)\n updateSigmaColor(0)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, inputTexture)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function updateSigmaSpace(sigmaSpace: number) {\n sigmaSpace *= Math.max(\n outputWidth / segmentationWidth,\n outputHeight / segmentationHeight\n )\n\n const kSparsityFactor = 0.66 // Higher is more sparse.\n const sparsity = Math.max(1, Math.sqrt(sigmaSpace) * kSparsityFactor)\n const step = sparsity\n const radius = sigmaSpace\n const offset = step > 1 ? step * 0.5 : 0\n const sigmaTexel = Math.max(texelWidth, texelHeight) * sigmaSpace\n\n gl.useProgram(program)\n gl.uniform1f(stepLocation, step)\n gl.uniform1f(radiusLocation, radius)\n gl.uniform1f(offsetLocation, offset)\n gl.uniform1f(sigmaTexelLocation, sigmaTexel)\n }\n\n function updateSigmaColor(sigmaColor: number) {\n gl.useProgram(program)\n gl.uniform1f(sigmaColorLocation, sigmaColor)\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, updateSigmaSpace, updateSigmaColor, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts b/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts new file mode 100644 index 0000000..aa65242 --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.d.ts @@ -0,0 +1,5 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildLoadSegmentationStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, segmentationConfig: SegmentationConfig, tflite: any, outputTexture: WebGLTexture): { + render: () => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.js b/es5/processors/webgl2/pipelines/loadSegmentationStage.js new file mode 100644 index 0000000..ed8a0cb --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.js @@ -0,0 +1,45 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildLoadSegmentationStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildLoadSegmentationStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite, outputTexture) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n " + // TFLite memory will be accessed as float32 + ]))); + // TFLite memory will be accessed as float32 + var tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation'); + var inputTexture = (0, webglHelper_1.createTexture)(gl, gl.R32F, segmentationWidth, segmentationHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputLocation, 1); + function render() { + gl.viewport(0, 0, segmentationWidth, segmentationHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, segmentationWidth, segmentationHeight, gl.RED, gl.FLOAT, tflite.HEAPF32, tfliteOutputMemoryOffset); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(inputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildLoadSegmentationStage = buildLoadSegmentationStage; +var templateObject_1; +//# sourceMappingURL=loadSegmentationStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map b/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map new file mode 100644 index 0000000..2fc8086 --- /dev/null +++ b/es5/processors/webgl2/pipelines/loadSegmentationStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"loadSegmentationStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/loadSegmentationStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAK+B;AAE/B,SAAgB,0BAA0B,CACxC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,kBAAsC,EACtC,MAAW,EACX,aAA2B;IAE3B,IAAM,oBAAoB,OAAG,kBAAI,+WAAA,2SAchC;QAED,4CAA4C;QAF3C,CAAA;IAED,4CAA4C;IAC5C,IAAM,wBAAwB,GAAG,MAAM,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAA;IAE9D,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IAED,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,aAAa,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,qBAAqB,CAAC,CAAA;IAC3E,IAAM,YAAY,GAAG,IAAA,2BAAa,EAChC,EAAE,EACF,EAAE,CAAC,IAAI,EACP,iBAAiB,EACjB,kBAAkB,CACnB,CAAA;IAED,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC,CAAA;IAE9B,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,CAAA;QACxD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;QAC3C,EAAE,CAAC,aAAa,CACd,EAAE,CAAC,UAAU,EACb,CAAC,EACD,CAAC,EACD,CAAC,EACD,iBAAiB,EACjB,kBAAkB,EAClB,EAAE,CAAC,GAAG,EACN,EAAE,CAAC,KAAK,EACR,MAAM,CAAC,OAAO,EACd,wBAAwB,CACzB,CAAA;QACD,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,YAAY,CAAC,CAAA;QAC9B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAC5B,CAAC;AA9FD,gEA8FC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport function buildLoadSegmentationStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n segmentationConfig: SegmentationConfig,\n tflite: any,\n outputTexture: WebGLTexture\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n float segmentation = texture(u_inputSegmentation, v_texCoord).r;\n outColor = vec4(vec3(0.0), segmentation);\n }\n `\n\n // TFLite memory will be accessed as float32\n const tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4\n\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation')\n const inputTexture = createTexture(\n gl,\n gl.R32F,\n segmentationWidth,\n segmentationHeight\n )\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(inputLocation, 1)\n\n function render() {\n gl.viewport(0, 0, segmentationWidth, segmentationHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, inputTexture)\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n segmentationWidth,\n segmentationHeight,\n gl.RED,\n gl.FLOAT,\n tflite.HEAPF32,\n tfliteOutputMemoryOffset\n )\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteTexture(inputTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/resizingStage.d.ts b/es5/processors/webgl2/pipelines/resizingStage.d.ts new file mode 100644 index 0000000..ea23da8 --- /dev/null +++ b/es5/processors/webgl2/pipelines/resizingStage.d.ts @@ -0,0 +1,5 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildResizingStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, segmentationConfig: SegmentationConfig, tflite: any): { + render: () => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/resizingStage.js b/es5/processors/webgl2/pipelines/resizingStage.js new file mode 100644 index 0000000..8b4ea11 --- /dev/null +++ b/es5/processors/webgl2/pipelines/resizingStage.js @@ -0,0 +1,53 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildResizingStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildResizingStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n " + // TFLite memory will be accessed as float32 + ]))); + // TFLite memory will be accessed as float32 + var tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], outputWidth = _a[0], outputHeight = _a[1]; + var outputPixelCount = outputWidth * outputHeight; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame'); + var outputTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, outputWidth, outputHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + var outputPixels = new Uint8Array(outputPixelCount * 4); + gl.useProgram(program); + gl.uniform1i(inputFrameLocation, 0); + function render() { + gl.viewport(0, 0, outputWidth, outputHeight); + gl.useProgram(program); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + // Downloads pixels asynchronously from GPU while rendering the current frame + (0, webglHelper_1.readPixelsAsync)(gl, 0, 0, outputWidth, outputHeight, gl.RGBA, gl.UNSIGNED_BYTE, outputPixels); + for (var i = 0; i < outputPixelCount; i++) { + var tfliteIndex = tfliteInputMemoryOffset + i * 3; + var outputIndex = i * 4; + tflite.HEAPF32[tfliteIndex] = outputPixels[outputIndex] / 255; + tflite.HEAPF32[tfliteIndex + 1] = outputPixels[outputIndex + 1] / 255; + tflite.HEAPF32[tfliteIndex + 2] = outputPixels[outputIndex + 2] / 255; + } + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(outputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildResizingStage = buildResizingStage; +var templateObject_1; +//# sourceMappingURL=resizingStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/resizingStage.js.map b/es5/processors/webgl2/pipelines/resizingStage.js.map new file mode 100644 index 0000000..3ea2784 --- /dev/null +++ b/es5/processors/webgl2/pipelines/resizingStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"resizingStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/resizingStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAM+B;AAE/B,SAAgB,kBAAkB,CAChC,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,kBAAsC,EACtC,MAAW;IAEX,IAAM,oBAAoB,OAAG,kBAAI,oSAAA,gOAahC;QAED,4CAA4C;QAF3C,CAAA;IAED,4CAA4C;IAC5C,IAAM,uBAAuB,GAAG,MAAM,CAAC,qBAAqB,EAAE,GAAG,CAAC,CAAA;IAE5D,IAAA,KAA8B,qCAAgB,CAClD,kBAAkB,CAAC,eAAe,CACnC,EAFM,WAAW,QAAA,EAAE,YAAY,QAE/B,CAAA;IACD,IAAM,gBAAgB,GAAG,WAAW,GAAG,YAAY,CAAA;IAEnD,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,kBAAkB,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAA;IACzE,IAAM,aAAa,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,KAAK,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;IAE5E,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IACD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,gBAAgB,GAAG,CAAC,CAAC,CAAA;IAEzD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAA;IAEnC,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,WAAW,EAAE,YAAY,CAAC,CAAA;QAC5C,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAEtC,6EAA6E;QAC7E,IAAA,6BAAe,EACb,EAAE,EACF,CAAC,EACD,CAAC,EACD,WAAW,EACX,YAAY,EACZ,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,YAAY,CACb,CAAA;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,gBAAgB,EAAE,CAAC,EAAE,EAAE;YACzC,IAAM,WAAW,GAAG,uBAAuB,GAAG,CAAC,GAAG,CAAC,CAAA;YACnD,IAAM,WAAW,GAAG,CAAC,GAAG,CAAC,CAAA;YACzB,MAAM,CAAC,OAAO,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,GAAG,GAAG,CAAA;YAC7D,MAAM,CAAC,OAAO,CAAC,WAAW,GAAG,CAAC,CAAC,GAAG,YAAY,CAAC,WAAW,GAAG,CAAC,CAAC,GAAG,GAAG,CAAA;YACrE,MAAM,CAAC,OAAO,CAAC,WAAW,GAAG,CAAC,CAAC,GAAG,YAAY,CAAC,WAAW,GAAG,CAAC,CAAC,GAAG,GAAG,CAAA;SACtE;IACH,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,aAAa,CAAC,CAAA;QAC/B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAC5B,CAAC;AA/FD,gDA+FC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n readPixelsAsync,\n} from '../helpers/webglHelper'\n\nexport function buildResizingStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n segmentationConfig: SegmentationConfig,\n tflite: any\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputFrame;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n outColor = texture(u_inputFrame, v_texCoord);\n }\n `\n\n // TFLite memory will be accessed as float32\n const tfliteInputMemoryOffset = tflite._getInputMemoryOffset() / 4\n\n const [outputWidth, outputHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n const outputPixelCount = outputWidth * outputHeight\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputFrameLocation = gl.getUniformLocation(program, 'u_inputFrame')\n const outputTexture = createTexture(gl, gl.RGBA8, outputWidth, outputHeight)\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n const outputPixels = new Uint8Array(outputPixelCount * 4)\n\n gl.useProgram(program)\n gl.uniform1i(inputFrameLocation, 0)\n\n function render() {\n gl.viewport(0, 0, outputWidth, outputHeight)\n gl.useProgram(program)\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n\n // Downloads pixels asynchronously from GPU while rendering the current frame\n readPixelsAsync(\n gl,\n 0,\n 0,\n outputWidth,\n outputHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n outputPixels\n )\n\n for (let i = 0; i < outputPixelCount; i++) {\n const tfliteIndex = tfliteInputMemoryOffset + i * 3\n const outputIndex = i * 4\n tflite.HEAPF32[tfliteIndex] = outputPixels[outputIndex] / 255\n tflite.HEAPF32[tfliteIndex + 1] = outputPixels[outputIndex + 1] / 255\n tflite.HEAPF32[tfliteIndex + 2] = outputPixels[outputIndex + 2] / 255\n }\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteTexture(outputTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/softmaxStage.d.ts b/es5/processors/webgl2/pipelines/softmaxStage.d.ts new file mode 100644 index 0000000..4b9953c --- /dev/null +++ b/es5/processors/webgl2/pipelines/softmaxStage.d.ts @@ -0,0 +1,5 @@ +import { SegmentationConfig } from '../helpers/segmentationHelper'; +export declare function buildSoftmaxStage(gl: WebGL2RenderingContext, vertexShader: WebGLShader, positionBuffer: WebGLBuffer, texCoordBuffer: WebGLBuffer, segmentationConfig: SegmentationConfig, tflite: any, outputTexture: WebGLTexture): { + render: () => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/softmaxStage.js b/es5/processors/webgl2/pipelines/softmaxStage.js new file mode 100644 index 0000000..d77a1bc --- /dev/null +++ b/es5/processors/webgl2/pipelines/softmaxStage.js @@ -0,0 +1,45 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildSoftmaxStage = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +function buildSoftmaxStage(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite, outputTexture) { + var fragmentShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec2 segmentation = texture(u_inputSegmentation, v_texCoord).rg;\n float shift = max(segmentation.r, segmentation.g);\n float backgroundExp = exp(segmentation.r - shift);\n float personExp = exp(segmentation.g - shift);\n outColor = vec4(vec3(0.0), personExp / (backgroundExp + personExp));\n }\n "], ["#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec2 segmentation = texture(u_inputSegmentation, v_texCoord).rg;\n float shift = max(segmentation.r, segmentation.g);\n float backgroundExp = exp(segmentation.r - shift);\n float personExp = exp(segmentation.g - shift);\n outColor = vec4(vec3(0.0), personExp / (backgroundExp + personExp));\n }\n " + // TFLite memory will be accessed as float32 + ]))); + // TFLite memory will be accessed as float32 + var tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var fragmentShader = (0, webglHelper_1.compileShader)(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); + var program = (0, webglHelper_1.createPiplelineStageProgram)(gl, vertexShader, fragmentShader, positionBuffer, texCoordBuffer); + var inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation'); + var inputTexture = (0, webglHelper_1.createTexture)(gl, gl.RG32F, segmentationWidth, segmentationHeight); + var frameBuffer = gl.createFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, outputTexture, 0); + gl.useProgram(program); + gl.uniform1i(inputLocation, 1); + function render() { + gl.viewport(0, 0, segmentationWidth, segmentationHeight); + gl.useProgram(program); + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, inputTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, segmentationWidth, segmentationHeight, gl.RG, gl.FLOAT, tflite.HEAPF32, tfliteOutputMemoryOffset); + gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer); + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + } + function cleanUp() { + gl.deleteFramebuffer(frameBuffer); + gl.deleteTexture(inputTexture); + gl.deleteProgram(program); + gl.deleteShader(fragmentShader); + } + return { render: render, cleanUp: cleanUp }; +} +exports.buildSoftmaxStage = buildSoftmaxStage; +var templateObject_1; +//# sourceMappingURL=softmaxStage.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/softmaxStage.js.map b/es5/processors/webgl2/pipelines/softmaxStage.js.map new file mode 100644 index 0000000..7e79046 --- /dev/null +++ b/es5/processors/webgl2/pipelines/softmaxStage.js.map @@ -0,0 +1 @@ +{"version":3,"file":"softmaxStage.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/softmaxStage.ts"],"names":[],"mappings":";;;;;;;AAAA,oEAGsC;AACtC,sDAK+B;AAE/B,SAAgB,iBAAiB,CAC/B,EAA0B,EAC1B,YAAyB,EACzB,cAA2B,EAC3B,cAA2B,EAC3B,kBAAsC,EACtC,MAAW,EACX,aAA2B;IAE3B,IAAM,oBAAoB,OAAG,kBAAI,ojBAAA,gfAiBhC;QAED,4CAA4C;QAF3C,CAAA;IAED,4CAA4C;IAC5C,IAAM,wBAAwB,GAAG,MAAM,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAA;IAE9D,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IAED,IAAM,cAAc,GAAG,IAAA,2BAAa,EAClC,EAAE,EACF,EAAE,CAAC,eAAe,EAClB,oBAAoB,CACrB,CAAA;IACD,IAAM,OAAO,GAAG,IAAA,yCAA2B,EACzC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,cAAc,CACf,CAAA;IACD,IAAM,aAAa,GAAG,EAAE,CAAC,kBAAkB,CAAC,OAAO,EAAE,qBAAqB,CAAC,CAAA;IAC3E,IAAM,YAAY,GAAG,IAAA,2BAAa,EAChC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,iBAAiB,EACjB,kBAAkB,CACnB,CAAA;IAED,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;IAC/C,EAAE,CAAC,oBAAoB,CACrB,EAAE,CAAC,WAAW,EACd,EAAE,CAAC,iBAAiB,EACpB,EAAE,CAAC,UAAU,EACb,aAAa,EACb,CAAC,CACF,CAAA;IAED,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtB,EAAE,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC,CAAA;IAE9B,SAAS,MAAM;QACb,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,iBAAiB,EAAE,kBAAkB,CAAC,CAAA;QACxD,EAAE,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;QACtB,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;QAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,YAAY,CAAC,CAAA;QAC3C,EAAE,CAAC,aAAa,CACd,EAAE,CAAC,UAAU,EACb,CAAC,EACD,CAAC,EACD,CAAC,EACD,iBAAiB,EACjB,kBAAkB,EAClB,EAAE,CAAC,EAAE,EACL,EAAE,CAAC,KAAK,EACR,MAAM,CAAC,OAAO,EACd,wBAAwB,CACzB,CAAA;QACD,EAAE,CAAC,eAAe,CAAC,EAAE,CAAC,WAAW,EAAE,WAAW,CAAC,CAAA;QAC/C,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,cAAc,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACxC,CAAC;IAED,SAAS,OAAO;QACd,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,aAAa,CAAC,YAAY,CAAC,CAAA;QAC9B,EAAE,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QACzB,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;IACjC,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AAC5B,CAAC;AAjGD,8CAiGC","sourcesContent":["import {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport {\n compileShader,\n createPiplelineStageProgram,\n createTexture,\n glsl,\n} from '../helpers/webglHelper'\n\nexport function buildSoftmaxStage(\n gl: WebGL2RenderingContext,\n vertexShader: WebGLShader,\n positionBuffer: WebGLBuffer,\n texCoordBuffer: WebGLBuffer,\n segmentationConfig: SegmentationConfig,\n tflite: any,\n outputTexture: WebGLTexture\n) {\n const fragmentShaderSource = glsl`#version 300 es\n\n precision highp float;\n\n uniform sampler2D u_inputSegmentation;\n\n in vec2 v_texCoord;\n\n out vec4 outColor;\n\n void main() {\n vec2 segmentation = texture(u_inputSegmentation, v_texCoord).rg;\n float shift = max(segmentation.r, segmentation.g);\n float backgroundExp = exp(segmentation.r - shift);\n float personExp = exp(segmentation.g - shift);\n outColor = vec4(vec3(0.0), personExp / (backgroundExp + personExp));\n }\n `\n\n // TFLite memory will be accessed as float32\n const tfliteOutputMemoryOffset = tflite._getOutputMemoryOffset() / 4\n\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n\n const fragmentShader = compileShader(\n gl,\n gl.FRAGMENT_SHADER,\n fragmentShaderSource\n )\n const program = createPiplelineStageProgram(\n gl,\n vertexShader,\n fragmentShader,\n positionBuffer,\n texCoordBuffer\n )\n const inputLocation = gl.getUniformLocation(program, 'u_inputSegmentation')\n const inputTexture = createTexture(\n gl,\n gl.RG32F,\n segmentationWidth,\n segmentationHeight\n )\n\n const frameBuffer = gl.createFramebuffer()\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n outputTexture,\n 0\n )\n\n gl.useProgram(program)\n gl.uniform1i(inputLocation, 1)\n\n function render() {\n gl.viewport(0, 0, segmentationWidth, segmentationHeight)\n gl.useProgram(program)\n gl.activeTexture(gl.TEXTURE1)\n gl.bindTexture(gl.TEXTURE_2D, inputTexture)\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n segmentationWidth,\n segmentationHeight,\n gl.RG,\n gl.FLOAT,\n tflite.HEAPF32,\n tfliteOutputMemoryOffset\n )\n gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer)\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4)\n }\n\n function cleanUp() {\n gl.deleteFramebuffer(frameBuffer)\n gl.deleteTexture(inputTexture)\n gl.deleteProgram(program)\n gl.deleteShader(fragmentShader)\n }\n\n return { render, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts b/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts new file mode 100644 index 0000000..affba13 --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.d.ts @@ -0,0 +1,9 @@ +import { BackgroundConfig } from '../helpers/backgroundHelper'; +import { PostProcessingConfig } from '../helpers/postProcessingHelper'; +import { SegmentationConfig } from '../helpers/segmentationHelper'; +import { SourcePlayback } from '../helpers/sourceHelper'; +export declare function buildWebGL2Pipeline(sourcePlayback: SourcePlayback, backgroundImage: HTMLImageElement | null, backgroundConfig: BackgroundConfig, segmentationConfig: SegmentationConfig, canvas: HTMLCanvasElement, tflite: any, benchmark: any, debounce: boolean): { + render: () => Promise; + updatePostProcessingConfig: (postProcessingConfig: PostProcessingConfig) => void; + cleanUp: () => void; +}; diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.js b/es5/processors/webgl2/pipelines/webgl2Pipeline.js new file mode 100644 index 0000000..05ba11b --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.js @@ -0,0 +1,153 @@ +"use strict"; +var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.buildWebGL2Pipeline = void 0; +var segmentationHelper_1 = require("../helpers/segmentationHelper"); +var webglHelper_1 = require("../helpers/webglHelper"); +var backgroundBlurStage_1 = require("./backgroundBlurStage"); +var backgroundImageStage_1 = require("./backgroundImageStage"); +var jointBilateralFilterStage_1 = require("./jointBilateralFilterStage"); +var loadSegmentationStage_1 = require("./loadSegmentationStage"); +var resizingStage_1 = require("./resizingStage"); +function buildWebGL2Pipeline(sourcePlayback, backgroundImage, backgroundConfig, segmentationConfig, canvas, tflite, benchmark, debounce) { + var shouldRunInference = true; + var vertexShaderSource = (0, webglHelper_1.glsl)(templateObject_1 || (templateObject_1 = __makeTemplateObject(["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "], ["#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n "]))); + var frameWidth = sourcePlayback.width, frameHeight = sourcePlayback.height; + var _a = segmentationHelper_1.inputResolutions[segmentationConfig.inputResolution], segmentationWidth = _a[0], segmentationHeight = _a[1]; + var gl = canvas.getContext('webgl2'); + var vertexShader = (0, webglHelper_1.compileShader)(gl, gl.VERTEX_SHADER, vertexShaderSource); + var vertexArray = gl.createVertexArray(); + gl.bindVertexArray(vertexArray); + var positionBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + var texCoordBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]), gl.STATIC_DRAW); + // We don't use texStorage2D here because texImage2D seems faster + // to upload video texture than texSubImage2D even though the latter + // is supposed to be the recommended way: + // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures + var inputFrameTexture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + // TODO Rename segmentation and person mask to be more specific + var segmentationTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, segmentationWidth, segmentationHeight); + var personMaskTexture = (0, webglHelper_1.createTexture)(gl, gl.RGBA8, frameWidth, frameHeight); + var resizingStage = (0, resizingStage_1.buildResizingStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite); + var loadSegmentationStage = (0, loadSegmentationStage_1.buildLoadSegmentationStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationConfig, tflite, segmentationTexture); + var jointBilateralFilterStage = (0, jointBilateralFilterStage_1.buildJointBilateralFilterStage)(gl, vertexShader, positionBuffer, texCoordBuffer, segmentationTexture, segmentationConfig, personMaskTexture, canvas); + var backgroundStage = backgroundConfig.type === 'blur' + ? (0, backgroundBlurStage_1.buildBackgroundBlurStage)(gl, vertexShader, positionBuffer, texCoordBuffer, personMaskTexture, canvas) + : (0, backgroundImageStage_1.buildBackgroundImageStage)(gl, positionBuffer, texCoordBuffer, personMaskTexture, backgroundImage, canvas); + function render() { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + benchmark.start('inputImageResizeDelay'); + gl.clearColor(0, 0, 0, 0); + gl.clear(gl.COLOR_BUFFER_BIT); + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture); + // texImage2D seems faster than texSubImage2D to upload + // video texture + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, sourcePlayback.htmlElement); + gl.bindVertexArray(vertexArray); + resizingStage.render(); + benchmark.end('inputImageResizeDelay'); + benchmark.start('segmentationDelay'); + if (shouldRunInference) { + tflite._runInference(); + } + if (debounce) { + shouldRunInference = !shouldRunInference; + } + benchmark.end('segmentationDelay'); + benchmark.start('imageCompositionDelay'); + loadSegmentationStage.render(); + jointBilateralFilterStage.render(); + backgroundStage.render(); + benchmark.end('imageCompositionDelay'); + return [2 /*return*/]; + }); + }); + } + function updatePostProcessingConfig(postProcessingConfig) { + jointBilateralFilterStage.updateSigmaSpace(postProcessingConfig.jointBilateralFilter.sigmaSpace); + jointBilateralFilterStage.updateSigmaColor(postProcessingConfig.jointBilateralFilter.sigmaColor); + if (backgroundConfig.type === 'image') { + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage(postProcessingConfig.coverage); + backgroundImageStage.updateLightWrapping(postProcessingConfig.lightWrapping); + backgroundImageStage.updateBlendMode(postProcessingConfig.blendMode); + } + else if (backgroundConfig.type === 'blur') { + var backgroundBlurStage = backgroundStage; + backgroundBlurStage.updateCoverage(postProcessingConfig.coverage); + } + else { + // TODO Handle no background in a separate pipeline path + var backgroundImageStage = backgroundStage; + backgroundImageStage.updateCoverage([0, 0.9999]); + backgroundImageStage.updateLightWrapping(0); + } + } + function cleanUp() { + backgroundStage.cleanUp(); + jointBilateralFilterStage.cleanUp(); + loadSegmentationStage.cleanUp(); + resizingStage.cleanUp(); + gl.deleteTexture(personMaskTexture); + gl.deleteTexture(segmentationTexture); + gl.deleteTexture(inputFrameTexture); + gl.deleteBuffer(texCoordBuffer); + gl.deleteBuffer(positionBuffer); + gl.deleteVertexArray(vertexArray); + gl.deleteShader(vertexShader); + } + return { render: render, updatePostProcessingConfig: updatePostProcessingConfig, cleanUp: cleanUp }; +} +exports.buildWebGL2Pipeline = buildWebGL2Pipeline; +var templateObject_1; +//# sourceMappingURL=webgl2Pipeline.js.map \ No newline at end of file diff --git a/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map b/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map new file mode 100644 index 0000000..79cbef6 --- /dev/null +++ b/es5/processors/webgl2/pipelines/webgl2Pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webgl2Pipeline.js","sourceRoot":"","sources":["../../../../lib/processors/webgl2/pipelines/webgl2Pipeline.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAEA,oEAGsC;AAEtC,sDAA2E;AAC3E,6DAG8B;AAC9B,+DAG+B;AAC/B,yEAA4E;AAC5E,iEAAoE;AACpE,iDAAoD;AAGpD,SAAgB,mBAAmB,CACjC,cAA8B,EAC9B,eAAwC,EACxC,gBAAkC,EAClC,kBAAsC,EACtC,MAAyB,EACzB,MAAW,EACX,SAAc,EACd,QAAiB;IAEjB,IAAI,kBAAkB,GAAG,IAAI,CAAC;IAE9B,IAAM,kBAAkB,OAAG,kBAAI,sRAAA,kNAW9B,IAAA,CAAA;IAEO,IAAO,UAAU,GAA0B,cAAc,MAAxC,EAAU,WAAW,GAAK,cAAc,OAAnB,CAAmB;IAC3D,IAAA,KAA0C,qCAAgB,CAC9D,kBAAkB,CAAC,eAAe,CACnC,EAFM,iBAAiB,QAAA,EAAE,kBAAkB,QAE3C,CAAA;IAED,IAAM,EAAE,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAE,CAAA;IAEvC,IAAM,YAAY,GAAG,IAAA,2BAAa,EAAC,EAAE,EAAE,EAAE,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAA;IAE5E,IAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,EAAE,CAAA;IAC1C,EAAE,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA;IAE/B,IAAM,cAAc,GAAG,EAAE,CAAC,YAAY,EAAG,CAAA;IACzC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,YAAY,EACf,IAAI,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,EAC9D,EAAE,CAAC,WAAW,CACf,CAAA;IAED,IAAM,cAAc,GAAG,EAAE,CAAC,YAAY,EAAG,CAAA;IACzC,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;IAC9C,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,YAAY,EACf,IAAI,YAAY,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,EAC1D,EAAE,CAAC,WAAW,CACf,CAAA;IAED,iEAAiE;IACjE,oEAAoE;IACpE,yCAAyC;IACzC,oHAAoH;IACpH,IAAM,iBAAiB,GAAG,EAAE,CAAC,aAAa,EAAE,CAAA;IAC5C,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;IAChD,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,cAAc,EAAE,EAAE,CAAC,aAAa,CAAC,CAAA;IACpE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAA;IAClE,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,UAAU,EAAE,EAAE,CAAC,kBAAkB,EAAE,EAAE,CAAC,OAAO,CAAC,CAAA;IAElE,+DAA+D;IAC/D,IAAM,mBAAmB,GAAG,IAAA,2BAAa,EACvC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,iBAAiB,EACjB,kBAAkB,CAClB,CAAA;IACF,IAAM,iBAAiB,GAAG,IAAA,2BAAa,EACrC,EAAE,EACF,EAAE,CAAC,KAAK,EACR,UAAU,EACV,WAAW,CACX,CAAA;IAEF,IAAM,aAAa,GAAG,IAAA,kCAAkB,EACtC,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,kBAAkB,EAClB,MAAM,CACP,CAAA;IACD,IAAM,qBAAqB,GAAG,IAAA,kDAA0B,EACtD,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,kBAAkB,EAClB,MAAM,EACN,mBAAmB,CACpB,CAAC;IACF,IAAM,yBAAyB,GAAG,IAAA,0DAA8B,EAC9D,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,mBAAmB,EACnB,kBAAkB,EAClB,iBAAiB,EACjB,MAAM,CACP,CAAA;IACD,IAAM,eAAe,GACnB,gBAAgB,CAAC,IAAI,KAAK,MAAM;QAC9B,CAAC,CAAC,IAAA,8CAAwB,EACtB,EAAE,EACF,YAAY,EACZ,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,MAAM,CACP;QACH,CAAC,CAAC,IAAA,gDAAyB,EACvB,EAAE,EACF,cAAc,EACd,cAAc,EACd,iBAAiB,EACjB,eAAe,EACf,MAAM,CACP,CAAA;IAEP,SAAe,MAAM;;;gBACnB,SAAS,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAA;gBACxC,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBACzB,EAAE,CAAC,KAAK,CAAC,EAAE,CAAC,gBAAgB,CAAC,CAAA;gBAE7B,EAAE,CAAC,aAAa,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAA;gBAC7B,EAAE,CAAC,WAAW,CAAC,EAAE,CAAC,UAAU,EAAE,iBAAiB,CAAC,CAAA;gBAEhD,uDAAuD;gBACvD,gBAAgB;gBAChB,EAAE,CAAC,UAAU,CACX,EAAE,CAAC,UAAU,EACb,CAAC,EACD,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,IAAI,EACP,EAAE,CAAC,aAAa,EAChB,cAAc,CAAC,WAAW,CAC3B,CAAA;gBAED,EAAE,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA;gBAE/B,aAAa,CAAC,MAAM,EAAE,CAAA;gBACtB,SAAS,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;gBAEtC,SAAS,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAA;gBACpC,IAAI,kBAAkB,EAAE;oBACtB,MAAM,CAAC,aAAa,EAAE,CAAA;iBACvB;gBACD,IAAI,QAAQ,EAAE;oBACZ,kBAAkB,GAAG,CAAC,kBAAkB,CAAC;iBAC1C;gBACD,SAAS,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAA;gBAElC,SAAS,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAA;gBACxC,qBAAqB,CAAC,MAAM,EAAE,CAAA;gBAC9B,yBAAyB,CAAC,MAAM,EAAE,CAAA;gBAClC,eAAe,CAAC,MAAM,EAAE,CAAA;gBACxB,SAAS,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;;;;KACvC;IAED,SAAS,0BAA0B,CACjC,oBAA0C;QAE1C,yBAAyB,CAAC,gBAAgB,CACxC,oBAAoB,CAAC,oBAAoB,CAAC,UAAU,CACrD,CAAA;QACD,yBAAyB,CAAC,gBAAgB,CACxC,oBAAoB,CAAC,oBAAoB,CAAC,UAAU,CACrD,CAAA;QAED,IAAI,gBAAgB,CAAC,IAAI,KAAK,OAAO,EAAE;YACrC,IAAM,oBAAoB,GAAG,eAAuC,CAAA;YACpE,oBAAoB,CAAC,cAAc,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAA;YAClE,oBAAoB,CAAC,mBAAmB,CACtC,oBAAoB,CAAC,aAAa,CACnC,CAAA;YACD,oBAAoB,CAAC,eAAe,CAAC,oBAAoB,CAAC,SAAS,CAAC,CAAA;SACrE;aAAM,IAAI,gBAAgB,CAAC,IAAI,KAAK,MAAM,EAAE;YAC3C,IAAM,mBAAmB,GAAG,eAAsC,CAAA;YAClE,mBAAmB,CAAC,cAAc,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAA;SAClE;aAAM;YACL,wDAAwD;YACxD,IAAM,oBAAoB,GAAG,eAAuC,CAAA;YACpE,oBAAoB,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAA;YAChD,oBAAoB,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAA;SAC5C;IACH,CAAC;IAED,SAAS,OAAO;QACd,eAAe,CAAC,OAAO,EAAE,CAAA;QACzB,yBAAyB,CAAC,OAAO,EAAE,CAAA;QACnC,qBAAqB,CAAC,OAAO,EAAE,CAAA;QAC/B,aAAa,CAAC,OAAO,EAAE,CAAA;QAEvB,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,aAAa,CAAC,mBAAmB,CAAC,CAAA;QACrC,EAAE,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAA;QACnC,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,YAAY,CAAC,cAAc,CAAC,CAAA;QAC/B,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACjC,EAAE,CAAC,YAAY,CAAC,YAAY,CAAC,CAAA;IAC/B,CAAC;IAED,OAAO,EAAE,MAAM,QAAA,EAAE,0BAA0B,4BAAA,EAAE,OAAO,SAAA,EAAE,CAAA;AACxD,CAAC;AAhND,kDAgNC","sourcesContent":["import { BackgroundConfig } from '../helpers/backgroundHelper'\nimport { PostProcessingConfig } from '../helpers/postProcessingHelper'\nimport {\n inputResolutions,\n SegmentationConfig,\n} from '../helpers/segmentationHelper'\nimport { SourcePlayback } from '../helpers/sourceHelper'\nimport { compileShader, createTexture, glsl } from '../helpers/webglHelper'\nimport {\n BackgroundBlurStage,\n buildBackgroundBlurStage,\n} from './backgroundBlurStage'\nimport {\n BackgroundImageStage,\n buildBackgroundImageStage,\n} from './backgroundImageStage'\nimport { buildJointBilateralFilterStage } from './jointBilateralFilterStage'\nimport { buildLoadSegmentationStage } from './loadSegmentationStage'\nimport { buildResizingStage } from './resizingStage'\nimport { buildSoftmaxStage } from './softmaxStage'\n\nexport function buildWebGL2Pipeline(\n sourcePlayback: SourcePlayback,\n backgroundImage: HTMLImageElement | null,\n backgroundConfig: BackgroundConfig,\n segmentationConfig: SegmentationConfig,\n canvas: HTMLCanvasElement,\n tflite: any,\n benchmark: any,\n debounce: boolean,\n) {\n let shouldRunInference = true;\n\n const vertexShaderSource = glsl`#version 300 es\n\n in vec2 a_position;\n in vec2 a_texCoord;\n\n out vec2 v_texCoord;\n\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n v_texCoord = a_texCoord;\n }\n `\n\n const { width: frameWidth, height: frameHeight } = sourcePlayback\n const [segmentationWidth, segmentationHeight] = inputResolutions[\n segmentationConfig.inputResolution\n ]\n\n const gl = canvas.getContext('webgl2')!\n\n const vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource)\n\n const vertexArray = gl.createVertexArray()\n gl.bindVertexArray(vertexArray)\n\n const positionBuffer = gl.createBuffer()!\n gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer)\n gl.bufferData(\n gl.ARRAY_BUFFER,\n new Float32Array([-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0]),\n gl.STATIC_DRAW\n )\n\n const texCoordBuffer = gl.createBuffer()!\n gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer)\n gl.bufferData(\n gl.ARRAY_BUFFER,\n new Float32Array([0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]),\n gl.STATIC_DRAW\n )\n\n // We don't use texStorage2D here because texImage2D seems faster\n // to upload video texture than texSubImage2D even though the latter\n // is supposed to be the recommended way:\n // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/WebGL_best_practices#use_texstorage_to_create_textures\n const inputFrameTexture = gl.createTexture()\n gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)\n\n // TODO Rename segmentation and person mask to be more specific\n const segmentationTexture = createTexture(\n gl,\n gl.RGBA8,\n segmentationWidth,\n segmentationHeight\n )!\n const personMaskTexture = createTexture(\n gl,\n gl.RGBA8,\n frameWidth,\n frameHeight\n )!\n\n const resizingStage = buildResizingStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n segmentationConfig,\n tflite\n )\n const loadSegmentationStage = buildLoadSegmentationStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n segmentationConfig,\n tflite,\n segmentationTexture\n );\n const jointBilateralFilterStage = buildJointBilateralFilterStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n segmentationTexture,\n segmentationConfig,\n personMaskTexture,\n canvas\n )\n const backgroundStage =\n backgroundConfig.type === 'blur'\n ? buildBackgroundBlurStage(\n gl,\n vertexShader,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n canvas\n )\n : buildBackgroundImageStage(\n gl,\n positionBuffer,\n texCoordBuffer,\n personMaskTexture,\n backgroundImage,\n canvas\n )\n\n async function render() {\n benchmark.start('inputImageResizeDelay')\n gl.clearColor(0, 0, 0, 0)\n gl.clear(gl.COLOR_BUFFER_BIT)\n\n gl.activeTexture(gl.TEXTURE0)\n gl.bindTexture(gl.TEXTURE_2D, inputFrameTexture)\n\n // texImage2D seems faster than texSubImage2D to upload\n // video texture\n gl.texImage2D(\n gl.TEXTURE_2D,\n 0,\n gl.RGBA,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n sourcePlayback.htmlElement\n )\n\n gl.bindVertexArray(vertexArray)\n\n resizingStage.render()\n benchmark.end('inputImageResizeDelay')\n\n benchmark.start('segmentationDelay')\n if (shouldRunInference) {\n tflite._runInference()\n }\n if (debounce) {\n shouldRunInference = !shouldRunInference;\n }\n benchmark.end('segmentationDelay')\n\n benchmark.start('imageCompositionDelay')\n loadSegmentationStage.render()\n jointBilateralFilterStage.render()\n backgroundStage.render()\n benchmark.end('imageCompositionDelay')\n }\n\n function updatePostProcessingConfig(\n postProcessingConfig: PostProcessingConfig\n ) {\n jointBilateralFilterStage.updateSigmaSpace(\n postProcessingConfig.jointBilateralFilter.sigmaSpace\n )\n jointBilateralFilterStage.updateSigmaColor(\n postProcessingConfig.jointBilateralFilter.sigmaColor\n )\n\n if (backgroundConfig.type === 'image') {\n const backgroundImageStage = backgroundStage as BackgroundImageStage\n backgroundImageStage.updateCoverage(postProcessingConfig.coverage)\n backgroundImageStage.updateLightWrapping(\n postProcessingConfig.lightWrapping\n )\n backgroundImageStage.updateBlendMode(postProcessingConfig.blendMode)\n } else if (backgroundConfig.type === 'blur') {\n const backgroundBlurStage = backgroundStage as BackgroundBlurStage\n backgroundBlurStage.updateCoverage(postProcessingConfig.coverage)\n } else {\n // TODO Handle no background in a separate pipeline path\n const backgroundImageStage = backgroundStage as BackgroundImageStage\n backgroundImageStage.updateCoverage([0, 0.9999])\n backgroundImageStage.updateLightWrapping(0)\n }\n }\n\n function cleanUp() {\n backgroundStage.cleanUp()\n jointBilateralFilterStage.cleanUp()\n loadSegmentationStage.cleanUp()\n resizingStage.cleanUp()\n\n gl.deleteTexture(personMaskTexture)\n gl.deleteTexture(segmentationTexture)\n gl.deleteTexture(inputFrameTexture)\n gl.deleteBuffer(texCoordBuffer)\n gl.deleteBuffer(positionBuffer)\n gl.deleteVertexArray(vertexArray)\n gl.deleteShader(vertexShader)\n }\n\n return { render, updatePostProcessingConfig, cleanUp }\n}\n"]} \ No newline at end of file diff --git a/es5/types.d.ts b/es5/types.d.ts new file mode 100644 index 0000000..213849b --- /dev/null +++ b/es5/types.d.ts @@ -0,0 +1,79 @@ +/** + * @private + */ +declare global { + interface Window { + chrome: any; + createTwilioTFLiteModule: () => Promise; + createTwilioTFLiteSIMDModule: () => Promise; + OffscreenCanvas: typeof OffscreenCanvas; + Twilio: Object & { + VideoProcessors?: any; + }; + } +} +/** + * @private + */ +export declare enum WebGL2PipelineType { + Blur = "blur", + Image = "image" +} +/** + * @private + */ +export interface Timing { + delay?: number; + end?: number; + start?: number; +} +/** + * @private + */ +export interface Dimensions { + height: number; + width: number; +} +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +export declare enum ImageFit { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + Contain = "Contain", + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + Cover = "Cover", + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + Fill = "Fill", + /** + * Ignore height and width and use the original size. + */ + None = "None" +} +/** + * Specifies which pipeline to use when processing video frames. + */ +export declare enum Pipeline { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Canvas2D = "Canvas2D", + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + WebGL2 = "WebGL2" +} diff --git a/es5/types.js b/es5/types.js new file mode 100644 index 0000000..d31b53a --- /dev/null +++ b/es5/types.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pipeline = exports.ImageFit = exports.WebGL2PipelineType = void 0; +/** + * @private + */ +var WebGL2PipelineType; +(function (WebGL2PipelineType) { + WebGL2PipelineType["Blur"] = "blur"; + WebGL2PipelineType["Image"] = "image"; +})(WebGL2PipelineType || (exports.WebGL2PipelineType = WebGL2PipelineType = {})); +/** + * ImageFit specifies the positioning of an image inside a viewport. + */ +var ImageFit; +(function (ImageFit) { + /** + * Scale the image up or down to fill the viewport while preserving the aspect ratio. + * The image will be fully visible but will add empty space in the viewport if + * aspect ratios do not match. + */ + ImageFit["Contain"] = "Contain"; + /** + * Scale the image to fill both height and width of the viewport while preserving + * the aspect ratio, but will crop the image if aspect ratios do not match. + */ + ImageFit["Cover"] = "Cover"; + /** + * Stretches the image to fill the viewport regardless of aspect ratio. + */ + ImageFit["Fill"] = "Fill"; + /** + * Ignore height and width and use the original size. + */ + ImageFit["None"] = "None"; +})(ImageFit || (exports.ImageFit = ImageFit = {})); +/** + * Specifies which pipeline to use when processing video frames. + */ +var Pipeline; +(function (Pipeline) { + /** + * Use canvas 2d rendering context. Some browsers such as Safari do not + * have full support of this feature. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility) + * for reference. + */ + Pipeline["Canvas2D"] = "Canvas2D"; + /** + * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work + * on some older versions of browsers. Please test your application to make sure it works as intented. See + * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility) + * for reference. + */ + Pipeline["WebGL2"] = "WebGL2"; +})(Pipeline || (exports.Pipeline = Pipeline = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/es5/types.js.map b/es5/types.js.map new file mode 100644 index 0000000..a51d73d --- /dev/null +++ b/es5/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../lib/types.ts"],"names":[],"mappings":";;;AAaA;;GAEG;AACH,IAAY,kBAGX;AAHD,WAAY,kBAAkB;IAC5B,mCAAa,CAAA;IACb,qCAAe,CAAA;AACjB,CAAC,EAHW,kBAAkB,kCAAlB,kBAAkB,QAG7B;AAmBD;;GAEG;AACH,IAAY,QAuBX;AAvBD,WAAY,QAAQ;IAClB;;;;OAIG;IACH,+BAAmB,CAAA;IAEnB;;;OAGG;IACH,2BAAe,CAAA;IAEf;;OAEG;IACH,yBAAa,CAAA;IAEb;;OAEG;IACH,yBAAa,CAAA;AACf,CAAC,EAvBW,QAAQ,wBAAR,QAAQ,QAuBnB;AAED;;GAEG;AACH,IAAY,QAgBX;AAhBD,WAAY,QAAQ;IAClB;;;;;OAKG;IACH,iCAAqB,CAAA;IAErB;;;;;OAKG;IACH,6BAAiB,CAAA;AACnB,CAAC,EAhBW,QAAQ,wBAAR,QAAQ,QAgBnB","sourcesContent":["/**\n * @private\n */\n declare global {\n interface Window {\n chrome: any;\n createTwilioTFLiteModule: () => Promise;\n createTwilioTFLiteSIMDModule: () => Promise;\n OffscreenCanvas: typeof OffscreenCanvas;\n Twilio: Object & { VideoProcessors?: any };\n }\n}\n\n/**\n * @private\n */\nexport enum WebGL2PipelineType {\n Blur = 'blur',\n Image = 'image',\n}\n\n/**\n * @private\n */\nexport interface Timing {\n delay?: number;\n end?: number;\n start?: number;\n}\n\n/**\n * @private\n */\nexport interface Dimensions {\n height: number;\n width: number;\n}\n\n/**\n * ImageFit specifies the positioning of an image inside a viewport.\n */\nexport enum ImageFit {\n /**\n * Scale the image up or down to fill the viewport while preserving the aspect ratio.\n * The image will be fully visible but will add empty space in the viewport if\n * aspect ratios do not match.\n */\n Contain = 'Contain',\n\n /**\n * Scale the image to fill both height and width of the viewport while preserving\n * the aspect ratio, but will crop the image if aspect ratios do not match.\n */\n Cover = 'Cover',\n\n /**\n * Stretches the image to fill the viewport regardless of aspect ratio.\n */\n Fill = 'Fill',\n\n /**\n * Ignore height and width and use the original size.\n */\n None = 'None'\n}\n\n/**\n * Specifies which pipeline to use when processing video frames.\n */\nexport enum Pipeline {\n /**\n * Use canvas 2d rendering context. Some browsers such as Safari do not\n * have full support of this feature. Please test your application to make sure it works as intented. See\n * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D#browser_compatibility)\n * for reference.\n */\n Canvas2D = 'Canvas2D',\n\n /**\n * Use canvas webgl2 rendering context. Major browsers have support for this feature. However, this does not work\n * on some older versions of browsers. Please test your application to make sure it works as intented. See\n * [browser compatibility page](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext#browser_compatibility)\n * for reference.\n */\n WebGL2 = 'WebGL2'\n}\n"]} \ No newline at end of file diff --git a/es5/utils/Benchmark.d.ts b/es5/utils/Benchmark.d.ts new file mode 100644 index 0000000..7de7039 --- /dev/null +++ b/es5/utils/Benchmark.d.ts @@ -0,0 +1,15 @@ +/** + * @private + */ +export declare class Benchmark { + static readonly cacheSize = 41; + private _timingCache; + private _timings; + constructor(); + end(name: string): void; + getAverageDelay(name: string): number | undefined; + getNames(): string[]; + getRate(name: string): number | undefined; + start(name: string): void; + private _save; +} diff --git a/es5/utils/Benchmark.js b/es5/utils/Benchmark.js new file mode 100644 index 0000000..72660e5 --- /dev/null +++ b/es5/utils/Benchmark.js @@ -0,0 +1,79 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Benchmark = void 0; +/** + * @private + */ +var Benchmark = /** @class */ (function () { + function Benchmark() { + this._timingCache = new Map(); + this._timings = new Map(); + } + Benchmark.prototype.end = function (name) { + var timing = this._timings.get(name); + if (!timing) { + return; + } + timing.end = Date.now(); + timing.delay = timing.end - timing.start; + this._save(name, __assign({}, timing)); + }; + Benchmark.prototype.getAverageDelay = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || !timingCache.length) { + return; + } + return timingCache.map(function (timing) { return timing.delay; }) + .reduce(function (total, value) { return total += value; }, 0) / timingCache.length; + }; + Benchmark.prototype.getNames = function () { + return Array.from(this._timingCache.keys()); + }; + Benchmark.prototype.getRate = function (name) { + var timingCache = this._timingCache.get(name); + if (!timingCache || timingCache.length < 2) { + return; + } + var totalDelay = timingCache[timingCache.length - 1].end - timingCache[0].start; + return (timingCache.length / totalDelay) * 1000; + }; + Benchmark.prototype.start = function (name) { + var timing = this._timings.get(name); + if (!timing) { + timing = {}; + this._timings.set(name, timing); + } + timing.start = Date.now(); + delete timing.end; + delete timing.delay; + }; + Benchmark.prototype._save = function (name, timing) { + var timingCache = this._timingCache.get(name); + if (!timingCache) { + timingCache = []; + this._timingCache.set(name, timingCache); + } + timingCache.push(timing); + if (timingCache.length > Benchmark.cacheSize) { + timingCache.splice(0, timingCache.length - Benchmark.cacheSize); + } + }; + // NOTE (csantos): How many timing information to save per benchmark. + // This is about the amount of timing info generated on a 24fps input. + // Enough samples to calculate fps + Benchmark.cacheSize = 41; + return Benchmark; +}()); +exports.Benchmark = Benchmark; +//# sourceMappingURL=Benchmark.js.map \ No newline at end of file diff --git a/es5/utils/Benchmark.js.map b/es5/utils/Benchmark.js.map new file mode 100644 index 0000000..fb5a05d --- /dev/null +++ b/es5/utils/Benchmark.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Benchmark.js","sourceRoot":"","sources":["../../lib/utils/Benchmark.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAEA;;GAEG;AACH;IAUE;QACE,IAAI,CAAC,YAAY,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9B,IAAI,CAAC,QAAQ,GAAG,IAAI,GAAG,EAAE,CAAC;IAC5B,CAAC;IAED,uBAAG,GAAH,UAAI,IAAY;QACd,IAAM,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,EAAE;YACX,OAAO;SACR;QACD,MAAM,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC,KAAM,CAAC;QAC1C,IAAI,CAAC,KAAK,CAAC,IAAI,eAAM,MAAM,EAAE,CAAC;IAChC,CAAC;IAED,mCAAe,GAAf,UAAgB,IAAY;QAC1B,IAAM,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,WAAW,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;YACvC,OAAO;SACR;QACD,OAAO,WAAW,CAAC,GAAG,CAAC,UAAA,MAAM,IAAI,OAAA,MAAM,CAAC,KAAM,EAAb,CAAa,CAAC;aAC5C,MAAM,CAAC,UAAC,KAAa,EAAE,KAAa,IAAK,OAAA,KAAK,IAAI,KAAK,EAAd,CAAc,EAAE,CAAC,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC;IACtF,CAAC;IAED,4BAAQ,GAAR;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC;IAC9C,CAAC;IAED,2BAAO,GAAP,UAAQ,IAAY;QAClB,IAAM,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1C,OAAO;SACR;QACD,IAAM,UAAU,GAAG,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAI,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,KAAM,CAAC;QACpF,OAAO,CAAC,WAAW,CAAC,MAAM,GAAG,UAAU,CAAC,GAAG,IAAI,CAAC;IAClD,CAAC;IAED,yBAAK,GAAL,UAAM,IAAY;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,GAAG,EAAE,CAAC;YACZ,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACjC;QACD,MAAM,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC1B,OAAO,MAAM,CAAC,GAAG,CAAC;QAClB,OAAO,MAAM,CAAC,KAAK,CAAC;IACtB,CAAC;IAEO,yBAAK,GAAb,UAAc,IAAY,EAAE,MAAc;QACxC,IAAI,WAAW,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAC9C,IAAI,CAAC,WAAW,EAAE;YAChB,WAAW,GAAG,EAAE,CAAC;YACjB,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC;SAC1C;QAED,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAEzB,IAAI,WAAW,CAAC,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE;YAC5C,WAAW,CAAC,MAAM,CAAC,CAAC,EAAE,WAAW,CAAC,MAAM,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;SACjE;IACH,CAAC;IApED,qEAAqE;IACrE,sEAAsE;IACtE,kCAAkC;IAClB,mBAAS,GAAG,EAAE,CAAC;IAkEjC,gBAAC;CAAA,AAvED,IAuEC;AAvEY,8BAAS","sourcesContent":["import { Timing } from '../types';\n\n/**\n * @private\n */\nexport class Benchmark {\n\n // NOTE (csantos): How many timing information to save per benchmark.\n // This is about the amount of timing info generated on a 24fps input.\n // Enough samples to calculate fps\n static readonly cacheSize = 41;\n\n private _timingCache: Map;\n private _timings: Map;\n\n constructor() {\n this._timingCache = new Map();\n this._timings = new Map();\n }\n\n end(name: string) {\n const timing = this._timings.get(name);\n if (!timing) {\n return;\n }\n timing.end = Date.now();\n timing.delay = timing.end - timing.start!;\n this._save(name, {...timing});\n }\n\n getAverageDelay(name: string): number | undefined {\n const timingCache = this._timingCache.get(name);\n if (!timingCache || !timingCache.length) {\n return;\n }\n return timingCache.map(timing => timing.delay!)\n .reduce((total: number, value: number) => total += value, 0) / timingCache.length;\n }\n\n getNames(): string[] {\n return Array.from(this._timingCache.keys());\n }\n\n getRate(name: string): number | undefined {\n const timingCache = this._timingCache.get(name);\n if (!timingCache || timingCache.length < 2) {\n return;\n }\n const totalDelay = timingCache[timingCache.length - 1].end! - timingCache[0].start!;\n return (timingCache.length / totalDelay) * 1000;\n }\n\n start(name: string) {\n let timing = this._timings.get(name);\n if (!timing) {\n timing = {};\n this._timings.set(name, timing);\n }\n timing.start = Date.now();\n delete timing.end;\n delete timing.delay;\n }\n\n private _save(name: string, timing: Timing) {\n let timingCache = this._timingCache.get(name);\n if (!timingCache) {\n timingCache = [];\n this._timingCache.set(name, timingCache);\n }\n\n timingCache.push(timing);\n\n if (timingCache.length > Benchmark.cacheSize) {\n timingCache.splice(0, timingCache.length - Benchmark.cacheSize);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/es5/utils/support.d.ts b/es5/utils/support.d.ts new file mode 100644 index 0000000..7f061a0 --- /dev/null +++ b/es5/utils/support.d.ts @@ -0,0 +1,20 @@ +/** + * @private + */ +export declare function isBrowserSupported(): boolean; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +export declare const isSupported: boolean; diff --git a/es5/utils/support.js b/es5/utils/support.js new file mode 100644 index 0000000..2eb3eab --- /dev/null +++ b/es5/utils/support.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSupported = exports.isBrowserSupported = void 0; +/** + * @private + */ +function getCanvas() { + return typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas'); +} +/** + * @private + */ +function isBrowserSupported() { + if (typeof window !== 'undefined' && typeof document !== 'undefined') { + return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2')); + } + else { + return false; + } +} +exports.isBrowserSupported = isBrowserSupported; +/** + * Check if the current browser is officially supported by twilio-video-procesors.js. + * This is set to `true` for browsers that supports canvas + * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or + * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext) + * rendering context. + * @example + * ```ts + * import { isSupported } from '@twilio/video-processors'; + * + * if (isSupported) { + * // Initialize the background processors + * } + * ``` + */ +exports.isSupported = isBrowserSupported(); +//# sourceMappingURL=support.js.map \ No newline at end of file diff --git a/es5/utils/support.js.map b/es5/utils/support.js.map new file mode 100644 index 0000000..5358f9c --- /dev/null +++ b/es5/utils/support.js.map @@ -0,0 +1 @@ +{"version":3,"file":"support.js","sourceRoot":"","sources":["../../lib/utils/support.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,SAAS,SAAS;IAChB,OAAO,OAAO,MAAM,CAAC,eAAe,KAAK,WAAW,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,eAAe,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;AAC7H,CAAC;AAED;;GAEG;AACH,SAAgB,kBAAkB;IAChC,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;QACpE,OAAO,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,SAAS,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;KAC7E;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAND,gDAMC;AAED;;;;;;;;;;;;;;GAcG;AACU,QAAA,WAAW,GAAG,kBAAkB,EAAE,CAAC","sourcesContent":["/**\n * @private\n */\nfunction getCanvas() {\n return typeof window.OffscreenCanvas !== 'undefined' ? new window.OffscreenCanvas(1, 1) : document.createElement('canvas');\n}\n\n/**\n * @private\n */\nexport function isBrowserSupported() {\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n return !!(getCanvas().getContext('2d') || getCanvas().getContext('webgl2'));\n } else {\n return false;\n }\n}\n\n/**\n * Check if the current browser is officially supported by twilio-video-procesors.js.\n * This is set to `true` for browsers that supports canvas\n * [2D](https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D) or\n * [webgl2](https://developer.mozilla.org/en-US/docs/Web/API/WebGL2RenderingContext)\n * rendering context.\n * @example\n * ```ts\n * import { isSupported } from '@twilio/video-processors';\n *\n * if (isSupported) {\n * // Initialize the background processors\n * }\n * ```\n */\nexport const isSupported = isBrowserSupported();\n"]} \ No newline at end of file diff --git a/es5/utils/version.d.ts b/es5/utils/version.d.ts new file mode 100644 index 0000000..f640437 --- /dev/null +++ b/es5/utils/version.d.ts @@ -0,0 +1,4 @@ +/** + * The current version of the library. + */ +export declare const version: string; diff --git a/es5/utils/version.js b/es5/utils/version.js new file mode 100644 index 0000000..1dc600e --- /dev/null +++ b/es5/utils/version.js @@ -0,0 +1,9 @@ +"use strict"; +// This file is generated on build. To make changes, see scripts/version.js +Object.defineProperty(exports, "__esModule", { value: true }); +exports.version = void 0; +/** + * The current version of the library. + */ +exports.version = '2.1.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/es5/utils/version.js.map b/es5/utils/version.js.map new file mode 100644 index 0000000..7e76fc6 --- /dev/null +++ b/es5/utils/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../lib/utils/version.ts"],"names":[],"mappings":";AAAA,2EAA2E;;;AAE3E;;GAEG;AACU,QAAA,OAAO,GAAW,OAAO,CAAC","sourcesContent":["// This file is generated on build. To make changes, see scripts/version.js\n\n/**\n * The current version of the library.\n */\nexport const version: string = '2.1.0';\n"]} \ No newline at end of file diff --git a/lib/utils/version.ts b/lib/utils/version.ts new file mode 100644 index 0000000..20d2deb --- /dev/null +++ b/lib/utils/version.ts @@ -0,0 +1,6 @@ +// This file is generated on build. To make changes, see scripts/version.js + +/** + * The current version of the library. + */ +export const version: string = '2.1.0'; diff --git a/package.json b/package.json index 4110ec3..713283e 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "@twilio/video-processors", "title": "Twilio Video Processors", "description": "Twilio Video Processors JavaScript Library", - "version": "2.1.0-dev", + "version": "2.1.0", "homepage": "https://github.com/twilio/twilio-video-processors.js#readme", "author": "Charlie Santos ", "contributors": [