bricks/3parties/vad.0.0.7.min.js

1 line
10 KiB
JavaScript
Raw Normal View History

2024-07-13 10:37:06 +08:00
!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("onnxruntime-web")):"function"==typeof define&&define.amd?define(["onnxruntime-web"],t):"object"==typeof exports?exports.vad=t(require("onnxruntime-web")):e.vad=t(e.ort)}(self,(e=>(()=>{"use strict";var t={55:(e,t,s)=>{e.exports=s.p+"silero_vad.onnx"},265:(e,t,s)=>{e.exports=s.p+"vad.worklet.bundle.min.js"},656:t=>{t.exports=e}},s={};function i(e){var o=s[e];if(void 0!==o)return o.exports;var r=s[e]={exports:{}};return t[e](r,r.exports,i),r.exports}i.m=t,i.d=(e,t)=>{for(var s in t)i.o(t,s)&&!i.o(e,s)&&Object.defineProperty(e,s,{enumerable:!0,get:t[s]})},i.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),i.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),i.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;i.g.importScripts&&(e=i.g.location+"");var t=i.g.document;if(!e&&t&&(t.currentScript&&(e=t.currentScript.src),!e)){var s=t.getElementsByTagName("script");s.length&&(e=s[s.length-1].src)}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),i.p=e})(),i.b=document.baseURI||self.location.href;var o={};return(()=>{i.r(o),i.d(o,{AudioNodeVAD:()=>F,FrameProcessor:()=>p,Message:()=>e,MicVAD:()=>v,NonRealTimeVAD:()=>y,defaultRealTimeVADOptions:()=>w,utils:()=>b});var e,t=i(656);function s(e,t,s){for(var i=0;i<s.length;i++)e.setUint8(t+i,s.charCodeAt(i))}!function(e){e.AudioFrame="AUDIO_FRAME",e.SpeechStart="SPEECH_START",e.VADMisfire="VAD_MISFIRE",e.SpeechEnd="SPEECH_END"}(e||(e={}));const r=["error","debug","warn"].reduce(((e,t)=>(e[t]=function(e){return(...t)=>{console[e]("[VAD]",...t)}}(t),e)),{}),n=[512,1024,1536],a={positiveSpeechThreshold:.5,negativeSpeechThreshold:.35,preSpeechPadFrames:1,redemptionFrames:8,frameSamples:1536,minSpeechFrames:3};function h(e){n.includes(e.frameSamples)||r.warn("You are using an unusual frame size"),(e.positiveSpeechThreshold<0||e.negativeSpeechThreshold>1)&&r.error("postiveSpeechThreshold should be a number between 0 and 1"),(e.negativeSpeechThreshold<0||e.negativeSpeechThreshold>e.positiveSpeechThreshold)&&r.error("negativeSpeechThreshold should be between 0 and postiveSpeechThreshold"),e.preSpeechPadFrames<0&&r.error("preSpeechPadFrames should be positive"),e.redemptionFrames<0&&r.error("preSpeechPadFrames should be positive")}const c=e=>{const t=e.reduce(((e,t)=>(e.push(e.at(-1)+t.length),e)),[0]),s=new Float32Array(t.at(-1));return e.forEach(((e,i)=>{const o=t[i];s.set(e,o)})),s};class p{constructor(t,s,i){this.modelProcessFunc=t,this.modelResetFunc=s,this.options=i,this.speaking=!1,this.redemptionCounter=0,this.active=!1,this.reset=()=>{this.speaking=!1,this.audioBuffer=[],this.modelResetFunc(),this.redemptionCounter=0},this.pause=()=>{this.active=!1,this.reset()},this.resume=()=>{this.active=!0},this.endSegment=()=>{const t=this.audioBuffer;this.audioBuffer=[];const s=this.speaking;this.reset();const i=t.reduce(((e,t)=>e+ +t.isSpeech),0);if(s){if(i>=this.options.minSpeechFrames){const s=c(t.map((e=>e.frame)));return{msg:e.SpeechEnd,audio:s}}return{msg:e.VADMisfire}}return{}},this.process=async t=>{if(!this.active)return{};const s=await this.modelProcessFunc(t);if(this.audioBuffer.push({frame:t,isSpeech:s.isSpeech>=this.options.positiveSpeechThreshold}),s.isSpeech>=this.options.positiveSpeechThreshold&&this.redemptionCounter&&(this.redemptionCounter=0),s.isSpeech>=this.options.positiveSpeechThreshold&&!this.speaking)return this.speaking=!0,{probs:s,msg:e.SpeechStart};if(s.isSpeech<this.options.negativeSpeechThreshold&&this.speaking&&++this.redemptionCounter>=this.options.redemptionFrames){this.redemptionCounter=0,this.speaking=!1;const t=this.audioBuffer;if(this.audioBuffer=[],t.reduce(((e,t)=>e+ +t.isSpeech),0)>=this.options.minSpeechFrames){const i=c(t.map((e=>e.fr