bricks/3parties/vad.0.0.7.min.js
2024-07-13 10:37:06 +08:00

1 line
10 KiB
JavaScript

!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t(require("onnxruntime-web")):"function"==typeof define&&define.amd?define(["onnxruntime-web"],t):"object"==typeof exports?exports.vad=t(require("onnxruntime-web")):e.vad=t(e.ort)}(self,(e=>(()=>{"use strict";var t={55:(e,t,s)=>{e.exports=s.p+"silero_vad.onnx"},265:(e,t,s)=>{e.exports=s.p+"vad.worklet.bundle.min.js"},656:t=>{t.exports=e}},s={};function i(e){var o=s[e];if(void 0!==o)return o.exports;var r=s[e]={exports:{}};return t[e](r,r.exports,i),r.exports}i.m=t,i.d=(e,t)=>{for(var s in t)i.o(t,s)&&!i.o(e,s)&&Object.defineProperty(e,s,{enumerable:!0,get:t[s]})},i.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),i.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),i.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e;i.g.importScripts&&(e=i.g.location+"");var t=i.g.document;if(!e&&t&&(t.currentScript&&(e=t.currentScript.src),!e)){var s=t.getElementsByTagName("script");s.length&&(e=s[s.length-1].src)}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),i.p=e})(),i.b=document.baseURI||self.location.href;var o={};return(()=>{i.r(o),i.d(o,{AudioNodeVAD:()=>F,FrameProcessor:()=>p,Message:()=>e,MicVAD:()=>v,NonRealTimeVAD:()=>y,defaultRealTimeVADOptions:()=>w,utils:()=>b});var e,t=i(656);function s(e,t,s){for(var i=0;i<s.length;i++)e.setUint8(t+i,s.charCodeAt(i))}!function(e){e.AudioFrame="AUDIO_FRAME",e.SpeechStart="SPEECH_START",e.VADMisfire="VAD_MISFIRE",e.SpeechEnd="SPEECH_END"}(e||(e={}));const r=["error","debug","warn"].reduce(((e,t)=>(e[t]=function(e){return(...t)=>{console[e]("[VAD]",...t)}}(t),e)),{}),n=[512,1024,1536],a={positiveSpeechThreshold:.5,negativeSpeechThreshold:.35,preSpeechPadFrames:1,redemptionFrames:8,frameSamples:1536,minSpeechFrames:3};function h(e){n.includes(e.frameSamples)||r.warn("You are using an unusual frame size"),(e.positiveSpeechThreshold<0||e.negativeSpeechThreshold>1)&&r.error("postiveSpeechThreshold should be a number between 0 and 1"),(e.negativeSpeechThreshold<0||e.negativeSpeechThreshold>e.positiveSpeechThreshold)&&r.error("negativeSpeechThreshold should be between 0 and postiveSpeechThreshold"),e.preSpeechPadFrames<0&&r.error("preSpeechPadFrames should be positive"),e.redemptionFrames<0&&r.error("preSpeechPadFrames should be positive")}const c=e=>{const t=e.reduce(((e,t)=>(e.push(e.at(-1)+t.length),e)),[0]),s=new Float32Array(t.at(-1));return e.forEach(((e,i)=>{const o=t[i];s.set(e,o)})),s};class p{constructor(t,s,i){this.modelProcessFunc=t,this.modelResetFunc=s,this.options=i,this.speaking=!1,this.redemptionCounter=0,this.active=!1,this.reset=()=>{this.speaking=!1,this.audioBuffer=[],this.modelResetFunc(),this.redemptionCounter=0},this.pause=()=>{this.active=!1,this.reset()},this.resume=()=>{this.active=!0},this.endSegment=()=>{const t=this.audioBuffer;this.audioBuffer=[];const s=this.speaking;this.reset();const i=t.reduce(((e,t)=>e+ +t.isSpeech),0);if(s){if(i>=this.options.minSpeechFrames){const s=c(t.map((e=>e.frame)));return{msg:e.SpeechEnd,audio:s}}return{msg:e.VADMisfire}}return{}},this.process=async t=>{if(!this.active)return{};const s=await this.modelProcessFunc(t);if(this.audioBuffer.push({frame:t,isSpeech:s.isSpeech>=this.options.positiveSpeechThreshold}),s.isSpeech>=this.options.positiveSpeechThreshold&&this.redemptionCounter&&(this.redemptionCounter=0),s.isSpeech>=this.options.positiveSpeechThreshold&&!this.speaking)return this.speaking=!0,{probs:s,msg:e.SpeechStart};if(s.isSpeech<this.options.negativeSpeechThreshold&&this.speaking&&++this.redemptionCounter>=this.options.redemptionFrames){this.redemptionCounter=0,this.speaking=!1;const t=this.audioBuffer;if(this.audioBuffer=[],t.reduce(((e,t)=>e+ +t.isSpeech),0)>=this.options.minSpeechFrames){const i=c(t.map((e=>e.frame)));return{probs:s,msg:e.SpeechEnd,audio:i}}return{probs:s,msg:e.VADMisfire}}if(!this.speaking)for(;this.audioBuffer.length>this.options.preSpeechPadFrames;)this.audioBuffer.shift();return{probs:s}},this.audioBuffer=[],this.reset()}}class d{constructor(e,t){this.ort=e,this.modelFetcher=t,this.init=async()=>{r.debug("initializing vad");const e=await this.modelFetcher();this._session=await this.ort.InferenceSession.create(e),this._sr=new this.ort.Tensor("int64",[16000n]),this.reset_state(),r.debug("vad is initialized")},this.reset_state=()=>{const e=Array(128).fill(0);this._h=new this.ort.Tensor("float32",e,[2,1,64]),this._c=new this.ort.Tensor("float32",e,[2,1,64])},this.process=async e=>{const t={input:new this.ort.Tensor("float32",e,[1,e.length]),h:this._h,c:this._c,sr:this._sr},s=await this._session.run(t);this._h=s.hn,this._c=s.cn;const[i]=s.output.data;return{notSpeech:1-i,isSpeech:i}}}}d.new=async(e,t)=>{const s=new d(e,t);return await s.init(),s};class u{constructor(e){this.options=e,this.process=e=>{const t=[];for(const t of e)this.inputBuffer.push(t);for(;this.inputBuffer.length*this.options.targetSampleRate/this.options.nativeSampleRate>this.options.targetFrameSize;){const e=new Float32Array(this.options.targetFrameSize);let s=0,i=0;for(;s<this.options.targetFrameSize;){let t=0,o=0;for(;i<Math.min(this.inputBuffer.length,(s+1)*this.options.nativeSampleRate/this.options.targetSampleRate);)t+=this.inputBuffer[i],o++,i++;e[s]=t/o,s++}this.inputBuffer=this.inputBuffer.slice(i),t.push(e)}return t},e.nativeSampleRate<16e3&&r.error("nativeSampleRate is too low. Should have 16000 = targetSampleRate <= nativeSampleRate"),this.inputBuffer=[]}}const l={...a};class m{static async _new(e,t,s={}){const i=new this(e,t,{...l,...s});return await i.init(),i}constructor(t,s,i){this.modelFetcher=t,this.ort=s,this.options=i,this.init=async()=>{const e=await d.new(this.ort,this.modelFetcher);this.frameProcessor=new p(e.process,e.reset_state,{frameSamples:this.options.frameSamples,positiveSpeechThreshold:this.options.positiveSpeechThreshold,negativeSpeechThreshold:this.options.negativeSpeechThreshold,redemptionFrames:this.options.redemptionFrames,preSpeechPadFrames:this.options.preSpeechPadFrames,minSpeechFrames:this.options.minSpeechFrames}),this.frameProcessor.resume()},this.run=async function*(t,s){const i={nativeSampleRate:s,targetSampleRate:16e3,targetFrameSize:this.options.frameSamples},o=new u(i).process(t);let r,n;for(const t of[...Array(o.length)].keys()){const s=o[t],{msg:i,audio:a}=await this.frameProcessor.process(s);switch(i){case e.SpeechStart:r=t*this.options.frameSamples/16;break;case e.SpeechEnd:n=(t+1)*this.options.frameSamples/16,yield{audio:a,start:r,end:n}}}const{msg:a,audio:h}=this.frameProcessor.endSegment();a==e.SpeechEnd&&(yield{audio:h,start:r,end:o.length*this.options.frameSamples/16})},h(i)}}const f={minFramesForTargetMS:function(e,t,s=16e3){return Math.ceil(e*s/1e3/t)},arrayBufferToBase64:function(e){for(var t="",s=new Uint8Array(e),i=s.byteLength,o=0;o<i;o++)t+=String.fromCharCode(s[o]);return btoa(t)},encodeWAV:function(e,t=3,i=16e3,o=1,r=32){var n=r/8,a=o*n,h=new ArrayBuffer(44+e.length*n),c=new DataView(h);return s(c,0,"RIFF"),c.setUint32(4,36+e.length*n,!0),s(c,8,"WAVE"),s(c,12,"fmt "),c.setUint32(16,16,!0),c.setUint16(20,t,!0),c.setUint16(22,o,!0),c.setUint32(24,i,!0),c.setUint32(28,i*a,!0),c.setUint16(32,a,!0),c.setUint16(34,r,!0),s(c,36,"data"),c.setUint32(40,e.length*n,!0),1===t?function(e,t,s){for(var i=0;i<s.length;i++,t+=2){var o=Math.max(-1,Math.min(1,s[i]));e.setInt16(t,o<0?32768*o:32767*o,!0)}}(c,44,e):function(e,t,s){for(var i=0;i<s.length;i++,t+=4)e.setFloat32(t,s[i],!0)}(c,44,e),h}},S=new URL(i(55),i.b),g=async()=>await fetch(S).then((e=>e.arrayBuffer())),w={...a,onFrameProcessed:e=>{},onVADMisfire:()=>{r.debug("VAD misfire")},onSpeechStart:()=>{r.debug("Detected speech start")},onSpeechEnd:()=>{r.debug("Detected speech end")},workletURL:new URL(i(265),i.b).toString()};class v{static async new(e={}){const t=new v({...w,...e});return await t.init(),t}constructor(e){this.options=e,this.listening=!1,this.init=async()=>{this.stream=await navigator.mediaDevices.getUserMedia({audio:{...this.options.additionalAudioConstraints,channelCount:1,echoCancellation:!0,autoGainControl:!0,noiseSuppression:!0}}),this.audioContext=new AudioContext;const e=new MediaStreamAudioSourceNode(this.audioContext,{mediaStream:this.stream});this.audioNodeVAD=await F.new(this.audioContext,this.options),this.audioNodeVAD.receive(e)},this.pause=()=>{this.audioNodeVAD.pause(),this.listening=!1},this.start=()=>{this.audioNodeVAD.start(),this.listening=!0},h(e)}}class F{static async new(e,t={}){const s=new F(e,{...w,...t});return await s.init(),s}constructor(s,i){this.ctx=s,this.options=i,this.pause=()=>{this.frameProcessor.pause()},this.start=()=>{this.frameProcessor.resume()},this.receive=e=>{e.connect(this.entryNode)},this.processFrame=async t=>{const{probs:s,msg:i,audio:o}=await this.frameProcessor.process(t);switch(void 0!==s&&this.options.onFrameProcessed(s),i){case e.SpeechStart:this.options.onSpeechStart();break;case e.VADMisfire:this.options.onVADMisfire();break;case e.SpeechEnd:this.options.onSpeechEnd(o)}},this.init=async()=>{await this.ctx.audioWorklet.addModule(this.options.workletURL);const s=new AudioWorkletNode(this.ctx,"vad-helper-worklet",{processorOptions:{frameSamples:this.options.frameSamples}});this.entryNode=s;const i=await d.new(t,g);this.frameProcessor=new p(i.process,i.reset_state,{frameSamples:this.options.frameSamples,positiveSpeechThreshold:this.options.positiveSpeechThreshold,negativeSpeechThreshold:this.options.negativeSpeechThreshold,redemptionFrames:this.options.redemptionFrames,preSpeechPadFrames:this.options.preSpeechPadFrames,minSpeechFrames:this.options.minSpeechFrames}),s.port.onmessage=async t=>{if(t.data?.message===e.AudioFrame){const e=t.data.data,s=new Float32Array(e);await this.processFrame(s)}}},h(i)}}class y extends m{static async new(e={}){return await this._new(g,t,e)}}const b={audioFileToArray:async function(e){const t=new OfflineAudioContext(1,1,44100),s=new FileReader;let i=null;if(await new Promise((o=>{s.addEventListener("loadend",(e=>{const r=s.result;t.decodeAudioData(r,(e=>{i=e,t.startRendering().then((e=>{console.log("Rendering completed successfully"),o()})).catch((e=>{console.error(`Rendering failed: ${e}`)}))}),(e=>{console.log(`Error with decoding audio data: ${e}`)}))})),s.readAsArrayBuffer(e)})),null===i)throw Error("some shit");let o=i,r=new Float32Array(o.length);for(let e=0;e<o.length;e++)for(let t=0;t<o.numberOfChannels;t++)r[e]+=o.getChannelData(t)[e];return{audio:r,sampleRate:o.sampleRate}},...f}})(),o})()));