bugfix
This commit is contained in:
parent
07d03dc421
commit
d6bf03bb8d
@ -29,13 +29,18 @@ bricks.AudioPlayer = class extends bricks.JsWidget {
|
||||
this.audio.addEventListener('canplay', this.play_audio.bind(this));
|
||||
}
|
||||
this.audio.style.width = "100%"
|
||||
this.source = this._create('source');
|
||||
this.source.src = this.opts.url;
|
||||
this.audio.appendChild(this.source);
|
||||
this.dom_element.appendChild(this.audio);
|
||||
if ( this.url ){
|
||||
this.set_source(this.url);
|
||||
}
|
||||
}
|
||||
set_source(url){
|
||||
this.audio.src = url;
|
||||
if (! this.source){
|
||||
this.source = this._create('source');
|
||||
this.source.src = url;
|
||||
this.audio.appendChild(this.source);
|
||||
}
|
||||
this.url = this.audio.src = url;
|
||||
bricks.debug(this.audio.src,' new src seted');
|
||||
}
|
||||
set_source_from_response(resp){
|
||||
|
@ -115,11 +115,16 @@ bricks.FormBase = class extends bricks.Layout {
|
||||
label:'Cancel'
|
||||
}
|
||||
]
|
||||
var tb_desc;
|
||||
var names = [ ' submit', 'reset', 'cancel' ];
|
||||
var tb_desc={};
|
||||
var names = [ 'submit', 'reset', 'cancel' ];
|
||||
if (this.toolbar){
|
||||
tb_desc = bricks.extend(tb_desc, this.toolbar);
|
||||
tb_desc.tools = tools;
|
||||
tools.forEach(t => {
|
||||
if (! names.includes(t.name)) {
|
||||
tb_desc.tools.push(t);
|
||||
}
|
||||
});
|
||||
this.toolbar.tools.forEach(t => {
|
||||
if (! names.includes(t.name)) {
|
||||
tb_desc.tools.push(t);
|
||||
@ -154,6 +159,8 @@ bricks.FormBase = class extends bricks.Layout {
|
||||
if (params.action){
|
||||
f = bricks.buildEventHandler(this, params);
|
||||
if (f) f(event);
|
||||
} else {
|
||||
this.dispatch(params.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,57 @@
|
||||
var bricks = window.bricks || {};
|
||||
/** @param sampleRate {number} */
|
||||
/** @param channelBuffers {Float32Array[]} */
|
||||
function audioBufferToWav(channelBuffers, sampleRate) {
|
||||
const totalSamples = channelBuffers[0].length * channelBuffers.length;
|
||||
|
||||
const buffer = new ArrayBuffer(44 + totalSamples * 2);
|
||||
const view = new DataView(buffer);
|
||||
|
||||
const writeString = (view, offset, string) => {
|
||||
for (let i = 0; i < string.length; i++) {
|
||||
view.setUint8(offset + i, string.charCodeAt(i));
|
||||
}
|
||||
};
|
||||
|
||||
/* RIFF identifier */
|
||||
writeString(view, 0, "RIFF");
|
||||
/* RIFF chunk length */
|
||||
view.setUint32(4, 36 + totalSamples * 2, true);
|
||||
/* RIFF type */
|
||||
writeString(view, 8, "WAVE");
|
||||
/* format chunk identifier */
|
||||
writeString(view, 12, "fmt ");
|
||||
/* format chunk length */
|
||||
view.setUint32(16, 16, true);
|
||||
/* sample format (raw) */
|
||||
view.setUint16(20, 1, true);
|
||||
/* channel count */
|
||||
view.setUint16(22, channelBuffers.length, true);
|
||||
/* sample rate */
|
||||
view.setUint32(24, sampleRate, true);
|
||||
/* byte rate (sample rate * block align) */
|
||||
view.setUint32(28, sampleRate * 4, true);
|
||||
/* block align (channel count * bytes per sample) */
|
||||
view.setUint16(32, channelBuffers.length * 2, true);
|
||||
/* bits per sample */
|
||||
view.setUint16(34, 16, true);
|
||||
/* data chunk identifier */
|
||||
writeString(view, 36, "data");
|
||||
/* data chunk length */
|
||||
view.setUint32(40, totalSamples * 2, true);
|
||||
|
||||
// floatTo16BitPCM
|
||||
let offset = 44;
|
||||
for (let i = 0; i < channelBuffers[0].length; i++) {
|
||||
for (let channel = 0; channel < channelBuffers.length; channel++) {
|
||||
const s = Math.max(-1, Math.min(1, channelBuffers[channel][i]));
|
||||
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
|
||||
offset += 2;
|
||||
}
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
bricks.VadText = class extends bricks.VBox {
|
||||
constructor(opts){
|
||||
@ -10,7 +63,8 @@ bricks.VadText = class extends bricks.VBox {
|
||||
icon:bricks_resource('imgs/speak.png')
|
||||
});
|
||||
this.audio = new bricks.AudioPlayer({});
|
||||
var hbox = new bricks.HBox({height:'auto'});
|
||||
this.audio.set_css('filler');
|
||||
var hbox = new bricks.HBox({width:'100%', height:'auto'});
|
||||
hbox.add_widget(this.button);
|
||||
hbox.add_widget(this.audio)
|
||||
this.add_widget(hbox);
|
||||
@ -61,7 +115,7 @@ bricks.VadText = class extends bricks.VBox {
|
||||
}
|
||||
async handle_audio(audio){
|
||||
console.log('handle_audil() called', audio);
|
||||
var wavBuffer = this.floatArrayToWAV(audio, 16000);
|
||||
var wavBuffer = audioBufferToWav([audio], 16000);
|
||||
var b64audio = this.arrayBufferToBase64(wavBuffer);
|
||||
this.audio.set_url('data:audio/wav;base64,' + b64audio);
|
||||
var hj = new bricks.HttpJson();
|
||||
@ -92,17 +146,6 @@ bricks.VadText = class extends bricks.VBox {
|
||||
}
|
||||
return btoa(binary);
|
||||
}
|
||||
floatArrayToWAV(floatArray, sampleRate) {
|
||||
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
|
||||
const buffer = audioCtx.createBuffer(1, floatArray.length, sampleRate);
|
||||
buffer.getChannelData(0).set(floatArray);
|
||||
|
||||
const audioData = buffer;
|
||||
|
||||
const wavEncoder = new WavEncoder();
|
||||
const wavBlob = wavEncoder.encode(audioData);
|
||||
return wavBlob;
|
||||
}
|
||||
getValue(){
|
||||
var d = {}
|
||||
d[this.name] = this.text;
|
||||
|
Loading…
Reference in New Issue
Block a user