JS mediaDevices 录音转文字分片处理并上传

需求是手机录音,并实时转文字,结束后上传录音及支持修改录音名称。

 

遇到的坑:

  1、页面切换tab或应用,录音和WebSocket根据不同手机会有断连的情况

    1:wakeLock保持常亮

    2:监听visibilitychange,切回后重新连接音频、WebSocket

  2、音频格式不满足第三方api,无法识别

    使用AudioWorkletNode 代替 MediaRecorder

特性AudioWorkletNodeMediaRecorder
实时性 极低延迟(<50ms) 较高延迟(100-500ms)
数据处理能力 完全访问原始PCM数据 只能获取编码后的数据
开发复杂度 高(需自行编码/封装) 低(API简单)
输出格式 需手动实现(如WAV) 浏览器预设(如webm)
内存使用 需自行管理缓冲 自动分块处理
适用场景 专业音频处理/实时效果 简单录音/快速实现
线程模型 专用音频线程 主线程
浏览器支持 较新浏览器 广泛支持

  3、长时间录音文件过大

    wav格式大,mp3格式大概是1/7,分片转化为mp3格式,再分片上传文件

 

worklet-processor.js,录音并转文字
//worklet-processor.js

export default class AudioStreamer {
    constructor(obj) {
        this.audioContext = null;
        this.stream = null;
        this.workletNode = null;
        this.ws = null;
        this.wakeLock = null;
        this.analyser = null;
        this.mediaRecorder = null;
        this.audioBuffer = [];
        this.buffer = [];
        this.CHUNK_SIZE = 16000; // 1秒的16kHz音频数据
        this.size = 0;
        this.inputSampleRate = 0; //输入采样率
        this.outputSampleRate = 16000; //输出采样率
        this.inputSampleBits = 16; //输入采样数位 8, 16
        this.outputSampleBits = 16; //输出采样数位 8, 16
        this.wsUrl = obj.wsUrl;
        this.onMessageCallback = null; //消息回调函数
        this.onAudioBlobCallback = null; //完整录音回调函数
        this.active = true;
        this.timer = null;
        this.audioChunks = [];
        this.readyData = [];
    }

    // 初始化音频流和WebSocket
    async init(onMessageCallback, onAudioBlobCallback) {
        // 存储回调函数
        this.onMessageCallback = onMessageCallback;
        this.onAudioBlobCallback = onAudioBlobCallback;
        return new Promise(async (resolve, reject) => {
            try {
                //0. 请求屏幕保持开启
                this.requestWakeLock();

                // 1. 获取用户音频流
                this.stream = await navigator.mediaDevices.getUserMedia({
                    audio: true
                });

                // 2. 创建音频上下文
                this.audioContext = new(window.AudioContext || window.webkitAudioContext)();
                this.analyser = this.audioContext.createAnalyser();
                this.analyser.fftSize = 256;
                this.inputSampleRate = this.audioContext.sampleRate;

                // 3. 创建WebSocket连接
                this.connectWebSocket();

                // 4. 注册并创建AudioWorklet
                await this.setupAudioWorklet();

                // 5. 连接音频节点
                this.connectAudioNodes();

                // 6. 完整音频录制保存
                //this.saveAudioBlob();

                resolve({
                    analyser: this.analyser,
                    ws: this.ws
                })
                console.log('Audio streaming initialized');
            } catch (error) {
                console.error('Initialization failed:', error);
            }
        });

    }

    //重新连接
    async reconnect(wsUrl) {
        //console.log(this.active, '页面恢复 - 重新连接音频');
        return new Promise(async (resolve, reject) => {
            try {
                //0. 请求屏幕保持开启
                this.requestWakeLock();

                //WebSocket断连
                if (wsUrl) {
                    this.wsUrl = wsUrl;
                    this.connectWebSocket(true);
                }

                // 1. 恢复音频上下文
                if (this.audioContext.state === 'suspended') {
                    await this.audioContext.resume();
                }

                // 2. 重新创建 AudioWorkletNode 如果需要
                if (!this.workletNode || this.workletNode.context !== this
                    .audioContext) {
                    await this.setupAudioWorklet();
                }

                // 重新连接音频节点
                this.connectAudioNodes();

                resolve({
                    ws: this.ws
                })
            } catch (error) {
                console.error('Initialization failed:', error);
            }
        })

    }

    //完整音频录制保存(页面切换tab后没有 AudioWorkletNode 稳定)
    saveAudioBlob() {
        const mediaRecorder = new MediaRecorder(this.stream);
        mediaRecorder.addEventListener("dataavailable", event => {
            // 暂停处理
            if (!this.active) {
                return;
            }
            console.log('dataavailable', event.data);
            this.audioChunks.push(event.data);
        });
        mediaRecorder.addEventListener("stop", async () => {
            console.log('audioChunks', this.audioChunks)
            const audioBlob = new Blob(this.audioChunks, {
                type: 'audio/wav'
            });
            if (this.onAudioBlobCallback && !this.active) {
                const buffer = await audioBlob.arrayBuffer();
                const decodedData = await this.audioContext.decodeAudioData(buffer);
                this.onAudioBlobCallback({
                    audioBlob,
                    duration: decodedData.duration
                });
            }
        });
        mediaRecorder.start(100);
        this.mediaRecorder = mediaRecorder;
    }

    // 请求屏幕保持开启
    async requestWakeLock() {
        let wakeLock = null;
        try {
            // 检查API是否可用
            if ('wakeLock' in navigator) {
                wakeLock = await navigator.wakeLock.request('screen');
                console.log('屏幕保持常亮已激活');
                this.wakeLock = wakeLock;
            } else {
                console.warn('Wake Lock API 不支持');
            }
        } catch (err) {
            console.error(`无法保持屏幕常亮: ${err.message}`);
        }

    }

    // 连接WebSocket
    connectWebSocket(isReconnect) {
        this.ws = new WebSocket(this.wsUrl);
        this.ws.binaryType = 'arraybuffer';

        this.ws.onopen = () => {
            console.log('WebSocket connection established');
            // 发送开始帧
            const startFrame = {
                header: {
                    name: "StartTranscription",
                    namespace: "SpeechTranscriber",
                },
                payload: {
                    format: "pcm"
                }
            };
            this.ws.send(JSON.stringify(startFrame));
            //重连,发送断开期间暂存音频数据
            if (isReconnect) {
                this.readyData.forEach((data) => {
                    this.ws.send(data);
                })
                this.readyData = [];
            }
        };

        this.ws.onmessage = (msg) => {
            // msg.data 是接收到的数据(具体参考「实时推流返回事件」)
            // 根据业务处理数据
            if (typeof msg.data === "string") {
                const dataJson = JSON.parse(msg.data);
                switch (dataJson.header.name) {
                    case "SentenceBegin": {
                        // 句子开始事件
                        console.log("句子", dataJson.payload.index, "开始");
                        break;
                    }
                    case "TranscriptionResultChanged":
                        // 句中识别结果变化事件
                        // console.log(
                        //     "句子" + dataJson.payload.index + "中间结果:",
                        //     dataJson.payload.result
                        // );
                        if (this.onMessageCallback) {
                            this.onMessageCallback({
                                ...dataJson.payload,
                                name: 'TranscriptionResultChanged'
                            });
                        }
                        break;

                    case "SentenceEnd": {
                        // 句子结束事件
                        console.log(
                            "句子" + dataJson.payload.index + "结束:",
                            dataJson.payload.result + dataJson.payload.stash_result.text
                        );
                        if (this.onMessageCallback) {
                            this.onMessageCallback({
                                ...dataJson.payload,
                                name: 'SentenceEnd'
                            });
                        }
                        break;
                    }
                    case "ResultTranslated": {
                        // 识别结果翻译事件
                        console.log(
                            "句子翻译结果",
                            JSON.stringify(dataJson.payload.translate_result)
                        );
                        break;
                    }
                    //... 
                }
            }
        };
        this.ws.onclose = (event) => {
            console.log(`WebSocket连接关闭: ${event.code} ${event.reason}`);
            //alert('Wsclose' + event.code);
        };
        this.ws.onerror = (error) => {
            console.error('WebSocket error:', error);
            //alert('Wserror' + error);
        };
    }

    // 设置AudioWorklet
    async setupAudioWorklet() {
        // 内联方式注册AudioWorkletProcessor
        const workletCode = `
      class AudioStreamProcessor extends AudioWorkletProcessor {
        process(inputs, outputs, parameters) {
            const inputData = inputs[0][0]; // 获取单声道数据
            this.port.postMessage(inputData);
            return true;
        }
      }
      registerProcessor('audio-stream-processor', AudioStreamProcessor);
    `;

        // 创建Blob URL方式加载Worklet
        const blob = new Blob([workletCode], {
            type: 'application/javascript'
        });
        const blobUrl = URL.createObjectURL(blob);

        try {
            // 注册AudioWorklet
            await this.audioContext.audioWorklet.addModule(blobUrl);

            // 创建Worklet节点
            this.workletNode = new AudioWorkletNode(
                this.audioContext,
                'audio-stream-processor'
            );

            // 处理接收到的音频数据
            this.workletNode.port.onmessage = (e) => {
                // 暂停处理
                if (!this.active) {
                    return;
                }
                this.handleAudioData(e.data);
            };

        } finally {
            URL.revokeObjectURL(blobUrl); // 清理Blob URL
        }
    }

    // 连接音频节点
    connectAudioNodes() {
        const sourceNode = this.audioContext.createMediaStreamSource(this.stream);
        sourceNode.connect(this.workletNode);
        sourceNode.connect(this.analyser);
        this.workletNode.connect(this.audioContext.destination);
    }

    // 处理音频数据
    handleAudioData(float32Data) {
        // 添加到缓冲区
        this.audioBuffer = this.audioBuffer.concat(Array.from(float32Data));

        // 检查是否达到发送阈值
        if (this.audioBuffer.length >= this.CHUNK_SIZE) {
            const chunk = this.audioBuffer.slice(0, this.CHUNK_SIZE);
            this.audioBuffer = this.audioBuffer.slice(this.CHUNK_SIZE);

            // 转换为16位PCM
            //onst int16Data = this.floatTo16BitPCM(chunk);
            //暂存完整录制
            this.audioChunks.push(new Float32Array(chunk));
            // 转换为PCM
            this.buffer.push(new Float32Array(chunk));
            this.size += chunk.length;
            const int16Data = this.encodePCM();
            //console.log('onmessage');
            // 通过WebSocket发送
            if (this.ws.readyState === WebSocket.OPEN) {
                this.ws.send(int16Data);
            } else {
                this.readyData.push(int16Data);
            }
            this.clear();
        }
    }
    compress() { //对数据 进行 合并压缩
        var data = new Float32Array(this.size);
        var offset = 0;
        for (var i = 0; i < this.buffer.length; i++) {
            data.set(this.buffer[i], offset);
            offset += this.buffer[i].length;
        }
        var compression = parseInt(this.inputSampleRate / this.outputSampleRate);
        var length = data.length / compression;
        var result = new Float32Array(length);
        var index = 0,
            j = 0;
        while (index < length) {
            result[index] = data[j];
            j += compression;
            index++;
        }
        return result;
    }

    encodePCM() {
        var sampleRate = Math.min(this.inputSampleRate,
            this.outputSampleRate);
        var sampleBits = Math.min(this.inputSampleBits,
            this.outputSampleBits);
        var bytes = this.compress();
        var dataLength = bytes.length * (sampleBits / 8);
        var buffer = new ArrayBuffer(dataLength);
        var data = new DataView(buffer);
        var offset = 0;
        for (var i = 0; i < bytes.length; i++, offset += 2) {
            var s = Math.max(-1, Math.min(1, bytes[i]));
            data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
        }
        return data.buffer;
    }

    clear() {
        this.buffer = [];
        this.size = 0;
    }

    // 浮点转16位PCM
    floatTo16BitPCM(input) {
        const output = new Int16Array(input.length);
        for (let i = 0; i < input.length; i++) {
            const s = Math.max(-1, Math.min(1, input[i]));
            output[i] = s < 0 ? s * 0x8000 : s * 0x7FFF;
        }
        return output.buffer;
    }

    // 辅助函数:合并 Float32Array
    mergeArrays(arrays) {
        let totalLength = arrays.reduce((acc, value) => acc + value.length, 0);
        let result = new Float32Array(totalLength);
        let offset = 0;
        for (let array of arrays) {
            result.set(array, offset);
            offset += array.length;
        }
        return result;
    }
    // 辅助函数:将 Float32Array 转为 WAV Blob
    encodeWAV(samples) {
        const buffer = new ArrayBuffer(44 + samples.length * 2);
        const view = new DataView(buffer);

        // WAV 文件头
        this.writeString(view, 0, 'RIFF');
        view.setUint32(4, 36 + samples.length * 2, true);
        this.writeString(view, 8, 'WAVE');
        this.writeString(view, 12, 'fmt ');
        view.setUint32(16, 16, true); // chunk length
        view.setUint16(20, 1, true); // PCM format
        view.setUint16(22, 1, true); // mono
        view.setUint32(24, this.inputSampleRate, true);
        view.setUint32(28, this.inputSampleRate * 2, true); // byte rate
        view.setUint16(32, 2, true); // block align
        view.setUint16(34, 16, true); // bits per sample
        this.writeString(view, 36, 'data');
        view.setUint32(40, samples.length * 2, true);

        // 写入 PCM 数据
        this.setFloatTo16BitPCM(view, 44, samples);

        return new Blob([view], {
            type: 'audio/wav'
        });
    }

    setFloatTo16BitPCM(output, offset, input) {
        for (let i = 0; i < input.length; i++, offset += 2) {
            const s = Math.max(-1, Math.min(1, input[i]));
            output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
        }
    }

    writeString(view, offset, string) {
        for (let i = 0; i < string.length; i++) {
            view.setUint8(offset + i, string.charCodeAt(i));
        }
    }

    // 停止音频流
    async stop() {
        this.active = false;
        if (this.workletNode) {
            this.workletNode.disconnect();
        }
        if (this.mediaRecorder) {
            this.mediaRecorder.stop();
        }
        if (this.stream) {
            this.stream.getTracks().forEach(track => track.stop());
        }
        if (this.wakeLock !== null && !this.wakeLock.released) {
            this.wakeLock.release();
            this.wakeLock = null;
        }
        const params = {
            header: {
                name: 'StopTranscription',
                namespace: 'SpeechTranscriber',
            },
            payload: {},
        };
        this.ws.send(JSON.stringify(params));
        setTimeout(() => {
            if (this.ws) {
                this.ws.close();
                this.ws = null;
            }
        }, 1000);
        console.log('Audio streaming stopped');

        //回调保存完整录音
        if (this.onAudioBlobCallback && !this.active) {
            // 合并所有音频块
            const mergedArray = this.mergeArrays(this.audioChunks);

            // 转换为 WAV
            const audioBlob = this.encodeWAV(mergedArray);
            const buffer = await audioBlob.arrayBuffer();
            const decodedData = await this.audioContext.decodeAudioData(buffer);
            this.onAudioBlobCallback({
                audioBlob,
                duration: decodedData.duration
            });
        }
    }
    //暂停、继续
    setActive(val) {
        this.active = val;
        if (!val) {
            // 保持链接,创建一个长度为 10 的空 Buffer,默认用 0 填充
            const emptyBuffer = Buffer.alloc(10);
            this.timer = setInterval(() => {
                this.ws && this.ws.send(emptyBuffer)
            }, 1000);
        } else {
            if (this.timer) {
                clearInterval(this.timer);
                this.timer = null;
            }
        }
    }
}
View Code

 



chunked-uploader.js 分片转mp3、上传
  
lamejs 使用1.2.0,1.2.1版会报错
import {
    uploadPartTask,
} from "@/api/common.api.js";
import lamejs from 'lamejs';
export default class ChunkedUploader {
    constructor(file, options = {}) {
        this.file = file;
        this.chunkSizeMB = options.chunkSizeMB || 5; // 5MB
        this.retries = options.retries || 3;
        this.concurrent = options.concurrent || 3;
        this.chunks = [];
        this.totalSize = 0;
        this.uploaded = 0;
        this.taskId = '';
        this.key = '';
        this.onSplitProgress = options.onSplitProgress;
        this.onUploadProgress = options.onUploadProgress;
        this.authorization = uni.getStorageSync('auth');
    }

    async upload() {
        const mp3Chunks = await this.compressWavToMp3(this.file, 0.3, this.onSplitProgress, this.chunkSizeMB);
        this.chunks = mp3Chunks.map((blob, index) => {
            return {
                id: `${index}-${Date.now()}`,
                index: index + 1,
                blob,
                retry: 0,
                status: 'pending'
            }
        })
        try {
            //开始分片上传附件任务
            const {
                data
            } = await uploadPartTask();
            this.taskId = data.taskId;
            this.key = data.key;

            // 并发控制
            const queue = [];
            for (let i = 0; i < this.chunks.length; i += this.concurrent) {
                const chunks = this.chunks.slice(i, i + this.concurrent);
                const results = await Promise.all(chunks.map(chunk => this.processQueue(chunk)));
                queue.push(...results);
            }
            return queue;
        } catch (err) {
            throw new Error(err);
        }
    }

    async processQueue(chunk) {
        chunk.status = 'uploading';
        try {
            const data = await this.uploadChunk(chunk);
            chunk.status = 'completed';
            this.uploaded++;
            this.onProgress();
            return data;
        } catch (err) {
            if (chunk.retry++ < this.retries) {
                chunk.status = 'pending';
                return this.processQueue(chunk);
            } else {
                chunk.status = 'failed';
                throw new Error(`Chunk ${chunk.index} failed after ${this.retries} retries`);
            }
        }
    }

    uploadChunk(chunk) {
        return new Promise(async (resolve, reject) => {
            uni.uploadFile({
                url: `/uploadPartFiles?taskId=${this.taskId}&key=${this.key}&index=${chunk.index}`,
                file: chunk.blob,
                name: 'file',
                header: {
                    authorization: this.authorization
                },
                success: (uploadFileRes) => {
                    if (uploadFileRes.statusCode === 200) {
                        const data = JSON.parse(uploadFileRes.data);
                        resolve(data.partETag);
                    } else {
                        reject('error');
                    }
                },
            });
        })
    }

    onProgress() {
        const progress = Math.round((this.uploaded / this.chunks.length) * 100);
        console.log(`上传进度: ${progress}%`);
        // 触发UI更新事件...
        this.onUploadProgress(progress);
    }

    /**
     * 将WAV音频分片压缩为MP3
     * @param {Blob} wavBlob - 输入的WAV音频Blob对象
     * @param {number} quality - 压缩质量 (0.1~1.0)
     * @param {function} onProgress - 进度回调 (0~100)
     * @param {number} chunkSizeMB - 分片大小 (单位MB,默认5MB)
     * @returns {Promise<Blob>} - 返回MP3格式的Blob
     */
    async compressWavToMp3(wavBlob, quality = 0.7, onProgress, chunkSizeMB = 5) {
        // 1. 初始化参数
        //const chunkSize = chunkSizeMB * 1024 * 1024; // 转为字节
        //const totalChunks = Math.ceil(wavBlob.size / chunkSize);
        const mp3Chunks = [];
        let processedChunks = 0;

        const chunks = await this.splitWavBySize(wavBlob, chunkSizeMB);

        // 2. 分片处理函数
        const processChunk = async (blob, end) => {
            // 1. 解码WAV(增加错误处理)
            const audioCtx = new(window.AudioContext || window.webkitAudioContext)();
            let audioBuffer;
            try {
                const arrayBuffer = await blob.arrayBuffer();
                audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
            } catch (err) {
                console.error("WAV解码失败:", err);
                throw new Error("无效的WAV文件");
            }


            // 3. 准备编码器(确保参数匹配)
            //  动态计算比特率 (32kbps~320kbps)
            const bitrate = Math.floor(32 + (320 - 32) * quality);
            const mp3Encoder = new lamejs.Mp3Encoder(
                audioBuffer.numberOfChannels,
                audioBuffer.sampleRate,
                bitrate // 比特率(kbps)
            );

            // 4. 获取PCM数据(兼容单声道)
            const leftChannel = audioBuffer.getChannelData(0);
            const rightChannel = audioBuffer.numberOfChannels > 1 ?
                audioBuffer.getChannelData(1) :
                leftChannel;

            // 5. 转换到16位整数(关键修复!)
            const convertTo16Bit = (float32) => {
                const int16 = new Int16Array(float32.length);
                for (let i = 0; i < float32.length; i++) {
                    // 重要:必须先缩放再取整!
                    int16[i] = Math.min(32767, Math.max(-32768, float32[i] * 32767));
                }
                return int16;
            };

            const left = convertTo16Bit(leftChannel);
            const right = convertTo16Bit(rightChannel);

            // 6. 分块编码(调整块大小)
            const SAMPLE_BLOCK = 1152; // MP3帧标准样本数
            const mp3Data = [];

            for (let i = 0; i < left.length; i += SAMPLE_BLOCK) {
                let leftChunk = left.subarray(i, i + SAMPLE_BLOCK);
                let rightChunk = right.subarray(i, i + SAMPLE_BLOCK);

                // 确保块大小一致(补零处理)
                if (leftChunk.length < SAMPLE_BLOCK) {
                    const paddedLeft = new Int16Array(SAMPLE_BLOCK).fill(0);
                    paddedLeft.set(leftChunk);
                    leftChunk = paddedLeft;

                    const paddedRight = new Int16Array(SAMPLE_BLOCK).fill(0);
                    paddedRight.set(rightChunk);
                    rightChunk = paddedRight;
                }

                const mp3buf = mp3Encoder.encodeBuffer(leftChunk, rightChunk);
                if (mp3buf.length > 0) {
                    mp3Data.push(mp3buf);
                    //console.log(`编码块 ${i/SAMPLE_BLOCK}: ${mp3buf.length}字节`);
                }
            }

            // 7. 结束编码(关键步骤)
            const lastChunk = mp3Encoder.flush();
            if (lastChunk.length > 0) {
                mp3Data.push(lastChunk);
                //console.log("最终块:", lastChunk.length, "字节");
            }

            // 8. 验证输出
            if (mp3Data.length === 0) {
                throw new Error("编码器未产生任何数据");
            }

            const totalSize = mp3Data.reduce((sum, buf) => sum + buf.length, 0);
            this.totalSize += totalSize;
            //console.log("总MP3大小:", totalSize, "bytes");

            // 9. 更新进度
            processedChunks++;
            const progress = Math.floor((processedChunks / chunks.length) * 100);
            if (onProgress) onProgress(progress);

            return new Blob(mp3Data, {
                type: 'audio/mp3'
            });
        };

        // 8. 并行处理所有分片(限制并发数)
        const MAX_CONCURRENT = 3; // 最大并发数

        // 9. 分批次处理(避免内存爆炸)
        for (let i = 0; i < chunks.length; i += MAX_CONCURRENT) {
            const batch = chunks.slice(i, i + MAX_CONCURRENT);
            const results = await Promise.all(batch.map(chunk => processChunk(chunk)));
            mp3Chunks.push(...results);
        }
        return mp3Chunks;

        // 10. 合并所有分片
        // return new Blob(mp3Chunks, {
        //     type: 'audio/mp3'
        // });
    }

    /**
     * 按文件大小切分WAV文件(保持WAV头完整)
     * @param {Blob} wavBlob - 原始WAV文件
     * @param {number} chunkSizeMB - 每个分片的大小(MB)
     * @return {Promise<Array<Blob>>} - 切分后的WAV片段数组
     */
    async splitWavBySize(wavBlob, chunkSizeMB = 10) {
        const CHUNK_SIZE = chunkSizeMB * 1024 * 1024;
        const arrayBuffer = await wavBlob.arrayBuffer();
        const header = this.parseWavHeader(arrayBuffer);

        // 确保切分点在数据块边界上
        const bytesPerSample = header.bitsPerSample / 8;
        const bytesPerFrame = bytesPerSample * header.numChannels;
        const dataStart = header.dataOffset;
        const dataSize = header.dataSize;

        // 计算合理的切分点
        const chunkSize = Math.max(
            CHUNK_SIZE - (CHUNK_SIZE % bytesPerFrame),
            bytesPerFrame * header.sampleRate // 至少1秒音频
        );

        const chunks = [];
        let offset = dataStart;

        while (offset < dataStart + dataSize) {
            const chunkEnd = Math.min(offset + chunkSize, dataStart + dataSize);
            const chunkDataSize = chunkEnd - offset;

            // 创建新的WAV头
            const newHeader = this.createWavHeader(
                header.numChannels,
                header.sampleRate,
                header.bitsPerSample,
                chunkDataSize / bytesPerFrame
            );

            // 合并头和音频数据
            const chunkArray = new Uint8Array(newHeader.byteLength + chunkDataSize);
            chunkArray.set(new Uint8Array(newHeader), 0);
            chunkArray.set(
                new Uint8Array(arrayBuffer, offset, chunkDataSize),
                newHeader.byteLength
            );

            chunks.push(new Blob([chunkArray], {
                type: 'audio/wav'
            }));
            offset = chunkEnd;
        }

        return chunks;
    }

    // 解析WAV头信息
    parseWavHeader(arrayBuffer) {
        const view = new DataView(arrayBuffer);
        let offset = 0;

        const chunkID = this.readString(view, offset, 4);
        offset += 4;
        const chunkSize = view.getUint32(offset, true);
        offset += 4;
        const format = this.readString(view, offset, 4);
        offset += 4;

        // 查找fmt块
        let fmtOffset = 12;
        while (fmtOffset < view.byteLength) {
            const subchunkID = this.readString(view, fmtOffset, 4);
            const subchunkSize = view.getUint32(fmtOffset + 4, true);

            if (subchunkID === 'fmt ') {
                offset = fmtOffset + 8;
                break;
            }

            fmtOffset += 8 + subchunkSize;
        }

        const audioFormat = view.getUint16(offset, true);
        offset += 2;
        const numChannels = view.getUint16(offset, true);
        offset += 2;
        const sampleRate = view.getUint32(offset, true);
        offset += 4;
        const byteRate = view.getUint32(offset, true);
        offset += 4;
        const blockAlign = view.getUint16(offset, true);
        offset += 2;
        const bitsPerSample = view.getUint16(offset, true);
        offset += 2;

        // 查找data块
        let dataOffset = fmtOffset;
        while (dataOffset < view.byteLength) {
            const subchunkID = this.readString(view, dataOffset, 4);
            const subchunkSize = view.getUint32(dataOffset + 4, true);

            if (subchunkID === 'data') {
                return {
                    dataOffset: dataOffset + 8,
                    dataSize: subchunkSize,
                    numChannels,
                    sampleRate,
                    bitsPerSample,
                    byteRate,
                    blockAlign,
                    audioFormat
                };
            }

            dataOffset += 8 + subchunkSize;
        }

        throw new Error('Invalid WAV file: data chunk not found');
    }

    readString(view, offset, length) {
        let str = '';
        for (let i = 0; i < length; i++) {
            str += String.fromCharCode(view.getUint8(offset + i));
        }
        return str;
    }

    // 辅助函数:创建WAV头
    createWavHeader(numChannels, sampleRate, bitDepth, numSamples) {
        const byteRate = (sampleRate * numChannels * bitDepth) / 8;
        const blockAlign = (numChannels * bitDepth) / 8;
        const dataSize = numSamples * numChannels * (bitDepth / 8);

        const buffer = new ArrayBuffer(44);
        const view = new DataView(buffer);

        // RIFF标识
        this.writeString(view, 0, 'RIFF');
        view.setUint32(4, 36 + dataSize, true);
        this.writeString(view, 8, 'WAVE');
        // fmt子块
        this.writeString(view, 12, 'fmt ');
        view.setUint32(16, 16, true); // 子块大小
        view.setUint16(20, 1, true); // PCM格式
        view.setUint16(22, numChannels, true);
        view.setUint32(24, sampleRate, true);
        view.setUint32(28, byteRate, true);
        view.setUint16(32, blockAlign, true);
        view.setUint16(34, bitDepth, true);
        // data子块
        this.writeString(view, 36, 'data');
        view.setUint32(40, dataSize, true);

        return buffer;
    }

    writeString(view, offset, string) {
        for (let i = 0; i < string.length; i++) {
            view.setUint8(offset + i, string.charCodeAt(i));
        }
    }

}
View Code

 

调用:(canvas 属性宽高要2倍与css,否则会模糊)

//页面可见性变化处理
            visibilitychange() {
                document.addEventListener('visibilitychange', () => {
                    if (!document.hidden && this.isRecording && this.task.id) {
                        console.log('show---');
                        // 检查WebSocket状态
                        if (this.ws.readyState === WebSocket.CLOSED) {
                            //清空上个任务的中间状态
                            this.dataList.forEach((item) => {
                                if (item.isChange) {
                                    item.isChange = false;
                                }
                            })

                            //重新创建新任务
                            getRestartVoiceWs({
                                id: this.task.id
                            }, async (res) => {
                                console.log('task-------', res.data);
                                this.task = res.data;
                                const {
                                    ws
                                } = await this.audioStreamer.reconnect(res.data.ws);
                                this.ws = ws;
                            })
                        } else {
                            this.audioStreamer.reconnect();
                        }
                    } else if (document.hidden) {
                        console.log('hedden---');
                        //this.ws.close(); //模拟断连
                    }
                });
            },
            //请求麦克风权限并开始录音
            startRecording() {
                if (!this.task.id) {
                    getVoiceWs({}, async (res) => {
                        this.isRecording = true;
                        this.task = res.data;
                        const audioStreamer = new AudioStreamer({
                            wsUrl: res.data.ws,
                        });
                        this.audioStreamer = audioStreamer;
                        const worklet = await audioStreamer.init(this.messageHandle, this.audioBlobHandle);
                        this.analyser = worklet.analyser;
                        this.ws = worklet.ws;
                        // 开始动画
                        this.visualize();

                        this.timer = setInterval(() => {
                            this.duration += 1;
                        }, 1000);
                    })
                } else {
                    this.isRecording = true;
                    this.audioStreamer.setActive(true);
                    this.timer = setInterval(() => {
                        this.duration += 1;
                    }, 1000);
                    // 开始动画
                    this.visualize();
                }
            },
            // 暂停录音
            stopRecording() {
                this.isRecording = false;
                if (this.timer) {
                    clearInterval(this.timer);
                    this.timer = null;
                }
                cancelAnimationFrame(this.animationId);
                this.audioStreamer.setActive(false);
            },
            messageHandle(msg) {
                if (msg.name === 'TranscriptionResultChanged') {
                    //中间结果
                    const index = this.dataList.findIndex((item) => item.isChange && item.index === msg
                        .index);
                    if (index >= 0) {
                        this.resultChangedHandle(msg.result);
                    } else {
                        this.dataList.push({
                            isChange: true,
                            index: msg.index,
                            speakerId: msg.speaker_id || 0,
                            content: msg.result,
                            duration: util.secondsToHMS(0, false),
                            bg: this.colors[Number(msg.speaker_id || 0)] || ''
                        });
                    }
                } else {
                    this.lineBuffer = '';
                    //完整句子结果
                    //删除上个中间值
                    const index = this.dataList.findIndex((item) => item.isChange && item.index === msg
                        .index);
                    index >= 0 && this.dataList.splice(index, 1);
                    this.dataList.push({
                        index: msg.index,
                        speakerId: msg.speaker_id,
                        content: msg.result,
                        duration: util.secondsToHMS(parseInt((msg.time - msg.begin_time) / 1000), false),
                        bg: this.colors[Number(msg.speaker_id)] || ''
                    });
                }

                // 将聊天框的滚动条滑动到最底部
                this.$nextTick(() => {
                    const box = document.getElementById('chat-box');
                    box.scrollTo(0, box.scrollHeight)
                })
            },
            resultChangedHandle(newText) {
                let lastReceived = this.dataList.find((item) => item.isChange);;
                let lastReceivedText = this.lineBuffer;
                if (lastReceivedText === '一') {
                    lastReceivedText = '';
                }
                // 找出新增的字符
                let newCharacters = '';
                for (let i = 0; i < newText.length; i++) {
                    if (newText[i] !== lastReceivedText[i]) {
                        newCharacters = newText.substring(i);
                        lastReceivedText = lastReceivedText.substring(0, i);
                        break;
                    }
                }

                // 如果有新增字符,追加字符
                if (newCharacters) {
                    this.lineBuffer = lastReceivedText + newCharacters;
                    lastReceived.content = this.lineBuffer;
                }
            },
            async audioBlobHandle(data) {
                this.task.duration = data.duration;
                const uploader = new ChunkedUploader(data.audioBlob, {
                    chunkSizeMB: 50, // 10MB
                    //concurrent: 1,
                    onSplitProgress(progress) {
                        uni.showLoading({
                            title: `录音转码中${progress}%`
                        });
                    },
                    onUploadProgress(progress) {
                        uni.showLoading({
                            title: `上传录音中${progress}%`
                        });
                    },
                });
                uploader.upload().then((partETags) => {
                    // 通知服务器合并分片
                    overUploadPartFiles({
                        taskId: uploader.taskId,
                        key: uploader.key,
                        partETags,
                        fileName: moment().format('YYYY-MM-DD HH:mm:ss') + '.mp3',
                        fileSize: uploader.totalSize
                    }, res => {
                        const data = JSON.parse(res.data);
                        uni.hideLoading();
                        this.task.voiceUrl = data.url;
                        this.saveHandle();
                    })
                });
            },
            save() {
                uni.showLoading({
                    title: `加载中`
                });
                this.isRecording && this.stopRecording();
                this.audioStreamer.stop();
            },
            saveHandle() {
                this.task.voiceTitle = moment().format('YYYY-MM-DD HH:mm:ss');
                this.showModal = true;
            },
            // 可视化动画
            visualize(isInit) {
                const _this = this;
                const canvas = document.querySelector('.canvas');
                // 获取音频数据
                const bufferLength = this.analyser?.frequencyBinCount;
                const dataArray = new Uint8Array(bufferLength);

                // 动画函数
                function draw() {
                    _this.animationId = requestAnimationFrame(draw);

                    // 清空画布
                    _this.ctx.fillStyle = 'rgba(0, 0, 0, 0.1)';
                    _this.ctx.fillRect(0, 0, canvas.width, canvas.height);

                    // 获取频率数据
                    _this.analyser.getByteFrequencyData(dataArray);

                    // 绘制
                    drawCenteredBars(dataArray, bufferLength);

                }
                if (isInit) {
                    const emptyBuffer = Buffer.alloc(10);
                    drawCenteredBars(emptyBuffer, 10);
                } else {
                    draw();
                }

                // 绘制柱状图效果
                function drawCenteredBars(dataArray, bufferLength, displayBars = 6) {
                    const centerX = canvas.width / 2;
                    const maxBarWidth = 15;
                    const minGap = 3;

                    // 动态计算柱宽和间隙
                    const barWidth = Math.min(maxBarWidth,
                        (canvas.width - minGap * (displayBars - 1)) / displayBars);
                    const gap = Math.max(minGap,
                        (canvas.width - barWidth * displayBars) / (displayBars - 1));

                    const totalWidth = (barWidth + gap) * displayBars - gap;
                    const startX = centerX - totalWidth / 2;

                    _this.ctx.clearRect(0, 0, canvas.width, canvas.height);

                    // 只绘制部分柱子(提高性能)
                    const step = Math.max(1, Math.floor(bufferLength / displayBars));

                    for (let i = 0; i < displayBars; i++) {
                        const dataIndex = Math.min(i * step, bufferLength - 1);
                        const barHeight = Math.max(2, (dataArray[dataIndex] / 255) * canvas.height * 0.8);
                        const x = startX + i * (barWidth + gap);
                        const y = (canvas.height - barHeight) / 2;

                        // 渐变填充
                        //const gradient = _this.ctx.createLinearGradient(x, y, x, canvas.height);
                        // //gradient.addColorStop(0, getBarColor(i, displayBars, dataArray[dataIndex]));
                        //gradient.addColorStop(0, '#1F65FF');
                        //gradient.addColorStop(1, 'rgba(0,0,0,0.7)');

                        _this.ctx.fillStyle = '#1F65FF';
                        _this.ctx.fillRect(x, y, barWidth, barHeight);

                        // 高光效果
                        _this.ctx.fillStyle = 'rgba(255,255,255,0.2)';
                        _this.ctx.fillRect(x + 1, y + 1, barWidth - 2, 2);
                    }
                }
                // 辅助函数:获取柱子颜色
                function getBarColor(index, total, value) {
                    const hue = (index / total) * 360;
                    const saturation = 100;
                    const lightness = 30 + (value / 255) * 40;
                    return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
                }
            },
        

总结:deepseek很好用,基本都是问它,不过有些方法不知道是不是瞎编的,看着很有逻辑,但是处理的音频是损坏的,得换个角度单独问

 
posted @ 2025-07-11 15:38  Jade_g  阅读(55)  评论(0)    收藏  举报