科大讯飞官网 讯飞开放平台-以语音交互为核心的人工智能开放平台

安装依赖 npm install crypto-js  worker-loader @vitejs/plugin-vue

 ###  vite.config.js

import { defineConfig } from "vite";
import vue from "@vitejs/plugin-vue";
import { viteCommonjs } from "@originjs/vite-plugin-commonjs";
// https://vite.dev/config/
export default defineConfig({
  plugins: [
    vue(),
    viteCommonjs({
      transformMixedEsModules: true, //混用 commonJS 和 ES6 模块
    }),
  ],
});

结构目录

## 语音识别

在utils 新建lat_xunfei文件夹 ,新建IatRecorder.js 和transcode.worker.js

### IatRecorder.js

需要你填写科大讯飞注册好的(个人新用户有500次调用)

APPID = "";
API_SECRET = "";
API_KEY = "";


const APPID = "";
const API_SECRET = "";
const API_KEY = "";
import CryptoJS from "crypto-js";
const transWorker = new Worker(
  new URL("./transcode.worker.js", import.meta.url)
);

console.log(transWorker);
var startTime = "";
var endTime = "";

function getWebSocketUrl() {
  return new Promise((resolve, reject) => {
    // 请求地址根据语种不同变化
    var url = "wss://iat-api.xfyun.cn/v2/iat";
    var host = "iat-api.xfyun.cn";
    var apiKey = API_KEY;
    var apiSecret = API_SECRET;
    var date = new Date().toGMTString();
    var algorithm = "hmac-sha256";
    var headers = "host date request-line";
    var signatureOrigin = `host: ${host}\ndate: ${date}\nGET /v2/iat HTTP/1.1`;
    var signatureSha = CryptoJS.HmacSHA256(signatureOrigin, apiSecret);
    var signature = CryptoJS.enc.Base64.stringify(signatureSha);
    var authorizationOrigin = `api_key="${apiKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"`;
    var authorization = btoa(authorizationOrigin);
    url = `${url}?authorization=${authorization}&date=${date}&host=${host}`;
    resolve(url);
  });
}
const IatRecorder = class {
  constructor({ language, accent, appId } = {}) {
    let self = this;
    this.status = "null";
    this.language = language || "zh_cn";
    this.accent = accent || "mandarin";
    this.appId = appId || APPID;
    // 记录音频数据
    this.audioData = [];
    // 记录听写结果
    this.resultText = "";
    // wpgs下的听写结果需要中间状态辅助记录
    this.resultTextTemp = "";
    transWorker.onmessage = function (event) {
      // console.log("构造方法中",self.audioData)
      self.audioData.push(...event.data);
    };
  }

  // 修改录音听写状态
  setStatus(status) {
    this.onWillStatusChange &&
      this.status !== status &&
      this.onWillStatusChange(this.status, status);
    this.status = status;
  }
  setResultText({ resultText, resultTextTemp } = {}) {
    this.onTextChange && this.onTextChange(resultTextTemp || resultText || "");
    resultText !== undefined && (this.resultText = resultText);
    resultTextTemp !== undefined && (this.resultTextTemp = resultTextTemp);
  }
  // 修改听写参数
  setParams({ language, accent } = {}) {
    language && (this.language = language);
    accent && (this.accent = accent);
  }
  // 连接websocket
  connectWebSocket() {
    return getWebSocketUrl().then((url) => {
      let iatWS;
      if ("WebSocket" in window) {
        iatWS = new WebSocket(url);
      } else if ("MozWebSocket" in window) {
        iatWS = new MozWebSocket(url);
      } else {
        alert("浏览器不支持WebSocket");
        return;
      }
      this.webSocket = iatWS;
      this.setStatus("init");
      iatWS.onopen = (e) => {
        this.setStatus("ing");
        // 重新开始录音
        setTimeout(() => {
          this.webSocketSend();
        }, 500);
      };
      iatWS.onmessage = (e) => {
        this.result(e.data);
      };
      iatWS.onerror = (e) => {
        this.recorderStop();
      };
      iatWS.onclose = (e) => {
        endTime = Date.parse(new Date());
        console.log("持续时间", endTime - startTime);
        this.recorderStop();
      };
    });
  }
  // 初始化浏览器录音
  recorderInit() {
    navigator.getUserMedia =
      navigator.getUserMedia ||
      navigator.webkitGetUserMedia ||
      navigator.mozGetUserMedia ||
      navigator.msGetUserMedia;

    // 创建音频环境
    try {
      this.audioContext = new (window.AudioContext ||
        window.webkitAudioContext)();
      this.audioContext.resume();
      if (!this.audioContext) {
        alert("浏览器不支持webAudioApi相关接口");
        return;
      }
    } catch (e) {
      if (!this.audioContext) {
        alert("浏览器不支持webAudioApi相关接口");
        return;
      }
    }

    // 获取浏览器录音权限
    if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
      navigator.mediaDevices
        .getUserMedia({
          audio: true,
          video: false,
        })
        .then((stream) => {
          getMediaSuccess(stream);
        })
        .catch((e) => {
          getMediaFail(e);
        });
    } else if (navigator.getUserMedia) {
      navigator.getUserMedia(
        {
          audio: true,
          video: false,
        },
        (stream) => {
          getMediaSuccess(stream);
        },
        function (e) {
          getMediaFail(e);
        }
      );
    } else {
      if (
        navigator.userAgent.toLowerCase().match(/chrome/) &&
        location.origin.indexOf("https://") < 0
      ) {
        alert(
          "chrome下获取浏览器录音功能,因为安全性问题,需要在localhost或127.0.0.1或https下才能获取权限"
        );
      } else {
        alert("无法获取浏览器录音功能,请升级浏览器或使用chrome");
      }
      this.audioContext && this.audioContext.close();
      return;
    }
    // 获取浏览器录音权限成功的回调
    let getMediaSuccess = (stream) => {
      // 创建一个用于通过JavaScript直接处理音频
      this.scriptProcessor = this.audioContext.createScriptProcessor(0, 1, 1);
      this.scriptProcessor.onaudioprocess = (e) => {
        // 去处理音频数据
        if (this.status === "ing") {
          transWorker.postMessage(e.inputBuffer.getChannelData(0));
          //  this.audioData.push(e.inputBuffer.getChannelData(0))
        }
      };
      // 创建一个新的MediaStreamAudioSourceNode 对象,使来自MediaStream的音频可以被播放和操作
      this.mediaSource = this.audioContext.createMediaStreamSource(stream);
      // 连接
      this.mediaSource.connect(this.scriptProcessor);
      this.scriptProcessor.connect(this.audioContext.destination);
      this.connectWebSocket();
    };

    let getMediaFail = (e) => {
      this.audioContext && this.audioContext.close();
      this.audioContext = undefined;
      // 关闭websocket
      if (this.webSocket && this.webSocket.readyState === 1) {
        this.webSocket.close();
      }
    };
  }
  recorderStart() {
    if (!this.audioContext) {
      this.recorderInit();
    } else {
      this.audioContext.resume();
      this.connectWebSocket();
    }
  }
  // 暂停录音
  recorderStop() {
    // safari下suspend后再次resume录音内容将是空白,设置safari下不做suspend
    if (
      !(
        /Safari/.test(navigator.userAgent) && !/Chrome/.test(navigator.userAgen)
      )
    ) {
      this.audioContext && this.audioContext.suspend();
    }
    this.setStatus("end");
  }
  // 处理音频数据
  // transAudioData(audioData) {
  //   audioData = transAudioData.transaction(audioData)
  //   this.audioData.push(...audioData)
  // }
  // 对处理后的音频数据进行base64编码,
  toBase64(buffer) {
    var binary = "";
    var bytes = new Uint8Array(buffer);
    var len = bytes.byteLength;
    for (var i = 0; i < len; i++) {
      binary += String.fromCharCode(bytes[i]);
    }
    return window.btoa(binary);
  }
  // 向webSocket发送数据
  webSocketSend() {
    if (this.webSocket.readyState !== 1) {
      return;
    }
    let audioData = this.audioData.splice(0, 1280);
    var params = {
      common: {
        app_id: this.appId,
      },
      business: {
        language: this.language, //小语种可在控制台--语音听写(流式)--方言/语种处添加试用
        domain: "iat",
        accent: this.accent, //中文方言可在控制台--语音听写(流式)--方言/语种处添加试用
      },
      data: {
        status: 0,
        format: "audio/L16;rate=16000",
        encoding: "raw",
        audio: this.toBase64(audioData),
      },
    };
    console.log("参数language:", this.language);
    console.log("参数accent:", this.accent);
    this.webSocket.send(JSON.stringify(params));
    startTime = Date.parse(new Date());
    this.handlerInterval = setInterval(() => {
      // websocket未连接
      if (this.webSocket.readyState !== 1) {
        console.log("websocket未连接");
        this.audioData = [];
        clearInterval(this.handlerInterval);
        return;
      }
      if (this.audioData.length === 0) {
        console.log("自动关闭", this.status);
        if (this.status === "end") {
          this.webSocket.send(
            JSON.stringify({
              data: {
                status: 2,
                format: "audio/L16;rate=16000",
                encoding: "raw",
                audio: "",
              },
            })
          );
          this.audioData = [];
          clearInterval(this.handlerInterval);
        }
        return false;
      }
      audioData = this.audioData.splice(0, 1280);
      // 中间帧
      this.webSocket.send(
        JSON.stringify({
          data: {
            status: 1,
            format: "audio/L16;rate=16000",
            encoding: "raw",
            audio: this.toBase64(audioData),
          },
        })
      );
    }, 40);
  }
  result(resultData) {
    // 识别结束
    let jsonData = JSON.parse(resultData);
    if (jsonData.data && jsonData.data.result) {
      let data = jsonData.data.result;
      let str = "";
      let resultStr = "";
      let ws = data.ws;
      for (let i = 0; i < ws.length; i++) {
        str = str + ws[i].cw[0].w;
      }
      console.log("识别的结果为:", str);
      // 开启wpgs会有此字段(前提:在控制台开通动态修正功能)
      // 取值为 "apd"时表示该片结果是追加到前面的最终结果;取值为"rpl" 时表示替换前面的部分结果,替换范围为rg字段
      if (data.pgs) {
        if (data.pgs === "apd") {
          // 将resultTextTemp同步给resultText
          this.setResultText({
            resultText: this.resultTextTemp,
          });
        }
        // 将结果存储在resultTextTemp中
        this.setResultText({
          resultTextTemp: this.resultText + str,
        });
      } else {
        this.setResultText({
          resultText: this.resultText + str,
        });
      }
    }
    if (jsonData.code === 0 && jsonData.data.status === 2) {
      this.webSocket.close();
    }
    if (jsonData.code !== 0) {
      this.webSocket.close();
      console.log(`${jsonData.code}:${jsonData.message}`);
    }
  }
  start() {
    this.recorderStart();
    this.setResultText({ resultText: "", resultTextTemp: "" });
  }
  stop() {
    this.recorderStop();
  }
};

export default IatRecorder;

###   transcode.worker.js 

因为用了 vite 所以不能 import Worker from "./transcode.worker.js";导入transcode.worker.js 

会报错。原因看https://cn.vitejs.dev/guide/features#web-workers

transcode.worker.js 代码修改成如下:

self.onmessage = function (e) {
  transAudioData.transcode(e.data);
};
let transAudioData = {
  transcode(audioData) {
    let output = transAudioData.to16kHz(audioData);
    output = transAudioData.to16BitPCM(output);
    output = Array.from(new Uint8Array(output.buffer));
    self.postMessage(output);
    // return output
  },
  to16kHz(audioData) {
    var data = new Float32Array(audioData);
    var fitCount = Math.round(data.length * (16000 / 44100));
    var newData = new Float32Array(fitCount);
    var springFactor = (data.length - 1) / (fitCount - 1);
    newData[0] = data[0];
    for (let i = 1; i < fitCount - 1; i++) {
      var tmp = i * springFactor;
      var before = Math.floor(tmp).toFixed();
      var after = Math.ceil(tmp).toFixed();
      var atPoint = tmp - before;
      newData[i] = data[before] + (data[after] - data[before]) * atPoint;
    }
    newData[fitCount - 1] = data[data.length - 1];
    return newData;
  },
  to16BitPCM(input) {
    var dataLength = input.length * (16 / 8);
    var dataBuffer = new ArrayBuffer(dataLength);
    var dataView = new DataView(dataBuffer);
    var offset = 0;
    for (var i = 0; i < input.length; i++, offset += 2) {
      var s = Math.max(-1, Math.min(1, input[i]));
      dataView.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
    }
    return dataView;
  },
};

### 语音识别使用方式

<template>
  <div class="conter">
    <button @click="translationStart">开始</button>
    <button @click="translationEnd">停止</button>
  </div>
</template>

<script setup>
import { ref } from 'vue';
import IatRecorder from "../utils//lat_xunfei/IatRecorder.js";
const searchData = ref('');
const iatRecorder = new IatRecorder({ language: "en_us", accent: "mandarin", appId: "9abbbfb0" });
const translationStart = () => {
  iatRecorder.start();
};
const translationEnd = () => {
  iatRecorder.onTextChange = (text) => {
    const inputText = text;
    searchData.value = inputText.substring(0, inputText.length - 1);
    //文字处理,因为不知道为什么识别输出的后面都带'。',这个方法是去除字符串最后一位
    console.log(searchData.value);
  };
  iatRecorder.stop();
};
</script>

<style scoped></style>

注意点:浏览器需要证书才能安全调起麦克风或者172.0.0.1 的方式

##  科大语音播报

在utils 新建tts_xunfei文件夹 ,新建audio.js , base64js.js 和transcode.worker.js

### audio.js



// 在线语音合成 WebAPI 接口调用示例 接口文档(必看):
//https://www.xfyun.cn/doc/tts/online_tts/API.html

// 1. websocket连接:判断浏览器是否兼容,获取websocket url并连接,这里为了方便本地生成websocket url
// 2. 连接websocket,向websocket发送数据,实时接收websocket返回数据
// 3. 处理websocket返回数据为浏览器可以播放的音频数据
// 4. 播放音频数据
// ps: 该示例用到了es6中的一些语法,建议在chrome下运行
import CryptoJS from "crypto-js";


const transWorker = new Worker(
  new URL("./transcode.worker.js", import.meta.url)
);

import { Base64 } from "./base64js.js";


//APPID,APISecret,APIKey在控制台-我的应用-语音合成(流式版)页面获取
const APPID = "";
const API_SECRET = "";
const API_KEY = "";

function getWebsocketUrl() {
  return new Promise((resolve, reject) => {
    var apiKey = API_KEY;
    var apiSecret = API_SECRET;
    var url = "wss://tts-api.xfyun.cn/v2/tts";
    var host = location.host;
    var date = new Date().toGMTString();
    var algorithm = "hmac-sha256";
    var headers = "host date request-line";
    var signatureOrigin = `host: ${host}\ndate: ${date}\nGET /v2/tts HTTP/1.1`;
    var signatureSha = CryptoJS.HmacSHA256(signatureOrigin, apiSecret);
    var signature = CryptoJS.enc.Base64.stringify(signatureSha);
    var authorizationOrigin = `api_key="${apiKey}", algorithm="${algorithm}", headers="${headers}", signature="${signature}"`;
    var authorization = btoa(authorizationOrigin);
    url = `${url}?authorization=${authorization}&date=${date}&host=${host}`;
    resolve(url);
  });
}
const TTSRecorder = class {
  constructor({
    speed = 30,
    voice = 50,
    pitch = 50,
    voiceName = "xiaoyan",
    appId = APPID,
    text = "",
    tte = "UTF8",
    defaultText = "请输入您要合成的文本",
  } = {}) {
    this.speed = speed;
    this.voice = voice;
    this.pitch = pitch;
    this.voiceName = voiceName;
    this.text = text;
    this.tte = tte;
    this.defaultText = defaultText;
    this.appId = appId;
    this.audioData = [];
    this.rawAudioData = [];
    this.audioDataOffset = 0;
    this.status = "init";
    transWorker.onmessage = (e) => {
      this.audioData.push(...e.data.data);
      this.rawAudioData.push(...e.data.rawAudioData);
    };
  }
  // 修改录音听写状态
  setStatus(status) {
    this.onWillStatusChange && this.onWillStatusChange(this.status, status);
    this.status = status;
  }

  // 设置合成相关参数
  setParams({ speed, voice, pitch, text, voiceName, tte }) {
    speed !== undefined && (this.speed = speed);
    voice !== undefined && (this.voice = voice);
    pitch !== undefined && (this.pitch = pitch);
    text && (this.text = text);
    tte && (this.tte = tte);
    voiceName && (this.voiceName = voiceName);
    this.resetAudio();
  }
  // 连接websocket
  connectWebSocket() {
    this.setStatus("ttsing");
    return getWebsocketUrl().then((url) => {
      let ttsWS;
      if ("WebSocket" in window) {
        ttsWS = new WebSocket(url);
      } else if ("MozWebSocket" in window) {
        ttsWS = new MozWebSocket(url);
      } else {
        alert("浏览器不支持WebSocket");
        return;
      }
      this.ttsWS = ttsWS;
      ttsWS.onopen = (e) => {
        this.webSocketSend();
        this.playTimeout = setTimeout(() => {
          this.audioPlay();
        }, 1000);
      };
      ttsWS.onmessage = (e) => {
        this.result(e.data);
      };
      ttsWS.onerror = (e) => {
        clearTimeout(this.playTimeout);
        this.setStatus("errorTTS");
        alert("WebSocket报错,请f12查看详情");
        console.error(`详情查看:${encodeURI(url.replace("wss:", "https:"))}`);
      };
      ttsWS.onclose = (e) => {
        // console.log(e)
      };
    });
  }
  // 处理音频数据
  transToAudioData(audioData) {}
  // websocket发送数据
  webSocketSend() {
    var params = {
      common: {
        app_id: this.appId, // APPID
      },
      business: {
        aue: "raw",
        // sfl= 1,
        auf: "audio/L16;rate=16000",
        vcn: this.voiceName,
        speed: this.speed,
        volume: this.voice,
        pitch: this.pitch,
        bgs: 0,
        tte: this.tte,
      },
      data: {
        status: 2,
        text: this.encodeText(
          this.text || this.defaultText,
          this.tte === "unicode" ? "base64&utf16le" : ""
        ),
      },
    };
    this.ttsWS.send(JSON.stringify(params));
  }
  encodeText(text, encoding) {
    switch (encoding) {
      case "utf16le": {
        let buf = new ArrayBuffer(text.length * 4);
        let bufView = new Uint16Array(buf);
        for (let i = 0, strlen = text.length; i < strlen; i++) {
          bufView[i] = text.charCodeAt(i);
        }
        return buf;
      }
      case "buffer2Base64": {
        let binary = "";
        let bytes = new Uint8Array(text);
        let len = bytes.byteLength;
        for (let i = 0; i < len; i++) {
          binary += String.fromCharCode(bytes[i]);
        }
        return window.btoa(binary);
      }
      case "base64&utf16le": {
        return this.encodeText(
          this.encodeText(text, "utf16le"),
          "buffer2Base64"
        );
      }
      default: {
        return Base64.encode(text);
      }
    }
  }
  // websocket接收数据的处理
  result(resultData) {
    let jsonData = JSON.parse(resultData);
    // 合成失败
    if (jsonData.code !== 0) {
      alert(`合成失败: ${jsonData.code}:${jsonData.message}`);
      console.error(`${jsonData.code}:${jsonData.message}`);
      this.resetAudio();
      return;
    }
    transWorker.postMessage(jsonData.data.audio);

    if (jsonData.code === 0 && jsonData.data.status === 2) {
      this.ttsWS.close();
    }
  }
  // 重置音频数据
  resetAudio() {
    this.audioStop();
    this.setStatus("init");
    this.audioDataOffset = 0;
    this.audioData = [];
    this.rawAudioData = [];
    this.ttsWS && this.ttsWS.close();
    clearTimeout(this.playTimeout);
  }
  // 音频初始化
  audioInit() {
    let AudioContext = window.AudioContext || window.webkitAudioContext;
    if (AudioContext) {
      this.audioContext = new AudioContext();
      this.audioContext.resume();
      this.audioDataOffset = 0;
    }
  }
  // 音频播放
  audioPlay() {
    this.setStatus("play");
    let audioData = this.audioData.slice(this.audioDataOffset);
    this.audioDataOffset += audioData.length;
    let audioBuffer = this.audioContext.createBuffer(
      1,
      audioData.length,
      22050
    );
    let nowBuffering = audioBuffer.getChannelData(0);
    if (audioBuffer.copyToChannel) {
      audioBuffer.copyToChannel(new Float32Array(audioData), 0, 0);
    } else {
      for (let i = 0; i < audioData.length; i++) {
        nowBuffering[i] = audioData[i];
      }
    }
    let bufferSource = (this.bufferSource =
      this.audioContext.createBufferSource());
    bufferSource.buffer = audioBuffer;
    bufferSource.connect(this.audioContext.destination);
    bufferSource.start();
    bufferSource.onended = (event) => {
      if (this.status !== "play") {
        return;
      }
      if (this.audioDataOffset < this.audioData.length) {
        this.audioPlay();
      } else {
        this.audioStop();
      }
    };
  }
  // 音频播放结束
  audioStop() {
    this.setStatus("endPlay");
    clearTimeout(this.playTimeout);
    this.audioDataOffset = 0;
    if (this.bufferSource) {
      try {
        this.bufferSource.stop();
      } catch (e) {
        // console.log(e)
      }
    }
  }
  start() {
    if (this.audioData.length) {
      this.audioPlay();
    } else {
      if (!this.audioContext) {
        this.audioInit();
      }
      if (!this.audioContext) {
        alert("该浏览器不支持webAudioApi相关接口");
        return;
      }
      this.connectWebSocket();
    }
  }
  stop() {
    this.audioStop();
  }
};
export default TTSRecorder;

###  base64js.js

'use strict';

// constants
const version = "2.5.1";
const b64chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const b64tab = function(bin) {
  const t = {};
  for (let i = 0, l = bin.length; i < l; i++) t[bin.charAt(i)] = i;
  return t;
}(b64chars);

const fromCharCode = String.fromCharCode;

// encoder stuff
const cb_utob = function(c) {
  if (c.length < 2) {
    const cc = c.charCodeAt(0);
    return cc < 0x80 ? c
      : cc < 0x800 ? (fromCharCode(0xc0 | (cc >>> 6))
      + fromCharCode(0x80 | (cc & 0x3f)))
        : (fromCharCode(0xe0 | ((cc >>> 12) & 0x0f))
        + fromCharCode(0x80 | ((cc >>>  6) & 0x3f))
        + fromCharCode(0x80 | ( cc         & 0x3f)));
  } else {
    const cc = 0x10000
      + (c.charCodeAt(0) - 0xD800) * 0x400
      + (c.charCodeAt(1) - 0xDC00);
    return (fromCharCode(0xf0 | ((cc >>> 18) & 0x07))
    + fromCharCode(0x80 | ((cc >>> 12) & 0x3f))
    + fromCharCode(0x80 | ((cc >>>  6) & 0x3f))
    + fromCharCode(0x80 | ( cc         & 0x3f)));
  }
};

const re_utob = /[\uD800-\uDBFF][\uDC00-\uDFFFF]|[^\x00-\x7F]/g;
const utob = function(u) {
  return u.replace(re_utob, cb_utob);
};

const cb_encode = function(ccc) {
  const padlen = [0, 2, 1][ccc.length % 3];
  const ord = ccc.charCodeAt(0) << 16
    | ((ccc.length > 1 ? ccc.charCodeAt(1) : 0) << 8)
    | ((ccc.length > 2 ? ccc.charCodeAt(2) : 0));
  const chars = [
    b64chars.charAt( ord >>> 18),
    b64chars.charAt((ord >>> 12) & 63),
    padlen >= 2 ? '=' : b64chars.charAt((ord >>> 6) & 63),
    padlen >= 1 ? '=' : b64chars.charAt(ord & 63)
  ];
  return chars.join('');
};

const btoa = globalThis.btoa ? function(b) {
  return globalThis.btoa(b);
} : function(b) {
  return b.replace(/[\s\S]{1,3}/g, cb_encode);
};

const _encode = function(u) { return btoa(utob(u)) };

const encode = function(u, urisafe) {
  return !urisafe ? _encode(String(u)) : _encode(String(u)).replace(/[+\/]/g, function(m0) {
    return m0 == '+' ? '-' : '_';
  }).replace(/=/g, '');
};

const encodeURI = function(u) { return encode(u, true) };

// decoder stuff
const re_btou = /[\xC0-\xDF][\x80-\xBF]|[\xE0-\xEF][\x80-\xBF]{2}|[\xF0-\xF7][\x80-\xBF]{3}/g;
const cb_btou = function(cccc) {
  switch(cccc.length) {
    case 4:
      const cp = ((0x07 & cccc.charCodeAt(0)) << 18)
        |    ((0x3f & cccc.charCodeAt(1)) << 12)
        |    ((0x3f & cccc.charCodeAt(2)) <<  6)
        |     (0x3f & cccc.charCodeAt(3)),
        offset = cp - 0x10000;
      return (fromCharCode((offset  >>> 10) + 0xD800)
      + fromCharCode((offset & 0x3FF) + 0xDC00));
    case 3:
      return fromCharCode(
        ((0x0f & cccc.charCodeAt(0)) << 12)
        | ((0x3f & cccc.charCodeAt(1)) << 6)
        |  (0x3f & cccc.charCodeAt(2))
      );
    default:
      return fromCharCode(
        ((0x1f & cccc.charCodeAt(0)) << 6)
        |  (0x3f & cccc.charCodeAt(1))
      );
  }
};

const btou = function(b) {
  return b.replace(re_btou, cb_btou);
};

const _decode = function(a) {
  return btou(_atob(a));
};

const _atob = globalThis.atob ? function(a) {
  return globalThis.atob(a);
} : function(a){
  return a.replace(/[\s\S]{1,4}/g, cb_decode);
};

const decode = function(a) {
  return _decode(String(a).replace(/[-_]/g, function(m0) { return m0 == '-' ? '+' : '/' })
    .replace(/[^A-Za-z0-9\+\/]/g, ''));
};

export const Base64 = {
  VERSION: version,
  encode: encode,
  encodeURI: encodeURI,
  decode: decode
};

###  transcode.worker.js

  let minSampleRate = 22050
  self.onmessage = function(e) {
    transcode.transToAudioData(e.data)
  }
  var transcode = {
    transToAudioData: function(audioDataStr, fromRate = 16000, toRate = 22505) {
      let outputS16 = transcode.base64ToS16(audioDataStr)
      let output = transcode.transS16ToF32(outputS16)
      output = transcode.transSamplingRate(output, fromRate, toRate)
      output = Array.from(output)
      self.postMessage({
        data: output, 
        rawAudioData: Array.from(outputS16)
      })
    },
    transSamplingRate: function(data, fromRate = 44100, toRate = 16000) {
      var fitCount = Math.round(data.length * (toRate / fromRate))
      var newData = new Float32Array(fitCount)
      var springFactor = (data.length - 1) / (fitCount - 1)
      newData[0] = data[0]
      for (let i = 1; i < fitCount - 1; i++) {
        var tmp = i * springFactor
        var before = Math.floor(tmp).toFixed()
        var after = Math.ceil(tmp).toFixed()
        var atPoint = tmp - before
        newData[i] = data[before] + (data[after] - data[before]) * atPoint
      }
      newData[fitCount - 1] = data[data.length - 1]
      return newData
    },
    transS16ToF32: function(input) {
      var tmpData = []
      for (let i = 0; i < input.length; i++) {
        var d = input[i] < 0 ? input[i] / 0x8000 : input[i] / 0x7fff
        tmpData.push(d)
      }
      return new Float32Array(tmpData)
    },
    base64ToS16: function(base64AudioData) {
      base64AudioData = atob(base64AudioData)
      const outputArray = new Uint8Array(base64AudioData.length)
      for (let i = 0; i < base64AudioData.length; ++i) {
        outputArray[i] = base64AudioData.charCodeAt(i)
      }
      return new Int16Array(new DataView(outputArray.buffer).buffer)
    },
  }

### 语音播报的使用

<template>
  <div class="contert">
    <button @click="play">开始合成</button>
    <button @click="pause">停止播放</button>
  </div>
</template>

<script setup>
import { ref } from 'vue';
import TtsRecorder from "../utils/tts_xunfei/audio.js";

const text = ref("我是一个合成语音");
const ttsRecorder = new TtsRecorder();

const play = () => {
  ttsRecorder.setParams({
    text: text.value,
    speed: 50,
    voice: 50,
  });
  ttsRecorder.start();
};

const pause = () => {
  ttsRecorder.stop();
};
</script>

<style scoped>

</style>

Logo

在这里,我们一起交流AI,学习AI,用AI改变世界。如有AI产品需求,可访问讯飞开放平台,www.xfyun.cn。

更多推荐