最近開發一個代取快遞的功能模塊,需求是當快遞取到之后要給用戶一個反饋,也就是以語音提示的方式給用戶提示
因為提示的文字是固定的,所以其實思路就是利用文字識別,之后轉換為語音進行播報這么一個思路
但是問題就在於客戶端如何進行判斷快遞到達
這邊的話是采用,服務人員確認取件---服務器改變狀態----給與客戶端提示,客戶端進行語音播報,
但是!!!!首先要保持客戶端的運行,可以是后台,也可以是鎖屏狀態,但是一定要是運行的,然后在整個客戶端加贊的時候設置定時器,定時判斷一個開關的狀態
當服務器給與客戶端反饋的時候就可以進行語音播報,並重置整個開關的狀態
文字轉換為語音的代碼如下,這個是一個在uni-app插件市場的小哥制作的,也可以去插件市場進行搜索,這邊僅做記錄
const audioTeam = []; let audioStartSwitch = false; const getAudioUrl = 'https://tsn.baidu.com/text2audio'; /** * 瀏覽器調用語音合成接口 * 獲取token接口: https://openapi.baidu.com/oauth/2.0/token?grant_type=client_credentials&client_id=uFYiXWMCiYvx68V4EVyCGeL8j4GAzXD5&client_secret=897Mm2qCj7bC1eHYVDxaWrO38FscTOHD */ function getBDVoicToken() { return new Promise((rs, rj) => { console.log('准備訪問接口獲取語音token') uni.request({ // 強烈建議此接口由后端訪問並且維護token有效期,否則前端每次訪問都會刷新token //此url為專門插件測試預覽用的key和secret key, 請替換為自己申請的key url: 'https://openapi.baidu.com/oauth/2.0/token', method: 'POST', //建議使用post訪問 // data: 'grant_type=client_credentials&client_id=nm6Os9qqOacgxXjKv8PIp45H&client_secret=BXHhGIpNU7Wi3GDYUt0AGY5cWbWklrov', data: 'grant_type=client_credentials&client_id=jtwoB9xzRnv3qltcfqL0pk1t&client_secret=A16UKHBKVeAh68kuGGSPqnemCVyPDmgb', header: { "content-type": "application/x-www-form-urlencoded" }, success: (res) => { console.log('訪問成功'); rs(res); }, fail: (err) => { console.log('訪問失敗'); rj(err); } }) }) } export default function openVoice(objs) { // 傳入需轉為語音的文本內容 let lineUp = false; let returnAudio = false; if (typeof(objs) !== 'string') { if (objs && objs.lineUp === true) { lineUp = true; } if (objs && objs.returnAudio === true) { returnAudio = true; } } if(returnAudio) { return new Promise((resolve, reject)=>{ openVoiceFc(objs, returnAudio).then(res=>{ resolve(res); }).catch(err=>{ reject(err) }); }) } if (!audioStartSwitch || lineUp) { audioStartSwitch = true; openVoiceFc(objs); } else { audioTeam.push(objs); } } function openVoiceFc(objs, returnAudio) { console.log('准備獲取語音tok'); if(returnAudio) { return new Promise((resolve, reject)=>{ getBDVoicToken().then(res => { console.log('獲取語音tok接口成功'); if (res.data && res.data.access_token) { console.log('token: ' + res.data.access_token); resolve(tts(objs, res.data.access_token, returnAudio)); } else { console.log('獲取語音tok接口為空'); reject('獲取語音tok接口為空'); } }).catch(err => { console.log('獲取語音tok接口失敗'); reject(err||'獲取語音tok接口失敗'); }) }) }else{ getBDVoicToken().then(res => { console.log('獲取語音tok接口成功'); if (res.data && res.data.access_token) { console.log('token: ' + res.data.access_token); tts(objs, res.data.access_token); } else { console.log('獲取語音tok接口為空'); } }).catch(err => { console.log('獲取語音tok接口失敗'); }) } } function tts(objs, tok, returnAudio) { if(typeof(objs)=='string') objs = {voiceSet: {tex: objs}}; const data = { tok, cuid: tok, ctp: 1, lan: 'zh', ...objs.voiceSet } if(returnAudio) return btts( data, objs.audioSet, objs.audioCallback, objs.lineUp, returnAudio); btts( data, objs.audioSet, objs.audioCallback, objs.lineUp, returnAudio); } function setAudioSet(options, audio) { if (options) { audio.volume = options.volume || 1; audio.startTime = options.startTime || 0; audio.loop = options.loop || false; audio.obeyMuteSwitch = options.obeyMuteSwitch && typeof(options.obeyMuteSwitch) == 'boolean' ? options.obeyMuteSwitch : true; //支持微信小程序、百度小程序、頭條小程序 } } function btts(param, options, audioCallback, lineUp, returnAudio) { let audio = uni.createInnerAudioContext(); setAudioSet(options, audio); // 序列化參數列表 let fd = []; for (let k in param) { fd.push(k + '=' + encodeURIComponent(encodeURIComponent(param[k]))); } audio.src = `${getAudioUrl}?${fd.join('&')}`; if(returnAudio) { audio.onEnded(() => { console.log('音頻播放結束'); console.log('銷毀音頻實例'); audio.destroy(); //銷毀音頻實例 audio = null; }) audio.onError((e)=>{ if (audioCallback && audioCallback.onError && typeof(audioCallback.onError) == 'function') audioCallback.onError(e); console.log('音頻播放錯誤: ' + JSON.stringify(e)); console.log('銷毀音頻實例'); audio.destroy(); //銷毀音頻實例 audio = null; }) return audio; } audio.onPlay(() => { console.log('音頻播放開始'); if (audioCallback && audioCallback.onPlay && typeof(audioCallback.onPlay) == 'function') audioCallback.onPlay(); }) audio.onPause(()=>{ if (audioCallback && audioCallback.onPause && typeof(audioCallback.onPause) == 'function') audioCallback.onPause(); }) audio.onWaiting(()=>{ if (audioCallback && audioCallback.onWaiting && typeof(audioCallback.onWaiting) == 'function') audioCallback.onWaiting(); }) audio.onStop(()=>{ if (audioCallback && audioCallback.onStop && typeof(audioCallback.onStop) == 'function') audioCallback.onStop(); }) audio.onTimeUpdate(()=>{ if (audioCallback && audioCallback.onTimeUpdate && typeof(audioCallback.onTimeUpdate) == 'function') audioCallback.onTimeUpdate(); }) audio.onSeeking(()=>{ if (audioCallback && audioCallback.onSeeking && typeof(audioCallback.onSeeking) == 'function') audioCallback.onSeeking(); }) audio.onSeeked(()=>{ if (audioCallback && audioCallback.onSeeked && typeof(audioCallback.onSeeked) == 'function') audioCallback.onSeeked(); }) audio.onEnded(() => { console.log('音頻播放結束'); console.log('銷毀音頻實例'); audio.destroy(); //銷毀音頻實例 audio = null; if (audioCallback && audioCallback.onEnded && typeof(audioCallback.onEnded) == 'function') audioCallback.onEnded(); if (lineUp !== false) { if (audioTeam.length > 0) { console.log('隊列中'); openVoiceFc(audioTeam[0]); audioTeam.splice(0, 1); } else { console.log('隊列為零'); audioStartSwitch = false; } } }) audio.onError((e)=>{ if (audioCallback && audioCallback.onError && typeof(audioCallback.onError) == 'function') audioCallback.onError(e); console.log('音頻播放錯誤: ' + JSON.stringify(e)); console.log('銷毀音頻實例'); audio.destroy(); //銷毀音頻實例 audio = null; }) audio.play(); }
然后在界面進行調用
<script> import Voice from '@/QS-baiduyy/QS-baiduyy.js'; export default { data() { return { title: 'Hello' } }, onLoad() { this.openVoice(); }, methods: { openVoice() { setInterval(function(){ console.log('准備播報語音'); Voice('你好,您的快遞已到達,請下樓取件'); Voice('謝謝!'); }, 3000) } } } </script>