由於專案需要,我們要在web端實現錄音功能。一開始,找到的方案有兩個,一個是透過iframe,一個是html5的getUserMedia api。由於我們的錄音功能不需要相容IE瀏覽器,所以毫不猶豫的選擇了html5提供的getUserMedia去實作。基本想法是參考了官方的api文檔以及網上查找的一些方案做結合做出了適合項目需要的方案。但由於我們必須確保這個錄音功能能夠同時在pad端、pc端都可以打開,所以其中也踩了一些坑。以下為過程還原。
步驟1由於新的api是透過navigator.mediaDevices.getUserMedia,且傳回一個promise。
而舊的api是navigator.getUserMedia,於是做了一個相容性。程式碼如下:
// 舊的瀏覽器可能根本沒有實作mediaDevices,所以我們可以先設定一個空的物件if (navigator.mediaDevices === undefined) { navigator.mediaDevices = {};}// 一些瀏覽器部分支援mediaDevices。我們不能直接給物件設定getUserMedia// 因為這樣可能會覆寫已有的屬性。這裡我們只會在沒有getUserMedia屬性的時候加入它。 if (navigator.mediaDevices.getUserMedia === undefined) { let getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;首先,如果有getUserMedia的話,就獲得它// 一些瀏覽器根本沒實現它- 那麼就返回一個error到promise的reject來保持一個統一的接口if (!getUserMedia) { return Promise.reject(new Error(' getUserMedia is not implemented in this browser')); } // 否則,為舊的navigator.getUserMedia方法包裹一個Promise return new Promise(function(resolve, reject) { getUserMedia.call(navigator, constraints, resolve, reject); }); };步驟2這是網路上存在的一個方法,封裝了一個HZRecorder。基本上引用了這個方法。呼叫HZRecorder.get就可以調起錄音接口,這個方法傳入一個callback函數,new HZRecorder後執行callback函數且傳入一個實體化後的HZRecorder物件。可以透過該物件的方法實現開始錄音、暫停、停止、播放等功能。
var HZRecorder = function (stream, config) { config = config || {}; config.sampleBits = config.sampleBits || 8; //取樣數字8, 16 config.sampleRate = config.sampleRate || (44100 / 6) ; //取樣率(1/6 44100) //建立一個音訊環境物件audioContext = window.AudioContext || window.webkitAudioContext; var context = new audioContext(); //將聲音輸入這個物件var audioInput = context.createMediaStreamSource(stream); //設定音量節點var volume = context.createGain(); audioInput .connect(volume); //建立緩存,用來緩存聲音var bufferSize = 4096; //建立聲音的快取節點,createScriptProcessor方法的// 第二個和第三個參數指的是輸入和輸出都是雙聲道。 var recorder = context.createScriptProcessor(bufferSize, 2, 2); var audioData = { size: 0 //錄音檔案長度, buffer: [] //錄音快取, inputSampleRate: context.sampleRate //輸入取樣率, inputSampleBits: 16 //輸入取樣數字8, 16 , outputSampleRate: config.sampleRate //輸出取樣率, oututSampleBits: config.sampleBits //輸出取樣數字8, 16 , input: function (data) { this.buffer.push(new Float32Array(data)); this.size += data.length; } , compress: function () { //合併壓縮//合併var data = new Float32Array(this.size); var offset = 0; for (var i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer[i].length; } //壓縮var compression = parseInt(this.inputSampleRate / this.outputSampleRate); var length = data.length / compression; var result = new Float32Array(length); var index = 0, j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; } , encodeWAV: function () { var sampleRate = Math.min(this.inputSampleRate, this.outputSampleRate); var sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); var bytes = this.compress(); var dataLength = bytes.length * (sampleBits / 8); var dataLength = bytes.length * (sampleBits / 8); var buffer = 4new Array(44new Arrays" + dataLength); var data = new DataView(buffer); var channelCount = 1;//單聲道var offset = 0; var writeString = function (str) { for (var i = 0; i < str.length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // 資源交換文件識別碼writeString('RIFF'); offset += 4; //下個位址開始到檔案尾總位元組數,即檔案大小-8 data.setUint32(offset, 36 + dataLength, true); offset += 4; // WAV檔案標誌writeString('WAVE'); offset += 4; // 波形格式標誌writeString('fmt '); offset += 4; // 過濾位元組,一般為0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // 格式類別(PCM形式取樣資料) data.setUint16(offset, 1, true); offset += 2; // 通道數data.setUint16( offset, channelCount, true); offset += 2; //取樣率,每秒樣本數,表示每個通道的播放速度data.setUint32(offset, sampleRate, true); offset += 4; // 波形資料傳輸率(每秒平均位元組數) 單聲道×每秒資料位數×每樣本資料位元/8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; //快資料調整數取樣一次佔用位元組數單聲道×每樣本的資料位數/8 data.setUint16(offset, channelCount * (sampleBits / 8), true); offset += 2; // 每樣本資料位數data.setUint16(offset, sampleBits, true); offset += 2; // 資料標識符writeString('data'); offset += 4; //取樣資料總數,即資料總大小-44 data.setUint32(offset, dataLength, true); offset += 4; // 寫入取樣資料if (sampleBits === 8) { for (var i = 0; i < bytes.length; i++, offset++) { var s = Math.max(-1, Math.min(1, bytes[i])); var val = s < 0 ? s * 0x8000 : s * 0x7FFF; val = parseInt(255 / (65535 / (val + 32768))); data.setInt8(offset, val, true); } } else { for (var i = 0; i < bytes.length; i++, offset += 2) { var s = Math.max(-1, Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true); } } return new Blob([ data], { type: 'audio/wav' }); } }; //開始錄音this.start = function () { audioInput.connect(recorder); recorder.connect(context.destination); }; //停止this.stop = function () { recorder.disconnect(); }; // 結束this.end = function() { context. close(); }; // 繼續this.again = function() { recorder.connect(context.destination); }; //取得音訊檔案this.getBlob = function () { this.stop(); return audioData.encodeWAV(); }; //回放this.play = function (audio) { audio.src = window.URL.createObjectURL( this.getBlob()); }; //上傳this.upload = function (url, callback) { var fd = new FormData(); fd.append('audioData', this.getBlob()); var xhr = new XMLHttpRequest(); if (callback) { xhr.upload.addEventListener('progress', function (e) { callback('uploading', e e ); }, false); xhr.addEventListener('load', function (e) { callback('ok', e); }, false); xhr.addEventListener('error', function (e) { callback('error', e); }, false); xhr.addEventListener('abort', function (e) { callback( 'cancel', e); }, false); } xhr.open('POST', url); xhr.send(fd); }; //音訊擷取recorder.onaudioprocess = function (e) { audioData.input(e.inputBuffer.getChannelData(0)); //record(e.inputBuffer.getChannelData(0)); }; }; //拋出例外狀況HZRecorder.throwError = function (message) { throw new function () { this.toString = function () { return message; };}; }; //是否支援錄音HZRecorder.canRecording = (navigator.getUserMedia != null); //取得錄音機HZRecorder.get = function (callback, config) { if (callback) { navigator.mediaDevices . getUserMedia({ audio: true }) .then(function(stream) { let rec = new HZRecorder(stream, config); callback(rec); }) .catch(function(error) { HZRecorder.throwError('無法錄音,請檢查設備狀態'); }); }}; window.HZRecorder = HZRecorder;以上,已經可以滿足大部分的需求。但是我們要兼容pad端。我們的pad有幾個問題必須解決。
以下為解決這兩個問題的方案。
步驟3以下為我實作錄音格式為mp3 和window.URL.createObjectURL傳入blob資料在pad端報錯的方案。
1、修改HZRecorder裡的audioData物件程式碼。並引入網路上一位大神的一個js檔案lamejs.js
const lame = new lamejs();let audioData = { samplesMono: null, maxSamples: 1152, mp3Encoder: new lame.Mp3Encoder(1, context.sampleRate || 44100, config.bitRate 1288) [28 : 0, // 錄音檔案長度buffer: [], // 錄音快取inputSampleRate: context.sampleRate, // 輸入取樣率inputSampleBits: 16, // 輸入取樣數字8, 16 outputSampleRate: config.sampleRate, // 輸入取樣率 oututSampleBits: config.sampleBits, // 輸出取樣數字8, 16 convertBuffer: function(arrayBuffer) { let data = new Float32Array(arrayBuffer); let out = new Int16Array(arrayBuffer.length); this.floatTo16BitPCM(data, out); return out; }, floatTo16BitPCM: function(input, output) { for (let iput. length; i++) { let s = Math.max(-1, Math.min(1, input[i])); output[i] = s < 0 ? s * 0x8000 : s * 0x7fff; } }, appendToBuffer: function(mp3Buf) { this.dataBuffer .push(new Int8Array(mp3Buf)); }, encode: function(arrayBuffer) { this.samplesMono = this.convertBuffer(arrayBuffer); let remaining = this.samplesMono.length; for (let i = 0; remaining >= 0; i += this.maxSamples) { let left = this.samplesMono.subarray(imaxSamples) { let left = this.samplesMono.subarray(imaxS. , i + this.maxSamples); let mp3buf = this.mp3Encoder.encodeBuffer(left); this.appendToBuffer(mp3buf); remaining -= this.maxSamples; } }, finish: function() { this.appendToBuffer(this.mp3Encoder.flush()); return new Blob(this3Encoder.flush()); return new Blob(this3Encoder.flush()); return new Blob(this3Encoder.flush()); return new Blob(this3Encoder.flush()); return new Blob(this3Encoder.flush()); return new Blob(this .dataBuffer, { type: 'audio/mp3' }); }, input: function(data) { this.buffer.push(new Float32Array(data)); this.size += data.length; }, compress: function() { // 合併壓縮// 合併let data = new Float32Array(this. size); let offset = 0; for (let i = 0; i < this.buffer.length; i++) { data.set(this.buffer[i], offset); offset += this.buffer[i].length; } // 壓縮let compression = parseInt(this.inputSampleRate / this.outputSampleRate, 10); let length = data .length / compression; let result = new Float32Array(length); let index = 0; let j = 0; while (index < length) { result[index] = data[j]; j += compression; index++; } return result; }, encodeWAV: function() { let sampleRate = Math. min(this.inputSampleRate, this.outputSampleRate); let sampleBits = Math.min(this.inputSampleBits, this.oututSampleBits); let bytes = this.compress(); let dataLength = bytes.length * (sampleBits / 8); let buffer = new ArrayBuffer(44 + dataLength); DataView(buffer); let channelCount = 1; // 單聲道let offset = 0; let writeString = function(str) { for (let i = 0; i < str.length; i++) { data.setUint8(offset + i, str.charCodeAt(i)); } }; // 資源交換檔案識別碼writeString('RIFF'); offset += 4; // 下個位址開始到檔案尾總位元組數,即檔案大小-8 data.setUint32(offset, 36 + dataLength, true); offset += 4; // WAV檔案標誌writeString('WAVE'); offset += 4; // 波形格式標誌writeString('fmt '); offset += 4; // 過濾位元組,一般為0x10 = 16 data.setUint32(offset, 16, true); offset += 4; // 格式類別(PCM形式取樣資料) data.setUint16(offset, 1, true); offset += 2; // 通道數data.setUint16(offset, channelCount, true); offset += 2 ; // 取樣率,每秒取樣數,表示每個頻道的播放速度data.setUint32(offset, sampleRate, true); offset += 4; // 波形資料傳輸速率(每秒平均位元組數) 單聲道×每秒資料位數×每樣本資料位元/8 data.setUint32(offset, channelCount * sampleRate * (sampleBits / 8), true); offset += 4; // 快資料調整數取樣一次佔用位元組數單聲道×每樣本的資料位數/8 data.setUint16(offset, channelCount * (sampleBits / 8), true); offset += 2; // 每樣本資料位數data.setUint16(offset, sampleBits, true); offset += 2; // 資料識別碼writeString('data'); offset += 4; // 取樣資料總數,即資料總大小-44 data.setUint32(offset, dataLength, true); offset += 4; // 寫入取樣資料if (sampleBits === 8) { for (let i = 0; i < bytes.length; i++, offset++) { const s = Math.max(-1, Math.min (1, bytes[i])); let val = s < 0 ? s * 0x8000 : s * 0x7fff; val = parseInt(255 / (65535 / (val + 32768)), 10); data.setInt8(offset, val, true); } } else { for (let i = 0; i < bytes.length; i++, offset += 2) { const s = Math.max(-1, Math.min(1, bytes[i])); data.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true); } } return new Blob([data], { type: 'audio/wav' }); }};2、修改HZRecord的音訊採集的呼叫方法。
// 音訊擷取recorder.onaudioprocess = function(e) { audioData.encode(e.inputBuffer.getChannelData(0));};3、HZRecord的getBlob方法。
this.getBlob = function() { this.stop(); return audioData.finish();};4.HZRecord的play方法。把blob轉base64url。
this.play = function(func) { readBlobAsDataURL(this.getBlob(), func);};function readBlobAsDataURL(data, callback) { let fileReader = new FileReader(); fileReader.onload = function(e) { callback(ee) { callbacke .target.result); }; fileReader.readAsDataURL(data);}至此,已經解決以上兩個問題。
步驟4這裡主要介紹怎麼做錄音時的動效。我們的一個動效需求為:
根據傳入的音量大小,做一個圓弧動態擴展。
// 建立analyser節點,取得音訊時間和頻率資料const analyser = context.createAnalyser();audioInput.connect(analyser);const inputAnalyser = new Uint8Array(1);const wrapEle = $this.refs['wrap']; let ctx = wrapEle.getContext('2d');const width = wrapEle.width;const height = wrapEle.height;const center = { x: width / 2, y: height / 2};function drawArc(ctx, color, x, y, radius, beginAngle, endAngingle) { 問題。 ); ctx.lineWidth = 1; ctx.strokeStyle = color; ctx.arc(x, y, radius, (Math.PI * beginAngle) / 180, (Math.PI * endAngle) / 180); ctx.stroke();}(function drawSpectrum() { analyqu.getByDataFreser.getByDatareserqu. inputAnalyser); // 取得頻域資料ctx.clearRect(0, 0, width, height); // 畫出線條for (let i = 0; i < 1; i++) { let value = inputAnalyser[i] / 3; // <===取得資料let colors = []; if (value <= 16) { colors = ['#f5A631', '#f5A631', '#e4e4e4', '#e4e4e4', '#e4e4e4', '#e4e4e4']; } else if (value <= 32) { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631', ' #e4e4e4', '#e4e4e4']; } else { colors = ['#f5A631', '#f5A631', '#f5A631', '#f5A631', '#f5A631', '#f5A631']; } drawArc(ctx, colors[0], center.x, center. y, 52 + 16, -30, 30); drawArc(ctx, colors[1], center.x, center.y, 52 + 16, 150, 210); drawArc(ctx, colors[2], center.x, center.y, 52 + 32, -22.5, 22.5); drawArc (ctx, colors[3], center.x, center.y, 52 + 32, 157.5, 202.5); drawArc(ctx, colors[4], center.x, center.y, 52 + 48, -13, 13); drawArc(ctx, colors[5], center.x, center.y, 52 + 48, 167, 193); } //請求下一幀requestAnimationFrame(drawSpectrum);})();緣盡至此,一個完整的html5錄音功能方案已經完成。有什麼需要補充,不合理的地方的歡迎留言。
ps:lamejs可參考這個github
以上就是本文的全部內容,希望對大家的學習有所幫助,也希望大家多多支持VeVb武林網。