您的位置:首页 > 其它

在网页上面录制声音(chrome、firefox浏览器可用)

2017-10-16 20:50 344 查看
代码结构如下:

 
VoiceController主要代码:
    @RequestMapping(value = "/voice2.do", method = RequestMethod.GET)
public ModelAndView voice(HttpServletResponse response) {

ModelAndView model = new ModelAndView("voice/test2");
model.addObject("name", "caoyong");

return model;

}

@RequestMapping(value = "/upload")
public void upload(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
File localFile = ExcelUtil.createLocalFile(request, "data", "D:/files/");
log.info(localFile);
} catch (Exception e) {
log.error(e);
}

ResponseJsonUtils.responseJson("{\"result\":\"success\"}", response);
}
test.jsp如下:
<%@ page language="java" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core"%>
<%
String path = request.getContextPath();
String basePath = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort()
+ path + "/";
%>
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width,initial-scale=1">
<title>Audio Recorder</title>
<script type="text/javascript" src="${pageContext.request.contextPath}/source/js/jquery/jquery-1.10.2.js"></script>
<script type="text/javascript" src="${pageContext.request.contextPath}/source/js/voice/recorder.js"></script>
<script type="text/javascript" src="${pageContext.request.contextPath}/source/js/voice/main.js"></script>
<style>
html { overflow: hidden; }
body {
font: 14pt Arial, sans-serif;
background: lightgrey;
display: flex;
flex-direction: column;
height: 100vh;
width: 100%;
margin: 0 0;
}
canvas {
display: inline-block;
background: #202020;
width: 95%;
height: 45%;
box-shadow: 0px 0px 10px blue;
}
#controls {
display: flex;
flex-direction: row;
align-items: center;
justify-content: space-around;
height: 20%;
width: 100%;
}
#record { height: 15vh; }
#record.recording {
background: red;
background: -webkit-radial-gradient(center, ellipse cover, #ff0000 0%,lightgrey 75%,lightgrey 100%,#7db9e8 100%);
background: -moz-radial-gradient(center, ellipse cover, #ff0000 0%,lightgrey 75%,lightgrey 100%,#7db9e8 100%);
background: radial-gradient(center, ellipse cover, #ff0000 0%,lightgrey 75%,lightgrey 100%,#7db9e8 100%);
}
#save, #save img { height: 10vh; }
#save { opacity: 0.25;}
#save[download] { opacity: 1;}
#viz {
height: 80%;
width: 100%;
display: flex;
flex-direction: column;
justify-content: space-around;
align-items: center;
}
@media (orientation: landscape) {
body { flex-direction: row;}
#controls { flex-direction: column; height: 100%; width: 10%;}
#viz { height: 100%; width: 90%;}
}

</style>
</head>
<body>
<div id="viz">
<canvas id="analyser" width="1024" height="500"></canvas>
<canvas id="wavedisplay" width="1024" height="500"></canvas>
</div>
<div id="controls">
<img id="record" src="${pageContext.request.contextPath}/source/images/voice/mic128.png" onclick="toggleRecording(this);">
<a id="save" href="#"><img src="${pageContext.request.contextPath}/source/images/voice/save.svg"></a>
</div>
</body>
</html>
main.js如下:
window.AudioContext = window.AudioContext || window.webkitAudioContext;

var audioContext = new AudioContext();
var audioInput = null,
realAudioInput = null,
inputPoint = null,
audioRecorder = null,
analyserNode=null,
zeroGain = null;
var rafID = null;
var analyserContext = null;
var canvasWidth, canvasHeight;
var recIndex = 0;

/* TODO:

- offer mono option
- "Monitor input" switch
*/

function saveAudio() {
audioRecorder.exportWAV( doneEncoding );
// could get mono instead by saying
// audioRecorder.exportMonoWAV( doneEncoding );
}

function gotBuffers( buffers ) {
var canvas = document.getElementById( "wavedisplay" );

drawBuffer( canvas.width, canvas.height, canvas.getContext('2d'), buffers[0] );

// the ONLY time gotBuffers is called is right after a new recording is completed -
// so here's where we should set up the download.
audioRecorder.exportWAV( doneEncoding );
}

function doneEncoding( blob ) {
audioRecorder.upload( blob, "myRecording" + ((recIndex<10)?"0":"") + recIndex + ".wav" );
// Recorder.setupDownload( blob, "myRecording" + ((recIndex<10)?"0":"") + recIndex + ".wav" );
recIndex++;
}

function toggleRecording( e ) {
if (e.classList.contains("recording")) {
// stop recording
audioRecorder.stop();
e.classList.remove("recording");
audioRecorder.getBuffers( gotBuffers );
} else {
// start recording
if (!audioRecorder)
return;
e.classList.add("recording");
audioRecorder.clear();
audioRecorder.record();
}
}

function convertToMono( input ) {
var splitter = audioContext.createChannelSplitter(2);
var merger = audioContext.createChannelMerger(2);

input.connect( splitter );
splitter.connect( merger, 0, 0 );
splitter.connect( merger, 0, 1 );
return merger;
}

function cancelAnalyserUpdates() {
window.cancelAnimationFrame( rafID );
rafID = null;
}

function updateAnalysers(time) {
if (!analyserContext) {
var canvas = document.getElementById("analyser");
canvasWidth = canvas.width;
canvasHeight = canvas.height;
analyserContext = canvas.getContext('2d');
}

// analyzer draw code here
{
var SPACING = 3;
var BAR_WIDTH = 1;
var numBars = Math.round(canvasWidth / SPACING);
var freqByteData = new Uint8Array(analyserNode.frequencyBinCount);

analyserNode.getByteFrequencyData(freqByteData);

analyserContext.clearRect(0, 0, canvasWidth, canvasHeight);
analyserContext.fillStyle = '#F6D565';
analyserContext.lineCap = 'round';
var multiplier = analyserNode.frequencyBinCount / numBars;

// Draw rectangle for each frequency bin.
for (var i = 0; i < numBars; ++i) {
var magnitude = 0;
var offset = Math.floor( i * multiplier );
// gotta sum/average the block, or we miss narrow-bandwidth spikes
for (var j = 0; j< multiplier; j++)
magnitude += freqByteData[offset + j];
magnitude = magnitude / multiplier;
// var magnitude2 = freqByteData[i * multiplier];
analyserContext.fillStyle = "hsl( " + Math.round((i*360)/numBars) + ", 100%, 50%)";
analyserContext.fillRect(i * SPACING, canvasHeight, BAR_WIDTH, -magnitude);
}
}

rafID = window.requestAnimationFrame( updateAnalysers );
}

function toggleMono() {
if (audioInput != realAudioInput) {
audioInput.disconnect();
realAudioInput.disconnect();
audioInput = realAudioInput;
} else {
realAudioInput.disconnect();
audioInput = convertToMono( realAudioInput );
}

audioInput.connect(inputPoint);
}

function gotStream(stream) {
inputPoint = audioContext.createGain();

// Create an AudioNode from the stream.
realAudioInput = audioContext.createMediaStreamSource(stream);
audioInput = realAudioInput;
audioInput.connect(inputPoint);

// audioInput = convertToMono( input );

analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 2048;
inputPoint.connect(analyserNode);
var config = {
"workerPath" : "source/js/voice/recorderWorker.js",
"uploadUrl":"/lenovobot/upload.do"
};
audioRecorder = new Recorder(inputPoint, config);

zeroGain = audioContext.createGain();
zeroGain.gain.value = 0.0;
inputPoint.connect(zeroGain);
zeroGain.connect(audioContext.destination);
updateAnalysers();
}

function initAudio() {
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
if (!navigator.cancelAnimationFrame)
navigator.cancelAnimationFrame = navigator.webkitCancelAnimationFrame || navigator.mozCancelAnimationFrame;
if (!navigator.requestAnimationFrame)
navigator.requestAnimationFrame = navigator.webkitRequestAnimationFrame || navigator.mozRequestAnimationFrame;

navigator.getUserMedia(
{
"audio": {
"mandatory": {
"googEchoCancellation": "false",
"googAutoGainControl": "false",
"googNoiseSuppression": "false",
"googHighpassFilter": "false"
},
"optional": []
},
}, gotStream, function(e) {
var msg = 'Error getting audio';
alert(msg);
});
}

function drawBuffer( width, height, context, data ) {
var step = Math.ceil( data.length / width );
var amp = height / 2;
context.fillStyle = "silver";
context.clearRect(0,0,width,height);
for(var i=0; i < width; i++){
var min = 1.0;
var max = -1.0;
for (var j=0; j<step; j++) {
var datum = data[(i*step)+j];
if (datum < min)
min = datum;
if (datum > max)
max = datum;
}
context.fillRect(i,(1+min)*amp,1,Math.max(1,(max-min)*amp));
}
}

window.addEventListener('load', initAudio );
recorder.js
var Recorder = function(source, cfg){
var config = cfg || {};
var bufferLen = config.bufferLen || 4096;
this.context = source.context;
if(!this.context.createScriptProcessor){
this.node = this.context.createJavaScriptNode(bufferLen, 2, 2);
} else {
this.node = this.context.createScriptProcessor(bufferLen, 2, 2);
}

var worker = new Worker(config.workerPath);
worker.postMessage({
command: 'init',
config: {
sampleRate: this.context.sampleRate
}
});
var recording = false,
currCallback;

this.node.onaudioprocess = function(e){
if (!recording) return;
worker.postMessage({
command: 'record',
buffer: [
e.inputBuffer.getChannelData(0),
e.inputBuffer.getChannelData(1)
]
});
}

this.configure = function(cfg){
for (var prop in cfg){
if (cfg.hasOwnProperty(prop)){
config[prop] = cfg[prop];
}
}
}

this.record = function(){
recording = true;
}

this.stop = function(){
recording = false;
}

this.clear = function(){
worker.postMessage({ command: 'clear' });
}

this.getBuffers = function(cb) {
currCallback = cb || config.callback;
worker.postMessage({ command: 'getBuffers' })
}

this.exportWAV = function(cb, type){
currCallback = cb || config.callback;
type = type || config.type || 'audio/wav';
if (!currCallback) throw new Error('Callback not set');
worker.postMessage({
command: 'exportWAV',
type: type
});
}

this.exportMonoWAV = function(cb, type){
currCallback = cb || config.callback;
type = type || config.type || 'audio/wav';
if (!currCallback) throw new Error('Callback not set');
worker.postMessage({
command: 'exportMonoWAV',
type: type
});
}

worker.onmessage = function(e){
var blob = e.data;
currCallback(blob);
}

source.connect(this.node);
this.node.connect(this.context.destination); // if the script node is not connected to an output the "onaudioprocess" event is not triggered in chrome.

this.upload = function(soundBlob, filename){
var fd = new FormData();
fd.append('fname', filename);
fd.append('data', soundBlob);
var options = {
url : config.uploadUrl,
type : 'post',
data : fd,
processData : false,
contentType : false,
dataType : 'json',
success : function(obj) {
console.log(obj);
},
error : function(jqXHR, textStatus, errorThrown) {
alert(textStatus + "---" + errorThrown);
}

};
$.ajax(options);
}

this.setupDownload = function(blob, filename){
var url = (window.URL || window.webkitURL).createObjectURL(blob);
var link = document.getElementById("save");
link.href = url;
link.download = filename || 'output.wav';
}
};

recorderWorker.js
/*License (MIT)

Copyright 漏 2013 Matt Diamond

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or substantial portions of
the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANT
e798
ABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/

var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;

this.onmessage = function(e){
switch(e.data.command){
case 'init':
init(e.data.config);
break;
case 'record':
record(e.data.buffer);
break;
case 'exportWAV':
exportWAV(e.data.type);
break;
case 'exportMonoWAV':
exportMonoWAV(e.data.type);
break;
case 'getBuffers':
getBuffers();
break;
case 'clear':
clear();
break;
}
};

function init(config){
sampleRate = config.sampleRate;
}

function record(inputBuffer){
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength += inputBuffer[0].length;
}

function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], { type: type });

this.postMessage(audioBlob);
}

function exportMonoWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var dataview = encodeWAV(bufferL, true);
var audioBlob = new Blob([dataview], { type: type });

this.postMessage(audioBlob);
}

function getBuffers() {
var buffers = [];
buffers.push( mergeBuffers(recBuffersL, recLength) );
buffers.push( mergeBuffers(recBuffersR, recLength) );
this.postMessage(buffers);
}

function clear(){
recLength = 0;
recBuffersL = [];
recBuffersR = [];
}

function mergeBuffers(recBuffers, recLength){
var result = new Float32Array(recLength);
var offset = 0;
for (var i = 0; i < recBuffers.length; i++){
result.set(recBuffers[i], offset);
offset += recBuffers[i].length;
}
return result;
}

function interleave(inputL, inputR){
var length = inputL.length + inputR.length;
var result = new Float32Array(length);

var index = 0,
inputIndex = 0;

while (index < length){
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
console.log(result);
return result;
}

function floatTo16BitPCM(output, offset, input){
for (var i = 0; i < input.length; i++, offset+=2){
var s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}

function writeString(view, offset, string){
for (var i = 0; i < string.length; i++){
view.setUint8(offset + i, string.charCodeAt(i));
}
}

function encodeWAV(samples, mono){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);

/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, mono?1:2, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, 4, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);

floatTo16BitPCM(view, 44, samples);

return view;
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: