Begin to add recorder.

This commit is contained in:
Fangjun Kuang 2022-05-06 23:34:40 +08:00
parent 9b5c18438d
commit 30b2626172
5 changed files with 258 additions and 3 deletions

View File

@ -14,7 +14,7 @@
<script src="https://code.jquery.com/jquery-3.6.0.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script> <script src="https://code.jquery.com/jquery-3.6.0.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script>
<title>Hello next-gen Kaldi</title> <title>Next-gen Kaldi demo</title>
</head> </head>
@ -37,7 +37,7 @@
<li class="media"> <li class="media">
<div class="media-body"> <div class="media-body">
<h5 class="mt-0 mb-1">Record</h5> <h5 class="mt-0 mb-1">Record</h5>
<p>Recognition from real-time recording</p> <p>Recognition from real-time recordings</p>
</div> </div>
</li> </li>
</ul> </ul>

View File

@ -14,7 +14,9 @@
</li> </li>
<li class="nav-item"> <li class="nav-item">
<a class="nav-link" href="record.html">Record</a>
</li> </li>
</ul> </ul>
</div> </div>
</nav> </nav>

View File

@ -0,0 +1,64 @@
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8"></meta>
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"></meta>
<!-- Bootstrap CSS -->
<link rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/bootstrap@4.3.1/dist/css/bootstrap.min.css"
integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T"
crossorigin="anonymous">
</link>
<script src="https://code.jquery.com/jquery-3.6.0.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script>
<title>Next-gen Kaldi demo (Upload file for recognition)</title>
</head>
<body>
<div id="nav"></div>
<script>
$(function(){
$("#nav").load("nav-partial.html");
});
</script>
<h3>Recognition from real-time recordings</h3>
<div class="container">
<div class="row">
<div class="col-12">
<canvas id="canvas" height="60px" display="block" margin-bottom="0.5rem"></canvas>
</div>
</div>
<div class="row">
<div class="col">
<button class="btn btn-primary btn-block" id="record">Record</button>
</div>
<div class="col">
<button class="btn btn-primary btn-block" id="stop">Stop</button>
</div>
</div>
</div>
<section flex="1" overflow="auto" id="sound-clips">
</section>
<!-- Optional JavaScript -->
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
<script src="https://cdn.jsdelivr.net/npm/popper.js@1.14.7/dist/umd/popper.min.js"
integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1"
crossorigin="anonymous">
</script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@4.3.1/dist/js/bootstrap.min.js"
integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM"
crossorigin="anonymous">
</script>
<script src="./record.js"> </script>
</body>
</html>

View File

@ -0,0 +1,189 @@
// see https://mdn.github.io/web-dictaphone/scripts/app.js
// and https://gist.github.com/meziantou/edb7217fddfbb70e899e
const record = document.getElementById('record');
const stop = document.getElementById('stop');
const soundClips = document.getElementById('sound-clips');
const canvas = document.getElementById('canvas');
soundClips.innerHTML = "hello";
stop.disabled = true;
let audioCtx;
const canvasCtx = canvas.getContext("2d");
let sampleRate;
if (navigator.mediaDevices.getUserMedia) {
console.log('getUserMedia supported.');
// see https://w3c.github.io/mediacapture-main/#dom-mediadevices-getusermedia
const constraints = {
// does not work
// audio : {sampleRate : 16000, sampleSize : 16, channelCount : 1}
audio : true,
};
let chunks = [];
let onSuccess = function(stream) {
var settings = stream.getAudioTracks()[0].getSettings();
sampleRate = settings.sampleRate;
console.log(settings);
console.log('sample rate ' + settings.sampleRate);
console.log('channel count ' + settings.channelCount);
console.log('sample size ' + settings.sampleSize);
const mediaRecorder = new MediaRecorder(stream);
console.log('mime type ' + mediaRecorder.mimeType);
console.log('audio bits per second ' + mediaRecorder.audioBitsPerSecond);
console.log(mediaRecorder)
visualize(stream);
record.onclick = function() {
mediaRecorder.start(10); // 10ms period to send data
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
};
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
};
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName =
prompt('Enter a name for your sound clip?', 'My unnamed clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.textContent = 'Delete';
deleteButton.className = 'delete';
if (clipName === null) {
clipLabel.textContent = 'My unnamed clip';
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, {'type' : 'audio/ogg; codecs=opus'});
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick =
function(e) {
let evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if (newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
};
mediaRecorder.ondataavailable = function(e) {
console.log('size ' + e.data.size);
console.log(e.data);
chunks.push(e.data);
}
};
let onError = function(
err) { console.log('The following error occured: ' + err); };
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
function visualize(stream) {
if (!audioCtx) {
audioCtx = new AudioContext();
}
const source = audioCtx.createMediaStreamSource(stream);
const analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
// analyser.connect(audioCtx.destination);
draw()
function draw() {
const WIDTH = canvas.width
const HEIGHT = canvas.height;
requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
canvasCtx.beginPath();
let sliceWidth = WIDTH * 1.0 / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
let v = dataArray[i] / 128.0;
let y = v * HEIGHT / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
}
window.onresize = function() { canvas.width = mainSection.offsetWidth; };
window.onresize();

View File

@ -14,7 +14,7 @@
<script src="https://code.jquery.com/jquery-3.6.0.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script> <script src="https://code.jquery.com/jquery-3.6.0.min.js" integrity="sha256-/xUj+3OJU5yExlq6GSYGSHk7tPXikynS7ogEvDej/m4=" crossorigin="anonymous"></script>
<title>Hello next-gen Kaldi (Upload file for recognition)</title> <title>Next-gen Kaldi demo (Upload file for recognition)</title>
</head> </head>