こちらをzipで落としてきて使いました。
mdn/voice-change-o-matic
##完成品
始めるを押すと、canvasの中にバーを表示する
##ソースコード
html
<!DOCTYPE html>
<html lang="en-us">
<head>
<meta charset="utf-8">
</head>
<body>
<div class="wrapper">
<div style="width:200px;overflow: hidden;">
<canvas class="visualizer" width="200" height="100"></canvas>
<div>
<a id="startWave">始める</a>
</div>
<script src="scripts/sound-bar.js"></script>
</body>
</html>
js
let start = document.getElementById('startWave');
start.onclick = startWaveAnimation;
function startWaveAnimation() {
// Older browsers might not implement mediaDevices at all, so we set an empty object first
if (navigator.mediaDevices === undefined) {
navigator.mediaDevices = {};
}
// Some browsers partially implement mediaDevices. We can't just assign an object
// with getUserMedia as it would overwrite existing properties.
// Here, we will just add the getUserMedia property if it's missing.
if (navigator.mediaDevices.getUserMedia === undefined) {
navigator.mediaDevices.getUserMedia = function(constraints) {
// First get ahold of the legacy getUserMedia, if present
var getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
// Some browsers just don't implement it - return a rejected promise with an error
// to keep a consistent interface
if (!getUserMedia) {
return Promise.reject(new Error('getUserMedia is not implemented in this browser'));
}
// Otherwise, wrap the call to the old navigator.getUserMedia with a Promise
return new Promise(function(resolve, reject) {
getUserMedia.call(navigator, constraints, resolve, reject);
});
}
}
// set up forked web audio context, for multiple browsers
// window. is needed otherwise Safari explodes
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var source;
var stream;
// grab the mute button to use below
var mute = document.querySelector('.mute');
//set up the different audio nodes we will use for the app
var analyser = audioCtx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;
var distortion = audioCtx.createWaveShaper();
var gainNode = audioCtx.createGain();
var biquadFilter = audioCtx.createBiquadFilter();
var convolver = audioCtx.createConvolver();
function makeDistortionCurve(amount) {
var k = typeof amount === 'number' ? amount : 50,
n_samples = 44100,
curve = new Float32Array(n_samples),
deg = Math.PI / 180,
i = 0,
x;
for ( ; i < n_samples; ++i ) {
x = i * 2 / n_samples - 1;
curve[i] = ( 3 + k ) * x * 20 * deg / ( Math.PI + k * Math.abs(x) );
}
return curve;
};
// set up canvas context for visualizer
var canvas = document.querySelector('.visualizer');
var canvasCtx = canvas.getContext("2d");
var intendedWidth = document.querySelector('.wrapper').clientWidth;
canvas.setAttribute('width',intendedWidth);
var visualSelect = document.getElementById("visual");
var drawVisual;
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {audio: true}
navigator.mediaDevices.getUserMedia (constraints)
.then(
function(stream) {
source = audioCtx.createMediaStreamSource(stream);
source.connect(distortion);
distortion.connect(biquadFilter);
biquadFilter.connect(gainNode);
convolver.connect(gainNode);
gainNode.connect(analyser);
analyser.connect(audioCtx.destination);
visualize();
voiceChange();
})
.catch( function(err) { console.log('The following gUM error occured: ' + err);})
} else {
console.log('getUserMedia not supported on your browser!');
}
function visualize() {
WIDTH = canvas.width;
HEIGHT = canvas.height;
analyser.fftSize = 256;
var bufferLengthAlt = analyser.frequencyBinCount;
console.log(bufferLengthAlt);
var dataArrayAlt = new Uint8Array(bufferLengthAlt);
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
var drawAlt = function() {
drawVisual = requestAnimationFrame(drawAlt);
analyser.getByteFrequencyData(dataArrayAlt);
canvasCtx.fillStyle = '#fff';
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
var barWidth = 20; //一本一本のバー横幅はここでpx指定
var barHeight;
var x = 0;
for(var i = 0; i < bufferLengthAlt; i++) {
barHeight = barColor = dataArrayAlt[i];
canvasCtx.fillStyle = barColer(barHeight);
canvasCtx.fillRect(x,HEIGHT-barHeight/2,barWidth,barHeight/2);
x += barWidth + 1;
}
};
drawAlt();
}
function barColer(barHeight){ //ピンク色にしてねっていう依頼があったため
var barColor = 'rgb(255, ' + (200 - barHeight) + ' ,177)';
if(barHeight > 200 || (200 - barHeight) < 0){
barColor = 'rgb(255,157,207)';
}
return barColor
}
function voiceChange() {
distortion.oversample = '4x';
biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0)
var voiceSetting = voiceSelect.value;
console.log(voiceSetting);
//when convolver is selected it is connected back into the audio path
if(voiceSetting == "convolver") {
biquadFilter.disconnect(0);
biquadFilter.connect(convolver);
} else {
biquadFilter.disconnect(0);
biquadFilter.connect(gainNode);
if(voiceSetting == "distortion") {
distortion.curve = makeDistortionCurve(400);
} else if(voiceSetting == "biquad") {
biquadFilter.type = "lowshelf";
biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0)
biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0)
} else if(voiceSetting == "off") {
console.log("Voice settings turned off");
}
}
}
}
canvasのサイズがDeviceのwidthとなっているので、とりあえずdivで無理やり制御。
いらない記述が残っているかもしれませんが、まだそこまでじっくり見ていません。
console.logとかは消してもいいだろうなー
エラーの出し方はもっと工夫するべき。
名前空間作ろうね。