https://juejin.cn/post/6844903604013252621#heading-11
<template>
<div class="page">
<canvas id="waveCanvas" height="250"></canvas>
<audio src="./assets/1.mp3" controls id="audio">亲 您的浏览器不支持html5的audio标签</audio>
<button @click="play">play</button>
</div>
</template>
<script lang="ts">
import { defineComponent, ref, onMounted } from 'vue'
export default defineComponent({
name: 'Home',
components: {
},
data() {
return {
}
},
setup() {
let oAudio: any = null;
// 创建音频上下文对象
let audioSrc: any = null;
let analyser: any = null;
let voiceHeight: any = null;
function play() {
init()
animation()
}
function initCanvas() {
oAudio = document.getElementById('audio')
let canvas: any = document.getElementById('waveCanvas');
let ctx = canvas.getContext('2d');
//设置canvas宽高
let width = canvas.offsetWidth;
// let height = canvas.offsetHeight;
let height = 250;
canvas.width = width;
canvas.height = height;
let baseY = height - 50;
//画出矩形方框,this.baseY是方框高度相对于窗口高度的基准线
ctx.beginPath();
ctx.fillStyle = 'rgba(31,165,23,0.6)';
ctx.moveTo(0, baseY);
ctx.lineTo(width, baseY);
ctx.lineTo(width, height);
ctx.lineTo(0, height);
ctx.fill();
}
function init () {
oAudio.play()
if (!audioSrc) {
let oCtx = new window.AudioContext();
// 创建媒体源,除了audio本身可以获取,也可以通过oCtx对象提供的api进行媒体源操作
audioSrc = oCtx.createMediaElementSource(oAudio);
// 创建分析机
analyser = oCtx.createAnalyser();
// 媒体源与分析机连接
audioSrc.connect(analyser);
// 输出的目标:将分析机分析出来的处理结果与目标点(耳机/扬声器)连接
analyser.connect(oCtx.destination);
voiceHeight = new Uint8Array(analyser.frequencyBinCount);
}
}
function draw(waveArr1: any, waveArr2: any) {
analyser.getByteFrequencyData(voiceHeight);
let canvas: any = document.getElementById('waveCanvas');
let ctx = canvas.getContext('2d');
//设置canvas宽高
let width = canvas.offsetWidth;
// let height = canvas.offsetHeight;
let height = 250;
let baseY = height - 50;
canvas.width = width;
canvas.height = height;
let waveWidth = Math.ceil(width / (waveArr1.length - 3));
let waveWidth2 = Math.ceil(width / (waveArr2.length - 3));
//画出矩形方框,this.baseY是方框高度相对于窗口高度的基准线
ctx.beginPath();
ctx.fillStyle = 'rgba(31,165,23,0.2)';
ctx.moveTo(-waveWidth * 2, baseY - waveArr1[0]);
for(let i = 1; i < waveArr1.length - 2; i ++) {
let p0 = {x: (i - 2) * waveWidth, y:waveArr1[i - 1]};
let p1 = {x: (i - 1) * waveWidth, y:waveArr1[i]};
let p2 = {x: (i) * waveWidth, y:waveArr1[i + 1]};
let p3 = {x: (i + 1) * waveWidth, y:waveArr1[i + 2]};
for(let j = 0; j < 100; j ++) {
let t = j * (1.0 / 100);
let tt = t * t;
let ttt = tt * t;
let CGPoint: any ={};
CGPoint.x = 0.5 * (2*p1.x+(p2.x-p0.x)*t + (2*p0.x-5*p1.x+4*p2.x-p3.x)*tt + (3*p1.x-p0.x-3*p2.x+p3.x)*ttt);
CGPoint.y = 0.5 * (2*p1.y+(p2.y-p0.y)*t + (2*p0.y-5*p1.y+4*p2.y-p3.y)*tt + (3*p1.y-p0.y-3*p2.y+p3.y)*ttt);
ctx.lineTo(CGPoint.x, baseY - CGPoint.y);
}
ctx.lineTo(p2.x, baseY - p2.y);
}
ctx.lineTo((waveArr1.length) * waveWidth, baseY - waveArr1[waveArr1.length - 1]);
ctx.lineTo(width + waveWidth * 2, baseY);
ctx.lineTo(width + waveWidth * 2, height);
ctx.lineTo(-2 * waveWidth, height);
ctx.fill();
ctx.beginPath();
ctx.fillStyle = 'rgba(31,165,23,0.5)';
ctx.moveTo(-waveWidth2 * 2, baseY - waveArr2[0]);
for(let i = 1; i < waveArr2.length - 2; i ++) {
let p0 = {x: (i - 2) * waveWidth2, y:waveArr2[i - 1]};
let p1 = {x: (i - 1) * waveWidth2, y:waveArr2[i]};
let p2 = {x: (i) * waveWidth2, y:waveArr2[i + 1]};
let p3 = {x: (i + 1) * waveWidth2, y:waveArr2[i + 2]};
for(let j = 0; j < 100; j ++) {
let t = j * (1.0 / 100);
let tt = t * t;
let ttt = tt * t;
let CGPoint: any ={};
CGPoint.x = 0.5 * (2*p1.x+(p2.x-p0.x)*t + (2*p0.x-5*p1.x+4*p2.x-p3.x)*tt + (3*p1.x-p0.x-3*p2.x+p3.x)*ttt);
CGPoint.y = 0.5 * (2*p1.y+(p2.y-p0.y)*t + (2*p0.y-5*p1.y+4*p2.y-p3.y)*tt + (3*p1.y-p0.y-3*p2.y+p3.y)*ttt);
ctx.lineTo(CGPoint.x, baseY - CGPoint.y);
}
ctx.lineTo(p2.x, baseY - p2.y);
}
ctx.lineTo((waveArr2.length) * waveWidth2, baseY - waveArr2[waveArr2.length - 1]);
ctx.lineTo(width + waveWidth2 * 2, baseY);
ctx.lineTo(width + waveWidth2 * 2, height);
ctx.lineTo(-2 * waveWidth2, height);
ctx.fill();
}
function animation() {
analyser.getByteFrequencyData(voiceHeight);
let waveArr1 = [],
waveArr2 = [],
waveTemp: any[] = [],
leftTemp: any[] = [],
rightTemp: any[] = [],
waveStep = 50,
leftStep = 70,
rightStep = 90;
voiceHeight.map((data: any) => {
if(waveStep == 50 && waveTemp.length < 9) {
waveTemp.push(data / 2.6);
waveStep = 0;
}else{
waveStep ++;
}
if(leftStep == 0 && leftTemp.length < 5) {
leftTemp.unshift(Math.floor(data / 4.8));
leftStep = 70;
}else {
leftStep --;
}
if(rightStep == 0 && rightTemp.length < 5) {
rightTemp.push(Math.floor(data / 4.8));
rightStep = 90;
}else {
rightStep --;
}
});
waveArr1 = leftTemp.concat(waveTemp).concat(rightTemp);
waveArr2 = leftTemp.concat(rightTemp);
waveArr2.map((data, k) => {
waveArr2[k] = data * 1.8;
});
draw(waveArr1, waveArr2)
window.requestAnimationFrame(animation);
}
onMounted(() => {
initCanvas()
})
return {
play
}
},
})
</script>