- c - 在位数组中找到第一个零
- linux - Unix 显示有关匹配两种模式之一的文件的信息
- 正则表达式替换多个文件
- linux - 隐藏来自 xtrace 的命令
如何在 HTML 中为音频文件/标签创建移动波形?单击播放按钮时,必须播放音频 HTML 元素并为其生成相应的移动波形....如何实现?
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<title id='title'>HTML Page setup Tutorial</title>
<script src='riffwave.js'></script>
<script type="text/javascript">
function myFunction()
{
var data = []; // just an array
for (var i=0; i<10000; i++) data[i] = /*Math.round(255 * Math.random())*/i; // fill data with random samples
var wave = new RIFFWAVE(data); // create the wave file
var audio = new Audio(wave.dataURI); // create the HTML5 audio element
audio.play();
}
</script>
</head>
<body>
<button type="button" onclick="myFunction()">Click Me!</button>
</body>
</html>
最佳答案
与下面相同,但随后使用了 canvasjs:
演示:http://seapip.com/canvas/visualizer4/
/*
Speed has to be bigger then refresh!!!
*/
//Speed to move from right to left, also the visible amount of time on the x axis (in milliseconds)
var speed = 10000;
//Time in milliseconds to redraw chart
var refresh = 30;
//Without var to make it a global variable accessable by the html onclick attribute
audioElement = document.getElementById('audioElement');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var audioSrc = audioCtx.createMediaElementSource(audioElement);
var analyser = audioCtx.createAnalyser();
// Bind our analyser to the media element source.
audioSrc.connect(analyser);
audioSrc.connect(audioCtx.destination);
//Get frequency data
var frequencyData = new Uint8Array(analyser.frequencyBinCount);
//The animation reference
var animation;
//Create chart
var dps = []; // dataPoints
var chart = new CanvasJS.Chart("chart", {
interactivityEnabled: false,
width: 500,
height: 200,
axisX: {
title: "Time",
valueFormatString: "mm:ss"
},
axisY: {
title: "dB"
},
data: [{
type: "line",
dataPoints: dps
}]
});
chart.render();
//On play
audioElement.onplay = function() {
//Start drawing
animation = setInterval(function() {
drawWave();
}, refresh);
};
//On pause
audioElement.onpause = function() {
//Stop drawing
clearInterval(animation);
};
//On ended
audioElement.onended = function() {
//Stop drawing
clearInterval(animation);
//Reset time
time = 0;
//Reset dataPoints
dps = [];
//Prevent audio from looping (you can remove this if you want it to loop)
audioElement.pause();
};
//Max dB
var max = analyser.maxDecibels;
//Min dB
var min = analyser.minDecibels;
//Time
var time = 0;
//Our drawing method
function drawWave() {
// Copy frequency data to frequencyData array.
analyser.getByteFrequencyData(frequencyData);
//Total loudness of all frequencies in frequencyData
var totalLoudness = 0;
for(var i = 0; i < frequencyData.length; i++) {
totalLoudness += frequencyData[i];
}
//Average loudness of all frequencies in frequencyData on scale from 0 to 255
var averageLoudness = totalLoudness / frequencyData.length / 255;
//Decibels
var decibels = min + averageLoudness * Math.abs(min - max);
//Increase time
time += refresh;
//Add to chart
dps.push({
x: new Date(time),
y: decibels
});
//Maximum x values to draw based on speed ad refresh
if(dps.length > speed / refresh) {
dps.shift();
}
//Draw new chart
chart.render();
}
<audio id="audioElement" src="audio/Odesza - Above The Middle.mp3"></audio>
<div id="chart"></div>
<div>
<button onclick="audioElement.play()">Play the Audio</button>
<button onclick="audioElement.pause()">Pause the Audio</button>
<button onclick="audioElement.volume+=0.1">Increase Volume</button>
<button onclick="audioElement.volume-=0.1">Decrease Volume</button>
</div>
请记住,#chart 是一个 div 而不是 canvas 元素,我花了几分钟才找出为什么图表一开始没有显示 :P
与下面相同,但从右到左绘制。 stepSize 变量设置动画速度和步长大小,如果你想绘制更大的步长,那么它需要移动得更快,如果你想要绘制更小的步长,它需要移动得更慢。
演示:http://seapip.com/canvas/visualizer3
//Step size (pixels per 20ms)
var stepSize = 0.5;
//Without var to make it a global variable accessable by the html onclick attribute
audioElement = document.getElementById('audioElement');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var audioSrc = audioCtx.createMediaElementSource(audioElement);
var analyser = audioCtx.createAnalyser();
// Bind our analyser to the media element source.
audioSrc.connect(analyser);
audioSrc.connect(audioCtx.destination);
//Get frequency data (800 = max frequency)
var frequencyData = new Uint8Array(400);
//Use below to show all frequencies
//var frequencyData = new Uint8Array(analyser.frequencyBinCount);
//Create canvas
var canvas = document.getElementById("wave");
canvas.style.width = "500px";
canvas.style.height = "100px";
//High dpi stuff
canvas.width = parseInt(canvas.style.width) * 2;
canvas.height = parseInt(canvas.style.height) * 2;
//Get canvas context
var ctx = canvas.getContext("2d");
//Stroke color
ctx.strokeStyle = "#ffff00";
//Draw thicker lines due to high dpi scaling
ctx.lineWidth = 2;
//Store y values
var drawY = [canvas.height];
//The animation reference
var animation;
//On play
audioElement.onplay = function() {
//Start drawing
animation = setInterval(function() {
drawWave();
}, 20);
};
//On pause
audioElement.onpause = function() {
//Stop drawing
clearInterval(animation);
};
//On ended
audioElement.onended = function() {
//Stop drawing
clearInterval(animation);
//Clear previous y values
drawY = [canvas.height];
//Prevent audio from looping (you can remove this if you want it to loop)
audioElement.pause();
};
//Our drawing method
function drawWave() {
// Copy frequency data to frequencyData array.
analyser.getByteFrequencyData(frequencyData);
//Total loudness of all frequencies in frequencyData
var totalLoudness = 0;
for(var i = 0; i < frequencyData.length; i++) {
totalLoudness += frequencyData[i];
}
//Average loudness of all frequencies in frequencyData
var averageLoudness = totalLoudness / frequencyData.length;
//Scale of average loudness from (0 to 1), frequency loudness scale is (0 to 255)
var y = averageLoudness / 255;
//Multiply with canvas height to get scale from (0 to canvas height)
y *= canvas.height;
//Since a canvas y axis is inverted from a normal y axis we have to flip it to get a normal y axis value
y = canvas.height - y;
//Store new y value
drawY.push(y);
//Clear previous drawing
ctx.clearRect(0, 0, canvas.width, canvas.height);
//Draw line
for(var i = drawY.length; i > 0; i--) {
//calculate x values
var x1 = canvas.width - (drawY.length - i - 1) * stepSize;
var x2 = canvas.width - (drawY.length - i) * stepSize;
//Stop drawing y values if the x value is outside the canvas
if(!x2) {
break;
}
ctx.beginPath();
ctx.moveTo(x1, drawY[i - 1]);
ctx.lineTo(x2, drawY[i]);
ctx.stroke();
}
}
<audio id="audioElement" src="audio/Odesza - Above The Middle.mp3"></audio>
<canvas id="wave"></canvas>
<div>
<button onclick="audioElement.play()">Play the Audio</button>
<button onclick="audioElement.pause()">Pause the Audio</button>
<button onclick="audioElement.volume+=0.1">Increase Volume</button>
<button onclick="audioElement.volume-=0.1">Decrease Volume</button>
</div>
这就是我认为您可能想要的,x 轴是时间,y 轴是所有频率的平均响度。请记住,chrome 等浏览器无法在后台选项卡中正确绘制图表,因为它限制了刷新间隔和音频分析器输出。
演示:http://seapip.com/canvas/visualizer2
//Without var to make it a global variable accessable by the html onclick attribute
audioElement = document.getElementById('audioElement');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var audioSrc = audioCtx.createMediaElementSource(audioElement);
var analyser = audioCtx.createAnalyser();
// Bind our analyser to the media element source.
audioSrc.connect(analyser);
audioSrc.connect(audioCtx.destination);
//Get frequency data (800 = max frequency)
var frequencyData = new Uint8Array(400);
//Use below to show all frequencies
//var frequencyData = new Uint8Array(analyser.frequencyBinCount);
//Create canvas
var canvas = document.getElementById("wave");
canvas.style.width = "1000px";
canvas.style.height = "100px";
//High dpi stuff
canvas.width = parseInt(canvas.style.width) * 2;
canvas.height = parseInt(canvas.style.height) * 2;
//Get canvas context
var ctx = canvas.getContext("2d");
//Set stroke color to yellow
ctx.strokeStyle = "#ffff00";
//Draw twice as thick lines due to high dpi scaling
ctx.lineWidth = 2;
//Save x and y from the previous drawing
var drawX = 0;
var drawY = 0;
//Total duration (Seconds)
var duration;
//The animation reference
var animation;
//Audio is loaded
audioElement.oncanplaythrough = function() {
//Get duration
duration = audioElement.duration;
//On play
audioElement.onplay = function() {
//Start drawing
drawWave();
};
//On pause
audioElement.onpause = function() {
//Stop drawing
cancelAnimationFrame(animation);
};
//On ended
audioElement.onended = function() {
//Stop drawing
cancelAnimationFrame(animation);
//Clear previous drawing
ctx.clearRect(0, 0, canvas.width, canvas.height);
//Clear previous x and y values
drawX = 0;
drawY = 0;
//Prevent audio from looping (you can remove this if you want it to loop)
audioElement.pause();
};
};
//Our drawing method
function drawWave() {
//Current time (seconds)
var currentTime = audioElement.currentTime;
// Copy frequency data to frequencyData array.
analyser.getByteFrequencyData(frequencyData);
//Total loudness of all frequencies in frequencyData
var totalLoudness = 0;
for(var i = 0; i < frequencyData.length; i++) {
totalLoudness += frequencyData[i];
}
//Average loudness of all frequencies in frequencyData
var averageLoudness = totalLoudness / frequencyData.length;
//Get the previous x axis value
var previousDrawX = drawX;
//Scale of progress in song (from 0 to 1)
drawX = currentTime / duration;
//Multiply with canvas width to get x axis value
drawX *= canvas.width;
//Get the previous y axis value
var previousDrawY = drawY;
//Scale of average loudness from (0 to 1), frequency loudness scale is (0 to 255)
drawY = averageLoudness / 255;
//Multiply with canvas height to get scale from (0 to canvas height)
drawY *= canvas.height;
//Since a canvas y axis is inverted from a normal y axis we have to flip it to get a normal y axis value
drawY = canvas.height - drawY;
//Draw line
ctx.beginPath();
ctx.moveTo(previousDrawX, previousDrawY);
ctx.lineTo(drawX, drawY);
ctx.stroke();
//Animate
animation = requestAnimationFrame(drawWave);
}
<audio id="audioElement" src="audio/Odesza - Above The Middle.mp3"></audio>
<canvas id="wave"></canvas>
<div>
<button onclick="audioElement.play()">Play the Audio</button>
<button onclick="audioElement.pause()">Pause the Audio</button>
<button onclick="audioElement.volume+=0.1">Increase Volume</button>
<button onclick="audioElement.volume-=0.1">Decrease Volume</button>
</div>
Canvas 可视化器示例
演示:http://seapip.com/canvas/visualizer/
//Without var to make it a global variable accessable by the html onclick attribute
audioElement = document.getElementById('audioElement');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var audioSrc = audioCtx.createMediaElementSource(audioElement);
var analyser = audioCtx.createAnalyser();
// Bind our analyser to the media element source.
audioSrc.connect(analyser);
audioSrc.connect(audioCtx.destination);
//Get frequency data (400 = max frequency)
var frequencyData = new Uint8Array(400);
//Use below to show all frequencies
//var frequencyData = new Uint8Array(analyser.frequencyBinCount);
//Create canvas
var canvas = document.getElementById("wave");
canvas.style.width = "500px";
canvas.style.height = "100px";
//High dpi stuff
canvas.width = parseInt(canvas.style.width) * 2;
canvas.height = parseInt(canvas.style.height) * 2;
//Get canvas context
var ctx = canvas.getContext("2d");
//Set stroke color
ctx.strokeStyle = "#ffff00"
//Draw twice as thick lines due to high dpi scaling
ctx.lineWidth = 2;
//Animation reference
var animation;
//On play
audioElement.onplay = funtion() {
drawWave();
};
//On pause
audioElement.onpause = funtion() {
cancelAnimationFrame(animation);
};
//On ended
audioElement.onended = funtion() {
cancelAnimationFrame(animation);
};
//Our drawing method
function drawWave() {
// Copy frequency data to frequencyData array.
analyser.getByteFrequencyData(frequencyData);
//Draw the wave
ctx.clearRect(0, 0, canvas.width, canvas.height);
for(var i = 1; i < frequencyData.length; i++) {
var x1 = canvas.width / (frequencyData.length - 1) * (i - 1);
var x2 = canvas.width / (frequencyData.length - 1) * i;
var y1 = canvas.height - frequencyData[i - 1] / 255 * canvas.height;
var y2 = canvas.height - frequencyData[i] / 255 * canvas.height;
if(x1 && y1 && x2 && y2) {
ctx.beginPath();
ctx.moveTo(x1, y1);
ctx.lineTo(x2, y2);
ctx.stroke();
}
}
//Animate
animation = requestAnimationFrame(drawWave);
}
<audio id="audioElement" src="audio/Odesza - Above The Middle.mp3"></audio>
<canvas id="wave"></canvas>
<div>
<button onclick="document.getElementById('audioElement').play()">Play the Audio</button>
<button onclick="document.getElementById('audioElement').pause()">Pause the Audio</button>
<button onclick="document.getElementById('audioElement').volume+=0.1">Increase Volume</button>
<button onclick="document.getElementById('audioElement').volume-=0.1">Decrease Volume</button>
</div>
有关音频可视化的插件和教程:
https://www.bignerdranch.com/blog/music-visualization-with-d3-js/
https://github.com/wayou/HTML5_Audio_Visualizer
https://www.patrick-wied.at/blog/how-to-create-audio-visualizations-with-javascript-html
https://p5js.org/examples/examples/Sound_Frequency_Spectrum.php
关于javascript - 在html中播放wav音频文件的移动波形,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/38727741/
我正在尝试实现一系列音频文件,这些文件是三星语音记录功能的一部分。 像这样 enter image description here 但是,没有显示实时频谱的信息,但是没有显示上传的音频文件的波形的信
希望从输入中获取特定金色#ad9557(173/255、149/255、87/255)的波形,并具有透明背景/或黑色(如果不可能)。 我目前正在使用此脚本来生成它: command = new Str
这个问题是关于用于回答 this 的代码的线。我使用的是 Nicholas DiPiazza 发布的代码,后来是 Andrew Thompson 的变体。我在这段代码中添加了第二个 AudioWave
这就是我要达到的结果 这是我所做的:https://codepen.io/demedos/pen/gjQNOM HTML 结构: .container .header .page-1
我想在未来用 C# .NET 4.0 中的音频做一些项目,所以我收集了一些用于录音、处理 WAVE 文件等的代码示例。但我没能找到的是: 如何实时(ish)绘制音频波形/频谱图?显然,创建内存位图并将
我已经搜索了很多东西,但是找不到我想要的东西。 有没有办法从一个非常短的音频文件(在我的情况下为1秒mp3)生成简化或平坦的声波图像? 简化可能是错误的术语,但是我的意思是我想要这种输出: 而不是这样
我在使用 MS-s 波形 API 的程序中遇到某种链接错误。我正在使用的代码可以在这里找到:http://www.planet-source-code.com/vb/scripts/ShowCode.
我正在使用 Android 2.3.1 上的 Visualizer 类进行一些音乐分析。我发现 FFT 和波形幅度受设备体积的影响。这意味着,如果用户调低音量,我会收到很少或收不到 FFT 数据。 我
我有两张 png,一张白色,一张红色。 当歌曲不播放时,它应该是白色的,当歌曲播放时,它应该随着歌曲的进行而填充红色,并且当分别向后和向前滑动时,它应该填充红色。 我已经能
像soundcloud和zippyshare1,如何用java生成音频波形图?是否有任何框架或开源库可用于这种情况? 我想生成一个音频波形作为图像,加载轨道后,将加载波形图像。 最佳答案 以 this
是否有一种仅 Java 的方法可以在 JScrollPane 中显示更大的图片?我不想重新发明轮子,而且我已经在 JLabel 技巧中使用 ImageIcon 来显示 32768x400 图像而苦
是否有(某处)用于 Windows 的命令行程序,可以从 MP3/WAV 创建 PNG/JPEG 视觉效果? 编辑: 这是图像应该是什么样子的一个很好的例子。 最佳答案 Sox ,“音频处理的瑞士军刀
我正在开发一个 WebGL 应用程序,它使用来自 soundcloud API 的数据。 我正在尝试使用 ThreeJS 将轨道图稿和波形 PNG 加载到纹理中以放入 Sprite 上。 但是我在 c
我是一名优秀的程序员,十分优秀!