Creating a static waveform using webaudio

I am trying to create a static waveform, for example, in audio editing applications using webaudio and canvas. Right now I am downloading mp3, creating a buffer, iterating over the data returned by getChannelData.

The problem is that I do not understand what is returning.

  • What returns getChannelData - is it suitable for the waveform?
  • How to configure (sample size?) To get one peak in one second?
  • Why is ~ 50% of the values โ€‹โ€‹negative?

    ctx.decodeAudioData(req.response, function(buffer) {
      buf = buffer;
    
    src = ctx.createBufferSource();
    src.buffer = buf;
    
    //create fft
    fft = ctx.createAnalyser();
    
    var data = new Uint8Array(samples);
      fft.getByteFrequencyData(data);
    
    bufferL = buf.getChannelData(0)
      for(var i = 0; i<buf.length; i++){
        n = bufferL[i*(1000)]
          gfx.beginPath();
          gfx.moveTo(i +0.5, 300);
          gfx.lineTo(i +0.5, 300 + (-n*100));
          gfx.stroke();
    

What I create:

What I'm generating:

What I would like to generate:

What I'd like to generate:

thank

+4
source share
1 answer

, - https://github.com/cwilso/Audio-Buffer-Draw. - , .

1) , getChannelData . 2) , , , . , , ( "" ), , , . 3) , -1 +1. ; "" - , .

:

var audioContext = new AudioContext();

function drawBuffer( width, height, context, buffer ) {
    var data = buffer.getChannelData( 0 );
    var step = Math.ceil( data.length / width );
    var amp = height / 2;
    for(var i=0; i < width; i++){
        var min = 1.0;
        var max = -1.0;
        for (var j=0; j<step; j++) {
            var datum = data[(i*step)+j]; 
            if (datum < min)
                min = datum;
            if (datum > max)
                max = datum;
        }
        context.fillRect(i,(1+min)*amp,1,Math.max(1,(max-min)*amp));
    }
}

function initAudio() {
    var audioRequest = new XMLHttpRequest();
    audioRequest.open("GET", "sounds/fightclub.ogg", true);
    audioRequest.responseType = "arraybuffer";
    audioRequest.onload = function() {
        audioContext.decodeAudioData( audioRequest.response, 
            function(buffer) { 
                var canvas = document.getElementById("view1");
                drawBuffer( canvas.width, canvas.height, canvas.getContext('2d'), buffer ); 
            } );
    }
    audioRequest.send();
}

window.addEventListener('load', initAudio );
+7

All Articles