js-基于AudioContext在canvas上显示声音波形

js-基于AudioContext在canvas上显示声音波形 目录


前言

  • ES7后开始启用AudioContex
  • 常用API是:createScriptProcessor, onaudioprocess, getChannelData
  • 注意:onaudioprocess已经废弃,开始改用AnalyserNodeMDN-AnalyserNode

效果展示

在这里插入图片描述

代码展示

index.html

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <title>Canvas Audio</title>
    <style>
        body, div {
            margin: 0px;
            padding: 0px;
            text-align: center;
        }
        #cav {
            border: 1px solid black;
            border-radius: 4px;
            margin: 10px auto;
        }
    </style>
</head>
<body>
<canvas id="cav" width="400" height="300"></canvas>
</body>
<script src="Aud.js"></script>
<script>
let ctx = document.getElementById('cav').getContext('2d');
let SampleSize = 1024;

window.onload = function (){
    init();
};

function init() {
    AudManager.urls = ["test.mp3"];
    AudManager.init();
    AudManager.load(()=>{
        let jNode = AudManager.createNode(2, [SampleSize, 1, 1]);
        let pNode = AudManager.createNode();

        ctx.strokeStyle = "red";
        jNode.onaudioprocess = function (e){
            renderWave(e);
        };

        let sound1 = AudManager.items[0];

        sound1.addNode(pNode);
        sound1.addNode(jNode, 1);
        sound1.play();
    });
}

function renderWave(e){
    ctx.clearRect(0,0,400,300);

    let bufData = e.inputBuffer.getChannelData(0);

    ctx.beginPath();
    ctx.moveTo(0,150);

    for (let i = 0, len = bufData.length, mix = Math.min(400, len); i < mix; i++){
        ctx.lineTo(i, bufData[i]*150+150);
    }

    ctx.stroke();
    ctx.closePath();
}
</script>
</html>

Aud.js

let Aud = function (ctx, url) {
    this.ctx = ctx;
    this.url = url;

//    source节点
    this.src = ctx.createBufferSource();

//    多个处理节点组
    this.pNode = [];
};

Aud.prototype = {
    output(){
        for (let i = 0; i < this.pNode.length; i++){
            let tNode = this.src;
            for (let j = 0; j < this.pNode[i].length; j++){
                tNode.connect(this.pNode[i][j]);
                tNode = this.pNode[i][j];
            }
            tNode.connect(this.ctx.destination);
        }
    },

    play(loop){
        this.src.loop = loop || false;
        this.output();
        this.src.start(0);
    },

    stop() {
        this.src.stop();
    },

    addNode(node, groupIdx = 0){
        this.pNode[groupIdx] = this.pNode[groupIdx] || [];
        this.pNode[groupIdx].push(node);
    }
};

//设置节点类型
Aud.NODETYPE = {
    GNODE: 0 // 表示gainNode节点
}

//Aud管理对象
AudManager = {
    urls: [],
    items: [],
    ctx: null,
    init(){
        try{
            this.ctx = new AudioContext();
        }catch (e) {
            console.log(`${e}`);
        }
    },
    load(callback){
        for (let i = 0; i < this.urls.length; i++){
            this.loadSingle(this.urls[i], callback);
        }
    },

    loadSingle(url, callback){
        let req = new XMLHttpRequest();
        req.open('GET', url, true);
        req.responseType = 'arraybuffer';
        let self = this;
        req.onload = function () {
            self.ctx.decodeAudioData(this.response)
                .then(
                    buf => {
                        let aud = new Aud(self.ctx, url);
                        aud.src.buffer = buf;
                        self.items.push(aud);

                        if (self.items.length == self.urls.length){
                            callback();
                        }
                    },
                    err => {
                        console.log(`decode error:${err}`);
                    }
                )
        };

        req.send();
    },

    createNode(nodeType, param){
        let node = null;
        switch (nodeType) {
            case 1:
                node = this.ctx.createPanner();
                break;
            case 2:
                node = this.ctx.createScriptProcessor(param[0], param[1], param[2]);
                break;
            default:
                node = this.ctx.createGain();
        }
        return node;
    }
};
  • 0
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Record sounds / noises around you and turn them into music. It’s a work in progress, at the moment it enables you to record live audio straight from your browser, edit it and save these sounds as a WAV file. There's also a sequencer part where you can create small loops using these sounds with a drone synth overlaid on them. See it working: http://daaain.github.com/JSSoundRecorder Technology ---------- No servers involved, only Web Audio API with binary sound Blobs passed around! ### Web Audio API #### GetUserMedia audio for live recording Experimental API to record any system audio input (including USB soundcards, musical instruments, etc). ```javascript // shim and create AudioContext window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext; var audio_context = new AudioContext(); // shim and start GetUserMedia audio stream navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; navigator.getUserMedia({audio: true}, startUserMedia, function(e) { console.log('No live audio input: ' + e); }); ``` #### Audio nodes for routing You can route audio stream around, with input nodes (microphone, synths, etc), filters (volume / gain, equaliser, low pass, etc) and outputs (speakers, binary streams, etc). ```javascript function startUserMedia(stream) { // create MediaStreamSource and GainNode var input = audio_context.createMediaStreamSource(stream); var volume = audio_context.createGain(); volume.gain.value = 0.7; // connect them and pipe output input.connect(volume); volume.connect(audio_context.destination); // connect recorder as well - see below var recorder = new Recorder(input); } ``` ### WebWorker Processing (interleaving) record buffer is done in the background to not block the main thread and the UI. Also WAV conversion for export is also quite heavy for longer recordings, so best left to run in the background. ```javascript this.context = input.context; this.node = this.context.createScriptProcessor(4096, 2, 2); this.node.onaudioprocess = function(e){ worker.postMessage({ command: 'record', buffer: [ e.inputBuffer.getChannelData(0), e.inputBuffer.getChannelData(1) ] }); } ``` ```javascript function record(inputBuffer){ var bufferL = inputBuffer[0]; var bufferR = inputBuffer[1]; var interleaved = interleave(bufferL, bufferR); recBuffers.push(interleaved); recLength += interleaved.length; } function interleave(inputL, inputR){ var length = inputL.length + inputR.length; var result = new Float32Array(length); var index = 0, inputIndex = 0; while (index < length){ result[index++] = inputL[inputIndex]; result[index++] = inputR[inputIndex]; inputIndex++; } return result; } ``` ```javascript function encodeWAV(samples){ var buffer = new ArrayBuffer(44 + samples.length * 2); var view = new DataView(buffer); /* RIFF identifier */ writeString(view, 0, 'RIFF'); /* file length */ view.setUint32(4, 32 + samples.length * 2, true); /* RIFF type */ writeString(view, 8, 'WAVE'); /* format chunk identifier */ writeString(view, 12, 'fmt '); /* format chunk length */ view.setUint32(16, 16, true); /* sample format (raw) */ view.setUint16(20, 1, true); /* channel count */ view.setUint16(22, 2, true); /* sample rate */ view.setUint32(24, sampleRate, true); /* byte rate (sample rate * block align) */ view.setUint32(28, sampleRate * 4, true); /* block align (channel count * bytes per sample) */ view.setUint16(32, 4, true); /* bits per sample */ view.setUint16(34, 16, true); /* data chunk identifier */ writeString(view, 36, 'data'); /* data chunk length */ view.setUint32(40, samples.length * 2, true); floatTo16BitPCM(view, 44, samples); return view; } ``` ### Binary Blob Instead of file drag and drop interface this binary blob is passed to editor. Note: BlobBuilder deprecated (but a lot of examples use it), you should use Blob constructor instead! ```javascript var f = new FileReader(); f. { audio_context.decodeAudioData(e.target.result, function(buffer) { $('#audioLayerControl')[0].handleAudio(buffer); }, function(e) { console.warn(e); }); }; f.readAsArrayBuffer(blob); ``` ```javascript function exportWAV(type){ var buffer = mergeBuffers(recBuffers, recLength); var dataview = encodeWAV(buffer); var audioBlob = new Blob([dataview], { type: type }); this.postMessage(audioBlob); } ``` ### Virtual File – URL.createObjectURL You can create file download link pointing to WAV blob, but also set it as the source of an Audio element. ```javascript var url = URL.createObjectURL(blob); var audioElement = document.createElement('audio'); var downloadAnchor = document.createElement('a'); audioElement.controls = true; audioElement.src = url; downloadAnchor.href = url; ``` TODO ---- * Sequencer top / status row should be radio buttons :) * Code cleanup / restructuring * Enable open / drag and drop files for editing * Visual feedback (levels) for live recording * Sequencer UI (and separation to a different module) Credits / license ----------------- Live recording code adapted from: http://www.phpied.com/files/webaudio/record.html Editor code adapted from: https://github.com/plucked/html5-audio-editor Copyright (c) 2012 Daniel Demmel MIT License

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值