LoginSignup
0
0

More than 5 years have passed since last update.

threeでsound

Posted at

概要

shadertoyのmainsoundが魔法だったので、調べてみた。
threeで実装してみた。

参考にしたページ

https://qiita.com/tatmos/items/9ed8b47a44823f96dddf
https://codepen.io/fand/pen/baVdpK

サンプルコード

const DURATION = 6;
const WIDTH = 512;
const HEIGHT = 512;
const fragmentShader = `
precision mediump float;
uniform float iSampleRate;
uniform float iBlockOffset;
#define PI              3.1415926535
#define A               0.0
#define B               2.0
#define C               3.0
#define D               5.0
#define E               7.0
#define F               8.0
#define G               10.0
float rand(vec2 co)
{
    return fract(sin(dot(co, vec2(12.9898, 78.233))) * 43758.5453);
}
float calcHertz(float scale)
{
    return 441.0 * pow(2.0, scale / 12.0) * PI;
}
float calcHertz(float octave, float note)
{
    return calcHertz(octave * 12.0 + note);
}
float rect(float time)
{
    return sign(fract(time / PI / 2.0) - 0.5);
}
float rect2(float time)
{
    return sign(fract(time / PI / 2.0) - 0.25);
}
float rect3(float time)
{
    return sign(fract(time / PI / 2.0) - 0.125);
}
float eg(float time, float gate)
{
    return  exp(-gate * (time));
}
#define Sin1(u, v)          sound += clamp(sin(time * calcHertz(u, v)) * (1.0 - localTime2 + sin(time * 80.0) * 0.1), -0.3, 0.3);
#define Rect1(u, v, l)      sound += rect(time * calcHertz(u, v)) * l;
#define Rect2(u, v, l)      sound += rect2(time * calcHertz(u, v)) * l;
#define Rect3(u, v, l)      sound += rect3(time * calcHertz(u, v)) * l;
#define BD_Rect1(u, v, l)   sound += rect(time * calcHertz(u, v +  eg(localTime2, 2.0))) * l;
vec2 mainSound(float time)
{
    float localTime = mod(time * 0.5 , 8.0);
    float localTime2 = mod(localTime , 1.0);
    vec2 sound = vec2(0.0);
    if (rand(vec2 (localTime2)) * 2.0 > 0.2)
    {
        localTime2 = mod(localTime * 16.0, 1.0);
        BD_Rect1(0.0, C, eg(localTime2, 8.0));
    }
    if (localTime < 2.0)
    {
        localTime2 = mod(localTime * 2.0, 1.0);
        float op0 = sin(1.0 * calcHertz(1.0, C) * time) * eg(localTime2, 1.0);
        float op1 = sin(1.14 * op0) * eg(localTime2, 2.0);
        float op2 = sin(1.5 * calcHertz(1.0, C) * time) * eg(localTime2, 5.0);
        float op3 = sin(2.14 * op2) * eg(localTime2, 2.0);
        sound += vec2(op1 * 0.5 + op3 * 0.5);
    } 
    else if (localTime < 4.0)
    {
        localTime2 = mod(localTime * 2.0, 1.0);
        float op0 = sin(8.0 * calcHertz(1.0, C) * time) * eg(localTime2, 3.0);
        float op1 = sin(6.5 * op0) *  eg(localTime2, 1.0);
        float op2 = sin(3.14 * calcHertz(1.0, C) * time) * eg(localTime2, 1.0);
        float op3 = sin(6.5 * op2) *  eg(localTime2, 1.0);
        sound += vec2(op1 * 0.5 + op3 * 0.5);
    } 
    else if (localTime < 6.0)
    {
        localTime2 = mod(localTime * 1.0, 1.0);
        Rect1(0.0, C, eg(localTime2, 1.0));
        Rect1(1.0, E, eg(localTime2, 1.0));
        Rect1(1.0, G, eg(localTime2, 1.0));
        Rect1(1.0, B, eg(localTime2, 1.0));
        sound *= 0.25;
    } 
    else if (localTime < 7.0)
    {
        localTime2 = mod(localTime * 3.0, 1.0);
        Rect2(0.0, C, eg(localTime2, 1.0));
        Rect2(1.0, E, eg(localTime2, 1.0));
        Rect2(1.0, F, eg(localTime2, 1.0));
        Rect2(1.0, A, eg(localTime2, 1.0));
        sound *= 0.25;
    }
    else if (localTime < 8.0)
    {
        localTime2 = mod(localTime * 6.0, 1.0);
        Rect3(0.0, G, eg(localTime2, 2.0));
        Rect3(2.0, E, eg(localTime2, 2.0));
        Rect3(1.0, F, eg(localTime2, 3.0));
        Rect3(1.0, B, eg(localTime2, 3.0));
        sound *= 0.25;
    }
    return vec2(sound);
}
void main()
{
    float t = iBlockOffset + ((gl_FragCoord.x - 0.5) + (gl_FragCoord.y - 0.5) * 512.0) / iSampleRate;
    vec2 y = mainSound(t);
    vec2 v  = floor((0.5 + 0.5 * y) * 65536.0);
    vec2 vl = mod(v, 256.0) / 255.0;
    vec2 vh = floor(v / 256.0) / 255.0;
    gl_FragColor = vec4(vl.x, vh.x, vl.y, vh.y);
}`;
const ctx = new window.AudioContext();
const node = ctx.createBufferSource();
node.connect(ctx.destination);
node.loop = true;
const audioBuffer = ctx.createBuffer(2, ctx.sampleRate * DURATION, ctx.sampleRate);
const canvas = document.createElement('canvas');
canvas.width = WIDTH;
canvas.height = HEIGHT;
const renderer = new THREE.WebGLRenderer({ 
    canvas,
    alpha: true
});
const wctx = renderer.getContext();
const uniforms = {
    iBlockOffset: {
        type: 'f', 
        value: 0.0
    },
    iSampleRate: {
        type: 'f', 
        value: ctx.sampleRate
    },
};
const geometry = new THREE.PlaneGeometry(2, 2);
const material = new THREE.ShaderMaterial({ 
    uniforms, 
    fragmentShader
});
const plane = new THREE.Mesh(geometry, material);
const scene = new THREE.Scene();
const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0.1, 10);
camera.position.set(0, 0, 1);
camera.lookAt(scene.position);
scene.add(plane);
const target = new THREE.WebGLRenderTarget(WIDTH, HEIGHT);
const samples = WIDTH * HEIGHT;
const numBlocks = (ctx.sampleRate * DURATION) / samples;
for (let i = 0; i < numBlocks; i++) 
{
    uniforms.iBlockOffset.value = i * samples / ctx.sampleRate;
    renderer.render(scene, camera, target, true);
    const pixels = new Uint8Array(WIDTH * HEIGHT * 4);
    wctx.readPixels(0, 0, WIDTH, HEIGHT, wctx.RGBA, wctx.UNSIGNED_BYTE, pixels);
    const outputDataL = audioBuffer.getChannelData(0);
    const outputDataR = audioBuffer.getChannelData(1);
    for (let j = 0; j < samples; j++)
    {
        outputDataL[i * samples + j] = (pixels[j * 4 + 0] + 256 * pixels[j * 4 + 1]) / 65535 * 2 - 1;
        outputDataR[i * samples + j] = (pixels[j * 4 + 2] + 256 * pixels[j * 4 + 3]) / 65535 * 2 - 1;
    }
}
node.buffer = audioBuffer;
node.start(0);


成果物

以上。

0
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
0
0