Please write a html, css, js code that shows a blob; an interactive blob that reacted to the sound played with in.
Creating an interactive blob that reacts to sound involves several steps. We'll use HTML for the structure, CSS for styling, and JavaScript for the interactivity and sound analysis. The Web Audio API will help us analyze the sound, and we can use the Canvas API to draw the blob.
Here's a simple example:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Interactive Sound Blob</title>
<link rel="stylesheet" href="styles.css">
</head>
<body>
<div class="container">
<canvas id="blobCanvas"></canvas>
<input type="file" id="audioInput" accept="audio/*">
</div>
<script src="script.js"></script>
</body>
</html>
body, html {
margin: 0;
padding: 0;
width: 100%;
height: 100%;
display: flex;
justify-content: center;
align-items: center;
background-color: #282c34;
color: white;
font-family: Arial, sans-serif;
}
.container {
text-align: center;
}
canvas {
border: 1px solid #fff;
}
const canvas = document.getElementById('blobCanvas');
const ctx = canvas.getContext('2d');
const audioInput = document.getElementById('audioInput');
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
let audioContext, analyser, source, dataArray, bufferLength;
audioInput.addEventListener('change', handleFiles, false);
function handleFiles(event) {
const file = event.target.files[0];
if (file) {
const audio = new Audio(URL.createObjectURL(file));
audioContext = new (window.AudioContext || window.webkitAudioContext)();
analyser = audioContext.createAnalyser();
source = audioContext.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(audioContext.destination);
analyser.fftSize = 256;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
audio.play();
draw();
}
}
function draw() {
requestAnimationFrame(draw);
analyser.getByteFrequencyData(dataArray);
ctx.clearRect(0, 0, canvas.width, canvas.height);
const radius = Math.min(canvas.width, canvas.height) / 4;
const centerX = canvas.width / 2;
const centerY = canvas.height / 2;
ctx.beginPath();
for (let i = 0; i < bufferLength; i++) {
const angle = (i / bufferLength) * 2 * Math.PI;
const distance = radius + dataArray[i] / 2;
const x = centerX + distance * Math.cos(angle);
const y = centerY + distance * Math.sin(angle);
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
}
ctx.closePath();
ctx.fillStyle = 'rgba(0, 150, 255, 0.5)';
ctx.fill();
ctx.strokeStyle = 'rgba(0, 150, 255, 1)';
ctx.stroke();
}
AudioContext
and an AnalyserNode
to get frequency data from the audio.draw
function is called repeatedly using requestAnimationFrame
to update the canvas with the current frequency data.This code provides a basic interactive blob that reacts to the sound played. You can further enhance it by adding more visual effects or improving the blob's