let fileInput = document.querySelector('input[type="file"]');
fileInput.addEventListener('change', function(event) {
let file = event.target.files[0];
let reader = new FileReader();
reader.onload = function() {
let arrayBuffer = reader.result;
let uint8Array = new Uint8Array(arrayBuffer);
console.log(uint8Array);
};
reader.readAsArrayBuffer(file);
});
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Text File Manipulation</title>
</head>
<body>
<input type="file" id="fileInput" />
<pre id="output"></pre>
<script>
document.getElementById('fileInput').addEventListener('change', function(event) {
let file = event.target.files[0];
if (!file) return;
let reader = new FileReader();
reader.onload = function() {
let arrayBuffer = reader.result;
let decoder = new TextDecoder('utf-8');
let text = decoder.decode(arrayBuffer);
// 텍스트를 대문자로 변환
let upperText = text.toUpperCase();
document.getElementById('output').textContent = upperText;
};
reader.readAsArrayBuffer(file);
});
</script>
</body>
</html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Image Manipulation</title>
</head>
<body>
<input type="file" id="fileInput" />
<canvas id="canvas"></canvas>
<script>
document.getElementById('fileInput').addEventListener('change', function (event) {
let file = event.target.files[0];
if (!file) return;
let reader = new FileReader();
reader.onload = function () {
let arrayBuffer = reader.result;
let blob = new Blob([arrayBuffer], { type: 'image/png' });
let url = URL.createObjectURL(blob);
let img = new Image();
img.src = url;
img.onload = function () {
let canvas = document.getElementById('canvas');
let ctx = canvas.getContext('2d');
canvas.width = img.width;
canvas.height = img.height;
ctx.drawImage(img, 0, 0);
let imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
let data = imageData.data;
// 각 픽셀의 색상 반전
for (let i = 0; i < data.length; i += 4) {
data[i] = 255 - data[i]; // Red
data[i + 1] = 255 - data[i + 1]; // Green
data[i + 2] = 255 - data[i + 2]; // Blue
// Alpha 값은 변경하지 않음
}
ctx.putImageData(imageData, 0, 0);
};
};
reader.readAsArrayBuffer(file);
});
</script>
</body>
</html>
ArrayBuffer는 WebSockets, XMLHttpRequest, Fetch API 등을 사용하여 바이너리 데이터를 전송하고 수신하는 데 사용됩니다.
let socket = new WebSocket('wss://example.com/socket');
// 바이너리 데이터로 전송
let buffer = new ArrayBuffer(10);
let view = new Uint8Array(buffer);
for (let i = 0; i < view.length; i++) {
view[i] = i;
}
socket.binaryType = 'arraybuffer';
socket.send(buffer);
// 수신된 데이터 처리
socket.onmessage = function(event) {
let arrayBuffer = event.data;
let uint8Array = new Uint8Array(arrayBuffer);
console.log(uint8Array);
};
WebGL을 사용하여 그래픽스를 렌더링할 때 ArrayBuffer는 필수적입니다. 버텍스 데이터를 저장하고 이를 GPU에 전달할 때 사용됩니다.
<!DOCTYPE html>
<html lang="en">
<body>
<canvas id="canvas" width="400" height="400"></canvas>
<script>
function main() {
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl');
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
void main(void) {
gl_Position = aVertexPosition;
}
`;
// Fragment shader program
const fsSource = `
void main(void) {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); // Red color
}
`;
// Initialize a shader program
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// Collect shader attributes
const programInfo = {
program: shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'),
},
};
// Vertex positions for a triangle
const positions = new Float32Array([
0.0, 1.0,
-1.0, -1.0,
1.0, -1.0,
]);
// Create a buffer and put the positions in it
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
// Clear the canvas
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Tell WebGL how to pull out the positions from the position buffer
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexPosition,
2, // numComponents
gl.FLOAT, // type
false, // normalize
0, // stride
0 // offset
);
gl.enableVertexAttribArray(programInfo.attribLocations.vertexPosition);
// Use the shader program
gl.useProgram(programInfo.program);
// Draw the triangle
gl.drawArrays(gl.TRIANGLES, 0, 3);
}
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
window.onload = main;
</script>
</body>
</html>
Web Audio API를 사용하여 오디오 데이터를 조작할 때 ArrayBuffer가 사용됩니다. 예를 들어, 오디오 파일을 읽고 이를 조작하는 경우입니다.
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Web Audio Processing</title>
</head>
<body>
<input type="file" id="fileInput" />
<button id="playButton">Play</button>
<button id="fasterButton">Faster</button>
<button id="slowerButton">Slower</button>
<script>
let audioContext = new (window.AudioContext || window.webkitAudioContext)();
let audioBuffer;
let source;
document.getElementById('fileInput').addEventListener('change', function (event) {
const file = event.target.files[0];
if (!file) return;
const reader = new FileReader();
reader.onload = function () {
const arrayBuffer = reader.result;
console.log(arrayBuffer)
audioContext.decodeAudioData(arrayBuffer, function (buffer) {
audioBuffer = buffer;
console.log('Audio file loaded and decoded successfully.');
});
};
reader.readAsArrayBuffer(file);
});
document.getElementById('playButton').addEventListener('click', function () {
if (!audioBuffer) {
alert('Please load an audio file first.');
return;
}
// Create a new AudioContext if it doesn't exist
if (!audioContext) {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
}
// Stop any currently playing audio
if (source) {
source.stop();
}
source = audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(audioContext.destination);
source.start();
});
document.getElementById('fasterButton').addEventListener('click', function () {
if (source) {
source.playbackRate.value *= 1.25;
}
});
document.getElementById('slowerButton').addEventListener('click', function () {
if (source) {
source.playbackRate.value *= 0.8;
}
});
</script>
</body>
</html>
❔ TypedArray를 사용하려면 ArrayBuffer를 무조건 사용해야해?
✔️ TypedArray를 사용하려면 기본적으로 ArrayBuffer가 필요합니다. TypedArray는 ArrayBuffer를 기반으로 하여 특정 타입의 배열 뷰를 제공합니다. TypedArray는 ArrayBuffer 위에 덮어씌워져서 데이터를 읽고 쓰기 위해 사용됩니다. 이를 통해 고정 크기의 숫자형 데이터를 효율적으로 조작할 수 있습니다.
- ChatGPT와의 대화
- https://developer.mozilla.org/ko/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer
ChatGPT가 알려주는 window.history (0) | 2024.08.08 |
---|---|
ChatGPT가 알려주는 HTML meta viewport(뷰포트) (0) | 2024.08.06 |
[Javascript] 업로드한 한글 파일 이름이 다운로드 시 깨질 때 (0) | 2023.11.11 |
[HTML] 전화걸기, 통화 연결 기능 (0) | 2023.11.09 |