gpt4 book ai didi

javascript - 为什么将 gl.texImage2D 中的格式设置为 gl.LUMINANCE 而不是 gl.RGB 会使由 Canvas 制成的 Blob 的文件大小仅减小约 5%?

转载 作者:行者123 更新时间:2023-11-30 14:00:14 27 4
gpt4 key购买 nike

在探索javascript的过程中,我遇到了一个很莫名其妙的问题。序言是:我将不同 mime 类型的图像(主要是 pngs/jpgs)转换为具有 ImageBitmap 接口(interface)的位图,然后将它们传输给工作人员以在单独的线程中转换为 blob(为此我首先将它们绘制到屏幕外 Canvas 上下文中)然后保存到 IDB,同时主线程继续加载新图像。在这样做的同时,为了开阔视野,我决定在 Canvas 中使用 webgl2 渲染上下文,因为我从未接触过 GL。

为了将位图应用于 Canvas ,我使用了 texImage2D 函数,我似乎不明白。在那里,我可以指定存储在内存中的数据格式(它应该是 rgb(对吗?),因为位图是在没有 alpha 预乘的情况下创建的)、内部格式和类型。由于格式/内部格式/类型的组合是由规范指定的,我试图利用它们的众多并为我的目的选择最好的(质量/文件大小)。由于转换为位图的图像大多是黑白的,我认为亮度是我需要的。但首先我使用了标准的 RGB 格式:

gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGB, bitmap.width, bitmap.height, 0, gl.RGB, gl.UNSIGNED_BYTE, bitmap
);

然后我使用了 UNSIGNED_SHORT_5_6_5 数据类型的 RGB565,没有发现任何质量损失,同时 blob 大小比 RGB 减少了约 30%。我的理解是,它减少了,因为 RGB565 是每个像素 2 个无符号短字节,对吗?然后我使用了 UNSIGNED_SHORT_5_5_5_1 RGBA,与标准 RGB 相比,blob 文件大小减少了约 43%。甚至还不如RGB565!但是图像上的渐变变得不稳定,所以我没有 5551RGBA。 5551 RGBA 和 RGB565 在大小上的巨大差异是我不明白的。更令人困惑的是根据 spec 使用亮度时类型/格式/内部格式组合,标准 RGB 的减少仅为 ~5%。为什么 RGB565 降低了 ~30% 的 whooping 尺寸,而 luma 仅降低了 ~5%?

对于所有这些,我在片段着色器中使用了相同的浮点采样器:

 #version 300 es
precision mediump float;
precision mediump sampler2D;

uniform sampler2D sampler;
uniform vec2 dimensions;

out vec4 color;

void main(){
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}

同样的pixelStorei和texParameteri:

gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

如下面的代码片段所示,如果图像是黑白的,亮度不会改变 blob 的文件大小,而如果是彩色图像,亮度会明显降低,但仍然小于 RGBA4。考虑到 RGBA4 每个像素有 2 个字节,而 LUMA - 1,这非常违反直觉。

(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
var img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = e => rj(e);
});
};

var jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');

var png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');

var jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');


var format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
},
rgb9e5: {
internalFormat: 'RGB9_E5',
format: 'RGB',
type: 'FLOAT',
},
srgb: {
internalFormat: 'SRGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
},
rgba32f: {
internalFormat: 'RGB32F',
format: 'RGB',
type: 'FLOAT',
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
},
};


function compareFormatSize(image) {
return new Promise((r, _) => {
createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
}).then(async bitmap => {
var text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
console.log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (let val of Object.values(format)) {
await logBlobSize(bitmap, val);
if(val.format === 'LUMINANCE') r();
}
}).catch(console.warn);
});
};

compareFormatSize(jpeg).then(_ => compareFormatSize(png)).then(_ => compareFormatSize(jpgClr));


function logBlobSize(bitmap, { internalFormat, format, type }) {
return new Promise(r => {
drawCanvas(bitmap, internalFormat, format, type).convertToBlob({
type: `image/webp`
}).then(blob => { console.log(`Blob from ${internalFormat} is ${blob.size}b`); r(); });
})
}


function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});

function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}


const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;

void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);

const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;

uniform sampler2D sampler;
uniform vec2 dimensions;

out vec4 color;

void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);

const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);

const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location


const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([
-1.0,-1.0,
1.0,-1.0,
-1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);

const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);

gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);

return gl.canvas;
}
})()

提前致谢!

最佳答案

Canvas 始终是 RGBA 8 位(32 位颜色)。有人说要添加选项以拥有更深的深度 Canvas 以支持高清彩色显示器,但尚未发货。

因此,调用 canvas.converToBlob 总是会为您提供 RGBA32 位 png(或 jpeg)。您创建的 LUMIANCE 纹理将为您提供黑白纹理,但它会被绘制到 RGBA 32 位 Canvas 中。没有获得 1 channel PNG 的选项。

至于 RGB565、RGBA5551 等这些格式可能会或可能不会被硬件直接支持,规范允许驱动程序选择更高分辨率的格式,我猜大多数台式机在您将数据扩展为 RGBA8 时上传数据,这样它就不会节省任何内存。

另一方面,以 RGB565 或 RGBA5551 格式上传时,WebGL 规范要求当您传递图像时,图像首先转换为该格式,因此浏览器将获取您的图像并有效地将其量化为那些颜色深度这意味着你失去了颜色。然后,您将量化后的图像绘制回 Canvas 并保存,这样当然可以更好地压缩,因为有更多相似的颜色。

来自WebGL spec对于采用 ImageBitmap

texImage2D 版本

The source image data is conceptually first converted to the data type and format specified by the format and type arguments, and then transferred to the WebGL implementation. Format conversion is performed according to the following table. If a packed pixel format is specified which would imply loss of bits of precision from the image data, this loss of precision must occur.

让我们在没有 WebGL 的情况下试试吧

(async() => {
function createImage(src) {
return new Promise((rs, rj) => {
const img = new Image();
img.crossOrigin = 'anonymous';
img.src = src;
img.onload = () => rs(img);
img.onerror = rj;
});
};

const jpeg = await createImage('https://upload.wikimedia.org/wikipedia/commons/a/aa/5inchHowitzerFiringGallipoli1915.jpeg');

const png = await createImage('https://upload.wikimedia.org/wikipedia/commons/2/2c/6.d%C3%ADl_html_m2fdede78.png');

const jpgClr = await createImage('https://upload.wikimedia.org/wikipedia/commons/thumb/e/ed/%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg/117px-%22Good_bye%2C_sweetheart%22%2C_tobacco_label%2C_ca._1865.jpg');


const format = {
standard: {
internalFormat: 'RGB8',
format: 'RGB',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[1], p[2], 255],
},
rgb565: {
internalFormat: 'RGB565',
format: 'RGB',
type: 'UNSIGNED_SHORT_5_6_5',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 2) * 255 / 63,
(p[2] >> 3) * 255 / 31,
255,
],
},
rgba4: {
internalFormat: 'RGBA4',
format: 'RGBA',
type: 'UNSIGNED_SHORT_4_4_4_4',
fn: p => [
(p[0] >> 4) * 255 / 15,
(p[1] >> 4) * 255 / 15,
(p[2] >> 4) * 255 / 15,
(p[3] >> 4) * 255 / 15,
],
},
rgb5a1: {
internalFormat: 'RGB5_A1',
format: 'RGBA',
type: 'UNSIGNED_SHORT_5_5_5_1',
fn: p => [
(p[0] >> 3) * 255 / 31,
(p[1] >> 3) * 255 / 31,
(p[2] >> 3) * 255 / 31,
(p[3] >> 7) * 255 / 1,
],
},
luma: {
internalFormat: 'LUMINANCE',
format: 'LUMINANCE',
type: 'UNSIGNED_BYTE',
fn: p => [p[0], p[0], p[0], 255],
},
};


async function compareFormatSize(image) {
const bitmap = await createImageBitmap(image, {
premultiplyAlpha: 'none',
colorSpaceConversion: 'none',
});
const text = String(image.src.match(/(?<=\.)\w{3,4}$/)).toUpperCase();
log(`${text === 'JPG' ? 'Colored jpg' : text}:`);
for (const val of Object.values(format)) {
await logBlobSize(bitmap, val);
}
};

await compareFormatSize(jpeg);
await compareFormatSize(png);
await compareFormatSize(jpgClr);

async function logBlobSize(bitmap, {
internalFormat,
format,
type,
fn,
}) {
const canvas = drawCanvas(bitmap, internalFormat, format, type);
const blob = await canvas.convertToBlob({
type: `image/webp`
});
const canvas2 = drawFn(bitmap, fn);
const blob2 = await canvas2.convertToBlob({
type: `image/webp`
});
log(`Blob from ${internalFormat} is ${blob.size}b(webgl) vs ${blob2.size}b(code)`);
if (false) {
const img = new Image();
img.src = URL.createObjectURL(blob);
document.body.appendChild(img);
const img2 = new Image();
img2.src = URL.createObjectURL(blob2);
document.body.appendChild(img2);
}
}

function drawFn(bitmap, fn) {
const ctx = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("2d");
ctx.drawImage(bitmap, 0, 0);
const imageData = ctx.getImageData(0, 0, bitmap.width, bitmap.height);
const pixels = imageData.data;
for (let i = 0; i < pixels.length; i += 4) {
const n = fn(pixels.subarray(i, i + 4));
pixels.set(n, i);
}
ctx.putImageData(imageData, 0, 0);
return ctx.canvas;
}

function drawCanvas(bitmap, internalFormat, format, type) {
const gl = (new OffscreenCanvas(bitmap.width, bitmap.height)).getContext("webgl2", {
antialias: false,
alpha: false,
depth: false,
});

function createShader(gl, type, glsl) {
const shader = gl.createShader(type);
gl.shaderSource(shader, glsl)
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return;
}
return shader;
}


const vs = createShader(
gl,
gl.VERTEX_SHADER,
`#version 300 es
#define POSITION_LOCATION 0
layout(location = POSITION_LOCATION) in vec2 position;

void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}`,
);

const fs = createShader(
gl,
gl.FRAGMENT_SHADER,
`#version 300 es
precision mediump float;
precision mediump sampler2D;

uniform sampler2D sampler;
uniform vec2 dimensions;

out vec4 color;

void main()
{
color = texture(sampler, vec2(gl_FragCoord.x/dimensions.x, 1.0 - (gl_FragCoord.y/dimensions.y)));
}`,
);

const program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);

const sampler = gl.getUniformLocation(program, 'sampler');
const dimensions = gl.getUniformLocation(program, 'dimensions');
const position = 0; // GLSL location


const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
gl.enableVertexAttribArray(position);
const vxBuffer = gl.createBuffer();
const vertices = new Float32Array([-1.0, -1.0,
1.0, -1.0, -1.0, 1.0,
1.0, 1.0,
]);
gl.bindBuffer(gl.ARRAY_BUFFER, vxBuffer);
gl.vertexAttribPointer(position, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);

const texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, false);
gl.pixelStorei(gl.UNPACK_COLORSPACE_CONVERSION_WEBGL, gl.NONE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl[internalFormat],
bitmap.width,
bitmap.height,
0,
gl[format],
gl[type],
bitmap
);

gl.useProgram(program);
gl.uniform1i(sampler, 0);
gl.uniform2f(dimensions, gl.canvas.width, gl.canvas.height);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

gl.deleteTexture(texture);
gl.deleteVertexArray(vao);
gl.deleteBuffer(vxBuffer);
gl.deleteProgram(program);

return gl.canvas;
}
})()

function log(...args) {
const elem = document.createElement('pre');
elem.textContent = [...args].join(' ');
document.body.appendChild(elem);
}
pre { margin: 0; }

Why setting format in gl.texImage2D to gl.LUMINANCE instead of gl.RGB makes the blob made out of the canvas only ~5% smaller in filesize?

我没有看到这些结果。在您的示例中,黑白图像与 RGB 和 LUMIANCE 保持相同的大小。彩色图像变为 1/2 大小。但当然,黑白 32 位图像是否压缩得比彩色 32 位图像小取决于压缩算法,因为在所有情况下,调用 convertToBlob 时 Canvas 都是 32 位。

关于javascript - 为什么将 gl.texImage2D 中的格式设置为 gl.LUMINANCE 而不是 gl.RGB 会使由 Canvas 制成的 Blob 的文件大小仅减小约 5%?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/56430401/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com