如何在没有UBO的情况下进行批处理?

问题描述 投票:0回答:1

我正在尝试为WebGL渲染器实现批处理,因为绘制调用太多而导致许多小对象都在挣扎。我想的是我会根据他们使用的着色器对它们进行批处理,然后一次绘制一些,为制服上传每个对象的材质参数和模型矩阵。

我的问题是非UBO制服的统一尺寸限制非常低,因为256浮动最低。如果我的材料使用8个花车,如果你考虑模型矩阵,我几乎没有足够的制服在一个批次中绘制10个模型,这实际上还不够。

没有UBO,有没有希望让这项工作成功?纹理是一种选择吗?如果没有WebGL2 UBO,人们如何进行批处理?

更多细节:我没有蒙皮或复杂的动画,我只有一些着色器(漫反射,烹饪,等等),每个模型都有不同的材质设置,例如:颜色,粗糙度,折射率可由用户动态改变(因此将它们烘焙到顶点阵列是不现实的,因为我们有一些高多边形数据,用户也可以切换着色器而不是所有着色器都有相同数量的参数)以及材料图显然。几何体本身是静态的,并且每个模型都有一个线性变换。在大多数情况下,所有网格都是不同的,因此几何实例不会帮助很多,但我可以稍后再看。

谢谢

optimization 3d webgl
1个回答
0
投票

我不知道这实际上比很多绘制调用快,但这里绘制了4个模型,只有一个绘制调用

它的工作原理是为每个模型添加一个id。因此,对于模型#0中的每个顶点放置一个0,对于模型#1中的每个顶点放一个等等。

然后它使用模型id来索引纹理中的东西。最简单的是模型id选择纹理的行,然后该模型的所有数据都可以从该行中拉出。

对于WebGL1

attribute float modelId;

...

#define TEXTURE_WIDTH ??
#define COLOR_OFFSET    ((0.0 + 0.5) / TEXTURE_WIDTH)
#define MATERIAL_OFFSET ((1.0 + 0.5) / TEXTURE_WIDTH)

float modelOffset = (modelId + .5) / textureHeight;
vec4 color = texture2D(perModelData, vec2(COLOR_OFFSET, modelOffset));
vec4 roughnessIndexOfRefaction = texture2D(perModelData, 
                                           vec2(MATERIAL_OFFSET, modelOffset));

等等..

只要你没有绘制超过gl.getParameter(gl.MAX_TEXTURE_SIZE)模型,它将起作用。如果你有更多,使用更多的绘制调用或更改纹理坐标计算,所以每行有多个模型

在WebGL2中,您将更改代码以使用texelFetch和无符号整数

in uint modelId;

...

#define COLOR_OFFSET    0
#define MATERIAL_OFFSET 1

vec4 color = texelFetch(perModelData, uvec2(COLOR_OFFSET, modelId));
vec4 roughnessIndexOfRefaction = texelFetch(perModelData, 
                                            uvec2(MATERIAL_OFFSET, modelId));

用1次绘制调用绘制的4个模型的示例。对于每个模型,模型矩阵和颜色存储在纹理中。

const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('OES_texture_float');
if (!ext) {
  alert('need OES_texture_float');
}

const COMMON_STUFF = `
#define TEXTURE_WIDTH 5.0
#define MATRIX_ROW_0_OFFSET ((0. + 0.5) / TEXTURE_WIDTH)
#define MATRIX_ROW_1_OFFSET ((1. + 0.5) / TEXTURE_WIDTH)
#define MATRIX_ROW_2_OFFSET ((2. + 0.5) / TEXTURE_WIDTH)
#define MATRIX_ROW_3_OFFSET ((3. + 0.5) / TEXTURE_WIDTH)
#define COLOR_OFFSET        ((4. + 0.5) / TEXTURE_WIDTH)
`;

const vs = `
attribute vec4 position;
attribute vec3 normal;
attribute float modelId;

uniform float textureHeight;
uniform sampler2D perModelDataTexture;
uniform mat4 projection;
uniform mat4 view;

varying vec3 v_normal;
varying float v_modelId;

${COMMON_STUFF}

void main() {
  v_modelId = modelId;  // pass to fragment shader

  float modelOffset = (modelId + 0.5) / textureHeight;

  // note: in WebGL2 better to use texelFetch
  mat4 model = mat4(
    texture2D(perModelDataTexture, vec2(MATRIX_ROW_0_OFFSET, modelOffset)),
    texture2D(perModelDataTexture, vec2(MATRIX_ROW_1_OFFSET, modelOffset)),
    texture2D(perModelDataTexture, vec2(MATRIX_ROW_2_OFFSET, modelOffset)),
    texture2D(perModelDataTexture, vec2(MATRIX_ROW_3_OFFSET, modelOffset)));
  
  gl_Position = projection * view * model * position;
  v_normal = mat3(view) * mat3(model) * normal;
}
`;

const fs = `
precision highp float;

varying vec3 v_normal;
varying float v_modelId;

uniform float textureHeight;
uniform sampler2D perModelDataTexture;
uniform vec3 lightDirection;

${COMMON_STUFF}

void main() {
  float modelOffset = (v_modelId + 0.5) / textureHeight;

  vec4 color = texture2D(perModelDataTexture, vec2(COLOR_OFFSET, modelOffset));
  
  float l = dot(lightDirection, normalize(v_normal)) * .5 + .5;
  
  gl_FragColor = vec4(color.rgb * l, color.a);
}
`;

// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);

// make some vertex data
const modelVerts = [
  twgl.primitives.createSphereVertices(1, 6, 4),
  twgl.primitives.createCubeVertices(1, 1, 1),
  twgl.primitives.createCylinderVertices(1, 1, 10, 1),
  twgl.primitives.createTorusVertices(1, .2, 16, 8),
];
// merge all the vertices into one
const arrays = twgl.primitives.concatVertices(modelVerts);
// fill an array so each vertex of each model has a modelId
const modelIds = new Uint16Array(arrays.position.length / 3);
let offset = 0;
modelVerts.forEach((verts, modelId) => {
  const end = offset + verts.position.length / 3;
  while(offset < end) {
    modelIds[offset++] = modelId;
  }
});
arrays.modelId = { numComponents: 1, data: modelIds };
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);

const numModels = modelVerts.length;
const tex = gl.createTexture();
const textureWidth = 5; // 4x4 matrix, 4x1 color
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureWidth, numModels, 0, gl.RGBA, gl.FLOAT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

// this data is for the texture, one row per model
// first 4 pixels are the model matrix, 5 pixel is the color
const perModelData = new Float32Array(textureWidth * numModels * 4);
const stride = textureWidth * 4;
const modelOffset = 0;
const colorOffset = 16;

// set the colors at init time
for (let modelId = 0; modelId < numModels; ++modelId) {
  perModelData.set([r(), r(), r(), 1], modelId * stride + colorOffset);
}

function r() {
  return Math.random();
}

function render(time) {
  time *= 0.001;  // seconds
  
  twgl.resizeCanvasToDisplaySize(gl.canvas);
  
  gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
  gl.enable(gl.DEPTH_TEST);
  gl.enable(gl.CULL_FACE);

  const fov = Math.PI * 0.25;
  const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
  const near = 0.1;
  const far = 20;
  const projection = m4.perspective(fov, aspect, near, far);
  
  const eye = [0, 0, 10];
  const target = [0, 0, 0];
  const up = [0, 1, 0];
  const camera = m4.lookAt(eye, target, up);
  const view = m4.inverse(camera);

  // set the matrix for each model in the texture data
  const mat = m4.identity();
  for (let modelId = 0; modelId < numModels; ++modelId) {
    const t = time * (modelId + 1) * 0.3;
    m4.identity(mat);
    m4.rotateX(mat, t, mat);
    m4.rotateY(mat, t, mat);
    m4.translate(mat, [0, 0, Math.sin(t * 1.1) * 4], mat);
    m4.rotateZ(mat, t, mat);
    
    perModelData.set(mat, modelId * stride + modelOffset);
  }
  
  // upload the texture data
  gl.bindTexture(gl.TEXTURE_2D, tex);
  gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, textureWidth, numModels, 
                   gl.RGBA, gl.FLOAT, perModelData);
  
  gl.useProgram(programInfo.program);
  
  // calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
  twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
  
  // calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
  twgl.setUniforms(programInfo, {
    lightDirection: v3.normalize([1, 2, 3]),
    perModelDataTexture: tex,
    textureHeight: numModels,
    projection,
    view,
  });  
  
  // calls gl.drawArrays or gl.drawElements
  twgl.drawBufferInfo(gl, bufferInfo);

  requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>

这是一次抽奖中的2000个模型

https://jsfiddle.net/greggman/g2tcadho/

© www.soinside.com 2019 - 2024. All rights reserved.