Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

glBitmap rendering is fuzzy #3

Open
codefrau opened this issue Oct 2, 2024 · 0 comments
Open

glBitmap rendering is fuzzy #3

codefrau opened this issue Oct 2, 2024 · 0 comments

Comments

@codefrau
Copy link
Owner

codefrau commented Oct 2, 2024

The orange status text (enabled via the TeapotMorph's red-halo-handle menu) is almost unreadable:
image

Each character is drawn using glBitmap(). WebGL does not have bitmap rendering, so I emulated it using a texture, a vertex/fragment shader, and drawing a quad. This is the implementation that needs fixing/reimplementing:

jasmine/jasmine-opengl.js

Lines 292 to 447 in 7b134fa

glBitmap: function(width, height, xorig, yorig, xmove, ymove, bitmap) {
// bitmap is supposed to be declared as "GLubyte*" per OpenGL spec,
// which the FFI would convert to Uint8Array for us. However, the
// image FFI declaration uses "void*", probably because it makes no
// difference in C, a pointer is a pointer. In JS, we get an
// ArrayBuffer for "void*" so we need to convert it to Uint8Array
// ourselves.
if (!bitmap.buffer) bitmap = new Uint8Array(bitmap);
if (gl.listMode && this.addToList("glBitmap", [width, height, xorig, yorig, xmove, ymove, bitmap])) return;
DEBUG > 1 && console.log("glBitmap", width, height, xorig, yorig, xmove, ymove, bitmap);
if (width > 0 && height > 0) {
// we need to convert the 1-bit deep bitmap to a 1-byte
// per pixel texture in ALPHA format, with the bitmap
// mapping 0-bits to transparent, 1-bits to opaque,
// and then draw it as a textured quad covering the viewport
var texels = new Uint8Array(width * height);
var bytesPerRow = Math.ceil(width / 32) * 4;
for (var y = 0; y < height; y++) {
var byteIndex = y * bytesPerRow;
var bitIndex = 7;
for (var x = 0; x < width; x++) {
var bit = bitmap[byteIndex] & (1 << bitIndex);
if (bit) texels[y * width + x] = 255;
bitIndex--;
if (bitIndex < 0) {
byteIndex++;
bitIndex = 7;
}
}
}
// debug: print bitmap
// s=''; for (y = height -1 ; y >= 0; y--) { for (x = 0; x < width; x++) s += texels[y * width + x] ? '⬛️' : '⬜️'; s+='\n'}; console.log(s)
var texture = gl.bitmapTexture;
if (!texture) {
texture = gl.bitmapTexture = webgl.createTexture();
webgl.bindTexture(webgl.TEXTURE_2D, texture);
webgl.texParameteri(webgl.TEXTURE_2D, webgl.TEXTURE_MIN_FILTER, webgl.LINEAR);
webgl.texParameteri(webgl.TEXTURE_2D, webgl.TEXTURE_MAG_FILTER, webgl.LINEAR);
webgl.texParameteri(webgl.TEXTURE_2D, webgl.TEXTURE_WRAP_S, webgl.CLAMP_TO_EDGE);
webgl.texParameteri(webgl.TEXTURE_2D, webgl.TEXTURE_WRAP_T, webgl.CLAMP_TO_EDGE);
} else {
webgl.bindTexture(webgl.TEXTURE_2D, texture);
}
webgl.pixelStorei(webgl.UNPACK_ALIGNMENT, 1);
webgl.texImage2D(webgl.TEXTURE_2D, 0, webgl.ALPHA, width, height, 0, webgl.ALPHA, webgl.UNSIGNED_BYTE, texels);
webgl.pixelStorei(webgl.UNPACK_ALIGNMENT, 4);
webgl.disable(webgl.CULL_FACE);
webgl.disable(webgl.DEPTH_TEST);
webgl.disable(webgl.BLEND);
webgl.colorMask(true, true, true, true);
webgl.viewport(0, 0, webgl.drawingBufferWidth, webgl.drawingBufferHeight);
var vertexBuffer = gl.bitmapVertexBuffer;
if (!vertexBuffer) {
var vertices = new Float32Array([
0, 0,
1, 0,
0, 1,
1, 1,
]);
vertexBuffer = gl.bitmapVertexBuffer = webgl.createBuffer();
webgl.bindBuffer(webgl.ARRAY_BUFFER, vertexBuffer);
webgl.bufferData(webgl.ARRAY_BUFFER, vertices, webgl.STATIC_DRAW);
} else {
webgl.bindBuffer(webgl.ARRAY_BUFFER, vertexBuffer);
}
var shader = gl.bitmapShader;
if (!shader.program) {
shader.program = webgl.createProgram();
var vs = webgl.createShader(webgl.VERTEX_SHADER);
webgl.shaderSource(vs, `
attribute vec2 a_position;
uniform vec3 u_raster;
uniform vec2 u_rasterOffset;
uniform vec2 u_rasterScale;
uniform vec2 u_translate;
uniform vec2 u_scale;
varying vec2 v_texcoord;
void main() {
vec2 raster = u_raster.xy * u_rasterScale + u_rasterOffset;
vec2 pos = (a_position + raster) * u_scale + u_translate;
gl_Position = vec4(pos, u_raster.z, 1);
v_texcoord = a_position;
}
`);
webgl.compileShader(vs);
if (!webgl.getShaderParameter(vs, webgl.COMPILE_STATUS)) {
console.error("OpenGL: vertex shader compile error: " + webgl.getShaderInfoLog(vs));
debugger;
return;
}
var fs = webgl.createShader(webgl.FRAGMENT_SHADER);
webgl.shaderSource(fs, `
precision mediump float;
uniform sampler2D u_texture;
uniform vec4 u_color;
varying vec2 v_texcoord;
void main() {
float alpha = texture2D(u_texture, v_texcoord).a;
if (alpha < 0.5) discard;
gl_FragColor = u_color;
}
`);
webgl.compileShader(fs);
if (!webgl.getShaderParameter(fs, webgl.COMPILE_STATUS)) {
console.error("OpenGL: fragment shader compile error: " + webgl.getShaderInfoLog(fs));
debugger;
return;
}
webgl.attachShader(shader.program, vs);
webgl.attachShader(shader.program, fs);
webgl.linkProgram(shader.program);
if (!webgl.getProgramParameter(shader.program, webgl.LINK_STATUS)) {
console.error("OpenGL: shader link error: " + webgl.getProgramInfoLog(shader.program));
debugger
return;
}
shader.locations = {
a_position: webgl.getAttribLocation(shader.program, "a_position"),
u_texture: webgl.getUniformLocation(shader.program, "u_texture"),
u_color: webgl.getUniformLocation(shader.program, "u_color"),
u_raster: webgl.getUniformLocation(shader.program, "u_raster"),
u_rasterOffset: webgl.getUniformLocation(shader.program, "u_rasterOffset"),
u_rasterScale: webgl.getUniformLocation(shader.program, "u_rasterScale"),
u_translate: webgl.getUniformLocation(shader.program, "u_translate"),
u_scale: webgl.getUniformLocation(shader.program, "u_scale"),
};
}
webgl.useProgram(shader.program);
webgl.enableVertexAttribArray(shader.locations.a_position);
webgl.vertexAttribPointer(shader.locations.a_position, 2, webgl.FLOAT, false, 0, 0);
webgl.uniform1i(shader.locations.u_texture, 0);
webgl.uniform4fv(shader.locations.u_color, gl.rasterColor);
// these seem to work for 640x480... I can't figure out the right transform yet
if (!this.bitmapScale) this.bitmapScale = [0.0311, 0.0419];
if (!this.bitmapTranslate) this.bitmapTranslate = [-1, -1];
if (!this.bitmapRasterOffset) this.bitmapRasterOffset = [0, 0];
if (!this.bitmapRasterScale) this.bitmapRasterScale = [0.1, 0.1];
// these properties allow intereactive debugging
webgl.uniform3f(shader.locations.u_raster, gl.rasterPos[0] + xorig, gl.rasterPos[1] + yorig, gl.rasterPos[2]);
webgl.uniform2fv(shader.locations.u_rasterOffset, this.bitmapRasterOffset);
webgl.uniform2fv(shader.locations.u_rasterScale, this.bitmapRasterScale);
webgl.uniform2fv(shader.locations.u_translate, this.bitmapTranslate);
webgl.uniform2fv(shader.locations.u_scale, this.bitmapScale);
webgl.drawArrays(webgl.TRIANGLE_STRIP, 0, 4);
webgl.disableVertexAttribArray(shader.locations.a_position);
webgl.bindBuffer(webgl.ARRAY_BUFFER, null);
webgl.useProgram(null);
webgl.bindTexture(webgl.TEXTURE_2D, null);
webgl.enable(webgl.CULL_FACE);
webgl.enable(webgl.DEPTH_TEST);
webgl.enable(webgl.BLEND);
}
gl.rasterPos[0] += xmove;
gl.rasterPos[1] += ymove;
},

However, you'll notice the Morphic overlay in the screenshot above looks fine. It uses primitiveCompositeTexture rather than glBitmap().

b3dxCompositeTexture: function(texture, x, y, w, h, translucent) {
DEBUG > 1 && console.log("B3DAccel: b3dxCompositeTexture", texture, x, y, w, h, translucent);
if (!OpenGL.glIsTexture(texture)) return false;
OpenGL.glMatrixMode(GL.MODELVIEW);
OpenGL.glPushMatrix();
OpenGL.glLoadIdentity();
OpenGL.glMatrixMode(GL.PROJECTION);
OpenGL.glPushMatrix();
OpenGL.glLoadIdentity();
var width = currentRenderer.viewport.w;
var height = currentRenderer.viewport.h;
OpenGL.glViewport(0, 0, width, height);
OpenGL.glScaled(2.0/width, -2.0/height, 1.0);
OpenGL.glTranslated(width*-0.5, height*-0.5, 0.0);
//We haven't implemented glPushAttrib and glPopAttrib yet
//OpenGL.glPushAttrib(GL.ALL_ATTRIB_BITS);
// OpenGL.glShadeModel(GL.FLAT); // not implemented
OpenGL.glEnable(GL.TEXTURE_2D);
// OpenGL.glDisable(GL.COLOR_MATERIAL); // not implemented
// OpenGL.glDisable(GL.DITHER); //
OpenGL.glDisable(GL.LIGHTING);
OpenGL.glDisable(GL.DEPTH_TEST);
OpenGL.glDisable(GL.BLEND);
OpenGL.glDisable(GL.CULL_FACE);
OpenGL.glDepthMask(GL.FALSE);
OpenGL.glColor4d(1.0, 1.0, 1.0, 1.0);
if (translucent) {
OpenGL.glEnable(GL.BLEND);
OpenGL.glBlendFunc(GL.SRC_ALPHA, GL.ONE_MINUS_SRC_ALPHA);
}
// subtract top and left position of canvas
x -= currentRenderer.viewport.x;
y -= currentRenderer.viewport.y;
OpenGL.glBindTexture(GL.TEXTURE_2D, texture);
OpenGL.glBegin(GL.QUADS);
OpenGL.glTexCoord2d(0.0, 0.0);
OpenGL.glVertex2i(x, y);
OpenGL.glTexCoord2d(1.0, 0.0);
OpenGL.glVertex2i(x+w, y);
OpenGL.glTexCoord2d(1.0, 1.0);
OpenGL.glVertex2i(x+w, y+h);
OpenGL.glTexCoord2d(0.0, 1.0);
OpenGL.glVertex2i(x, y+h);
OpenGL.glEnd();
// instead of this ...
// OpenGL.glPopAttrib();
// we do this:
OpenGL.glDepthMask(GL.TRUE);
OpenGL.glEnable(GL.DEPTH_TEST);
OpenGL.glEnable(GL.CULL_FACE);
OpenGL.glDisable(GL.BLEND);
// OpenGL.glEnable(GL.COLOR_MATERIAL); // not implemented
// OpenGL.glEnable(GL.DITHER); // not implemented
OpenGL.glDisable(GL.TEXTURE_2D);
// OpenGL.glShadeModel(GL.SMOOTH); // not implemented
OpenGL.glPopMatrix();
OpenGL.glMatrixMode(GL.MODELVIEW);
OpenGL.glPopMatrix();
return true;
},

These implementations are very different, which may be the reason why one looks so much better than the other.

It's clear that in glBitmap() the rasterPos math is wrong (like the constant for bitmapScale which really needs to be calculated from the screen size). It might be best to scrap my glBitmap() altogether and start over from scratch.

Any ideas / code improvements welcome.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant