From 2fa995afde7af07f652427256a0147381e313ca2 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Fri, 23 May 2025 16:01:49 -0400 Subject: [PATCH 01/69] Factor out a base 3D renderer --- src/core/p5.Renderer3D.js | 1370 ++++++++++++++++++++++++++++++ src/webgl/p5.RendererGL.js | 1380 +------------------------------ src/webgpu/p5.RendererWebGPU.js | 27 + 3 files changed, 1432 insertions(+), 1345 deletions(-) create mode 100644 src/core/p5.Renderer3D.js create mode 100644 src/webgpu/p5.RendererWebGPU.js diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js new file mode 100644 index 0000000000..b9e275c977 --- /dev/null +++ b/src/core/p5.Renderer3D.js @@ -0,0 +1,1370 @@ +import * as constants from "../core/constants"; +import { Renderer } from './p5.Renderer'; +import GeometryBuilder from "../webgl/GeometryBuilder"; +import { Matrix } from "../math/p5.Matrix"; +import { Camera } from "../webgl/p5.Camera"; +import { Vector } from "../math/p5.Vector"; +import { ShapeBuilder } from "../webgl/ShapeBuilder"; +import { GeometryBufferCache } from "../webgl/GeometryBufferCache"; +import { filterParamDefaults } from "../image/const"; +import { PrimitiveToVerticesConverter } from "../shape/custom_shapes"; +import { Color } from "../color/p5.Color"; +import { Element } from "../dom/p5.Element"; +import { Framebuffer } from "../webgl/p5.Framebuffer"; + +export const STROKE_CAP_ENUM = {}; +export const STROKE_JOIN_ENUM = {}; +export let lineDefs = ""; +const defineStrokeCapEnum = function (key, val) { + lineDefs += `#define STROKE_CAP_${key} ${val}\n`; + STROKE_CAP_ENUM[constants[key]] = val; +}; +const defineStrokeJoinEnum = function (key, val) { + lineDefs += `#define STROKE_JOIN_${key} ${val}\n`; + STROKE_JOIN_ENUM[constants[key]] = val; +}; + +// Define constants in line shaders for each type of cap/join, and also record +// the values in JS objects +defineStrokeCapEnum("ROUND", 0); +defineStrokeCapEnum("PROJECT", 1); +defineStrokeCapEnum("SQUARE", 2); +defineStrokeJoinEnum("ROUND", 0); +defineStrokeJoinEnum("MITER", 1); +defineStrokeJoinEnum("BEVEL", 2); + +export class Renderer3D extends Renderer { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas); + + // Create new canvas + this.canvas = this.elt = elt || document.createElement("canvas"); + this.setupContext(); + + if (this._isMainCanvas) { + // for pixel method sharing with pimage + this._pInst._curElement = this; + this._pInst.canvas = this.canvas; + } else { + // hide if offscreen buffer by default + this.canvas.style.display = "none"; + } + this.elt.id = "defaultCanvas0"; + this.elt.classList.add("p5Canvas"); + + // Set and return p5.Element + this.wrappedElt = new Element(this.elt, this._pInst); + + // Extend renderer with methods of p5.Element with getters + for (const p of Object.getOwnPropertyNames(Element.prototype)) { + if (p !== 'constructor' && p[0] !== '_') { + Object.defineProperty(this, p, { + get() { + return this.wrappedElt[p]; + } + }) + } + } + + const dimensions = this._adjustDimensions(w, h); + w = dimensions.adjustedWidth; + h = dimensions.adjustedHeight; + + this.width = w; + this.height = h; + + // Set canvas size + this.elt.width = w * this._pixelDensity; + this.elt.height = h * this._pixelDensity; + this.elt.style.width = `${w}px`; + this.elt.style.height = `${h}px`; + this._origViewport = { + width: this.GL.drawingBufferWidth, + height: this.GL.drawingBufferHeight, + }; + this.viewport(this._origViewport.width, this._origViewport.height); + + // Attach canvas element to DOM + if (this._pInst._userNode) { + // user input node case + this._pInst._userNode.appendChild(this.elt); + } else { + //create main element + if (document.getElementsByTagName("main").length === 0) { + let m = document.createElement("main"); + document.body.appendChild(m); + } + //append canvas to main + document.getElementsByTagName("main")[0].appendChild(this.elt); + } + + this.isP3D = true; //lets us know we're in 3d mode + + // When constructing a new Geometry, this will represent the builder + this.geometryBuilder = undefined; + + // Push/pop state + this.states.uModelMatrix = new Matrix(4); + this.states.uViewMatrix = new Matrix(4); + this.states.uPMatrix = new Matrix(4); + + this.states.curCamera = new Camera(this); + this.states.uPMatrix.set(this.states.curCamera.projMatrix); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + + this.states.enableLighting = false; + this.states.ambientLightColors = []; + this.states.specularColors = [1, 1, 1]; + this.states.directionalLightDirections = []; + this.states.directionalLightDiffuseColors = []; + this.states.directionalLightSpecularColors = []; + this.states.pointLightPositions = []; + this.states.pointLightDiffuseColors = []; + this.states.pointLightSpecularColors = []; + this.states.spotLightPositions = []; + this.states.spotLightDirections = []; + this.states.spotLightDiffuseColors = []; + this.states.spotLightSpecularColors = []; + this.states.spotLightAngle = []; + this.states.spotLightConc = []; + this.states.activeImageLight = null; + + this.states.curFillColor = [1, 1, 1, 1]; + this.states.curAmbientColor = [1, 1, 1, 1]; + this.states.curSpecularColor = [0, 0, 0, 0]; + this.states.curEmissiveColor = [0, 0, 0, 0]; + this.states.curStrokeColor = [0, 0, 0, 1]; + + this.states.curBlendMode = constants.BLEND; + + this.states._hasSetAmbient = false; + this.states._useSpecularMaterial = false; + this.states._useEmissiveMaterial = false; + this.states._useNormalMaterial = false; + this.states._useShininess = 1; + this.states._useMetalness = 0; + + this.states.tint = [255, 255, 255, 255]; + + this.states.constantAttenuation = 1; + this.states.linearAttenuation = 0; + this.states.quadraticAttenuation = 0; + + this.states._currentNormal = new Vector(0, 0, 1); + + this.states.drawMode = constants.FILL; + + this.states._tex = null; + this.states.textureMode = constants.IMAGE; + this.states.textureWrapX = constants.CLAMP; + this.states.textureWrapY = constants.CLAMP; + + // erasing + this._isErasing = false; + + // simple lines + this._simpleLines = false; + + // clipping + this._clipDepths = []; + this._isClipApplied = false; + this._stencilTestOn = false; + + this.mixedAmbientLight = []; + this.mixedSpecularColor = []; + + // p5.framebuffer for this are calculated in getDiffusedTexture function + this.diffusedTextures = new Map(); + // p5.framebuffer for this are calculated in getSpecularTexture function + this.specularTextures = new Map(); + + this.preEraseBlend = undefined; + this._cachedBlendMode = undefined; + this._cachedFillStyle = [1, 1, 1, 1]; + this._cachedStrokeStyle = [0, 0, 0, 1]; + this._isBlending = false; + + this._useLineColor = false; + this._useVertexColor = false; + + this.registerEnabled = new Set(); + + // Camera + this.states.curCamera._computeCameraDefaultSettings(); + this.states.curCamera._setDefaultCamera(); + + // FilterCamera + this.filterCamera = new Camera(this); + this.filterCamera._computeCameraDefaultSettings(); + this.filterCamera._setDefaultCamera(); + // Information about the previous frame's touch object + // for executing orbitControl() + this.prevTouches = []; + // Velocity variable for use with orbitControl() + this.zoomVelocity = 0; + this.rotateVelocity = new Vector(0, 0); + this.moveVelocity = new Vector(0, 0); + // Flags for recording the state of zooming, rotation and moving + this.executeZoom = false; + this.executeRotateAndMove = false; + + this._drawingFilter = false; + this._drawingImage = false; + + this.specularShader = undefined; + this.sphereMapping = undefined; + this.diffusedShader = undefined; + this._baseFilterShader = undefined; + this._defaultLightShader = undefined; + this._defaultImmediateModeShader = undefined; + this._defaultNormalShader = undefined; + this._defaultColorShader = undefined; + this._defaultPointShader = undefined; + + this.states.userFillShader = undefined; + this.states.userStrokeShader = undefined; + this.states.userPointShader = undefined; + this.states.userImageShader = undefined; + + this.states.curveDetail = 1 / 4; + + // Used by beginShape/endShape functions to construct a p5.Geometry + this.shapeBuilder = new ShapeBuilder(this); + + this.geometryBufferCache = new GeometryBufferCache(this); + + this.curStrokeCap = constants.ROUND; + this.curStrokeJoin = constants.ROUND; + + // map of texture sources to textures created in this gl context via this.getTexture(src) + this.textures = new Map(); + + // set of framebuffers in use + this.framebuffers = new Set(); + // stack of active framebuffers + this.activeFramebuffers = []; + + // for post processing step + this.states.filterShader = undefined; + this.filterLayer = undefined; + this.filterLayerTemp = undefined; + this.defaultFilterShaders = {}; + + this.fontInfos = {}; + + this._curShader = undefined; + this.drawShapeCount = 1; + + this.scratchMat3 = new Matrix(3); + } + + remove() { + this.wrappedElt.remove(); + this.wrappedElt = null; + this.canvas = null; + this.elt = null; + } + + ////////////////////////////////////////////// + // Geometry Building + ////////////////////////////////////////////// + + /** + * Starts creating a new p5.Geometry. Subsequent shapes drawn will be added + * to the geometry and then returned when + * endGeometry() is called. One can also use + * buildGeometry() to pass a function that + * draws shapes. + * + * If you need to draw complex shapes every frame which don't change over time, + * combining them upfront with `beginGeometry()` and `endGeometry()` and then + * drawing that will run faster than repeatedly drawing the individual pieces. + * @private + */ + beginGeometry() { + if (this.geometryBuilder) { + throw new Error( + "It looks like `beginGeometry()` is being called while another p5.Geometry is already being build." + ); + } + this.geometryBuilder = new GeometryBuilder(this); + this.geometryBuilder.prevFillColor = this.states.fillColor; + this.fill(new Color([-1, -1, -1, -1])); + } + + /** + * Finishes creating a new p5.Geometry that was + * started using beginGeometry(). One can also + * use buildGeometry() to pass a function that + * draws shapes. + * @private + * + * @returns {p5.Geometry} The model that was built. + */ + endGeometry() { + if (!this.geometryBuilder) { + throw new Error( + "Make sure you call beginGeometry() before endGeometry()!" + ); + } + const geometry = this.geometryBuilder.finish(); + this.fill(this.geometryBuilder.prevFillColor); + this.geometryBuilder = undefined; + return geometry; + } + + /** + * Creates a new p5.Geometry that contains all + * the shapes drawn in a provided callback function. The returned combined shape + * can then be drawn all at once using model(). + * + * If you need to draw complex shapes every frame which don't change over time, + * combining them with `buildGeometry()` once and then drawing that will run + * faster than repeatedly drawing the individual pieces. + * + * One can also draw shapes directly between + * beginGeometry() and + * endGeometry() instead of using a callback + * function. + * @param {Function} callback A function that draws shapes. + * @returns {p5.Geometry} The model that was built from the callback function. + */ + buildGeometry(callback) { + this.beginGeometry(); + callback(); + return this.endGeometry(); + } + + ////////////////////////////////////////////// + // Shape drawing + ////////////////////////////////////////////// + + beginShape(...args) { + super.beginShape(...args); + // TODO remove when shape refactor is complete + // this.shapeBuilder.beginShape(...args); + } + + curveDetail(d) { + if (d === undefined) { + return this.states.curveDetail; + } else { + this.states.setValue("curveDetail", d); + } + } + + drawShape(shape) { + const visitor = new PrimitiveToVerticesConverter({ + curveDetail: this.states.curveDetail, + }); + shape.accept(visitor); + this.shapeBuilder.constructFromContours(shape, visitor.contours); + + if (this.geometryBuilder) { + this.geometryBuilder.addImmediate( + this.shapeBuilder.geometry, + this.shapeBuilder.shapeMode + ); + } else if (this.states.fillColor || this.states.strokeColor) { + if (this.shapeBuilder.shapeMode === constants.POINTS) { + this._drawPoints( + this.shapeBuilder.geometry.vertices, + this.buffers.point + ); + } else { + this._drawGeometry(this.shapeBuilder.geometry, { + mode: this.shapeBuilder.shapeMode, + count: this.drawShapeCount, + }); + } + } + this.drawShapeCount = 1; + } + + endShape(mode, count) { + this.drawShapeCount = count; + super.endShape(mode, count); + } + + vertexProperty(...args) { + this.currentShape.vertexProperty(...args); + } + + normal(xorv, y, z) { + if (xorv instanceof Vector) { + this.states.setValue("_currentNormal", xorv); + } else { + this.states.setValue("_currentNormal", new Vector(xorv, y, z)); + } + this.updateShapeVertexProperties(); + } + + model(model, count = 1) { + if (model.vertices.length > 0) { + if (this.geometryBuilder) { + this.geometryBuilder.addRetained(model); + } else { + if (!this.geometryInHash(model.gid)) { + model._edgesToVertices(); + this._getOrMakeCachedBuffers(model); + } + + this._drawGeometry(model, { count }); + } + } + } + + ////////////////////////////////////////////// + // Rendering + ////////////////////////////////////////////// + + _drawGeometryScaled(model, scaleX, scaleY, scaleZ) { + let originalModelMatrix = this.states.uModelMatrix; + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + try { + this.states.uModelMatrix.scale(scaleX, scaleY, scaleZ); + + if (this.geometryBuilder) { + this.geometryBuilder.addRetained(model); + } else { + this._drawGeometry(model); + } + } finally { + this.states.setValue("uModelMatrix", originalModelMatrix); + } + } + + _update() { + // reset model view and apply initial camera transform + // (containing only look at info; no projection). + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.reset(); + this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + + // reset light data for new frame. + + this.states.setValue("ambientLightColors", []); + this.states.setValue("specularColors", [1, 1, 1]); + + this.states.setValue("directionalLightDirections", []); + this.states.setValue("directionalLightDiffuseColors", []); + this.states.setValue("directionalLightSpecularColors", []); + + this.states.setValue("pointLightPositions", []); + this.states.setValue("pointLightDiffuseColors", []); + this.states.setValue("pointLightSpecularColors", []); + + this.states.setValue("spotLightPositions", []); + this.states.setValue("spotLightDirections", []); + this.states.setValue("spotLightDiffuseColors", []); + this.states.setValue("spotLightSpecularColors", []); + this.states.setValue("spotLightAngle", []); + this.states.setValue("spotLightConc", []); + + this.states.setValue("enableLighting", false); + + //reset tint value for new frame + this.states.setValue("tint", [255, 255, 255, 255]); + + //Clear depth every frame + this._resetBuffersBeforeDraw() + } + + background(...args) { + const _col = this._pInst.color(...args); + this.clear(..._col._getRGBA()); + } + + ////////////////////////////////////////////// + // Positioning + ////////////////////////////////////////////// + + get uModelMatrix() { + return this.states.uModelMatrix; + } + + get uViewMatrix() { + return this.states.uViewMatrix; + } + + get uPMatrix() { + return this.states.uPMatrix; + } + + get uMVMatrix() { + const m = this.uModelMatrix.copy(); + m.mult(this.uViewMatrix); + return m; + } + + /** + * Get a matrix from world-space to screen-space + */ + getWorldToScreenMatrix() { + const modelMatrix = this.states.uModelMatrix; + const viewMatrix = this.states.uViewMatrix; + const projectionMatrix = this.states.uPMatrix; + const projectedToScreenMatrix = new Matrix(4); + projectedToScreenMatrix.scale(this.width, this.height, 1); + projectedToScreenMatrix.translate([0.5, 0.5, 0.5]); + projectedToScreenMatrix.scale(0.5, -0.5, 0.5); + + const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); + const modelViewProjectionMatrix = modelViewMatrix.mult(projectionMatrix); + const worldToScreenMatrix = modelViewProjectionMatrix.mult(projectedToScreenMatrix); + return worldToScreenMatrix; + } + + ////////////////////////////////////////////// + // COLOR + ////////////////////////////////////////////// + /** + * Basic fill material for geometry with a given color + * @param {Number|Number[]|String|p5.Color} v1 gray value, + * red or hue value (depending on the current color mode), + * or color Array, or CSS color string + * @param {Number} [v2] green or saturation value + * @param {Number} [v3] blue or brightness value + * @param {Number} [a] opacity + * @chainable + * @example + *
+ * + * function setup() { + * createCanvas(200, 200, WEBGL); + * } + * + * function draw() { + * background(0); + * noStroke(); + * fill(100, 100, 240); + * rotateX(frameCount * 0.01); + * rotateY(frameCount * 0.01); + * box(75, 75, 75); + * } + * + *
+ * + * @alt + * black canvas with purple cube spinning + */ + fill(...args) { + super.fill(...args); + //see material.js for more info on color blending in webgl + // const color = fn.color.apply(this._pInst, arguments); + const color = this.states.fillColor; + this.states.setValue("curFillColor", color._array); + this.states.setValue("drawMode", constants.FILL); + this.states.setValue("_useNormalMaterial", false); + this.states.setValue("_tex", null); + } + + /** + * Basic stroke material for geometry with a given color + * @param {Number|Number[]|String|p5.Color} v1 gray value, + * red or hue value (depending on the current color mode), + * or color Array, or CSS color string + * @param {Number} [v2] green or saturation value + * @param {Number} [v3] blue or brightness value + * @param {Number} [a] opacity + * @example + *
+ * + * function setup() { + * createCanvas(200, 200, WEBGL); + * } + * + * function draw() { + * background(0); + * stroke(240, 150, 150); + * fill(100, 100, 240); + * rotateX(frameCount * 0.01); + * rotateY(frameCount * 0.01); + * box(75, 75, 75); + * } + * + *
+ * + * @alt + * black canvas with purple cube with pink outline spinning + */ + stroke(...args) { + super.stroke(...args); + // const color = fn.color.apply(this._pInst, arguments); + this.states.setValue("curStrokeColor", this.states.strokeColor._array); + } + + getCommonVertexProperties() { + return { + ...super.getCommonVertexProperties(), + stroke: this.states.strokeColor, + fill: this.states.fillColor, + normal: this.states._currentNormal, + }; + } + + getSupportedIndividualVertexProperties() { + return { + textureCoordinates: true, + }; + } + + strokeCap(cap) { + this.curStrokeCap = cap; + } + + strokeJoin(join) { + this.curStrokeJoin = join; + } + getFilterLayer() { + if (!this.filterLayer) { + this.filterLayer = new Framebuffer(this); + } + return this.filterLayer; + } + getFilterLayerTemp() { + if (!this.filterLayerTemp) { + this.filterLayerTemp = new Framebuffer(this); + } + return this.filterLayerTemp; + } + matchSize(fboToMatch, target) { + if ( + fboToMatch.width !== target.width || + fboToMatch.height !== target.height + ) { + fboToMatch.resize(target.width, target.height); + } + + if (fboToMatch.pixelDensity() !== target.pixelDensity()) { + fboToMatch.pixelDensity(target.pixelDensity()); + } + } + filter(...args) { + let fbo = this.getFilterLayer(); + + // use internal shader for filter constants BLUR, INVERT, etc + let filterParameter = undefined; + let operation = undefined; + if (typeof args[0] === "string") { + operation = args[0]; + let useDefaultParam = + operation in filterParamDefaults && args[1] === undefined; + filterParameter = useDefaultParam + ? filterParamDefaults[operation] + : args[1]; + + // Create and store shader for constants once on initial filter call. + // Need to store multiple in case user calls different filters, + // eg. filter(BLUR) then filter(GRAY) + if (!(operation in this.defaultFilterShaders)) { + this.defaultFilterShaders[operation] = this._makeFilterShader(fbo.renderer, operation); + } + this.states.setValue( + "filterShader", + this.defaultFilterShaders[operation] + ); + } + // use custom user-supplied shader + else { + this.states.setValue("filterShader", args[0]); + } + + // Setting the target to the framebuffer when applying a filter to a framebuffer. + + const target = this.activeFramebuffer() || this; + + // Resize the framebuffer 'fbo' and adjust its pixel density if it doesn't match the target. + this.matchSize(fbo, target); + + fbo.draw(() => this.clear()); // prevent undesirable feedback effects accumulating secretly. + + let texelSize = [ + 1 / (target.width * target.pixelDensity()), + 1 / (target.height * target.pixelDensity()), + ]; + + // apply blur shader with multiple passes. + if (operation === constants.BLUR) { + // Treating 'tmp' as a framebuffer. + const tmp = this.getFilterLayerTemp(); + // Resize the framebuffer 'tmp' and adjust its pixel density if it doesn't match the target. + this.matchSize(tmp, target); + // setup + this.push(); + this.states.setValue("strokeColor", null); + this.blendMode(constants.BLEND); + + // draw main to temp buffer + this.shader(this.states.filterShader); + this.states.filterShader.setUniform("texelSize", texelSize); + this.states.filterShader.setUniform("canvasSize", [ + target.width, + target.height, + ]); + this.states.filterShader.setUniform( + "radius", + Math.max(1, filterParameter) + ); + + // Horiz pass: draw `target` to `tmp` + tmp.draw(() => { + this.states.filterShader.setUniform("direction", [1, 0]); + this.states.filterShader.setUniform("tex0", target); + this.clear(); + this.shader(this.states.filterShader); + this.noLights(); + this.plane(target.width, target.height); + }); + + // Vert pass: draw `tmp` to `fbo` + fbo.draw(() => { + this.states.filterShader.setUniform("direction", [0, 1]); + this.states.filterShader.setUniform("tex0", tmp); + this.clear(); + this.shader(this.states.filterShader); + this.noLights(); + this.plane(target.width, target.height); + }); + + this.pop(); + } + // every other non-blur shader uses single pass + else { + fbo.draw(() => { + this.states.setValue("strokeColor", null); + this.blendMode(constants.BLEND); + this.shader(this.states.filterShader); + this.states.filterShader.setUniform("tex0", target); + this.states.filterShader.setUniform("texelSize", texelSize); + this.states.filterShader.setUniform("canvasSize", [ + target.width, + target.height, + ]); + // filterParameter uniform only used for POSTERIZE, and THRESHOLD + // but shouldn't hurt to always set + this.states.filterShader.setUniform("filterParameter", filterParameter); + this.noLights(); + this.plane(target.width, target.height); + }); + } + // draw fbo contents onto main renderer. + this.push(); + this.states.setValue("strokeColor", null); + this.clear(); + this.push(); + this.states.setValue("imageMode", constants.CORNER); + this.blendMode(constants.BLEND); + target.filterCamera._resize(); + this.setCamera(target.filterCamera); + this.resetMatrix(); + this._drawingFilter = true; + this.image( + fbo, + 0, + 0, + this.width, + this.height, + -target.width / 2, + -target.height / 2, + target.width, + target.height + ); + this._drawingFilter = false; + this.clearDepth(); + this.pop(); + this.pop(); + } + + // Pass this off to the host instance so that we can treat a renderer and a + // framebuffer the same in filter() + + pixelDensity(newDensity) { + if (newDensity) { + return this._pInst.pixelDensity(newDensity); + } + return this._pInst.pixelDensity(); + } + + blendMode(mode) { + if ( + mode === constants.DARKEST || + mode === constants.LIGHTEST || + mode === constants.ADD || + mode === constants.BLEND || + mode === constants.SUBTRACT || + mode === constants.SCREEN || + mode === constants.EXCLUSION || + mode === constants.REPLACE || + mode === constants.MULTIPLY || + mode === constants.REMOVE + ) + this.states.setValue("curBlendMode", mode); + else if ( + mode === constants.BURN || + mode === constants.OVERLAY || + mode === constants.HARD_LIGHT || + mode === constants.SOFT_LIGHT || + mode === constants.DODGE + ) { + console.warn( + "BURN, OVERLAY, HARD_LIGHT, SOFT_LIGHT, and DODGE only work for blendMode in 2D mode." + ); + } + } + + erase(opacityFill, opacityStroke) { + if (!this._isErasing) { + this.preEraseBlend = this.states.curBlendMode; + this._isErasing = true; + this.blendMode(constants.REMOVE); + this._cachedFillStyle = this.states.curFillColor.slice(); + this.states.setValue("curFillColor", [1, 1, 1, opacityFill / 255]); + this._cachedStrokeStyle = this.states.curStrokeColor.slice(); + this.states.setValue("curStrokeColor", [1, 1, 1, opacityStroke / 255]); + } + } + + noErase() { + if (this._isErasing) { + // Restore colors + this.states.setValue("curFillColor", this._cachedFillStyle.slice()); + this.states.setValue("curStrokeColor", this._cachedStrokeStyle.slice()); + // Restore blend mode + this.states.setValue("curBlendMode", this.preEraseBlend); + this.blendMode(this.preEraseBlend); + // Ensure that _applyBlendMode() sets preEraseBlend back to the original blend mode + this._isErasing = false; + this._applyBlendMode(); + } + } + + drawTarget() { + return this.activeFramebuffers[this.activeFramebuffers.length - 1] || this; + } + + beginClip(options = {}) { + super.beginClip(options); + + this.drawTarget()._isClipApplied = true; + + this._applyClip(); + + this.push(); + this.resetShader(); + if (this.states.fillColor) this.fill(0, 0); + if (this.states.strokeColor) this.stroke(0, 0); + } + + endClip() { + this.pop(); + + this._unapplyClip(); + + // Mark the depth at which the clip has been applied so that we can clear it + // when we pop past this depth + this._clipDepths.push(this._pushPopDepth); + + super.endClip(); + } + + _clearClip() { + this._clearClipBuffer(); + if (this._clipDepths.length > 0) { + this._clipDepths.pop(); + } + this.drawTarget()._isClipApplied = false; + } + + /** + * @private + * @returns {p5.Framebuffer} A p5.Framebuffer set to match the size and settings + * of the renderer's canvas. It will be created if it does not yet exist, and + * reused if it does. + */ + _getTempFramebuffer() { + if (!this._tempFramebuffer) { + this._tempFramebuffer = new Framebuffer(this, { + format: constants.UNSIGNED_BYTE, + useDepth: this._pInst._glAttributes.depth, + depthFormat: constants.UNSIGNED_INT, + antialias: this._pInst._glAttributes.antialias, + }); + } + return this._tempFramebuffer; + } + + ////////////////////////////////////////////// + // HASH | for geometry + ////////////////////////////////////////////// + + geometryInHash(gid) { + return this.geometryBufferCache.isCached(gid); + } + + /** + * [resize description] + * @private + * @param {Number} w [description] + * @param {Number} h [description] + */ + resize(w, h) { + super.resize(w, h); + + // save canvas properties + const props = {}; + for (const key in this.drawingContext) { + const val = this.drawingContext[key]; + if (typeof val !== "object" && typeof val !== "function") { + props[key] = val; + } + } + + const dimensions = this._adjustDimensions(w, h); + w = dimensions.adjustedWidth; + h = dimensions.adjustedHeight; + + this.width = w; + this.height = h; + + this.canvas.width = w * this._pixelDensity; + this.canvas.height = h * this._pixelDensity; + this.canvas.style.width = `${w}px`; + this.canvas.style.height = `${h}px`; + this._updateViewport(); + + this.states.curCamera._resize(); + + //resize pixels buffer + if (typeof this.pixels !== "undefined") { + this._createPixelsArray(); + } + + for (const framebuffer of this.framebuffers) { + // Notify framebuffers of the resize so that any auto-sized framebuffers + // can also update their size + framebuffer._canvasSizeChanged(); + } + + // reset canvas properties + for (const savedKey in props) { + try { + this.drawingContext[savedKey] = props[savedKey]; + } catch (err) { + // ignore read-only property errors + } + } + } + + applyMatrix(a, b, c, d, e, f) { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + if (arguments.length === 16) { + // this.states.uModelMatrix.apply(arguments); + Matrix.prototype.apply.apply(this.states.uModelMatrix, arguments); + } else { + this.states.uModelMatrix.apply([ + a, + b, + 0, + 0, + c, + d, + 0, + 0, + 0, + 0, + 1, + 0, + e, + f, + 0, + 1, + ]); + } + } + + /** + * [translate description] + * @private + * @param {Number} x [description] + * @param {Number} y [description] + * @param {Number} z [description] + * @chainable + * @todo implement handle for components or vector as args + */ + translate(x, y, z) { + if (x instanceof Vector) { + z = x.z; + y = x.y; + x = x.x; + } + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.translate([x, y, z]); + return this; + } + + /** + * Scales the Model View Matrix by a vector + * @private + * @param {Number | p5.Vector | Array} x [description] + * @param {Number} [y] y-axis scalar + * @param {Number} [z] z-axis scalar + * @chainable + */ + scale(x, y, z) { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.scale(x, y, z); + return this; + } + + rotate(rad, axis) { + if (typeof axis === "undefined") { + return this.rotateZ(rad); + } + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + Matrix.prototype.rotate4x4.apply(this.states.uModelMatrix, arguments); + return this; + } + + rotateX(rad) { + this.rotate(rad, 1, 0, 0); + return this; + } + + rotateY(rad) { + this.rotate(rad, 0, 1, 0); + return this; + } + + rotateZ(rad) { + this.rotate(rad, 0, 0, 1); + return this; + } + + pop(...args) { + if ( + this._clipDepths.length > 0 && + this._pushPopDepth === this._clipDepths[this._clipDepths.length - 1] + ) { + this._clearClip(); + if (!this._userEnabledStencil) { + this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); + } + + // Reset saved state + // this._userEnabledStencil = this._savedStencilTestState; + } + super.pop(...args); + this._applyStencilTestIfClipping(); + } + + resetMatrix() { + this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); + this.states.uModelMatrix.reset(); + this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); + this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); + return this; + } + + ////////////////////////////////////////////// + // SHADER + ////////////////////////////////////////////// + + _getStrokeShader() { + // select the stroke shader to use + const stroke = this.states.userStrokeShader; + if (stroke) { + return stroke; + } + return this._getLineShader(); + } + + /* + * This method will handle both image shaders and + * fill shaders, returning the appropriate shader + * depending on the current context (image or shape). + */ + _getFillShader() { + // If drawing an image, check for user-defined image shader and filters + if (this._drawingImage) { + // Use user-defined image shader if available and no filter is applied + if (this.states.userImageShader && !this._drawingFilter) { + return this.states.userImageShader; + } else { + return this._getLightShader(); // Fallback to light shader + } + } + // If user has defined a fill shader, return that + else if (this.states.userFillShader) { + return this.states.userFillShader; + } + // Use normal shader if normal material is active + else if (this.states._useNormalMaterial) { + return this._getNormalShader(); + } + // Use light shader if lighting or textures are enabled + else if (this.states.enableLighting || this.states._tex) { + return this._getLightShader(); + } + // Default to color shader if no other conditions are met + return this._getColorShader(); + } + + _getPointShader() { + // select the point shader to use + const point = this.states.userPointShader; + if (!point || !point.isPointShader()) { + return this._getPointShader(); + } + return point; + } + + baseMaterialShader() { + return this._getLightShader(); + } + + baseNormalShader() { + return this._getNormalShader(); + } + + baseColorShader() { + return this._getColorShader(); + } + + /** + * TODO(dave): un-private this when there is a way to actually override the + * shader used for points + * + * Get the shader used when drawing points with `point()`. + * + * You can call `pointShader().modify()` + * and change any of the following hooks: + * - `void beforeVertex`: Called at the start of the vertex shader. + * - `vec3 getLocalPosition`: Update the position of vertices before transforms are applied. It takes in `vec3 position` and must return a modified version. + * - `vec3 getWorldPosition`: Update the position of vertices after transforms are applied. It takes in `vec3 position` and pust return a modified version. + * - `float getPointSize`: Update the size of the point. It takes in `float size` and must return a modified version. + * - `void afterVertex`: Called at the end of the vertex shader. + * - `void beforeFragment`: Called at the start of the fragment shader. + * - `bool shouldDiscard`: Points are drawn inside a square, with the corners discarded in the fragment shader to create a circle. Use this to change this logic. It takes in a `bool willDiscard` and must return a modified version. + * - `vec4 getFinalColor`: Update the final color after mixing. It takes in a `vec4 color` and must return a modified version. + * - `void afterFragment`: Called at the end of the fragment shader. + * + * Call `pointShader().inspectHooks()` to see all the possible hooks and + * their default implementations. + * + * @returns {p5.Shader} The `point()` shader + * @private() + */ + pointShader() { + return this._getPointShader(); + } + + baseStrokeShader() { + return this._getLineShader(); + } + + /** + * @private + * @returns {p5.Framebuffer|null} The currently active framebuffer, or null if + * the main canvas is the current draw target. + */ + activeFramebuffer() { + return this.activeFramebuffers[this.activeFramebuffers.length - 1] || null; + } + + createFramebuffer(options) { + return new Framebuffer(this, options); + } + + _setGlobalUniforms(shader) { + const modelMatrix = this.states.uModelMatrix; + const viewMatrix = this.states.uViewMatrix; + const projectionMatrix = this.states.uPMatrix; + const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); + + shader.setUniform( + "uPerspective", + this.states.curCamera.useLinePerspective ? 1 : 0 + ); + shader.setUniform("uViewMatrix", viewMatrix.mat4); + shader.setUniform("uProjectionMatrix", projectionMatrix.mat4); + shader.setUniform("uModelMatrix", modelMatrix.mat4); + shader.setUniform("uModelViewMatrix", modelViewMatrix.mat4); + if (shader.uniforms.uModelViewProjectionMatrix) { + const modelViewProjectionMatrix = modelViewMatrix.copy(); + modelViewProjectionMatrix.mult(projectionMatrix); + shader.setUniform( + "uModelViewProjectionMatrix", + modelViewProjectionMatrix.mat4 + ); + } + if (shader.uniforms.uNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(modelViewMatrix); + shader.setUniform("uNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uModelNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(this.states.uModelMatrix); + shader.setUniform("uModelNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uCameraNormalMatrix) { + this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); + shader.setUniform("uCameraNormalMatrix", this.scratchMat3.mat3); + } + if (shader.uniforms.uCameraRotation) { + this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); + shader.setUniform("uCameraRotation", this.scratchMat3.mat3); + } + shader.setUniform("uViewport", this._viewport); + } + + _setStrokeUniforms(strokeShader) { + // set the uniform values + strokeShader.setUniform("uSimpleLines", this._simpleLines); + strokeShader.setUniform("uUseLineColor", this._useLineColor); + strokeShader.setUniform("uMaterialColor", this.states.curStrokeColor); + strokeShader.setUniform("uStrokeWeight", this.states.strokeWeight); + strokeShader.setUniform("uStrokeCap", STROKE_CAP_ENUM[this.curStrokeCap]); + strokeShader.setUniform( + "uStrokeJoin", + STROKE_JOIN_ENUM[this.curStrokeJoin] + ); + } + + _setFillUniforms(fillShader) { + this.mixedSpecularColor = [...this.states.curSpecularColor]; + const empty = this._getEmptyTexture(); + + if (this.states._useMetalness > 0) { + this.mixedSpecularColor = this.mixedSpecularColor.map( + (mixedSpecularColor, index) => + this.states.curFillColor[index] * this.states._useMetalness + + mixedSpecularColor * (1 - this.states._useMetalness) + ); + } + + // TODO: optimize + fillShader.setUniform("uUseVertexColor", this._useVertexColor); + fillShader.setUniform("uMaterialColor", this.states.curFillColor); + fillShader.setUniform("isTexture", !!this.states._tex); + // We need to explicitly set uSampler back to an empty texture here. + // In general, we record the last set texture so we can re-apply it + // the next time a shader is used. However, the texture() function + // works differently and is global p5 state. If the p5 state has + // been cleared, we also need to clear the value in uSampler to match. + fillShader.setUniform("uSampler", this.states._tex || empty); + fillShader.setUniform("uTint", this.states.tint); + + fillShader.setUniform("uHasSetAmbient", this.states._hasSetAmbient); + fillShader.setUniform("uAmbientMatColor", this.states.curAmbientColor); + fillShader.setUniform("uSpecularMatColor", this.mixedSpecularColor); + fillShader.setUniform("uEmissiveMatColor", this.states.curEmissiveColor); + fillShader.setUniform("uSpecular", this.states._useSpecularMaterial); + fillShader.setUniform("uEmissive", this.states._useEmissiveMaterial); + fillShader.setUniform("uShininess", this.states._useShininess); + fillShader.setUniform("uMetallic", this.states._useMetalness); + + this._setImageLightUniforms(fillShader); + + fillShader.setUniform("uUseLighting", this.states.enableLighting); + + const pointLightCount = this.states.pointLightDiffuseColors.length / 3; + fillShader.setUniform("uPointLightCount", pointLightCount); + fillShader.setUniform( + "uPointLightLocation", + this.states.pointLightPositions + ); + fillShader.setUniform( + "uPointLightDiffuseColors", + this.states.pointLightDiffuseColors + ); + fillShader.setUniform( + "uPointLightSpecularColors", + this.states.pointLightSpecularColors + ); + + const directionalLightCount = + this.states.directionalLightDiffuseColors.length / 3; + fillShader.setUniform("uDirectionalLightCount", directionalLightCount); + fillShader.setUniform( + "uLightingDirection", + this.states.directionalLightDirections + ); + fillShader.setUniform( + "uDirectionalDiffuseColors", + this.states.directionalLightDiffuseColors + ); + fillShader.setUniform( + "uDirectionalSpecularColors", + this.states.directionalLightSpecularColors + ); + + // TODO: sum these here... + const ambientLightCount = this.states.ambientLightColors.length / 3; + this.mixedAmbientLight = [...this.states.ambientLightColors]; + + if (this.states._useMetalness > 0) { + this.mixedAmbientLight = this.mixedAmbientLight.map((ambientColors) => { + let mixing = ambientColors - this.states._useMetalness; + return Math.max(0, mixing); + }); + } + fillShader.setUniform("uAmbientLightCount", ambientLightCount); + fillShader.setUniform("uAmbientColor", this.mixedAmbientLight); + + const spotLightCount = this.states.spotLightDiffuseColors.length / 3; + fillShader.setUniform("uSpotLightCount", spotLightCount); + fillShader.setUniform("uSpotLightAngle", this.states.spotLightAngle); + fillShader.setUniform("uSpotLightConc", this.states.spotLightConc); + fillShader.setUniform( + "uSpotLightDiffuseColors", + this.states.spotLightDiffuseColors + ); + fillShader.setUniform( + "uSpotLightSpecularColors", + this.states.spotLightSpecularColors + ); + fillShader.setUniform("uSpotLightLocation", this.states.spotLightPositions); + fillShader.setUniform( + "uSpotLightDirection", + this.states.spotLightDirections + ); + + fillShader.setUniform( + "uConstantAttenuation", + this.states.constantAttenuation + ); + fillShader.setUniform("uLinearAttenuation", this.states.linearAttenuation); + fillShader.setUniform( + "uQuadraticAttenuation", + this.states.quadraticAttenuation + ); + } + + // getting called from _setFillUniforms + _setImageLightUniforms(shader) { + //set uniform values + shader.setUniform("uUseImageLight", this.states.activeImageLight != null); + // true + if (this.states.activeImageLight) { + // this.states.activeImageLight has image as a key + // look up the texture from the diffusedTexture map + let diffusedLight = this.getDiffusedTexture(this.states.activeImageLight); + shader.setUniform("environmentMapDiffused", diffusedLight); + let specularLight = this.getSpecularTexture(this.states.activeImageLight); + + shader.setUniform("environmentMapSpecular", specularLight); + } + } + + _setPointUniforms(pointShader) { + // set the uniform values + pointShader.setUniform("uMaterialColor", this.states.curStrokeColor); + // @todo is there an instance where this isn't stroke weight? + // should be they be same var? + pointShader.setUniform( + "uPointSize", + this.states.strokeWeight * this._pixelDensity + ); + } +} diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 5e46d2d106..7614dc33af 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1,9 +1,5 @@ import * as constants from "../core/constants"; -import GeometryBuilder from "./GeometryBuilder"; -import { Renderer } from "../core/p5.Renderer"; -import { Matrix } from "../math/p5.Matrix"; -import { Camera } from "./p5.Camera"; -import { Vector } from "../math/p5.Vector"; +import { Renderer3D, lineDefs } from "../core/p5.Renderer3D"; import { RenderBuffer } from "./p5.RenderBuffer"; import { DataArray } from "./p5.DataArray"; import { Shader } from "./p5.Shader"; @@ -12,9 +8,6 @@ import { Texture, MipmapTexture } from "./p5.Texture"; import { Framebuffer } from "./p5.Framebuffer"; import { Graphics } from "../core/p5.Graphics"; import { Element } from "../dom/p5.Element"; -import { ShapeBuilder } from "./ShapeBuilder"; -import { GeometryBufferCache } from "./GeometryBufferCache"; -import { filterParamDefaults } from "../image/const"; import filterBaseVert from "./shaders/filters/base.vert"; import lightingShader from "./shaders/lighting.glsl"; @@ -47,29 +40,6 @@ import filterOpaqueFrag from "./shaders/filters/opaque.frag"; import filterInvertFrag from "./shaders/filters/invert.frag"; import filterThresholdFrag from "./shaders/filters/threshold.frag"; import filterShaderVert from "./shaders/filters/default.vert"; -import { PrimitiveToVerticesConverter } from "../shape/custom_shapes"; -import { Color } from "../color/p5.Color"; - -const STROKE_CAP_ENUM = {}; -const STROKE_JOIN_ENUM = {}; -let lineDefs = ""; -const defineStrokeCapEnum = function (key, val) { - lineDefs += `#define STROKE_CAP_${key} ${val}\n`; - STROKE_CAP_ENUM[constants[key]] = val; -}; -const defineStrokeJoinEnum = function (key, val) { - lineDefs += `#define STROKE_JOIN_${key} ${val}\n`; - STROKE_JOIN_ENUM[constants[key]] = val; -}; - -// Define constants in line shaders for each type of cap/join, and also record -// the values in JS objects -defineStrokeCapEnum("ROUND", 0); -defineStrokeCapEnum("PROJECT", 1); -defineStrokeCapEnum("SQUARE", 2); -defineStrokeJoinEnum("ROUND", 0); -defineStrokeJoinEnum("MITER", 1); -defineStrokeJoinEnum("BEVEL", 2); const defaultShaders = { normalVert, @@ -116,212 +86,15 @@ const filterShaderFrags = { * @todo extend class to include public method for offscreen * rendering (FBO). */ -class RendererGL extends Renderer { - constructor(pInst, w, h, isMainCanvas, elt, attr) { - super(pInst, w, h, isMainCanvas); - - // Create new canvas - this.canvas = this.elt = elt || document.createElement("canvas"); - this._setAttributeDefaults(pInst); - this._initContext(); - // This redundant property is useful in reminding you that you are - // interacting with WebGLRenderingContext, still worth considering future removal - this.GL = this.drawingContext; - - if (this._isMainCanvas) { - // for pixel method sharing with pimage - this._pInst._curElement = this; - this._pInst.canvas = this.canvas; - } else { - // hide if offscreen buffer by default - this.canvas.style.display = "none"; - } - this.elt.id = "defaultCanvas0"; - this.elt.classList.add("p5Canvas"); - - // Set and return p5.Element - this.wrappedElt = new Element(this.elt, this._pInst); - - // Extend renderer with methods of p5.Element with getters - for (const p of Object.getOwnPropertyNames(Element.prototype)) { - if (p !== 'constructor' && p[0] !== '_') { - Object.defineProperty(this, p, { - get() { - return this.wrappedElt[p]; - } - }) - } - } - - const dimensions = this._adjustDimensions(w, h); - w = dimensions.adjustedWidth; - h = dimensions.adjustedHeight; - - this.width = w; - this.height = h; - - // Set canvas size - this.elt.width = w * this._pixelDensity; - this.elt.height = h * this._pixelDensity; - this.elt.style.width = `${w}px`; - this.elt.style.height = `${h}px`; - this._origViewport = { - width: this.GL.drawingBufferWidth, - height: this.GL.drawingBufferHeight, - }; - this.viewport(this._origViewport.width, this._origViewport.height); - - // Attach canvas element to DOM - if (this._pInst._userNode) { - // user input node case - this._pInst._userNode.appendChild(this.elt); - } else { - //create main element - if (document.getElementsByTagName("main").length === 0) { - let m = document.createElement("main"); - document.body.appendChild(m); - } - //append canvas to main - document.getElementsByTagName("main")[0].appendChild(this.elt); - } +class RendererGL extends Renderer3D { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas, elt); - this.isP3D = true; //lets us know we're in 3d mode - - // When constructing a new Geometry, this will represent the builder - this.geometryBuilder = undefined; - - // Push/pop state - this.states.uModelMatrix = new Matrix(4); - this.states.uViewMatrix = new Matrix(4); - this.states.uPMatrix = new Matrix(4); - - this.states.curCamera = new Camera(this); - this.states.uPMatrix.set(this.states.curCamera.projMatrix); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - - this.states.enableLighting = false; - this.states.ambientLightColors = []; - this.states.specularColors = [1, 1, 1]; - this.states.directionalLightDirections = []; - this.states.directionalLightDiffuseColors = []; - this.states.directionalLightSpecularColors = []; - this.states.pointLightPositions = []; - this.states.pointLightDiffuseColors = []; - this.states.pointLightSpecularColors = []; - this.states.spotLightPositions = []; - this.states.spotLightDirections = []; - this.states.spotLightDiffuseColors = []; - this.states.spotLightSpecularColors = []; - this.states.spotLightAngle = []; - this.states.spotLightConc = []; - this.states.activeImageLight = null; - - this.states.curFillColor = [1, 1, 1, 1]; - this.states.curAmbientColor = [1, 1, 1, 1]; - this.states.curSpecularColor = [0, 0, 0, 0]; - this.states.curEmissiveColor = [0, 0, 0, 0]; - this.states.curStrokeColor = [0, 0, 0, 1]; - - this.states.curBlendMode = constants.BLEND; - - this.states._hasSetAmbient = false; - this.states._useSpecularMaterial = false; - this.states._useEmissiveMaterial = false; - this.states._useNormalMaterial = false; - this.states._useShininess = 1; - this.states._useMetalness = 0; - - this.states.tint = [255, 255, 255, 255]; - - this.states.constantAttenuation = 1; - this.states.linearAttenuation = 0; - this.states.quadraticAttenuation = 0; - - this.states._currentNormal = new Vector(0, 0, 1); - - this.states.drawMode = constants.FILL; - - this.states._tex = null; - this.states.textureMode = constants.IMAGE; - this.states.textureWrapX = constants.CLAMP; - this.states.textureWrapY = constants.CLAMP; - - // erasing - this._isErasing = false; - - // simple lines - this._simpleLines = false; - - // clipping - this._clipDepths = []; - this._isClipApplied = false; - this._stencilTestOn = false; - - this.mixedAmbientLight = []; - this.mixedSpecularColor = []; - - // p5.framebuffer for this are calculated in getDiffusedTexture function - this.diffusedTextures = new Map(); - // p5.framebuffer for this are calculated in getSpecularTexture function - this.specularTextures = new Map(); - - this.preEraseBlend = undefined; - this._cachedBlendMode = undefined; - this._cachedFillStyle = [1, 1, 1, 1]; - this._cachedStrokeStyle = [0, 0, 0, 1]; if (this.webglVersion === constants.WEBGL2) { this.blendExt = this.GL; } else { this.blendExt = this.GL.getExtension("EXT_blend_minmax"); } - this._isBlending = false; - - this._useLineColor = false; - this._useVertexColor = false; - - this.registerEnabled = new Set(); - - // Camera - this.states.curCamera._computeCameraDefaultSettings(); - this.states.curCamera._setDefaultCamera(); - - // FilterCamera - this.filterCamera = new Camera(this); - this.filterCamera._computeCameraDefaultSettings(); - this.filterCamera._setDefaultCamera(); - // Information about the previous frame's touch object - // for executing orbitControl() - this.prevTouches = []; - // Velocity variable for use with orbitControl() - this.zoomVelocity = 0; - this.rotateVelocity = new Vector(0, 0); - this.moveVelocity = new Vector(0, 0); - // Flags for recording the state of zooming, rotation and moving - this.executeZoom = false; - this.executeRotateAndMove = false; - - this._drawingFilter = false; - this._drawingImage = false; - - this.specularShader = undefined; - this.sphereMapping = undefined; - this.diffusedShader = undefined; - this._baseFilterShader = undefined; - this._defaultLightShader = undefined; - this._defaultImmediateModeShader = undefined; - this._defaultNormalShader = undefined; - this._defaultColorShader = undefined; - this._defaultPointShader = undefined; - - this.states.userFillShader = undefined; - this.states.userStrokeShader = undefined; - this.states.userPointShader = undefined; - this.states.userImageShader = undefined; - - this.states.curveDetail = 1 / 4; - - // Used by beginShape/endShape functions to construct a p5.Geometry - this.shapeBuilder = new ShapeBuilder(this); this.buffers = { fill: [ @@ -407,32 +180,6 @@ class RendererGL extends Renderer { user: [], }; - this.geometryBufferCache = new GeometryBufferCache(this); - - this.curStrokeCap = constants.ROUND; - this.curStrokeJoin = constants.ROUND; - - // map of texture sources to textures created in this gl context via this.getTexture(src) - this.textures = new Map(); - - // set of framebuffers in use - this.framebuffers = new Set(); - // stack of active framebuffers - this.activeFramebuffers = []; - - // for post processing step - this.states.filterShader = undefined; - this.filterLayer = undefined; - this.filterLayerTemp = undefined; - this.defaultFilterShaders = {}; - - this.fontInfos = {}; - - this._curShader = undefined; - this.drawShapeCount = 1; - - this.scratchMat3 = new Matrix(3); - this._userEnabledStencil = false; // Store original methods for internal use this._internalEnable = this.drawingContext.enable; @@ -457,160 +204,12 @@ class RendererGL extends Renderer { }; } - remove() { - this.wrappedElt.remove(); - this.wrappedElt = null; - this.canvas = null; - this.elt = null; - } - - ////////////////////////////////////////////// - // Geometry Building - ////////////////////////////////////////////// - - /** - * Starts creating a new p5.Geometry. Subsequent shapes drawn will be added - * to the geometry and then returned when - * endGeometry() is called. One can also use - * buildGeometry() to pass a function that - * draws shapes. - * - * If you need to draw complex shapes every frame which don't change over time, - * combining them upfront with `beginGeometry()` and `endGeometry()` and then - * drawing that will run faster than repeatedly drawing the individual pieces. - * @private - */ - beginGeometry() { - if (this.geometryBuilder) { - throw new Error( - "It looks like `beginGeometry()` is being called while another p5.Geometry is already being build." - ); - } - this.geometryBuilder = new GeometryBuilder(this); - this.geometryBuilder.prevFillColor = this.states.fillColor; - this.fill(new Color([-1, -1, -1, -1])); - } - - /** - * Finishes creating a new p5.Geometry that was - * started using beginGeometry(). One can also - * use buildGeometry() to pass a function that - * draws shapes. - * @private - * - * @returns {p5.Geometry} The model that was built. - */ - endGeometry() { - if (!this.geometryBuilder) { - throw new Error( - "Make sure you call beginGeometry() before endGeometry()!" - ); - } - const geometry = this.geometryBuilder.finish(); - this.fill(this.geometryBuilder.prevFillColor); - this.geometryBuilder = undefined; - return geometry; - } - - /** - * Creates a new p5.Geometry that contains all - * the shapes drawn in a provided callback function. The returned combined shape - * can then be drawn all at once using model(). - * - * If you need to draw complex shapes every frame which don't change over time, - * combining them with `buildGeometry()` once and then drawing that will run - * faster than repeatedly drawing the individual pieces. - * - * One can also draw shapes directly between - * beginGeometry() and - * endGeometry() instead of using a callback - * function. - * @param {Function} callback A function that draws shapes. - * @returns {p5.Geometry} The model that was built from the callback function. - */ - buildGeometry(callback) { - this.beginGeometry(); - callback(); - return this.endGeometry(); - } - - ////////////////////////////////////////////// - // Shape drawing - ////////////////////////////////////////////// - - beginShape(...args) { - super.beginShape(...args); - // TODO remove when shape refactor is complete - // this.shapeBuilder.beginShape(...args); - } - - curveDetail(d) { - if (d === undefined) { - return this.states.curveDetail; - } else { - this.states.setValue("curveDetail", d); - } - } - - drawShape(shape) { - const visitor = new PrimitiveToVerticesConverter({ - curveDetail: this.states.curveDetail, - }); - shape.accept(visitor); - this.shapeBuilder.constructFromContours(shape, visitor.contours); - - if (this.geometryBuilder) { - this.geometryBuilder.addImmediate( - this.shapeBuilder.geometry, - this.shapeBuilder.shapeMode - ); - } else if (this.states.fillColor || this.states.strokeColor) { - if (this.shapeBuilder.shapeMode === constants.POINTS) { - this._drawPoints( - this.shapeBuilder.geometry.vertices, - this.buffers.point - ); - } else { - this._drawGeometry(this.shapeBuilder.geometry, { - mode: this.shapeBuilder.shapeMode, - count: this.drawShapeCount, - }); - } - } - this.drawShapeCount = 1; - } - - endShape(mode, count) { - this.drawShapeCount = count; - super.endShape(mode, count); - } - - vertexProperty(...args) { - this.currentShape.vertexProperty(...args); - } - - normal(xorv, y, z) { - if (xorv instanceof Vector) { - this.states.setValue("_currentNormal", xorv); - } else { - this.states.setValue("_currentNormal", new Vector(xorv, y, z)); - } - this.updateShapeVertexProperties(); - } - - model(model, count = 1) { - if (model.vertices.length > 0) { - if (this.geometryBuilder) { - this.geometryBuilder.addRetained(model); - } else { - if (!this.geometryInHash(model.gid)) { - model._edgesToVertices(); - this._getOrMakeCachedBuffers(model); - } - - this._drawGeometry(model, { count }); - } - } + setupContext() { + this._setAttributeDefaults(this._pInst); + this._initContext(); + // This redundant property is useful in reminding you that you are + // interacting with WebGLRenderingContext, still worth considering future removal + this.GL = this.drawingContext; } ////////////////////////////////////////////// @@ -646,22 +245,6 @@ class RendererGL extends Renderer { this.buffers.user = []; } - _drawGeometryScaled(model, scaleX, scaleY, scaleZ) { - let originalModelMatrix = this.states.uModelMatrix; - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - try { - this.states.uModelMatrix.scale(scaleX, scaleY, scaleZ); - - if (this.geometryBuilder) { - this.geometryBuilder.addRetained(model); - } else { - this._drawGeometry(model); - } - } finally { - this.states.setValue("uModelMatrix", originalModelMatrix); - } - } - _drawFills(geometry, { count, mode } = {}) { this._useVertexColor = geometry.vertexColors.length > 0; @@ -1004,433 +587,15 @@ class RendererGL extends Renderer { } } - _update() { - // reset model view and apply initial camera transform - // (containing only look at info; no projection). - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - this.states.uModelMatrix.reset(); - this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - - // reset light data for new frame. - - this.states.setValue("ambientLightColors", []); - this.states.setValue("specularColors", [1, 1, 1]); - - this.states.setValue("directionalLightDirections", []); - this.states.setValue("directionalLightDiffuseColors", []); - this.states.setValue("directionalLightSpecularColors", []); - - this.states.setValue("pointLightPositions", []); - this.states.setValue("pointLightDiffuseColors", []); - this.states.setValue("pointLightSpecularColors", []); - - this.states.setValue("spotLightPositions", []); - this.states.setValue("spotLightDirections", []); - this.states.setValue("spotLightDiffuseColors", []); - this.states.setValue("spotLightSpecularColors", []); - this.states.setValue("spotLightAngle", []); - this.states.setValue("spotLightConc", []); - - this.states.setValue("enableLighting", false); - - //reset tint value for new frame - this.states.setValue("tint", [255, 255, 255, 255]); - - //Clear depth every frame + _resetBuffersBeforeDraw() { this.GL.clearStencil(0); this.GL.clear(this.GL.DEPTH_BUFFER_BIT | this.GL.STENCIL_BUFFER_BIT); if (!this._userEnabledStencil) { this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); } - - } - - /** - * [background description] - */ - background(...args) { - const _col = this._pInst.color(...args); - this.clear(..._col._getRGBA()); - } - - ////////////////////////////////////////////// - // Positioning - ////////////////////////////////////////////// - - get uModelMatrix() { - return this.states.uModelMatrix; - } - - get uViewMatrix() { - return this.states.uViewMatrix; - } - - get uPMatrix() { - return this.states.uPMatrix; - } - - get uMVMatrix() { - const m = this.uModelMatrix.copy(); - m.mult(this.uViewMatrix); - return m; - } - - /** - * Get a matrix from world-space to screen-space - */ - getWorldToScreenMatrix() { - const modelMatrix = this.states.uModelMatrix; - const viewMatrix = this.states.uViewMatrix; - const projectionMatrix = this.states.uPMatrix; - const projectedToScreenMatrix = new Matrix(4); - projectedToScreenMatrix.scale(this.width, this.height, 1); - projectedToScreenMatrix.translate([0.5, 0.5, 0.5]); - projectedToScreenMatrix.scale(0.5, -0.5, 0.5); - - const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); - const modelViewProjectionMatrix = modelViewMatrix.mult(projectionMatrix); - const worldToScreenMatrix = modelViewProjectionMatrix.mult(projectedToScreenMatrix); - return worldToScreenMatrix; - } - - ////////////////////////////////////////////// - // COLOR - ////////////////////////////////////////////// - /** - * Basic fill material for geometry with a given color - * @param {Number|Number[]|String|p5.Color} v1 gray value, - * red or hue value (depending on the current color mode), - * or color Array, or CSS color string - * @param {Number} [v2] green or saturation value - * @param {Number} [v3] blue or brightness value - * @param {Number} [a] opacity - * @chainable - * @example - *
- * - * function setup() { - * createCanvas(200, 200, WEBGL); - * } - * - * function draw() { - * background(0); - * noStroke(); - * fill(100, 100, 240); - * rotateX(frameCount * 0.01); - * rotateY(frameCount * 0.01); - * box(75, 75, 75); - * } - * - *
- * - * @alt - * black canvas with purple cube spinning - */ - fill(...args) { - super.fill(...args); - //see material.js for more info on color blending in webgl - // const color = fn.color.apply(this._pInst, arguments); - const color = this.states.fillColor; - this.states.setValue("curFillColor", color._array); - this.states.setValue("drawMode", constants.FILL); - this.states.setValue("_useNormalMaterial", false); - this.states.setValue("_tex", null); - } - - /** - * Basic stroke material for geometry with a given color - * @param {Number|Number[]|String|p5.Color} v1 gray value, - * red or hue value (depending on the current color mode), - * or color Array, or CSS color string - * @param {Number} [v2] green or saturation value - * @param {Number} [v3] blue or brightness value - * @param {Number} [a] opacity - * @example - *
- * - * function setup() { - * createCanvas(200, 200, WEBGL); - * } - * - * function draw() { - * background(0); - * stroke(240, 150, 150); - * fill(100, 100, 240); - * rotateX(frameCount * 0.01); - * rotateY(frameCount * 0.01); - * box(75, 75, 75); - * } - * - *
- * - * @alt - * black canvas with purple cube with pink outline spinning - */ - stroke(...args) { - super.stroke(...args); - // const color = fn.color.apply(this._pInst, arguments); - this.states.setValue("curStrokeColor", this.states.strokeColor._array); - } - - getCommonVertexProperties() { - return { - ...super.getCommonVertexProperties(), - stroke: this.states.strokeColor, - fill: this.states.fillColor, - normal: this.states._currentNormal, - }; - } - - getSupportedIndividualVertexProperties() { - return { - textureCoordinates: true, - }; - } - - strokeCap(cap) { - this.curStrokeCap = cap; - } - - strokeJoin(join) { - this.curStrokeJoin = join; - } - getFilterLayer() { - if (!this.filterLayer) { - this.filterLayer = new Framebuffer(this); - } - return this.filterLayer; } - getFilterLayerTemp() { - if (!this.filterLayerTemp) { - this.filterLayerTemp = new Framebuffer(this); - } - return this.filterLayerTemp; - } - matchSize(fboToMatch, target) { - if ( - fboToMatch.width !== target.width || - fboToMatch.height !== target.height - ) { - fboToMatch.resize(target.width, target.height); - } - - if (fboToMatch.pixelDensity() !== target.pixelDensity()) { - fboToMatch.pixelDensity(target.pixelDensity()); - } - } - filter(...args) { - let fbo = this.getFilterLayer(); - - // use internal shader for filter constants BLUR, INVERT, etc - let filterParameter = undefined; - let operation = undefined; - if (typeof args[0] === "string") { - operation = args[0]; - let useDefaultParam = - operation in filterParamDefaults && args[1] === undefined; - filterParameter = useDefaultParam - ? filterParamDefaults[operation] - : args[1]; - - // Create and store shader for constants once on initial filter call. - // Need to store multiple in case user calls different filters, - // eg. filter(BLUR) then filter(GRAY) - if (!(operation in this.defaultFilterShaders)) { - this.defaultFilterShaders[operation] = new Shader( - fbo.renderer, - filterShaderVert, - filterShaderFrags[operation] - ); - } - this.states.setValue( - "filterShader", - this.defaultFilterShaders[operation] - ); - } - // use custom user-supplied shader - else { - this.states.setValue("filterShader", args[0]); - } - - // Setting the target to the framebuffer when applying a filter to a framebuffer. - - const target = this.activeFramebuffer() || this; - - // Resize the framebuffer 'fbo' and adjust its pixel density if it doesn't match the target. - this.matchSize(fbo, target); - - fbo.draw(() => this.clear()); // prevent undesirable feedback effects accumulating secretly. - - let texelSize = [ - 1 / (target.width * target.pixelDensity()), - 1 / (target.height * target.pixelDensity()), - ]; - - // apply blur shader with multiple passes. - if (operation === constants.BLUR) { - // Treating 'tmp' as a framebuffer. - const tmp = this.getFilterLayerTemp(); - // Resize the framebuffer 'tmp' and adjust its pixel density if it doesn't match the target. - this.matchSize(tmp, target); - // setup - this.push(); - this.states.setValue("strokeColor", null); - this.blendMode(constants.BLEND); - - // draw main to temp buffer - this.shader(this.states.filterShader); - this.states.filterShader.setUniform("texelSize", texelSize); - this.states.filterShader.setUniform("canvasSize", [ - target.width, - target.height, - ]); - this.states.filterShader.setUniform( - "radius", - Math.max(1, filterParameter) - ); - - // Horiz pass: draw `target` to `tmp` - tmp.draw(() => { - this.states.filterShader.setUniform("direction", [1, 0]); - this.states.filterShader.setUniform("tex0", target); - this.clear(); - this.shader(this.states.filterShader); - this.noLights(); - this.plane(target.width, target.height); - }); - - // Vert pass: draw `tmp` to `fbo` - fbo.draw(() => { - this.states.filterShader.setUniform("direction", [0, 1]); - this.states.filterShader.setUniform("tex0", tmp); - this.clear(); - this.shader(this.states.filterShader); - this.noLights(); - this.plane(target.width, target.height); - }); - - this.pop(); - } - // every other non-blur shader uses single pass - else { - fbo.draw(() => { - this.states.setValue("strokeColor", null); - this.blendMode(constants.BLEND); - this.shader(this.states.filterShader); - this.states.filterShader.setUniform("tex0", target); - this.states.filterShader.setUniform("texelSize", texelSize); - this.states.filterShader.setUniform("canvasSize", [ - target.width, - target.height, - ]); - // filterParameter uniform only used for POSTERIZE, and THRESHOLD - // but shouldn't hurt to always set - this.states.filterShader.setUniform("filterParameter", filterParameter); - this.noLights(); - this.plane(target.width, target.height); - }); - } - // draw fbo contents onto main renderer. - this.push(); - this.states.setValue("strokeColor", null); - this.clear(); - this.push(); - this.states.setValue("imageMode", constants.CORNER); - this.blendMode(constants.BLEND); - target.filterCamera._resize(); - this.setCamera(target.filterCamera); - this.resetMatrix(); - this._drawingFilter = true; - this.image( - fbo, - 0, - 0, - this.width, - this.height, - -target.width / 2, - -target.height / 2, - target.width, - target.height - ); - this._drawingFilter = false; - this.clearDepth(); - this.pop(); - this.pop(); - } - - // Pass this off to the host instance so that we can treat a renderer and a - // framebuffer the same in filter() - - pixelDensity(newDensity) { - if (newDensity) { - return this._pInst.pixelDensity(newDensity); - } - return this._pInst.pixelDensity(); - } - - blendMode(mode) { - if ( - mode === constants.DARKEST || - mode === constants.LIGHTEST || - mode === constants.ADD || - mode === constants.BLEND || - mode === constants.SUBTRACT || - mode === constants.SCREEN || - mode === constants.EXCLUSION || - mode === constants.REPLACE || - mode === constants.MULTIPLY || - mode === constants.REMOVE - ) - this.states.setValue("curBlendMode", mode); - else if ( - mode === constants.BURN || - mode === constants.OVERLAY || - mode === constants.HARD_LIGHT || - mode === constants.SOFT_LIGHT || - mode === constants.DODGE - ) { - console.warn( - "BURN, OVERLAY, HARD_LIGHT, SOFT_LIGHT, and DODGE only work for blendMode in 2D mode." - ); - } - } - - erase(opacityFill, opacityStroke) { - if (!this._isErasing) { - this.preEraseBlend = this.states.curBlendMode; - this._isErasing = true; - this.blendMode(constants.REMOVE); - this._cachedFillStyle = this.states.curFillColor.slice(); - this.states.setValue("curFillColor", [1, 1, 1, opacityFill / 255]); - this._cachedStrokeStyle = this.states.curStrokeColor.slice(); - this.states.setValue("curStrokeColor", [1, 1, 1, opacityStroke / 255]); - } - } - - noErase() { - if (this._isErasing) { - // Restore colors - this.states.setValue("curFillColor", this._cachedFillStyle.slice()); - this.states.setValue("curStrokeColor", this._cachedStrokeStyle.slice()); - // Restore blend mode - this.states.setValue("curBlendMode", this.preEraseBlend); - this.blendMode(this.preEraseBlend); - // Ensure that _applyBlendMode() sets preEraseBlend back to the original blend mode - this._isErasing = false; - this._applyBlendMode(); - } - } - - drawTarget() { - return this.activeFramebuffers[this.activeFramebuffers.length - 1] || this; - } - - beginClip(options = {}) { - super.beginClip(options); - - this.drawTarget()._isClipApplied = true; + _applyClip() { const gl = this.GL; gl.clearStencil(0); gl.clear(gl.STENCIL_BUFFER_BIT); @@ -1447,16 +612,9 @@ class RendererGL extends Renderer { gl.REPLACE // what to do if both tests pass ); gl.disable(gl.DEPTH_TEST); - - this.push(); - this.resetShader(); - if (this.states.fillColor) this.fill(0, 0); - if (this.states.strokeColor) this.stroke(0, 0); } - endClip() { - this.pop(); - + _unapplyClip() { const gl = this.GL; gl.stencilOp( gl.KEEP, // what to do if the stencil test fails @@ -1469,21 +627,11 @@ class RendererGL extends Renderer { 0xff // mask ); gl.enable(gl.DEPTH_TEST); - - // Mark the depth at which the clip has been applied so that we can clear it - // when we pop past this depth - this._clipDepths.push(this._pushPopDepth); - - super.endClip(); } - _clearClip() { + _clearClipBuffer() { this.GL.clearStencil(1); this.GL.clear(this.GL.STENCIL_BUFFER_BIT); - if (this._clipDepths.length > 0) { - this._clipDepths.pop(); - } - this.drawTarget()._isClipApplied = false; } // x,y are canvas-relative (pre-scaled by _pixelDensity) @@ -1558,95 +706,25 @@ class RendererGL extends Renderer { this.GL.clear(this.GL.DEPTH_BUFFER_BIT); } - /** - * @private - * @returns {p5.Framebuffer} A p5.Framebuffer set to match the size and settings - * of the renderer's canvas. It will be created if it does not yet exist, and - * reused if it does. - */ - _getTempFramebuffer() { - if (!this._tempFramebuffer) { - this._tempFramebuffer = new Framebuffer(this, { - format: constants.UNSIGNED_BYTE, - useDepth: this._pInst._glAttributes.depth, - depthFormat: constants.UNSIGNED_INT, - antialias: this._pInst._glAttributes.antialias, - }); - } - return this._tempFramebuffer; - } - ////////////////////////////////////////////// - // HASH | for geometry - ////////////////////////////////////////////// - - geometryInHash(gid) { - return this.geometryBufferCache.isCached(gid); - } viewport(w, h) { this._viewport = [0, 0, w, h]; this.GL.viewport(0, 0, w, h); } - /** - * [resize description] - * @private - * @param {Number} w [description] - * @param {Number} h [description] - */ - resize(w, h) { - super.resize(w, h); - - // save canvas properties - const props = {}; - for (const key in this.drawingContext) { - const val = this.drawingContext[key]; - if (typeof val !== "object" && typeof val !== "function") { - props[key] = val; - } - } - - const dimensions = this._adjustDimensions(w, h); - w = dimensions.adjustedWidth; - h = dimensions.adjustedHeight; - - this.width = w; - this.height = h; - - this.canvas.width = w * this._pixelDensity; - this.canvas.height = h * this._pixelDensity; - this.canvas.style.width = `${w}px`; - this.canvas.style.height = `${h}px`; + _updateViewport() { this._origViewport = { width: this.GL.drawingBufferWidth, height: this.GL.drawingBufferHeight, }; this.viewport(this._origViewport.width, this._origViewport.height); + } - this.states.curCamera._resize(); - - //resize pixels buffer - if (typeof this.pixels !== "undefined") { - this.pixels = new Uint8Array( - this.GL.drawingBufferWidth * this.GL.drawingBufferHeight * 4 - ); - } - - for (const framebuffer of this.framebuffers) { - // Notify framebuffers of the resize so that any auto-sized framebuffers - // can also update their size - framebuffer._canvasSizeChanged(); - } - - // reset canvas properties - for (const savedKey in props) { - try { - this.drawingContext[savedKey] = props[savedKey]; - } catch (err) { - // ignore read-only property errors - } - } + _createPixelsArray() { + this.pixels = new Uint8Array( + this.GL.drawingBufferWidth * this.GL.drawingBufferHeight * 4 + ); } /** @@ -1693,107 +771,6 @@ class RendererGL extends Renderer { this.GL.clear(this.GL.DEPTH_BUFFER_BIT); } - applyMatrix(a, b, c, d, e, f) { - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - if (arguments.length === 16) { - // this.states.uModelMatrix.apply(arguments); - Matrix.prototype.apply.apply(this.states.uModelMatrix, arguments); - } else { - this.states.uModelMatrix.apply([ - a, - b, - 0, - 0, - c, - d, - 0, - 0, - 0, - 0, - 1, - 0, - e, - f, - 0, - 1, - ]); - } - } - - /** - * [translate description] - * @private - * @param {Number} x [description] - * @param {Number} y [description] - * @param {Number} z [description] - * @chainable - * @todo implement handle for components or vector as args - */ - translate(x, y, z) { - if (x instanceof Vector) { - z = x.z; - y = x.y; - x = x.x; - } - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - this.states.uModelMatrix.translate([x, y, z]); - return this; - } - - /** - * Scales the Model View Matrix by a vector - * @private - * @param {Number | p5.Vector | Array} x [description] - * @param {Number} [y] y-axis scalar - * @param {Number} [z] z-axis scalar - * @chainable - */ - scale(x, y, z) { - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - this.states.uModelMatrix.scale(x, y, z); - return this; - } - - rotate(rad, axis) { - if (typeof axis === "undefined") { - return this.rotateZ(rad); - } - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - Matrix.prototype.rotate4x4.apply(this.states.uModelMatrix, arguments); - return this; - } - - rotateX(rad) { - this.rotate(rad, 1, 0, 0); - return this; - } - - rotateY(rad) { - this.rotate(rad, 0, 1, 0); - return this; - } - - rotateZ(rad) { - this.rotate(rad, 0, 0, 1); - return this; - } - - pop(...args) { - if ( - this._clipDepths.length > 0 && - this._pushPopDepth === this._clipDepths[this._clipDepths.length - 1] - ) { - this._clearClip(); - if (!this._userEnabledStencil) { - this._internalDisable.call(this.GL, this.GL.STENCIL_TEST); - } - - // Reset saved state - // this._userEnabledStencil = this._savedStencilTestState; - } - super.pop(...args); - this._applyStencilTestIfClipping(); - } _applyStencilTestIfClipping() { const drawTarget = this.drawTarget(); if (drawTarget._isClipApplied !== this._stencilTestOn) { @@ -1808,13 +785,7 @@ class RendererGL extends Renderer { } } } - resetMatrix() { - this.states.setValue("uModelMatrix", this.states.uModelMatrix.clone()); - this.states.uModelMatrix.reset(); - this.states.setValue("uViewMatrix", this.states.uViewMatrix.clone()); - this.states.uViewMatrix.set(this.states.curCamera.cameraMatrix); - return this; - } + ////////////////////////////////////////////// // SHADER @@ -1826,15 +797,7 @@ class RendererGL extends Renderer { * and the shader must be valid in that context. */ - _getStrokeShader() { - // select the stroke shader to use - const stroke = this.states.userStrokeShader; - if (stroke) { - return stroke; - } - return this._getLineShader(); - } - + // TODO move to super class _getSphereMapping(img) { if (!this.sphereMapping) { this.sphereMapping = this._pInst.createFilterShader(sphereMapping); @@ -1848,53 +811,13 @@ class RendererGL extends Renderer { return this.sphereMapping; } - /* - * This method will handle both image shaders and - * fill shaders, returning the appropriate shader - * depending on the current context (image or shape). - */ - _getFillShader() { - // If drawing an image, check for user-defined image shader and filters - if (this._drawingImage) { - // Use user-defined image shader if available and no filter is applied - if (this.states.userImageShader && !this._drawingFilter) { - return this.states.userImageShader; - } else { - return this._getLightShader(); // Fallback to light shader - } - } - // If user has defined a fill shader, return that - else if (this.states.userFillShader) { - return this.states.userFillShader; - } - // Use normal shader if normal material is active - else if (this.states._useNormalMaterial) { - return this._getNormalShader(); - } - // Use light shader if lighting or textures are enabled - else if (this.states.enableLighting || this.states._tex) { - return this._getLightShader(); - } - // Default to color shader if no other conditions are met - return this._getColorShader(); - } - - _getPointShader() { - // select the point shader to use - const point = this.states.userPointShader; - if (!point || !point.isPointShader()) { - return this._getPointShader(); - } - return point; - } - baseMaterialShader() { if (!this._pInst._glAttributes.perPixelLighting) { throw new Error( "The material shader does not support hooks without perPixelLighting. Try turning it back on." ); } - return this._getLightShader(); + return super.baseMaterialShader(); } _getLightShader() { @@ -1945,10 +868,6 @@ class RendererGL extends Renderer { return this._defaultLightShader; } - baseNormalShader() { - return this._getNormalShader(); - } - _getNormalShader() { if (!this._defaultNormalShader) { this._defaultNormalShader = new Shader( @@ -1977,10 +896,6 @@ class RendererGL extends Renderer { return this._defaultNormalShader; } - baseColorShader() { - return this._getColorShader(); - } - _getColorShader() { if (!this._defaultColorShader) { this._defaultColorShader = new Shader( @@ -2009,34 +924,6 @@ class RendererGL extends Renderer { return this._defaultColorShader; } - /** - * TODO(dave): un-private this when there is a way to actually override the - * shader used for points - * - * Get the shader used when drawing points with `point()`. - * - * You can call `pointShader().modify()` - * and change any of the following hooks: - * - `void beforeVertex`: Called at the start of the vertex shader. - * - `vec3 getLocalPosition`: Update the position of vertices before transforms are applied. It takes in `vec3 position` and must return a modified version. - * - `vec3 getWorldPosition`: Update the position of vertices after transforms are applied. It takes in `vec3 position` and pust return a modified version. - * - `float getPointSize`: Update the size of the point. It takes in `float size` and must return a modified version. - * - `void afterVertex`: Called at the end of the vertex shader. - * - `void beforeFragment`: Called at the start of the fragment shader. - * - `bool shouldDiscard`: Points are drawn inside a square, with the corners discarded in the fragment shader to create a circle. Use this to change this logic. It takes in a `bool willDiscard` and must return a modified version. - * - `vec4 getFinalColor`: Update the final color after mixing. It takes in a `vec4 color` and must return a modified version. - * - `void afterFragment`: Called at the end of the fragment shader. - * - * Call `pointShader().inspectHooks()` to see all the possible hooks and - * their default implementations. - * - * @returns {p5.Shader} The `point()` shader - * @private() - */ - pointShader() { - return this._getPointShader(); - } - _getPointShader() { if (!this._defaultPointShader) { this._defaultPointShader = new Shader( @@ -2065,10 +952,6 @@ class RendererGL extends Renderer { return this._defaultPointShader; } - baseStrokeShader() { - return this._getLineShader(); - } - _getLineShader() { if (!this._defaultLineShader) { this._defaultLineShader = new Shader( @@ -2186,6 +1069,8 @@ class RendererGL extends Renderer { this.textures.set(src, tex); return tex; } + + // TODO move to super class /* * used in imageLight, * To create a blurry image from the input non blurry img, if it doesn't already exist @@ -2228,6 +1113,7 @@ class RendererGL extends Renderer { return newFramebuffer; } + // TODO move to super class /* * used in imageLight, * To create a texture from the input non blurry image, if it doesn't already exist @@ -2286,210 +1172,6 @@ class RendererGL extends Renderer { return tex; } - /** - * @private - * @returns {p5.Framebuffer|null} The currently active framebuffer, or null if - * the main canvas is the current draw target. - */ - activeFramebuffer() { - return this.activeFramebuffers[this.activeFramebuffers.length - 1] || null; - } - - createFramebuffer(options) { - return new Framebuffer(this, options); - } - - _setGlobalUniforms(shader) { - const modelMatrix = this.states.uModelMatrix; - const viewMatrix = this.states.uViewMatrix; - const projectionMatrix = this.states.uPMatrix; - const modelViewMatrix = modelMatrix.copy().mult(viewMatrix); - - shader.setUniform( - "uPerspective", - this.states.curCamera.useLinePerspective ? 1 : 0 - ); - shader.setUniform("uViewMatrix", viewMatrix.mat4); - shader.setUniform("uProjectionMatrix", projectionMatrix.mat4); - shader.setUniform("uModelMatrix", modelMatrix.mat4); - shader.setUniform("uModelViewMatrix", modelViewMatrix.mat4); - if (shader.uniforms.uModelViewProjectionMatrix) { - const modelViewProjectionMatrix = modelViewMatrix.copy(); - modelViewProjectionMatrix.mult(projectionMatrix); - shader.setUniform( - "uModelViewProjectionMatrix", - modelViewProjectionMatrix.mat4 - ); - } - if (shader.uniforms.uNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(modelViewMatrix); - shader.setUniform("uNormalMatrix", this.scratchMat3.mat3); - } - if (shader.uniforms.uModelNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(this.states.uModelMatrix); - shader.setUniform("uModelNormalMatrix", this.scratchMat3.mat3); - } - if (shader.uniforms.uCameraNormalMatrix) { - this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); - shader.setUniform("uCameraNormalMatrix", this.scratchMat3.mat3); - } - if (shader.uniforms.uCameraRotation) { - this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); - shader.setUniform("uCameraRotation", this.scratchMat3.mat3); - } - shader.setUniform("uViewport", this._viewport); - } - - _setStrokeUniforms(strokeShader) { - // set the uniform values - strokeShader.setUniform("uSimpleLines", this._simpleLines); - strokeShader.setUniform("uUseLineColor", this._useLineColor); - strokeShader.setUniform("uMaterialColor", this.states.curStrokeColor); - strokeShader.setUniform("uStrokeWeight", this.states.strokeWeight); - strokeShader.setUniform("uStrokeCap", STROKE_CAP_ENUM[this.curStrokeCap]); - strokeShader.setUniform( - "uStrokeJoin", - STROKE_JOIN_ENUM[this.curStrokeJoin] - ); - } - - _setFillUniforms(fillShader) { - this.mixedSpecularColor = [...this.states.curSpecularColor]; - const empty = this._getEmptyTexture(); - - if (this.states._useMetalness > 0) { - this.mixedSpecularColor = this.mixedSpecularColor.map( - (mixedSpecularColor, index) => - this.states.curFillColor[index] * this.states._useMetalness + - mixedSpecularColor * (1 - this.states._useMetalness) - ); - } - - // TODO: optimize - fillShader.setUniform("uUseVertexColor", this._useVertexColor); - fillShader.setUniform("uMaterialColor", this.states.curFillColor); - fillShader.setUniform("isTexture", !!this.states._tex); - // We need to explicitly set uSampler back to an empty texture here. - // In general, we record the last set texture so we can re-apply it - // the next time a shader is used. However, the texture() function - // works differently and is global p5 state. If the p5 state has - // been cleared, we also need to clear the value in uSampler to match. - fillShader.setUniform("uSampler", this.states._tex || empty); - fillShader.setUniform("uTint", this.states.tint); - - fillShader.setUniform("uHasSetAmbient", this.states._hasSetAmbient); - fillShader.setUniform("uAmbientMatColor", this.states.curAmbientColor); - fillShader.setUniform("uSpecularMatColor", this.mixedSpecularColor); - fillShader.setUniform("uEmissiveMatColor", this.states.curEmissiveColor); - fillShader.setUniform("uSpecular", this.states._useSpecularMaterial); - fillShader.setUniform("uEmissive", this.states._useEmissiveMaterial); - fillShader.setUniform("uShininess", this.states._useShininess); - fillShader.setUniform("uMetallic", this.states._useMetalness); - - this._setImageLightUniforms(fillShader); - - fillShader.setUniform("uUseLighting", this.states.enableLighting); - - const pointLightCount = this.states.pointLightDiffuseColors.length / 3; - fillShader.setUniform("uPointLightCount", pointLightCount); - fillShader.setUniform( - "uPointLightLocation", - this.states.pointLightPositions - ); - fillShader.setUniform( - "uPointLightDiffuseColors", - this.states.pointLightDiffuseColors - ); - fillShader.setUniform( - "uPointLightSpecularColors", - this.states.pointLightSpecularColors - ); - - const directionalLightCount = - this.states.directionalLightDiffuseColors.length / 3; - fillShader.setUniform("uDirectionalLightCount", directionalLightCount); - fillShader.setUniform( - "uLightingDirection", - this.states.directionalLightDirections - ); - fillShader.setUniform( - "uDirectionalDiffuseColors", - this.states.directionalLightDiffuseColors - ); - fillShader.setUniform( - "uDirectionalSpecularColors", - this.states.directionalLightSpecularColors - ); - - // TODO: sum these here... - const ambientLightCount = this.states.ambientLightColors.length / 3; - this.mixedAmbientLight = [...this.states.ambientLightColors]; - - if (this.states._useMetalness > 0) { - this.mixedAmbientLight = this.mixedAmbientLight.map((ambientColors) => { - let mixing = ambientColors - this.states._useMetalness; - return Math.max(0, mixing); - }); - } - fillShader.setUniform("uAmbientLightCount", ambientLightCount); - fillShader.setUniform("uAmbientColor", this.mixedAmbientLight); - - const spotLightCount = this.states.spotLightDiffuseColors.length / 3; - fillShader.setUniform("uSpotLightCount", spotLightCount); - fillShader.setUniform("uSpotLightAngle", this.states.spotLightAngle); - fillShader.setUniform("uSpotLightConc", this.states.spotLightConc); - fillShader.setUniform( - "uSpotLightDiffuseColors", - this.states.spotLightDiffuseColors - ); - fillShader.setUniform( - "uSpotLightSpecularColors", - this.states.spotLightSpecularColors - ); - fillShader.setUniform("uSpotLightLocation", this.states.spotLightPositions); - fillShader.setUniform( - "uSpotLightDirection", - this.states.spotLightDirections - ); - - fillShader.setUniform( - "uConstantAttenuation", - this.states.constantAttenuation - ); - fillShader.setUniform("uLinearAttenuation", this.states.linearAttenuation); - fillShader.setUniform( - "uQuadraticAttenuation", - this.states.quadraticAttenuation - ); - } - - // getting called from _setFillUniforms - _setImageLightUniforms(shader) { - //set uniform values - shader.setUniform("uUseImageLight", this.states.activeImageLight != null); - // true - if (this.states.activeImageLight) { - // this.states.activeImageLight has image as a key - // look up the texture from the diffusedTexture map - let diffusedLight = this.getDiffusedTexture(this.states.activeImageLight); - shader.setUniform("environmentMapDiffused", diffusedLight); - let specularLight = this.getSpecularTexture(this.states.activeImageLight); - - shader.setUniform("environmentMapSpecular", specularLight); - } - } - - _setPointUniforms(pointShader) { - // set the uniform values - pointShader.setUniform("uMaterialColor", this.states.curStrokeColor); - // @todo is there an instance where this isn't stroke weight? - // should be they be same var? - pointShader.setUniform( - "uPointSize", - this.states.strokeWeight * this._pixelDensity - ); - } - /* Binds a buffer to the drawing context * when passed more than two arguments it also updates or initializes * the data associated with the buffer @@ -2508,6 +1190,14 @@ class RendererGL extends Renderer { } } + _makeFilterShader(renderer, operation) { + return new Shader( + renderer, + filterShaderVert, + filterShaderFrags[operation] + ); + } + /////////////////////////////// //// UTILITY FUNCTIONS ////////////////////////////// @@ -2614,7 +1304,7 @@ function rendererGL(p5, fn) { * } * * - * + * *
* * // Now with the antialias attribute set to true. diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js new file mode 100644 index 0000000000..268e597af3 --- /dev/null +++ b/src/webgpu/p5.RendererWebGPU.js @@ -0,0 +1,27 @@ +import { Renderer3D } from '../core/p5.Renderer3D'; + +class RendererWebGPU extends Renderer3D { + constructor(pInst, w, h, isMainCanvas, elt) { + super(pInst, w, h, isMainCanvas, elt) + } + + setupContext() { + // TODO + } + + _resetBuffersBeforeDraw() { + // TODO + } + + ////////////////////////////////////////////// + // Setting + ////////////////////////////////////////////// + _adjustDimensions(width, height) { + // TODO: find max texture size + return { adjustedWidth: width, adjustedHeight: height }; + } + + _applyStencilTestIfClipping() { + // TODO + } +} From 3af1624fb8a7c6dabec6e69e0da9d55036a233a7 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Fri, 23 May 2025 19:01:27 -0400 Subject: [PATCH 02/69] Get background() working --- preview/index.html | 22 ++--- src/core/constants.js | 7 ++ src/core/p5.Renderer3D.js | 55 ++++++------ src/core/rendering.js | 6 +- src/webgl/3d_primitives.js | 44 ++++----- src/webgl/GeometryBufferCache.js | 61 +++---------- src/webgl/light.js | 20 ++--- src/webgl/material.js | 30 +++---- src/webgl/p5.Camera.js | 18 ++-- src/webgl/p5.Framebuffer.js | 2 +- src/webgl/p5.RendererGL.js | 148 ++++++++++--------------------- src/webgl/text.js | 8 +- src/webgl/utils.js | 99 +++++++++++++++++++++ src/webgpu/p5.RendererWebGPU.js | 125 +++++++++++++++++++++++++- 14 files changed, 390 insertions(+), 255 deletions(-) create mode 100644 src/webgl/utils.js diff --git a/preview/index.html b/preview/index.html index d0f3b329ae..2cc9391628 100644 --- a/preview/index.html +++ b/preview/index.html @@ -18,29 +18,21 @@ - \ No newline at end of file + diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index fb26a639c8..a2e40d83c5 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -557,13 +557,12 @@ export class Renderer3D extends Renderer { geometry.hasFillTransparency() ); - this._drawBuffers(geometry, { mode, count }); + this._drawBuffers(geometry, { mode, count }, false); shader.unbindShader(); } _drawStrokes(geometry, { count } = {}) { - const gl = this.GL; this._useLineColor = geometry.vertexStrokeColors.length > 0; @@ -584,22 +583,7 @@ export class Renderer3D extends Renderer { geometry.hasStrokeTransparency() ); - if (count === 1) { - gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); - } else { - try { - gl.drawArraysInstanced( - gl.TRIANGLES, - 0, - geometry.lineVertices.length / 3, - count - ); - } catch (e) { - console.log( - "🌸 p5.js says: Instancing is only supported in WebGL2 mode" - ); - } - } + this._drawBuffers(geometry, {count}, true) shader.unbindShader(); } @@ -1430,7 +1414,7 @@ export class Renderer3D extends Renderer { this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); shader.setUniform("uCameraRotation", this.scratchMat3.mat3); } - shader.setUniform("uViewport", this._viewport); + shader.setUniform("uViewport", [0, 0, 400, 400]); } _setStrokeUniforms(strokeShader) { diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 7ef7e61bf7..e033e3b1c2 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -270,9 +270,33 @@ class RendererGL extends Renderer3D { } } + // Stroke version for now: + // +// { +// const gl = this.GL; +// // move this to _drawBuffers ? +// if (count === 1) { +// gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); +// } else { +// try { + // gl.drawArraysInstanced( + // gl.TRIANGLES, + // 0, + // geometry.lineVertices.length / 3, + // count + // ); + // } catch (e) { + // console.log( + // "🌸 p5.js says: Instancing is only supported in WebGL2 mode" + // ); + // } + // } + // } + _drawBuffers(geometry, { mode = constants.TRIANGLES, count }) { const gl = this.GL; const glBuffers = this.geometryBufferCache.getCached(geometry); + //console.log(glBuffers); if (!glBuffers) return; @@ -1157,7 +1181,7 @@ class RendererGL extends Renderer3D { if (indices) { const buffer = gl.createBuffer(); - this.renderer._bindBuffer(buffer, gl.ELEMENT_ARRAY_BUFFER, indices, indexType); + this._bindBuffer(buffer, gl.ELEMENT_ARRAY_BUFFER, indices, indexType); buffers.indexBuffer = buffer; @@ -1478,9 +1502,9 @@ class RendererGL extends Renderer3D { createTexture({ width, height, format, dataType }) { const gl = this.GL; const tex = gl.createTexture(); - this.gl.bindTexture(gl.TEXTURE_2D, tex); - this.gl.texImage2D(gl.TEXTURE_2D, 0, this.gl.RGBA, width, height, 0, - gl.RGBA, this.gl.UNSIGNED_BYTE, null); + gl.bindTexture(gl.TEXTURE_2D, tex); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, + gl.RGBA, gl.UNSIGNED_BYTE, null); // TODO use format and data type return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; } diff --git a/src/webgl/shaders/line.vert b/src/webgl/shaders/line.vert index de422ad6b6..a00bf94ba8 100644 --- a/src/webgl/shaders/line.vert +++ b/src/webgl/shaders/line.vert @@ -271,6 +271,7 @@ void main() { } } else { vec2 tangent = aTangentIn == vec3(0.) ? tangentOut : tangentIn; + vTangent = tangent; vec2 normal = vec2(-tangent.y, tangent.x); diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index a58720d66f..ffb012e1f1 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -2,6 +2,7 @@ import { Renderer3D } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; import * as constants from '../core/constants'; import { colorVertexShader, colorFragmentShader } from './shaders/color'; +import { lineVertexShader, lineFragmentShader} from './shaders/line'; class RendererWebGPU extends Renderer3D { constructor(pInst, w, h, isMainCanvas, elt) { @@ -248,7 +249,6 @@ class RendererWebGPU extends Renderer3D { .filter(u => !u.isSampler) .reduce((sum, u) => sum + u.alignedBytes, 0); shader._uniformData = new Float32Array(uniformSize / 4); - shader._uniformBuffer = this.device.createBuffer({ size: uniformSize, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, @@ -453,7 +453,6 @@ class RendererWebGPU extends Renderer3D { for (const attrName in shader.attributes) { const attr = shader.attributes[attrName]; if (!attr || attr.location === -1) continue; - // Get the vertex buffer info associated with this attribute const renderBuffer = this.buffers[shader.shaderType].find(buf => buf.attr === attrName) || @@ -477,7 +476,6 @@ class RendererWebGPU extends Renderer3D { ], }); } - return layouts; } @@ -512,7 +510,9 @@ class RendererWebGPU extends Renderer3D { _useShader(shader, options) {} - _updateViewport() {} + _updateViewport() { + this._viewport = [0, 0, this.width, this.height]; + } zClipRange() { return [0, 1]; @@ -548,13 +548,14 @@ class RendererWebGPU extends Renderer3D { // Rendering ////////////////////////////////////////////// - _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }) { + _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }, stroke) { const buffers = this.geometryBufferCache.getCached(geometry); if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); + const currentTexture = this.drawingContext.getCurrentTexture(); const colorAttachment = { - view: this.drawingContext.getCurrentTexture().createView(), + view: currentTexture.createView(), loadOp: "load", storeOp: "store", }; @@ -578,7 +579,6 @@ class RendererWebGPU extends Renderer3D { const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); passEncoder.setPipeline(this._curShader.getPipeline(this._shaderOptions({ mode }))); - // Bind vertex buffers for (const buffer of this._getVertexBuffers(this._curShader)) { passEncoder.setVertexBuffer( @@ -587,9 +587,9 @@ class RendererWebGPU extends Renderer3D { 0 ); } - // Bind uniforms this._packUniforms(this._curShader); + console.log(this._curShader); this.device.queue.writeBuffer( this._curShader._uniformBuffer, 0, @@ -621,18 +621,21 @@ class RendererWebGPU extends Renderer3D { layout, entries: bgEntries, }); - passEncoder.setBindGroup(group, bindGroup); } + if (buffers.lineVerticesBuffer && geometry.lineVertices && stroke) { + passEncoder.draw(geometry.lineVertices.length / 3, count, 0, 0); + } // Bind index buffer and issue draw + if (!stroke) { if (buffers.indexBuffer) { const indexFormat = buffers.indexFormat || "uint16"; passEncoder.setIndexBuffer(buffers.indexBuffer, indexFormat); passEncoder.drawIndexed(geometry.faces.length * 3, count, 0, 0, 0); } else { passEncoder.draw(geometry.vertices.length, count, 0, 0); - } + }} passEncoder.end(); this.queue.submit([commandEncoder.finish()]); @@ -644,6 +647,7 @@ class RendererWebGPU extends Renderer3D { _packUniforms(shader) { let offset = 0; + let i = 0; for (const name in shader.uniforms) { const uniform = shader.uniforms[name]; if (uniform.isSampler) continue; @@ -661,7 +665,7 @@ class RendererWebGPU extends Renderer3D { new RegExp(`struct\\s+${structName}\\s*\\{([^\\}]+)\\}`) ); if (!structMatch) { - throw new Error(`Can't find a struct definition for ${structName}`); + throw new Error(`Can't find a struct defnition for ${structName}`); } const structBody = structMatch[1]; @@ -716,7 +720,6 @@ class RendererWebGPU extends Renderer3D { } const structType = uniformVarMatch[2]; const uniforms = this._parseStruct(shader.vertSrc(), structType); - // Extract samplers from group bindings const samplers = []; const samplerRegex = /@group\((\d+)\)\s*@binding\((\d+)\)\s*var\s+(\w+)\s*:\s*(\w+);/g; @@ -835,6 +838,28 @@ class RendererWebGPU extends Renderer3D { return this._defaultColorShader; } + _getLineShader() { + if (!this._defaultLineShader) { + this._defaultLineShader = new Shader( + this, + lineVertexShader, + lineFragmentShader, + { + vertex: { + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getWorldInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getCameraInputs": "(inputs: Vertex) { return inputs; }", + }, + fragment: { + "vec4 getFinalColor": "(color: vec4) { return color; }" + }, + } + ); + } + return this._defaultLineShader; + } + ////////////////////////////////////////////// // Setting ////////////////////////////////////////////// @@ -921,7 +946,7 @@ class RendererWebGPU extends Renderer3D { } } - console.log(preMain + '\n' + defines + hooks + main + postMain) + //console.log(preMain + '\n' + defines + hooks + main + postMain) return preMain + '\n' + defines + hooks + main + postMain; } } diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js new file mode 100644 index 0000000000..a42e7b178e --- /dev/null +++ b/src/webgpu/shaders/line.js @@ -0,0 +1,317 @@ +import { getTexture } from './utils' + +const uniforms = ` +struct Uniforms { +// @p5 ifdef Vertex getWorldInputs + uModelMatrix: mat4x4, + uViewMatrix: mat4x4, +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + uModelViewMatrix: mat4x4, +// @p5 endif + uMaterialColor: vec4, + uProjectionMatrix: mat4x4, + uStrokeWeight: f32, + uUseLineColor: f32, + uSimpleLines: f32, + uViewport: vec4, + uPerspective: i32, + uStrokeJoin: i32, +} +`; + +export const lineVertexShader = ` +struct StrokeVertexInput { + @location(0) aPosition: vec3, + @location(1) aSide: f32, + @location(2) aTangentIn: vec3, + @location(3) aTangentOut: vec3, + @location(4) aVertexColor: vec4, +}; + +struct StrokeVertexOutput { + @builtin(position) Position: vec4, + @location(0) vColor: vec4, + @location(1) vTangent: vec2, + @location(2) vCenter: vec2, + @location(3) vPosition: vec2, + @location(4) vMaxDist: f32, + @location(5) vCap: f32, + @location(6) vJoin: f32, + @location(7) vStrokeWeight: f32, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct Vertex { + position: vec3, + tangentIn: vec3, + tangentOut: vec3, + color: vec4, + weight: f32, +} + +fn lineIntersection(aPoint: vec2f, aDir: vec2f, bPoint: vec2f, bDir: vec2f) -> vec2f { + // Rotate and translate so a starts at the origin and goes out to the right + var bMutPoint = bPoint; + bMutPoint -= aPoint; + var rotatedBFrom = vec2( + bMutPoint.x*aDir.x + bMutPoint.y*aDir.y, + bMutPoint.y*aDir.x - bMutPoint.x*aDir.y + ); + var bTo = bMutPoint + bDir; + var rotatedBTo = vec2( + bTo.x*aDir.x + bTo.y*aDir.y, + bTo.y*aDir.x - bTo.x*aDir.y + ); + var intersectionDistance = + rotatedBTo.x + (rotatedBFrom.x - rotatedBTo.x) * rotatedBTo.y / + (rotatedBTo.y - rotatedBFrom.y); + return aPoint + aDir * intersectionDistance; +} + +@vertex +fn main(input: StrokeVertexInput) -> StrokeVertexOutput { + HOOK_beforeVertex(); + var output: StrokeVertexOutput; + let viewport = vec4(0.,0.,400.,400.); + let simpleLines = (uniforms.uSimpleLines != 0.); + if (!simpleLines) { + if (all(input.aTangentIn == vec3()) != all(input.aTangentOut == vec3())) { + output.vCap = 1.; + } else { + output.vCap = 0.; + } + let conditionA = any(input.aTangentIn != vec3()); + let conditionB = any(input.aTangentOut != vec3()); + let conditionC = any(input.aTangentIn != input.aTangentOut); + if (conditionA && conditionB && conditionC) { + output.vJoin = 1.; + } else { + output.vJoin = 0.; + } + } + var lineColor: vec4; + if (uniforms.uUseLineColor != 0.) { + lineColor = input.aVertexColor; + } else { + lineColor = uniforms.uMaterialColor; + } + var inputs = Vertex( + input.aPosition.xyz, + input.aTangentIn, + input.aTangentOut, + lineColor, + uniforms.uStrokeWeight + ); + +// @p5 ifdef Vertex getObjectInputs + inputs = HOOK_getObjectInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + inputs.position = (uModelMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uModelMatrix * vec4(input.aTangentIn, 1.)).xyz; + inputs.tangentOut = (uModelMatrix * vec4(input.aTangentOut, 1.)).xyz; +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + // Already multiplied by the model matrix, just apply view + inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uViewMatrix * vec4(input.aTangentIn, 0.)).xyz; + inputs.tangentOut = (uniforms.uViewMatrix * vec4(input.aTangentOut, 0.)).xyz; +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + // Apply both at once + inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uModelViewMatrix * vec4(input.aTangentIn, 0.)).xyz; + inputs.tangentOut = (uniforms.uModelViewMatrix * vec4(input.aTangentOut, 0.)).xyz; +// @p5 endif +// @p5 ifdef Vertex getCameraInputs + inputs = HOOK_getCameraInputs(inputs); +// @p5 endif + + var posp = vec4(inputs.position, 1.); + var posqIn = vec4(inputs.position + inputs.tangentIn, 1.); + var posqOut = vec4(inputs.position + inputs.tangentOut, 1.); + output.vStrokeWeight = inputs.weight; + + var facingCamera = pow( + // The word space tangent's z value is 0 if it's facing the camera + abs(normalize(posqIn-posp).z), + + // Using pow() here to ramp 'facingCamera' up from 0 to 1 really quickly + // so most lines get scaled and don't get clipped + 0.25 + ); + + // Moving vertices slightly toward the camera + // to avoid depth-fighting with the fill triangles. + // A mix of scaling and offsetting is used based on distance + // Discussion here: + // https://github.com/processing/p5.js/issues/7200 + + // using a scale <1 moves the lines towards nearby camera + // in order to prevent popping effects due to half of + // the line disappearing behind the geometry faces. + var zDistance = -posp.z; + var distanceFactor = smoothstep(0., 800., zDistance); + + // Discussed here: + // http://www.opengl.org/discussion_boards/ubbthreads.php?ubb=showflat&Number=252848 + var scale = mix(1., 0.995, facingCamera); + var dynamicScale = mix(scale, 1.0, distanceFactor); // Closer = more scale, farther = less + + posp = vec4(posp.xyz * dynamicScale, posp.w); + posqIn = vec4(posqIn.xyz * dynamicScale, posqIn.w); + posqOut= vec4(posqOut.xyz * dynamicScale, posqOut.w); + + // Moving vertices slightly toward camera when far away + // https://github.com/processing/p5.js/issues/6956 + var zOffset = mix(0., -1., facingCamera); + var dynamicZAdjustment = mix(0., zOffset, distanceFactor); // Closer = less zAdjustment, farther = more + + posp.z -= dynamicZAdjustment; + posqIn.z -= dynamicZAdjustment; + posqOut.z -= dynamicZAdjustment; + + var p = uniforms.uProjectionMatrix * posp; + var qIn = uniforms.uProjectionMatrix * posqIn; + var qOut = uniforms.uProjectionMatrix * posqOut; + + var tangentIn = normalize((qIn.xy * p.w - p.xy * qIn.w) * viewport.zw); + var tangentOut = normalize((qOut.xy * p.w - p.xy * qOut.w) * viewport.zw); + + var curPerspScale = vec2(); + if (uniforms.uPerspective == 1) { + // Perspective --- + // convert from world to clip by multiplying with projection scaling factor + // to get the right thickness (see https://github.com/processing/processing/issues/5182) + + // The y value of the projection matrix may be flipped if rendering to a Framebuffer. + // Multiplying again by its sign here negates the flip to get just the scale. + curPerspScale = (uniforms.uProjectionMatrix * vec4(1., sign(uniforms.uProjectionMatrix[1][1]), 0., 0.)).xy; + } else { + // No Perspective --- + // multiply by W (to cancel out division by W later in the pipeline) and + // convert from screen to clip (derived from clip to screen above) + curPerspScale = p.w / (0.5 * viewport.zw); + } + + var offset = vec2(); + if (output.vJoin == 1. && !simpleLines) { + output.vTangent = normalize(tangentIn + tangentOut); + var normalIn = vec2(-tangentIn.y, tangentIn.x); + var normalOut = vec2(-tangentOut.y, tangentOut.x); + var side = sign(input.aSide); + var sideEnum = abs(input.aSide); + + // We generate vertices for joins on either side of the centerline, but + // the "elbow" side is the only one needing a join. By not setting the + // offset for the other side, all its vertices will end up in the same + // spot and not render, effectively discarding it. + if (sign(dot(tangentOut, vec2(-tangentIn.y, tangentIn.x))) != side) { + // Side enums: + // 1: the side going into the join + // 2: the middle of the join + // 3: the side going out of the join + if (sideEnum == 2.) { + // Calculate the position + tangent on either side of the join, and + // find where the lines intersect to find the elbow of the join + var c = (posp.xy / posp.w + vec2(1.)) * 0.5 * viewport.zw; + + var intersection = lineIntersection( + c + (side * normalIn * inputs.weight / 2.), + tangentIn, + c + (side * normalOut * inputs.weight / 2.), + tangentOut + ); + offset = intersection - c; + + + // When lines are thick and the angle of the join approaches 180, the + // elbow might be really far from the center. We'll apply a limit to + // the magnitude to avoid lines going across the whole screen when this + // happens. + var mag = length(offset); + var maxMag = 3 * inputs.weight; + if (mag > maxMag) { + offset = vec2(maxMag / mag); + } else if (sideEnum == 1.) { + offset = side * normalIn * inputs.weight / 2.; + } else if (sideEnum == 3.) { + offset = side * normalOut * inputs.weight / 2.; + } + } + } + if (uniforms.uStrokeJoin == 2) { + var avgNormal = vec2(-output.vTangent.y, output.vTangent.x); + output.vMaxDist = abs(dot(avgNormal, normalIn * inputs.weight / 2.)); + } else { + output.vMaxDist = inputs.weight / 2.; + } + } else { + var tangent: vec2; + if (all(input.aTangentIn == vec3())) { + tangent = tangentOut; + } else { + tangent = tangentIn; + } + output.vTangent = tangent; + var normal = vec2(-tangent.y, tangent.y); + + var normalOffset = sign(input.aSide); + // Caps will have side values of -2 or 2 on the edge of the cap that + // extends out from the line + var tangentOffset = abs(input.aSide) - 1.; + offset = (normal * normalOffset + tangent * tangentOffset) * + inputs.weight * 0.5; + output.vMaxDist = inputs.weight / 2.; + } + output.vCenter = p.xy; + output.vPosition = output.vCenter + offset; + output.vColor = inputs.color; + + output.Position = vec4( + p.xy + offset.xy * curPerspScale, + p.zy + ); + var clip_pos: vec4; + if (input.aSide == 1.0) { + clip_pos = vec4(-0.1, 0.1, 0.5, 1.); + } else if (input.aSide == -1.0) { + clip_pos = vec4(-0.5, 0.5, 0.5, 1.0); + } else { + clip_pos = vec4(0.0, -0.5, 0.5 ,1.0); + } + output.Position = clip_pos; + return output; +} + + +`; + +export const lineFragmentShader = ` +struct StrokeFragmentInput { + @location(0) vColor: vec4, + @location(1) vTangent: vec2, + @location(2) vCenter: vec2, + @location(3) vPosition: vec2, + @location(4) vMaxDist: f32, + @location(5) vCap: f32, + @location(6) vJoin: f32, + @location(7) vStrokeWeight: f32, +} + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +${getTexture} + +@fragment +fn main(input: StrokeFragmentInput) -> @location(0) vec4 { + return vec4(1., 1., 1., 1.); +} +`; + From ae2c56685161418ecdfdc95478aef66cf4bd5efd Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 15 Jun 2025 16:40:17 -0400 Subject: [PATCH 11/69] Add material shader --- preview/index.html | 7 +- src/core/p5.Renderer3D.js | 23 ++- src/webgl/light.js | 16 +- src/webgl/p5.Shader.js | 7 +- src/webgl/shaders/basic.frag | 3 +- src/webgl/shaders/lighting.glsl | 2 - src/webgl/shaders/phong.frag | 5 +- src/webgl/shaders/phong.vert | 11 - src/webgpu/p5.RendererWebGPU.js | 160 ++++++++++++--- src/webgpu/shaders/color.js | 9 +- src/webgpu/shaders/material.js | 348 ++++++++++++++++++++++++++++++++ 11 files changed, 528 insertions(+), 63 deletions(-) create mode 100644 src/webgpu/shaders/material.js diff --git a/preview/index.html b/preview/index.html index 01a65cd11d..26a9c7196b 100644 --- a/preview/index.html +++ b/preview/index.html @@ -27,7 +27,7 @@ let sh; p.setup = async function () { await p.createCanvas(400, 400, p.WEBGPU); - sh = p.baseColorShader().modify({ + sh = p.baseMaterialShader().modify({ uniforms: { 'f32 time': () => p.millis(), }, @@ -44,6 +44,11 @@ p.background(200); p.noStroke(); p.shader(sh); + p.ambientLight(50); + p.directionalLight(100, 100, 100, 0, 1, -1); + p.pointLight(155, 155, 155, 0, -200, 500); + p.specularMaterial(255); + p.shininess(300); for (const [i, c] of ['red', 'lime', 'blue'].entries()) { p.push(); p.fill(c); diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index fb26a639c8..c8ed32ea8a 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -1515,17 +1515,20 @@ export class Renderer3D extends Renderer { ); // TODO: sum these here... - const ambientLightCount = this.states.ambientLightColors.length / 3; - this.mixedAmbientLight = [...this.states.ambientLightColors]; - - if (this.states._useMetalness > 0) { - this.mixedAmbientLight = this.mixedAmbientLight.map((ambientColors) => { - let mixing = ambientColors - this.states._useMetalness; - return Math.max(0, mixing); - }); + let mixedAmbientLight = [0, 0, 0]; + for (let i = 0; i < this.states.ambientLightColors.length; i += 3) { + for (let off = 0; off < 3; off++) { + if (this.states._useMetalness > 0) { + mixedAmbientLight[off] += Math.max( + 0, + this.states.ambientLightColors[i + off] - this.states._useMetalness + ); + } else { + mixedAmbientLight[off] += this.states.ambientLightColors[i + off]; + } + } } - fillShader.setUniform("uAmbientLightCount", ambientLightCount); - fillShader.setUniform("uAmbientColor", this.mixedAmbientLight); + fillShader.setUniform("uAmbientColor", mixedAmbientLight); const spotLightCount = this.states.spotLightDiffuseColors.length / 3; fillShader.setUniform("uSpotLightCount", spotLightCount); diff --git a/src/webgl/light.js b/src/webgl/light.js index 1c714ae02a..e57dd09686 100644 --- a/src/webgl/light.js +++ b/src/webgl/light.js @@ -1620,6 +1620,8 @@ function light(p5, fn){ angle, concentration ) { + if (this.states.spotLightDiffuseColors.length / 3 >= 4) return; + let color, position, direction; const length = arguments.length; @@ -1777,18 +1779,26 @@ function light(p5, fn){ return; } this.states.setValue('spotLightDiffuseColors', [ + ...this.states.spotLightDiffuseColors, color._array[0], color._array[1], color._array[2] ]); this.states.setValue('spotLightSpecularColors', [ + ...this.states.spotLightSpecularColors, ...this.states.specularColors ]); - this.states.setValue('spotLightPositions', [position.x, position.y, position.z]); + this.states.setValue('spotLightPositions', [ + ...this.states.spotLightPositions, + position.x, + position.y, + position.z + ]); direction.normalize(); this.states.setValue('spotLightDirections', [ + ...this.states.spotLightDirections, direction.x, direction.y, direction.z @@ -1808,8 +1818,8 @@ function light(p5, fn){ } angle = this._pInst._toRadians(angle); - this.states.setValue('spotLightAngle', [Math.cos(angle)]); - this.states.setValue('spotLightConc', [concentration]); + this.states.setValue('spotLightAngle', [...this.states.spotLightAngle, Math.cos(angle)]); + this.states.setValue('spotLightConc', [...this.states.spotLightConc, concentration]); this.states.setValue('enableLighting', true); } diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index 3f4015311c..fc3745e394 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -976,14 +976,17 @@ class Shader { * *
*/ - setUniform(uniformName, data) { + setUniform(uniformName, rawData) { this.init(); const uniform = this.uniforms[uniformName]; if (!uniform) { return; } - const gl = this._renderer.GL; + + const data = this._renderer._mapUniformData + ? this._renderer._mapUniformData(uniform, rawData) + : rawData; if (uniform.isArray) { if ( diff --git a/src/webgl/shaders/basic.frag b/src/webgl/shaders/basic.frag index e583955d36..1406964ca9 100644 --- a/src/webgl/shaders/basic.frag +++ b/src/webgl/shaders/basic.frag @@ -1,6 +1,7 @@ IN vec4 vColor; void main(void) { HOOK_beforeFragment(); - OUT_COLOR = HOOK_getFinalColor(vec4(vColor.rgb, 1.) * vColor.a); + OUT_COLOR = HOOK_getFinalColor(vColor); + OUT_COLOR.rgb *= OUT_COLOR.a; // Premultiply alpha before rendering HOOK_afterFragment(); } diff --git a/src/webgl/shaders/lighting.glsl b/src/webgl/shaders/lighting.glsl index b66ac083d1..85a4c79684 100644 --- a/src/webgl/shaders/lighting.glsl +++ b/src/webgl/shaders/lighting.glsl @@ -7,8 +7,6 @@ uniform mat4 uViewMatrix; uniform bool uUseLighting; -uniform int uAmbientLightCount; -uniform vec3 uAmbientColor[5]; uniform mat3 uCameraRotation; uniform int uDirectionalLightCount; uniform vec3 uLightingDirection[5]; diff --git a/src/webgl/shaders/phong.frag b/src/webgl/shaders/phong.frag index a424c6220c..78cfb76163 100644 --- a/src/webgl/shaders/phong.frag +++ b/src/webgl/shaders/phong.frag @@ -2,6 +2,7 @@ precision highp int; uniform bool uHasSetAmbient; +uniform vec3 uAmbientColor; uniform vec4 uSpecularMatColor; uniform vec4 uAmbientMatColor; uniform vec4 uEmissiveMatColor; @@ -13,7 +14,6 @@ uniform bool isTexture; IN vec3 vNormal; IN vec2 vTexCoord; IN vec3 vViewPosition; -IN vec3 vAmbientColor; IN vec4 vColor; struct ColorComponents { @@ -45,7 +45,7 @@ void main(void) { Inputs inputs; inputs.normal = normalize(vNormal); inputs.texCoord = vTexCoord; - inputs.ambientLight = vAmbientColor; + inputs.ambientLight = uAmbientColor; inputs.color = isTexture ? TEXTURE(uSampler, vTexCoord) * (vec4(uTint.rgb/255., 1.) * uTint.a/255.) : vColor; @@ -67,7 +67,6 @@ void main(void) { // Calculating final color as result of all lights (plus emissive term). - vec2 texCoord = inputs.texCoord; vec4 baseColor = inputs.color; ColorComponents c; c.opacity = baseColor.a; diff --git a/src/webgl/shaders/phong.vert b/src/webgl/shaders/phong.vert index 670da028c1..49a10933fc 100644 --- a/src/webgl/shaders/phong.vert +++ b/src/webgl/shaders/phong.vert @@ -7,8 +7,6 @@ IN vec3 aNormal; IN vec2 aTexCoord; IN vec4 aVertexColor; -uniform vec3 uAmbientColor[5]; - #ifdef AUGMENTED_HOOK_getWorldInputs uniform mat4 uModelMatrix; uniform mat4 uViewMatrix; @@ -19,7 +17,6 @@ uniform mat4 uModelViewMatrix; uniform mat3 uNormalMatrix; #endif uniform mat4 uProjectionMatrix; -uniform int uAmbientLightCount; uniform bool uUseVertexColor; uniform vec4 uMaterialColor; @@ -74,14 +71,6 @@ void main(void) { vNormal = inputs.normal; vColor = inputs.color; - // TODO: this should be a uniform - vAmbientColor = vec3(0.0); - for (int i = 0; i < 5; i++) { - if (i < uAmbientLightCount) { - vAmbientColor += uAmbientColor[i]; - } - } - gl_Position = uProjectionMatrix * vec4(inputs.position, 1.); HOOK_afterVertex(); } diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index a58720d66f..a2bcaca885 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -2,6 +2,7 @@ import { Renderer3D } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; import * as constants from '../core/constants'; import { colorVertexShader, colorFragmentShader } from './shaders/color'; +import { materialVertexShader, materialFragmentShader } from './shaders/material'; class RendererWebGPU extends Renderer3D { constructor(pInst, w, h, isMainCanvas, elt) { @@ -244,13 +245,16 @@ class RendererWebGPU extends Renderer3D { } _finalizeShader(shader) { - const uniformSize = Object.values(shader.uniforms) - .filter(u => !u.isSampler) - .reduce((sum, u) => sum + u.alignedBytes, 0); - shader._uniformData = new Float32Array(uniformSize / 4); + const rawSize = Math.max( + 0, + ...Object.values(shader.uniforms).map(u => u.offsetEnd) + ); + const alignedSize = Math.ceil(rawSize / 16) * 16; + shader._uniformData = new Float32Array(alignedSize / 4); + shader._uniformDataView = new DataView(shader._uniformData.buffer); shader._uniformBuffer = this.device.createBuffer({ - size: uniformSize, + size: alignedSize, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, }); @@ -643,16 +647,16 @@ class RendererWebGPU extends Renderer3D { ////////////////////////////////////////////// _packUniforms(shader) { - let offset = 0; for (const name in shader.uniforms) { const uniform = shader.uniforms[name]; if (uniform.isSampler) continue; - if (uniform.size === 1) { - shader._uniformData.set([uniform._cachedData], offset); + if (uniform.type === 'u32') { + shader._uniformDataView.setUint32(uniform.offset, uniform._cachedData, true); + } else if (uniform.size === 4) { + shader._uniformData.set([uniform._cachedData], uniform.offset / 4); } else { - shader._uniformData.set(uniform._cachedData, offset); + shader._uniformData.set(uniform._cachedData, uniform.offset / 4); } - offset += uniform.alignedBytes / 4; } } @@ -668,33 +672,101 @@ class RendererWebGPU extends Renderer3D { const elements = {}; let match; let index = 0; + let offset = 0; const elementRegex = - /(?:@location\((\d+)\)\s+)?(\w+):\s+((?:mat[234]x[234]|vec[234]|float|int|uint|bool|f32|i32|u32|bool)(?:)?)/g + /(?:@location\((\d+)\)\s+)?(\w+):\s*([^\n]+?),?\n/g + + const baseAlignAndSize = (type) => { + if (['f32', 'i32', 'u32', 'bool'].includes(type)) { + return { align: 4, size: 4, items: 1 }; + } + if (/^vec[2-4](|f)$/.test(type)) { + const n = parseInt(type.match(/^vec([2-4])/)[1]); + const size = 4 * n; + const align = n === 2 ? 8 : 16; + return { align, size, items: n }; + } + if (/^mat[2-4](?:x[2-4])?(|f)$/.test(type)) { + if (type[4] === 'x' && type[3] !== type[5]) { + throw new Error('Non-square matrices not implemented yet'); + } + const dim = parseInt(type[3]); + const align = dim === 2 ? 8 : 16; + // Each column must be aligned + const size = Math.ceil(dim * 4 / align) * align * dim; + const pack = dim === 3 + ? (data) => [ + ...data.slice(0, 3), + 0, + ...data.slice(3, 6), + 0, + ...data.slice(6, 9), + 0 + ] + : undefined; + return { align, size, pack, items: dim * dim }; + } + if (/^array<.+>$/.test(type)) { + const [, subtype, rawLength] = type.match(/^array<(.+),\s*(\d+)>/); + const length = parseInt(rawLength); + const { + align: elemAlign, + size: elemSize, + items: elemItems, + pack: elemPack = (data) => [...data] + } = baseAlignAndSize(subtype); + const stride = Math.ceil(elemSize / elemAlign) * elemAlign; + const pack = (data) => { + const result = []; + for (let i = 0; i < data.length; i += elemItems) { + const elemData = elemPack(data.slice(i, elemItems)) + result.push(...elemData); + for (let j = 0; j < stride / 4 - elemData.length; j++) { + result.push(0); + } + } + return result; + }; + return { + align: elemAlign, + size: stride * length, + items: elemItems * length, + pack, + }; + } + throw new Error(`Unknown type in WGSL struct: ${type}`); + }; + while ((match = elementRegex.exec(structBody)) !== null) { const [_, location, name, type] = match; - const size = type.startsWith('vec') - ? parseInt(type[3]) - : type.startsWith('mat') - ? Math.pow(parseInt(type[3]), 2) - : 1; - const bytes = 4 * size; // TODO handle non 32 bit sizes? - const alignedBytes = Math.ceil(bytes / 16) * 16; + const { size, align, pack } = baseAlignAndSize(type); + offset = Math.ceil(offset / align) * align; + const offsetEnd = offset + size; elements[name] = { name, location: location ? parseInt(location) : undefined, index, type, size, - bytes, - alignedBytes, + offset, + offsetEnd, + pack }; index++; + offset = offsetEnd; } return elements; } + _mapUniformData(uniform, data) { + if (uniform.pack) { + return uniform.pack(data); + } + return data; + } + _getShaderAttributes(shader) { const mainMatch = /fn main\(.+:\s*(\S+)\s*\)/.exec(shader._vertSrc); if (!mainMatch) throw new Error("Can't find `fn main` in vertex shader source"); @@ -810,6 +882,40 @@ class RendererWebGPU extends Renderer3D { gpuTexture.destroy(); } + _getLightShader() { + if (!this._defaultLightShader) { + this._defaultLightShader = new Shader( + this, + materialVertexShader, + materialFragmentShader, + { + vertex: { + "void beforeVertex": "() {}", + "Vertex getObjectInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getWorldInputs": "(inputs: Vertex) { return inputs; }", + "Vertex getCameraInputs": "(inputs: Vertex) { return inputs; }", + "void afterVertex": "() {}", + }, + fragment: { + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(inputs: Inputs) { return inputs; }", + "vec4f combineColors": `(components: ColorComponents) { + var rgb = vec3(0.0); + rgb += components.diffuse * components.baseColor; + rgb += components.ambient * components.ambientColor; + rgb += components.specular * components.specularColor; + rgb += components.emissive; + return vec4(rgb, components.opacity); + }`, + "vec4f getFinalColor": "(color: vec4) { return color; }", + "void afterFragment": "() {}", + }, + } + ); + } + return this._defaultLightShader; + } + _getColorShader() { if (!this._defaultColorShader) { this._defaultColorShader = new Shader( @@ -858,18 +964,23 @@ class RendererWebGPU extends Renderer3D { // way to add code if a hook is augmented. e.g.: // struct Uniforms { // // @p5 ifdef Vertex getWorldInputs - // uModelMatrix: mat4, - // uViewMatrix: mat4, + // uModelMatrix: mat4f, + // uViewMatrix: mat4f, // // @p5 endif // // @p5 ifndef Vertex getWorldInputs - // uModelViewMatrix: mat4, + // uModelViewMatrix: mat4f, // // @p5 endif // } src = src.replace( /\/\/ @p5 (ifdef|ifndef) (\w+)\s+(\w+)\n((?:(?!\/\/ @p5)(?:.|\n))*)\/\/ @p5 endif/g, (_, condition, hookType, hookName, body) => { const target = condition === 'ifdef'; - if (!!shader.hooks.modified[shaderType][`${hookType} ${hookName}`] === target) { + if ( + ( + shader.hooks.modified.vertex[`${hookType} ${hookName}`] || + shader.hooks.modified.fragment[`${hookType} ${hookName}`] + ) === target + ) { return body; } else { return ''; @@ -921,7 +1032,6 @@ class RendererWebGPU extends Renderer3D { } } - console.log(preMain + '\n' + defines + hooks + main + postMain) return preMain + '\n' + defines + hooks + main + postMain; } } diff --git a/src/webgpu/shaders/color.js b/src/webgpu/shaders/color.js index aa82c347b6..b22818efa2 100644 --- a/src/webgpu/shaders/color.js +++ b/src/webgpu/shaders/color.js @@ -14,7 +14,7 @@ struct Uniforms { // @p5 endif uProjectionMatrix: mat4x4, uMaterialColor: vec4, - uUseVertexColor: f32, + uUseVertexColor: u32, }; `; @@ -48,7 +48,7 @@ fn main(input: VertexInput) -> VertexOutput { HOOK_beforeVertex(); var output: VertexOutput; - let useVertexColor = (uniforms.uUseVertexColor != 0.0); + let useVertexColor = (uniforms.uUseVertexColor != 0); var inputs = Vertex( input.aPosition, input.aNormal, @@ -107,9 +107,8 @@ ${getTexture} @fragment fn main(input: FragmentInput) -> @location(0) vec4 { HOOK_beforeFragment(); - var outColor = HOOK_getFinalColor( - vec4(input.vColor.rgb * input.vColor.a, input.vColor.a) - ); + var outColor = HOOK_getFinalColor(input.vColor); + outColor = vec4(outColor.rgb * outColor.a, outColor.a); HOOK_afterFragment(); return outColor; } diff --git a/src/webgpu/shaders/material.js b/src/webgpu/shaders/material.js new file mode 100644 index 0000000000..9722daad06 --- /dev/null +++ b/src/webgpu/shaders/material.js @@ -0,0 +1,348 @@ +import { getTexture } from './utils'; + +const uniforms = ` +struct Uniforms { +// @p5 ifdef Vertex getWorldInputs + uModelMatrix: mat4x4, + uModelNormalMatrix: mat3x3, + uCameraNormalMatrix: mat3x3, +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + uModelViewMatrix: mat4x4, + uNormalMatrix: mat3x3, +// @p5 endif + uViewMatrix: mat4x4, + uProjectionMatrix: mat4x4, + uMaterialColor: vec4, + uUseVertexColor: u32, + + uHasSetAmbient: u32, + uAmbientColor: vec3, + uSpecularMatColor: vec4, + uAmbientMatColor: vec4, + uEmissiveMatColor: vec4, + + uTint: vec4, + isTexture: u32, + + uCameraRotation: mat3x3, + + uDirectionalLightCount: i32, + uLightingDirection: array, 5>, + uDirectionalDiffuseColors: array, 5>, + uDirectionalSpecularColors: array, 5>, + + uPointLightCount: i32, + uPointLightLocation: array, 5>, + uPointLightDiffuseColors: array, 5>, + uPointLightSpecularColors: array, 5>, + + uSpotLightCount: i32, + uSpotLightAngle: vec4, + uSpotLightConc: vec4, + uSpotLightDiffuseColors: array, 4>, + uSpotLightSpecularColors: array, 4>, + uSpotLightLocation: array, 4>, + uSpotLightDirection: array, 4>, + + uSpecular: u32, + uShininess: f32, + uMetallic: f32, + + uConstantAttenuation: f32, + uLinearAttenuation: f32, + uQuadraticAttenuation: f32, + + uUseImageLight: u32, + uUseLighting: u32, +}; +`; + +export const materialVertexShader = ` +struct VertexInput { + @location(0) aPosition: vec3, + @location(1) aNormal: vec3, + @location(2) aTexCoord: vec2, + @location(3) aVertexColor: vec4, +}; + +struct VertexOutput { + @builtin(position) Position: vec4, + @location(0) vNormal: vec3, + @location(1) vTexCoord: vec2, + @location(2) vViewPosition: vec3, + @location(4) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct Vertex { + position: vec3, + normal: vec3, + texCoord: vec2, + color: vec4, +} + +@vertex +fn main(input: VertexInput) -> VertexOutput { + HOOK_beforeVertex(); + var output: VertexOutput; + + let useVertexColor = (uniforms.uUseVertexColor != 0); + var inputs = Vertex( + input.aPosition, + input.aNormal, + input.aTexCoord, + select(uniforms.uMaterialColor, input.aVertexColor, useVertexColor) + ); + +// @p5 ifdef Vertex getObjectInputs + inputs = HOOK_getObjectInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + inputs.position = (uniforms.uModelMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uModelNormalMatrix * inputs.normal; + inputs = HOOK_getWorldInputs(inputs); +// @p5 endif + +// @p5 ifdef Vertex getWorldInputs + // Already multiplied by the model matrix, just apply view + inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uCameraNormalMatrix * inputs.normal; +// @p5 endif +// @p5 ifndef Vertex getWorldInputs + // Apply both at once + inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.0)).xyz; + inputs.normal = uniforms.uNormalMatrix * inputs.normal; +// @p5 endif + +// @p5 ifdef Vertex getCameraInputs + inputs = HOOK_getCameraInputs(inputs); +// @p5 endif + + output.vViewPosition = inputs.position; + output.vTexCoord = inputs.texCoord; + output.vNormal = normalize(inputs.normal); + output.vColor = inputs.color; + + output.Position = uniforms.uProjectionMatrix * vec4(inputs.position, 1.0); + + HOOK_afterVertex(); + return output; +} +`; + +export const materialFragmentShader = ` +struct FragmentInput { + @location(0) vNormal: vec3, + @location(1) vTexCoord: vec2, + @location(2) vViewPosition: vec3, + @location(4) vColor: vec4, +}; + +${uniforms} +@group(0) @binding(0) var uniforms: Uniforms; + +struct ColorComponents { + baseColor: vec3, + opacity: f32, + ambientColor: vec3, + specularColor: vec3, + diffuse: vec3, + ambient: vec3, + specular: vec3, + emissive: vec3, +} + +struct Inputs { + normal: vec3, + texCoord: vec2, + ambientLight: vec3, + ambientMaterial: vec3, + specularMaterial: vec3, + emissiveMaterial: vec3, + color: vec4, + shininess: f32, + metalness: f32, +} + +${getTexture} + +struct LightResult { + diffuse: vec3, + specular: vec3, +} +struct LightIntensityResult { + diffuse: f32, + specular: f32, +} + +const specularFactor = 2.0; +const diffuseFactor = 0.73; + +fn phongSpecular( + lightDirection: vec3, + viewDirection: vec3, + surfaceNormal: vec3, + shininess: f32 +) -> f32 { + let R = reflect(lightDirection, surfaceNormal); + return pow(max(0.0, dot(R, viewDirection)), shininess); +} + +fn lambertDiffuse(lightDirection: vec3, surfaceNormal: vec3) -> f32 { + return max(0.0, dot(-lightDirection, surfaceNormal)); +} + +fn singleLight( + viewDirection: vec3, + normal: vec3, + lightVector: vec3, + shininess: f32, + metallic: f32 +) -> LightIntensityResult { + let lightDir = normalize(lightVector); + let specularIntensity = mix(1.0, 0.4, metallic); + let diffuseIntensity = mix(1.0, 0.1, metallic); + let diffuse = lambertDiffuse(lightDir, normal) * diffuseIntensity; + let specular = select( + 0., + phongSpecular(lightDir, viewDirection, normal, shininess) * specularIntensity, + uniforms.uSpecular == 1 + ); + return LightIntensityResult(diffuse, specular); +} + +fn totalLight( + modelPosition: vec3, + normal: vec3, + shininess: f32, + metallic: f32 +) -> LightResult { + var totalSpecular = vec3(0.0, 0.0, 0.0); + var totalDiffuse = vec3(0.0, 0.0, 0.0); + + if (uniforms.uUseLighting == 0) { + return LightResult(vec3(1.0, 1.0, 1.0), totalSpecular); + } + + let viewDirection = normalize(-modelPosition); + + for (var j = 0; j < 5; j++) { + if (j < uniforms.uDirectionalLightCount) { + let lightVector = (uniforms.uViewMatrix * vec4( + uniforms.uLightingDirection[j], + 0.0 + )).xyz; + let lightColor = uniforms.uDirectionalDiffuseColors[j]; + let specularColor = uniforms.uDirectionalSpecularColors[j]; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + + if (j < uniforms.uPointLightCount) { + let lightPosition = (uniforms.uViewMatrix * vec4( + uniforms.uPointLightLocation[j], + 1.0 + )).xyz; + let lightVector = modelPosition - lightPosition; + let lightDistance = length(lightVector); + let lightFalloff = 1.0 / ( + uniforms.uConstantAttenuation + + lightDistance * uniforms.uLinearAttenuation + + lightDistance * lightDistance * uniforms.uQuadraticAttenuation + ); + let lightColor = uniforms.uPointLightDiffuseColors[j] * lightFalloff; + let specularColor = uniforms.uPointLightSpecularColors[j] * lightFalloff; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + + if (j < uniforms.uSpotLightCount) { + let lightPosition = (uniforms.uViewMatrix * vec4( + uniforms.uSpotLightLocation[j], + 1.0 + )).xyz; + let lightVector = modelPosition - lightPosition; + let lightDistance = length(lightVector); + var lightFalloff = 1.0 / ( + uniforms.uConstantAttenuation + + lightDistance * uniforms.uLinearAttenuation + + lightDistance * lightDistance * uniforms.uQuadraticAttenuation + ); + let lightDirection = (uniforms.uViewMatrix * vec4( + uniforms.uSpotLightDirection[j], + 0.0 + )).xyz; + let spotDot = dot(normalize(lightVector), normalize(lightDirection)); + let spotFalloff = select( + 0.0, + pow(spotDot, uniforms.uSpotLightConc[j]), + spotDot < uniforms.uSpotLightAngle[j] + ); + lightFalloff *= spotFalloff; + let lightColor = uniforms.uSpotLightDiffuseColors[j]; + let specularColor = uniforms.uSpotLightSpecularColors[j]; + let result = singleLight(viewDirection, normal, lightVector, shininess, metallic); + totalDiffuse += result.diffuse * lightColor; + totalSpecular += result.specular * specularColor; + } + } + + // TODO: image light + + return LightResult( + totalDiffuse * diffuseFactor, + totalSpecular * specularFactor + ); +} + +@fragment +fn main(input: FragmentInput) -> @location(0) vec4 { + HOOK_beforeFragment(); + + let color = input.vColor; // TODO: check isTexture and apply tint + var inputs = Inputs( + normalize(input.vNormal), + input.vTexCoord, + uniforms.uAmbientColor, + select(color.rgb, uniforms.uAmbientMatColor.rgb, uniforms.uHasSetAmbient == 1), + uniforms.uSpecularMatColor.rgb, + uniforms.uEmissiveMatColor.rgb, + color, + uniforms.uShininess, + uniforms.uMetallic + ); + inputs = HOOK_getPixelInputs(inputs); + + let light = totalLight( + input.vViewPosition, + inputs.normal, + inputs.shininess, + inputs.metalness + ); + + let baseColor = inputs.color; + let components = ColorComponents( + baseColor.rgb, + baseColor.a, + inputs.ambientMaterial, + inputs.specularMaterial, + light.diffuse, + inputs.ambientLight, + light.specular, + inputs.emissiveMaterial + ); + + var outColor = HOOK_getFinalColor( + HOOK_combineColors(components) + ); + outColor = vec4(outColor.rgb * outColor.a, outColor.a); + HOOK_afterFragment(); + return outColor; +} +`; From 5db2d3331476e490e40a48ff9431212c019f7147 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:40:27 +0100 Subject: [PATCH 12/69] changed drawbuffers to draw stroke and fill buffers depending on current shader --- src/core/p5.Renderer3D.js | 4 +-- src/webgpu/p5.RendererWebGPU.js | 46 +++++++++++++++++---------------- 2 files changed, 26 insertions(+), 24 deletions(-) diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index a2e40d83c5..223382f141 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -557,7 +557,7 @@ export class Renderer3D extends Renderer { geometry.hasFillTransparency() ); - this._drawBuffers(geometry, { mode, count }, false); + this._drawBuffers(geometry, { mode, count }); shader.unbindShader(); } @@ -583,7 +583,7 @@ export class Renderer3D extends Renderer { geometry.hasStrokeTransparency() ); - this._drawBuffers(geometry, {count}, true) + this._drawBuffers(geometry, {count}) shader.unbindShader(); } diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index ffb012e1f1..5d1439c855 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -548,7 +548,7 @@ class RendererWebGPU extends Renderer3D { // Rendering ////////////////////////////////////////////// - _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }, stroke) { + _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }) { const buffers = this.geometryBufferCache.getCached(geometry); if (!buffers) return; @@ -578,33 +578,33 @@ class RendererWebGPU extends Renderer3D { }; const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); - passEncoder.setPipeline(this._curShader.getPipeline(this._shaderOptions({ mode }))); + const currentShader = this._curShader; + passEncoder.setPipeline(currentShader.getPipeline(this._shaderOptions({ mode }))); // Bind vertex buffers - for (const buffer of this._getVertexBuffers(this._curShader)) { + for (const buffer of this._getVertexBuffers(currentShader)) { passEncoder.setVertexBuffer( - this._curShader.attributes[buffer.attr].location, + currentShader.attributes[buffer.attr].location, buffers[buffer.dst], 0 ); } // Bind uniforms this._packUniforms(this._curShader); - console.log(this._curShader); this.device.queue.writeBuffer( - this._curShader._uniformBuffer, + currentShader._uniformBuffer, 0, - this._curShader._uniformData.buffer, - this._curShader._uniformData.byteOffset, - this._curShader._uniformData.byteLength + currentShader._uniformData.buffer, + currentShader._uniformData.byteOffset, + currentShader._uniformData.byteLength ); // Bind sampler/texture uniforms - for (const [group, entries] of this._curShader._groupEntries) { + for (const [group, entries] of currentShader._groupEntries) { const bgEntries = entries.map(entry => { if (group === 0 && entry.binding === 0) { return { binding: 0, - resource: { buffer: this._curShader._uniformBuffer }, + resource: { buffer: currentShader._uniformBuffer }, }; } @@ -616,7 +616,7 @@ class RendererWebGPU extends Renderer3D { }; }); - const layout = this._curShader._bindGroupLayouts[group]; + const layout = currentShader._bindGroupLayouts[group]; const bindGroup = this.device.createBindGroup({ layout, entries: bgEntries, @@ -624,18 +624,20 @@ class RendererWebGPU extends Renderer3D { passEncoder.setBindGroup(group, bindGroup); } - if (buffers.lineVerticesBuffer && geometry.lineVertices && stroke) { + if (currentShader.shaderType === "fill") { + // Bind index buffer and issue draw + if (buffers.indexBuffer) { + const indexFormat = buffers.indexFormat || "uint16"; + passEncoder.setIndexBuffer(buffers.indexBuffer, indexFormat); + passEncoder.drawIndexed(geometry.faces.length * 3, count, 0, 0, 0); + } else { + passEncoder.draw(geometry.vertices.length, count, 0, 0); + } + } + + if (buffers.lineVerticesBuffer && currentShader.shaderType === "stroke") { passEncoder.draw(geometry.lineVertices.length / 3, count, 0, 0); } - // Bind index buffer and issue draw - if (!stroke) { - if (buffers.indexBuffer) { - const indexFormat = buffers.indexFormat || "uint16"; - passEncoder.setIndexBuffer(buffers.indexBuffer, indexFormat); - passEncoder.drawIndexed(geometry.faces.length * 3, count, 0, 0, 0); - } else { - passEncoder.draw(geometry.vertices.length, count, 0, 0); - }} passEncoder.end(); this.queue.submit([commandEncoder.finish()]); From a116e6fa7b7df59fb695b4a59e4faac55ba7836d Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:42:46 +0100 Subject: [PATCH 13/69] fixed uViewport uniform (uniform problem fixed in upstream) --- src/core/p5.Renderer3D.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index 223382f141..a487d975f9 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -1414,7 +1414,7 @@ export class Renderer3D extends Renderer { this.scratchMat3.inverseTranspose4x4(this.states.uViewMatrix); shader.setUniform("uCameraRotation", this.scratchMat3.mat3); } - shader.setUniform("uViewport", [0, 0, 400, 400]); + shader.setUniform("uViewport", this._viewport); } _setStrokeUniforms(strokeShader) { From ddfeb05839ab830bed84fc2105a95af4e8637652 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:43:24 +0100 Subject: [PATCH 14/69] remove console log --- src/webgpu/p5.RendererWebGPU.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 5d1439c855..acbfa20661 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -948,7 +948,6 @@ class RendererWebGPU extends Renderer3D { } } - //console.log(preMain + '\n' + defines + hooks + main + postMain) return preMain + '\n' + defines + hooks + main + postMain; } } From 14f1857fddbf30d66c890158664a6f7639c5e348 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:44:09 +0100 Subject: [PATCH 15/69] remove unused variable --- src/webgl/p5.Shader.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index 3f4015311c..b95066a70d 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -983,7 +983,6 @@ class Shader { if (!uniform) { return; } - const gl = this._renderer.GL; if (uniform.isArray) { if ( From 2c19cdcaa0abaf05476e8413ec9130d1392f14de Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:45:30 +0100 Subject: [PATCH 16/69] remove hardcoded viewport (uniform issue fixed upstream) --- src/webgpu/shaders/line.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js index a42e7b178e..15c3684886 100644 --- a/src/webgpu/shaders/line.js +++ b/src/webgpu/shaders/line.js @@ -75,7 +75,6 @@ fn lineIntersection(aPoint: vec2f, aDir: vec2f, bPoint: vec2f, bDir: vec2f) -> v fn main(input: StrokeVertexInput) -> StrokeVertexOutput { HOOK_beforeVertex(); var output: StrokeVertexOutput; - let viewport = vec4(0.,0.,400.,400.); let simpleLines = (uniforms.uSimpleLines != 0.); if (!simpleLines) { if (all(input.aTangentIn == vec3()) != all(input.aTangentOut == vec3())) { @@ -219,7 +218,7 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { if (sideEnum == 2.) { // Calculate the position + tangent on either side of the join, and // find where the lines intersect to find the elbow of the join - var c = (posp.xy / posp.w + vec2(1.)) * 0.5 * viewport.zw; + var c = (posp.xy / posp.w + vec2(1.)) * 0.5 * uniforms.uViewport.zw; var intersection = lineIntersection( c + (side * normalIn * inputs.weight / 2.), From e7696f067a45c3f7afc9c5ccdcc9b386790fa1de Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:46:01 +0100 Subject: [PATCH 17/69] fix stroke shader bugs from porting process) --- src/webgpu/shaders/line.js | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js index 15c3684886..96402aa1ee 100644 --- a/src/webgpu/shaders/line.js +++ b/src/webgpu/shaders/line.js @@ -179,8 +179,8 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { var qIn = uniforms.uProjectionMatrix * posqIn; var qOut = uniforms.uProjectionMatrix * posqOut; - var tangentIn = normalize((qIn.xy * p.w - p.xy * qIn.w) * viewport.zw); - var tangentOut = normalize((qOut.xy * p.w - p.xy * qOut.w) * viewport.zw); + var tangentIn = normalize((qIn.xy * p.w - p.xy * qIn.w) * uniforms.uViewport.zw); + var tangentOut = normalize((qOut.xy * p.w - p.xy * qOut.w) * uniforms.uViewport.zw); var curPerspScale = vec2(); if (uniforms.uPerspective == 1) { @@ -195,7 +195,7 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { // No Perspective --- // multiply by W (to cancel out division by W later in the pipeline) and // convert from screen to clip (derived from clip to screen above) - curPerspScale = p.w / (0.5 * viewport.zw); + curPerspScale = p.w / (0.5 * uniforms.uViewport.zw); } var offset = vec2(); @@ -234,14 +234,14 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { // the magnitude to avoid lines going across the whole screen when this // happens. var mag = length(offset); - var maxMag = 3 * inputs.weight; + var maxMag = 3. * inputs.weight; if (mag > maxMag) { - offset = vec2(maxMag / mag); - } else if (sideEnum == 1.) { + offset *= maxMag / mag; + } + } else if (sideEnum == 1.) { offset = side * normalIn * inputs.weight / 2.; - } else if (sideEnum == 3.) { + } else if (sideEnum == 3.) { offset = side * normalOut * inputs.weight / 2.; - } } } if (uniforms.uStrokeJoin == 2) { @@ -258,7 +258,7 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { tangent = tangentIn; } output.vTangent = tangent; - var normal = vec2(-tangent.y, tangent.y); + var normal = vec2(-tangent.y, tangent.x); var normalOffset = sign(input.aSide); // Caps will have side values of -2 or 2 on the edge of the cap that @@ -274,17 +274,8 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { output.Position = vec4( p.xy + offset.xy * curPerspScale, - p.zy + p.zw ); - var clip_pos: vec4; - if (input.aSide == 1.0) { - clip_pos = vec4(-0.1, 0.1, 0.5, 1.); - } else if (input.aSide == -1.0) { - clip_pos = vec4(-0.5, 0.5, 0.5, 1.0); - } else { - clip_pos = vec4(0.0, -0.5, 0.5 ,1.0); - } - output.Position = clip_pos; return output; } From ed88f919539a6cdbeb8179bc512198421d6f4413 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 12:46:45 +0100 Subject: [PATCH 18/69] stroke test --- preview/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/preview/index.html b/preview/index.html index 016dd60172..5b141511c1 100644 --- a/preview/index.html +++ b/preview/index.html @@ -36,10 +36,10 @@ // p.noStroke(); for (const [i, c] of ['red'].entries()) { p.stroke(0); - p.strokeWeight(10); + p.strokeWeight(2); p.push(); p.fill(c); - p.sphere(60, 4, 2); + p.sphere(60); p.pop(); } }; From ce4b31e9b1fcb1b3952462250a721df826a35520 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Mon, 16 Jun 2025 09:15:35 -0400 Subject: [PATCH 19/69] Coerce modified hooks to boolean --- src/webgpu/p5.RendererWebGPU.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index a2bcaca885..fa738f852c 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -977,8 +977,8 @@ class RendererWebGPU extends Renderer3D { const target = condition === 'ifdef'; if ( ( - shader.hooks.modified.vertex[`${hookType} ${hookName}`] || - shader.hooks.modified.fragment[`${hookType} ${hookName}`] + !!shader.hooks.modified.vertex[`${hookType} ${hookName}`] || + !!shader.hooks.modified.fragment[`${hookType} ${hookName}`] ) === target ) { return body; From 4dc493dbe6ae986c5ef738cef6119c2986f5b7f1 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 16:19:29 +0100 Subject: [PATCH 20/69] add stroke to preview --- preview/index.html | 49 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/preview/index.html b/preview/index.html index 5b141511c1..99a5bc38d5 100644 --- a/preview/index.html +++ b/preview/index.html @@ -25,21 +25,52 @@ const sketch = function (p) { let fbo; let sh; + let ssh; + p.setup = async function () { await p.createCanvas(400, 400, p.WEBGPU); + sh = p.baseMaterialShader().modify({ + uniforms: { + 'f32 time': () => p.millis(), + }, + 'Vertex getWorldInputs': `(inputs: Vertex) { + var result = inputs; + result.position.y += 40.0 * sin(uniforms.time * 0.01); + return result; + }`, + }) + ssh = p.baseStrokeShader().modify({ + uniforms: { + 'f32 time': () => p.millis(), + }, + 'StrokeVertex getWorldInputs': `(inputs: StrokeVertex) { + var result = inputs; + result.position.y += 40.0 * sin(uniforms.time * 0.01); + return result; + }`, + }) }; - p.disableFriendlyErrors = true; + p.draw = function () { - p.orbitControl() const t = p.millis() * 0.008; - p.background(0); - // p.noStroke(); - for (const [i, c] of ['red'].entries()) { - p.stroke(0); - p.strokeWeight(2); - p.push(); + p.background(200); + p.shader(sh); + p.strokeShader(ssh) + p.ambientLight(50); + p.directionalLight(100, 100, 100, 0, 1, -1); + p.pointLight(155, 155, 155, 0, -200, 500); + p.specularMaterial(255); + p.shininess(300); + p.stroke('white') + for (const [i, c] of ['red', 'lime', 'blue'].entries()) { + p.push(); p.fill(c); - p.sphere(60); + p.translate( + p.width/3 * p.sin(t + i * Math.E), + 0, //p.width/3 * p.sin(t * 0.9 + i * Math.E + 0.2), + p.width/3 * p.sin(t * 1.2 + i * Math.E + 0.3), + ) + p.sphere(30); p.pop(); } }; From d88fba92a6ab643d0205b02e41e107230ad3b647 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 16:21:44 +0100 Subject: [PATCH 21/69] change stroke shader constants/ preprocessor to be compatible with webgpu and webgl --- src/core/p5.Renderer3D.js | 8 ++++---- src/webgl/p5.RendererGL.js | 2 +- src/webgpu/p5.RendererWebGPU.js | 6 +++++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index 394462f3d7..c437f34725 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -16,16 +16,16 @@ import { RenderBuffer } from "../webgl/p5.RenderBuffer"; import { Image } from "../image/p5.Image"; import { Texture } from "../webgl/p5.Texture"; -export function getStrokeDefs() { +export function getStrokeDefs(shaderConstant) { const STROKE_CAP_ENUM = {}; const STROKE_JOIN_ENUM = {}; let lineDefs = ""; const defineStrokeCapEnum = function (key, val) { - lineDefs += `#define STROKE_CAP_${key} ${val}\n`; + lineDefs += shaderConstant(`STROKE_CAP_${key}`, `${val}`, 'u32'); STROKE_CAP_ENUM[constants[key]] = val; }; const defineStrokeJoinEnum = function (key, val) { - lineDefs += `#define STROKE_JOIN_${key} ${val}\n`; + lineDefs += shaderConstant(`STROKE_JOIN_${key}`, `${val}`, 'u32'); STROKE_JOIN_ENUM[constants[key]] = val; }; @@ -41,7 +41,7 @@ export function getStrokeDefs() { return { STROKE_CAP_ENUM, STROKE_JOIN_ENUM, lineDefs }; } -const { STROKE_CAP_ENUM, STROKE_JOIN_ENUM } = getStrokeDefs(); +const { STROKE_CAP_ENUM, STROKE_JOIN_ENUM } = getStrokeDefs(()=>""); export class Renderer3D extends Renderer { constructor(pInst, w, h, isMainCanvas, elt) { diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index e033e3b1c2..6cc8273a66 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -39,7 +39,7 @@ import filterInvertFrag from "./shaders/filters/invert.frag"; import filterThresholdFrag from "./shaders/filters/threshold.frag"; import filterShaderVert from "./shaders/filters/default.vert"; -const { lineDefs } = getStrokeDefs(); +const { lineDefs } = getStrokeDefs((n, v) => `#define ${n} ${v};\n`); const defaultShaders = { normalVert, diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 61e4254675..9af8bb4256 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -1,10 +1,14 @@ -import { Renderer3D } from '../core/p5.Renderer3D'; +import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; import * as constants from '../core/constants'; + + import { colorVertexShader, colorFragmentShader } from './shaders/color'; import { lineVertexShader, lineFragmentShader} from './shaders/line'; import { materialVertexShader, materialFragmentShader } from './shaders/material'; +const { lineDefs } = getStrokeDefs((n, v, t) => `const ${n}: ${t} = ${v};\n`); + class RendererWebGPU extends Renderer3D { constructor(pInst, w, h, isMainCanvas, elt) { super(pInst, w, h, isMainCanvas, elt) From c48977aa13eb23f9422399783f93f584784f68dc Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 16:24:56 +0100 Subject: [PATCH 22/69] rename fillHooks to populateHooks (ambiguous with fill/stroke) --- src/webgl/p5.RendererGL.js | 2 +- src/webgl/p5.Shader.js | 2 +- src/webgpu/p5.RendererWebGPU.js | 19 ++++--- src/webgpu/shaders/line.js | 94 +++++++++++++++++++++++++-------- 4 files changed, 87 insertions(+), 30 deletions(-) diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 6cc8273a66..e05e6839b1 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1683,7 +1683,7 @@ class RendererGL extends Renderer3D { ////////////////////////////////////////////// // Shader hooks ////////////////////////////////////////////// - fillHooks(shader, src, shaderType) { + populateHooks(shader, src, shaderType) { const main = 'void main'; if (!src.includes(main)) return src; diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index fc3745e394..0134701c9c 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -123,7 +123,7 @@ class Shader { } shaderSrc(src, shaderType) { - return this._renderer.fillHooks(this, src, shaderType); + return this._renderer.populateHooks(this, src, shaderType); } /** diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 9af8bb4256..5d023e266f 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -954,17 +954,22 @@ class RendererWebGPU extends Renderer3D { if (!this._defaultLineShader) { this._defaultLineShader = new Shader( this, - lineVertexShader, - lineFragmentShader, + lineDefs + lineVertexShader, + lineDefs + lineFragmentShader, { vertex: { "void beforeVertex": "() {}", - "Vertex getObjectInputs": "(inputs: Vertex) { return inputs; }", - "Vertex getWorldInputs": "(inputs: Vertex) { return inputs; }", - "Vertex getCameraInputs": "(inputs: Vertex) { return inputs; }", + "StrokeVertex getObjectInputs": "(inputs: StrokeVertex) { return inputs; }", + "StrokeVertex getWorldInputs": "(inputs: StrokeVertex) { return inputs; }", + "StrokeVertex getCameraInputs": "(inputs: StrokeVertex) { return inputs; }", + "void afterVertex": "() {}", }, fragment: { - "vec4 getFinalColor": "(color: vec4) { return color; }" + "void beforeFragment": "() {}", + "Inputs getPixelInputs": "(inputs: Inputs) { return inputs; }", + "vec4 getFinalColor": "(color: vec4) { return color; }", + "bool shouldDiscard": "(outside: bool) { return outside; };", + "void afterFragment": "() {}", }, } ); @@ -987,7 +992,7 @@ class RendererWebGPU extends Renderer3D { ////////////////////////////////////////////// // Shader hooks ////////////////////////////////////////////// - fillHooks(shader, src, shaderType) { + populateHooks(shader, src, shaderType) { if (!src.includes('fn main')) return src; // Apply some p5-specific preprocessing. WGSL doesn't have preprocessor diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js index 96402aa1ee..0aa9f5e72b 100644 --- a/src/webgpu/shaders/line.js +++ b/src/webgpu/shaders/line.js @@ -2,11 +2,11 @@ import { getTexture } from './utils' const uniforms = ` struct Uniforms { -// @p5 ifdef Vertex getWorldInputs +// @p5 ifdef StrokeVertex getWorldInputs uModelMatrix: mat4x4, uViewMatrix: mat4x4, // @p5 endif -// @p5 ifndef Vertex getWorldInputs +// @p5 ifndef StrokeVertex getWorldInputs uModelViewMatrix: mat4x4, // @p5 endif uMaterialColor: vec4, @@ -15,10 +15,10 @@ struct Uniforms { uUseLineColor: f32, uSimpleLines: f32, uViewport: vec4, - uPerspective: i32, - uStrokeJoin: i32, -} -`; + uPerspective: u32, + uStrokeCap: u32, + uStrokeJoin: u32, +}`; export const lineVertexShader = ` struct StrokeVertexInput { @@ -44,7 +44,7 @@ struct StrokeVertexOutput { ${uniforms} @group(0) @binding(0) var uniforms: Uniforms; -struct Vertex { +struct StrokeVertex { position: vec3, tangentIn: vec3, tangentOut: vec3, @@ -97,7 +97,7 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { } else { lineColor = uniforms.uMaterialColor; } - var inputs = Vertex( + var inputs = StrokeVertex( input.aPosition.xyz, input.aTangentIn, input.aTangentOut, @@ -105,29 +105,30 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { uniforms.uStrokeWeight ); -// @p5 ifdef Vertex getObjectInputs +// @p5 ifdef StrokeVertex getObjectInputs inputs = HOOK_getObjectInputs(inputs); // @p5 endif -// @p5 ifdef Vertex getWorldInputs - inputs.position = (uModelMatrix * vec4(inputs.position, 1.)).xyz; - inputs.tangentIn = (uModelMatrix * vec4(input.aTangentIn, 1.)).xyz; - inputs.tangentOut = (uModelMatrix * vec4(input.aTangentOut, 1.)).xyz; +// @p5 ifdef StrokeVertex getWorldInputs + inputs.position = (uniforms.uModelMatrix * vec4(inputs.position, 1.)).xyz; + inputs.tangentIn = (uniforms.uModelMatrix * vec4(input.aTangentIn, 1.)).xyz; + inputs.tangentOut = (uniforms.uModelMatrix * vec4(input.aTangentOut, 1.)).xyz; + inputs = HOOK_getWorldInputs(inputs); // @p5 endif -// @p5 ifdef Vertex getWorldInputs +// @p5 ifdef StrokeVertex getWorldInputs // Already multiplied by the model matrix, just apply view inputs.position = (uniforms.uViewMatrix * vec4(inputs.position, 1.)).xyz; inputs.tangentIn = (uniforms.uViewMatrix * vec4(input.aTangentIn, 0.)).xyz; inputs.tangentOut = (uniforms.uViewMatrix * vec4(input.aTangentOut, 0.)).xyz; // @p5 endif -// @p5 ifndef Vertex getWorldInputs +// @p5 ifndef StrokeVertex getWorldInputs // Apply both at once inputs.position = (uniforms.uModelViewMatrix * vec4(inputs.position, 1.)).xyz; inputs.tangentIn = (uniforms.uModelViewMatrix * vec4(input.aTangentIn, 0.)).xyz; inputs.tangentOut = (uniforms.uModelViewMatrix * vec4(input.aTangentOut, 0.)).xyz; // @p5 endif -// @p5 ifdef Vertex getCameraInputs +// @p5 ifdef StrokeVertex getCameraInputs inputs = HOOK_getCameraInputs(inputs); // @p5 endif @@ -276,11 +277,9 @@ fn main(input: StrokeVertexInput) -> StrokeVertexOutput { p.xy + offset.xy * curPerspScale, p.zw ); + HOOK_afterVertex(); return output; -} - - -`; +}`; export const lineFragmentShader = ` struct StrokeFragmentInput { @@ -299,9 +298,62 @@ ${uniforms} ${getTexture} +fn distSquared(a: vec2, b: vec2) -> f32 { + return dot(b - a, b - a); +} + +struct Inputs { + color: vec4, + tangent: vec2, + center: vec2, + position: vec2, + strokeWeight: f32, +} + @fragment fn main(input: StrokeFragmentInput) -> @location(0) vec4 { - return vec4(1., 1., 1., 1.); + HOOK_beforeFragment(); + + var inputs: Inputs; + inputs.color = input.vColor; + inputs.tangent = input.vTangent; + inputs.center = input.vCenter; + inputs.position = input.vPosition; + inputs.strokeWeight = input.vStrokeWeight; + inputs = HOOK_getPixelInputs(inputs); + + if (input.vCap > 0.) { + if ( + uniforms.uStrokeCap == STROKE_CAP_ROUND && + HOOK_shouldDiscard(distSquared(inputs.position, inputs.center) > inputs.strokeWeight * inputs.strokeWeight * 0.25) + ) { + discard; + } else if ( + uniforms.uStrokeCap == STROKE_CAP_SQUARE && + HOOK_shouldDiscard(dot(inputs.position - inputs.center, inputs.tangent) > 0.) + ) { + discard; + } else if (HOOK_shouldDiscard(false)) { + discard; + } + } else if (input.vJoin > 0.) { + if ( + uniforms.uStrokeJoin == STROKE_JOIN_ROUND && + HOOK_shouldDiscard(distSquared(inputs.position, inputs.center) > inputs.strokeWeight * inputs.strokeWeight * 0.25) + ) { + discard; + } else if (uniforms.uStrokeJoin == STROKE_JOIN_BEVEL) { + let normal = vec2(-inputs.tangent.y, -inputs.tangent.x); + if (HOOK_shouldDiscard(abs(dot(inputs.position - inputs.center, normal)) > input.vMaxDist)) { + discard; + } + } else if (HOOK_shouldDiscard(false)) { + discard; + } + } + var col = HOOK_getFinalColor(vec4(inputs.color.rgb, 1.) * inputs.color.a); + HOOK_afterFragment(); + return vec4(col); } `; From f0875d795be9297bcdf9b312dffcafeda4741536 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 17:40:17 +0100 Subject: [PATCH 23/69] add strokes back to WebGL mode --- src/webgl/p5.RendererGL.js | 42 +++++++++++++++----------------------- 1 file changed, 17 insertions(+), 25 deletions(-) diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index e05e6839b1..70adb68c16 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -270,37 +270,29 @@ class RendererGL extends Renderer3D { } } - // Stroke version for now: - // -// { -// const gl = this.GL; -// // move this to _drawBuffers ? -// if (count === 1) { -// gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); -// } else { -// try { - // gl.drawArraysInstanced( - // gl.TRIANGLES, - // 0, - // geometry.lineVertices.length / 3, - // count - // ); - // } catch (e) { - // console.log( - // "🌸 p5.js says: Instancing is only supported in WebGL2 mode" - // ); - // } - // } - // } - _drawBuffers(geometry, { mode = constants.TRIANGLES, count }) { const gl = this.GL; const glBuffers = this.geometryBufferCache.getCached(geometry); - //console.log(glBuffers); if (!glBuffers) return; - if (glBuffers.indexBuffer) { + if (this._curShader.shaderType === 'stroke'){ + if (count === 1) { + gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); + } else { + try { + gl.drawArraysInstanced( + gl.TRIANGLES, + 0, + geometry.lineVertices.length / 3, + count + ); + } catch (e) { + console.log( + "🌸 p5.js says: Instancing is only supported in WebGL2 mode" + ); + } + } else if (glBuffers.indexBuffer) { this._bindBuffer(glBuffers.indexBuffer, gl.ELEMENT_ARRAY_BUFFER); // If this model is using a Uint32Array we need to ensure the From e8bedfc470d01df99d2ebaf8d26a0e1f4afd5ad2 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 17:40:17 +0100 Subject: [PATCH 24/69] add strokes back to WebGL mode --- src/webgl/p5.RendererGL.js | 43 ++++++++++++++++---------------------- 1 file changed, 18 insertions(+), 25 deletions(-) diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index e05e6839b1..01dc2d035e 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -270,37 +270,30 @@ class RendererGL extends Renderer3D { } } - // Stroke version for now: - // -// { -// const gl = this.GL; -// // move this to _drawBuffers ? -// if (count === 1) { -// gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); -// } else { -// try { - // gl.drawArraysInstanced( - // gl.TRIANGLES, - // 0, - // geometry.lineVertices.length / 3, - // count - // ); - // } catch (e) { - // console.log( - // "🌸 p5.js says: Instancing is only supported in WebGL2 mode" - // ); - // } - // } - // } - _drawBuffers(geometry, { mode = constants.TRIANGLES, count }) { const gl = this.GL; const glBuffers = this.geometryBufferCache.getCached(geometry); - //console.log(glBuffers); if (!glBuffers) return; - if (glBuffers.indexBuffer) { + if (this._curShader.shaderType === 'stroke') { + if (count === 1) { + gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); + } else { + try { + gl.drawArraysInstanced( + gl.TRIANGLES, + 0, + geometry.lineVertices.length / 3, + count + ); + } catch (e) { + console.log( + "🌸 p5.js says: Instancing is only supported in WebGL2 mode" + ); + } + } + } else if (glBuffers.indexBuffer) { this._bindBuffer(glBuffers.indexBuffer, gl.ELEMENT_ARRAY_BUFFER); // If this model is using a Uint32Array we need to ensure the From 0af8df9e37e94f189bbeb8945f581872fb9639b4 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 18:07:51 +0100 Subject: [PATCH 25/69] typo in string --- src/webgpu/p5.RendererWebGPU.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 5d023e266f..0b9dd6fcfa 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -675,7 +675,7 @@ class RendererWebGPU extends Renderer3D { new RegExp(`struct\\s+${structName}\\s*\\{([^\\}]+)\\}`) ); if (!structMatch) { - throw new Error(`Can't find a struct defnition for ${structName}`); + throw new Error(`Can't find a struct definition for ${structName}`); } const structBody = structMatch[1]; From f120ad95d83ed0bc007af275264e39a4caba7c14 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 18:07:51 +0100 Subject: [PATCH 26/69] typo in string --- src/webgl/p5.RendererGL.js | 7 ------- src/webgpu/p5.RendererWebGPU.js | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 676cb3b6e7..f683075df1 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -276,11 +276,7 @@ class RendererGL extends Renderer3D { if (!glBuffers) return; -<<<<<<< HEAD - if (this._curShader.shaderType === 'stroke') { -======= if (this._curShader.shaderType === 'stroke'){ ->>>>>>> f0875d795be9297bcdf9b312dffcafeda4741536 if (count === 1) { gl.drawArrays(gl.TRIANGLES, 0, geometry.lineVertices.length / 3); } else { @@ -296,10 +292,7 @@ class RendererGL extends Renderer3D { "🌸 p5.js says: Instancing is only supported in WebGL2 mode" ); } -<<<<<<< HEAD } -======= ->>>>>>> f0875d795be9297bcdf9b312dffcafeda4741536 } else if (glBuffers.indexBuffer) { this._bindBuffer(glBuffers.indexBuffer, gl.ELEMENT_ARRAY_BUFFER); diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 5d023e266f..0b9dd6fcfa 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -675,7 +675,7 @@ class RendererWebGPU extends Renderer3D { new RegExp(`struct\\s+${structName}\\s*\\{([^\\}]+)\\}`) ); if (!structMatch) { - throw new Error(`Can't find a struct defnition for ${structName}`); + throw new Error(`Can't find a struct definition for ${structName}`); } const structBody = structMatch[1]; From 468b6384c0bc8e6cd3b64a7e52f52c210739e085 Mon Sep 17 00:00:00 2001 From: lukeplowden Date: Mon, 16 Jun 2025 18:15:18 +0100 Subject: [PATCH 27/69] multiply alpha after hook --- src/webgpu/shaders/line.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/webgpu/shaders/line.js b/src/webgpu/shaders/line.js index 0aa9f5e72b..5c01ddd1bf 100644 --- a/src/webgpu/shaders/line.js +++ b/src/webgpu/shaders/line.js @@ -351,7 +351,8 @@ fn main(input: StrokeFragmentInput) -> @location(0) vec4 { discard; } } - var col = HOOK_getFinalColor(vec4(inputs.color.rgb, 1.) * inputs.color.a); + var col = HOOK_getFinalColor(inputs.color); + col = vec4(col.rgb, 1.0) * col.a; HOOK_afterFragment(); return vec4(col); } From 01a7c1c69d8ecc6e6e21b0cf9b00f0f39a663cbe Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 17 Jun 2025 19:26:36 -0400 Subject: [PATCH 28/69] Fix some RendererGL + filterRenderer2D tests --- src/image/filterRenderer2D.js | 173 ++++++++++++++- src/shape/custom_shapes.js | 64 +++--- src/webgl/3d_primitives.js | 8 +- src/webgl/p5.Framebuffer.js | 20 +- src/webgl/p5.Geometry.js | 16 +- src/webgl/p5.RendererGL.js | 366 ++----------------------------- src/webgl/p5.Shader.js | 14 +- src/webgl/p5.Texture.js | 7 +- src/webgl/shaders/line.vert | 2 +- src/webgl/utils.js | 351 +++++++++++++++++++++++++++++ test/unit/webgl/p5.RendererGL.js | 3 +- vitest.workspace.mjs | 3 +- 12 files changed, 628 insertions(+), 399 deletions(-) diff --git a/src/image/filterRenderer2D.js b/src/image/filterRenderer2D.js index 97eed42671..cfdf10eb8d 100644 --- a/src/image/filterRenderer2D.js +++ b/src/image/filterRenderer2D.js @@ -1,6 +1,13 @@ import { Shader } from "../webgl/p5.Shader"; import { Texture } from "../webgl/p5.Texture"; import { Image } from "./p5.Image"; +import { + getWebGLShaderAttributes, + getWebGLUniformMetadata, + populateGLSLHooks, + setWebGLTextureParams, + setWebGLUniformValue +} from "../webgl/utils"; import * as constants from '../core/constants'; import filterGrayFrag from '../webgl/shaders/filters/gray.frag'; @@ -42,6 +49,9 @@ class FilterRenderer2D { console.error("WebGL not supported, cannot apply filter."); return; } + + this.textures = new Map(); + // Minimal renderer object required by p5.Shader and p5.Texture this._renderer = { GL: this.gl, @@ -62,6 +72,167 @@ class FilterRenderer2D { } return this._emptyTexture; }, + _initShader: (shader) => { + const gl = this.gl; + + const vertShader = gl.createShader(gl.VERTEX_SHADER); + gl.shaderSource(vertShader, shader.vertSrc()); + gl.compileShader(vertShader); + if (!gl.getShaderParameter(vertShader, gl.COMPILE_STATUS)) { + throw new Error(`Yikes! An error occurred compiling the vertex shader: ${ + gl.getShaderInfoLog(vertShader) + }`); + } + + const fragShader = gl.createShader(gl.FRAGMENT_SHADER); + gl.shaderSource(fragShader, shader.fragSrc()); + gl.compileShader(fragShader); + if (!gl.getShaderParameter(fragShader, gl.COMPILE_STATUS)) { + throw new Error(`Darn! An error occurred compiling the fragment shader: ${ + gl.getShaderInfoLog(fragShader) + }`); + } + + const program = gl.createProgram(); + gl.attachShader(program, vertShader); + gl.attachShader(program, fragShader); + gl.linkProgram(program); + + if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { + throw new Error( + `Snap! Error linking shader program: ${gl.getProgramInfoLog(program)}` + ); + } + + shader._glProgram = program; + shader._vertShader = vertShader; + shader._fragShader = fragShader; + }, + getTexture: (input) => { + let src = input; + if (src instanceof Framebuffer) { + src = src.color; + } + + const texture = this.textures.get(src); + if (texture) { + return texture; + } + + const tex = new Texture(this._renderer, src); + this.textures.set(src, tex); + return tex; + }, + populateHooks: (shader, src, shaderType) => { + return populateGLSLHooks(shader, src, shaderType); + }, + _getShaderAttributes: (shader) => { + return getWebGLShaderAttributes(shader, this.gl); + }, + getUniformMetadata: (shader) => { + return getWebGLUniformMetadata(shader, this.gl); + }, + _finalizeShader: () => {}, + _useShader: (shader) => { + this.gl.useProgram(shader._glProgram); + }, + bindTexture: (tex) => { + // bind texture using gl context + glTarget and + // generated gl texture object + this.gl.bindTexture(this.gl.TEXTURE_2D, tex.getTexture().texture); + }, + unbindTexture: () => { + // unbind per above, disable texturing on glTarget + this.gl.bindTexture(this.gl.TEXTURE_2D, null); + }, + _unbindFramebufferTexture: (uniform) => { + // Make sure an empty texture is bound to the slot so that we don't + // accidentally leave a framebuffer bound, causing a feedback loop + // when something else tries to write to it + const gl = this.gl; + const empty = this._getEmptyTexture(); + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + empty.bindTexture(); + gl.uniform1i(uniform.location, uniform.samplerIndex); + }, + createTexture: ({ width, height, format, dataType }) => { + const gl = this.gl; + const tex = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, tex); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, + gl.RGBA, gl.UNSIGNED_BYTE, null); + // TODO use format and data type + return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; + }, + uploadTextureFromSource: ({ texture, glFormat, glDataType }, source) => { + const gl = this.gl; + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); + }, + uploadTextureFromData: ({ texture, glFormat, glDataType }, data, width, height) => { + const gl = this.gl; + gl.bindTexture(gl.TEXTURE_2D, texture); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + glFormat, + width, + height, + 0, + glFormat, + glDataType, + data + ); + }, + setTextureParams: (texture) => { + return setWebGLTextureParams(texture, this.gl, this._renderer.webglVersion); + }, + updateUniformValue: (shader, uniform, data) => { + return setWebGLUniformValue( + shader, + uniform, + data, + (tex) => this._renderer.getTexture(tex), + this.gl + ); + }, + _enableAttrib: (_shader, attr, size, type, normalized, stride, offset) => { + const loc = attr.location; + const gl = this.gl; + // Enable register even if it is disabled + if (!this._renderer.registerEnabled.has(loc)) { + gl.enableVertexAttribArray(loc); + // Record register availability + this._renderer.registerEnabled.add(loc); + } + gl.vertexAttribPointer( + loc, + size, + type || gl.FLOAT, + normalized || false, + stride || 0, + offset || 0 + ); + }, + _disableRemainingAttributes: (shader) => { + for (const location of this._renderer.registerEnabled.values()) { + if ( + !Object.keys(shader.attributes).some( + key => shader.attributes[key].location === location + ) + ) { + this.gl.disableVertexAttribArray(location); + this._renderer.registerEnabled.delete(location); + } + } + }, + _updateTexture: (uniform, tex) => { + const gl = this.gl; + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + tex.bindTexture(); + tex.update(); + gl.uniform1i(uniform.location, uniform.samplerIndex); + } }; this._baseFilterShader = undefined; @@ -257,7 +428,7 @@ class FilterRenderer2D { this._shader.enableAttrib(this._shader.attributes.aTexCoord, 2); this._shader.bindTextures(); - this._shader.disableRemainingAttributes(); + this._renderer._disableRemainingAttributes(this._shader); // Draw the quad gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); diff --git a/src/shape/custom_shapes.js b/src/shape/custom_shapes.js index 9e22f0b75c..3049094125 100644 --- a/src/shape/custom_shapes.js +++ b/src/shape/custom_shapes.js @@ -1170,10 +1170,12 @@ class PrimitiveToPath2DConverter extends PrimitiveVisitor { class PrimitiveToVerticesConverter extends PrimitiveVisitor { contours = []; curveDetail; + pointsToLines; - constructor({ curveDetail = 1 } = {}) { + constructor({ curveDetail = 1, pointsToLines = true } = {}) { super(); this.curveDetail = curveDetail; + this.pointsToLines = pointsToLines; } lastContour() { @@ -1246,7 +1248,11 @@ class PrimitiveToVerticesConverter extends PrimitiveVisitor { } } visitPoint(point) { - this.contours.push(point.vertices.slice()); + if (this.pointsToLines) { + this.contours.push(...point.vertices.map(v => [v, v])); + } else { + this.contours.push(point.vertices.slice()); + } } visitLine(line) { this.contours.push(line.vertices.slice()); @@ -1592,11 +1598,11 @@ function customShapes(p5, fn) { * one call to bezierVertex(), before * a number of `bezierVertex()` calls that is a multiple of the parameter * set by bezierOrder(...) (default 3). - * + * * Each curve of order 3 requires three calls to `bezierVertex`, so * 2 curves would need 7 calls to `bezierVertex()`: * (1 one initial anchor point, two sets of 3 curves describing the curves) - * With `bezierOrder(2)`, two curves would need 5 calls: 1 + 2 + 2. + * With `bezierOrder(2)`, two curves would need 5 calls: 1 + 2 + 2. * * Bézier curves can also be drawn in 3D using WebGL mode. * @@ -1605,7 +1611,7 @@ function customShapes(p5, fn) { * * @method bezierOrder * @param {Number} order The new order to set. Can be either 2 or 3, by default 3 - * + * * @example *
* @@ -1619,7 +1625,7 @@ function customShapes(p5, fn) { * * // Start drawing the shape. * beginShape(); - * + * * // set the order to 2 for a quadratic Bézier curve * bezierOrder(2); * @@ -2059,11 +2065,11 @@ function customShapes(p5, fn) { /** * Sets the property of a curve. - * + * * For example, set tightness, * use `splineProperty('tightness', t)`, with `t` between 0 and 1, * at 0 as default. - * + * * Spline curves are like cables that are attached to a set of points. * Adjusting tightness adjusts how tightly the cable is * attached to the points. The parameter, tightness, determines @@ -2072,33 +2078,33 @@ function customShapes(p5, fn) { * `splineProperty('tightness', 1)`, connects the curve's points * using straight lines. Values in the range from –5 to 5 * deform curves while leaving them recognizable. - * + * * This function can also be used to set 'ends' property * (see also: the curveDetail() example), * such as: `splineProperty('ends', EXCLUDE)` to exclude * vertices, or `splineProperty('ends', INCLUDE)` to include them. - * + * * @method splineProperty * @param {String} property * @param value Value to set the given property to. - * + * * @example *
* * // Move the mouse left and right to see the curve change. - * + * * function setup() { * createCanvas(100, 100); * describe('A black curve forms a sideways U shape. The curve deforms as the user moves the mouse from left to right'); * } - * + * * function draw() { * background(200); - * + * * // Set the curve's tightness using the mouse. * let t = map(mouseX, 0, 100, -5, 5, true); * splineProperty('tightness', t); - * + * * // Draw the curve. * noFill(); * beginShape(); @@ -2124,11 +2130,11 @@ function customShapes(p5, fn) { /** * Get or set multiple spline properties at once. - * + * * Similar to splineProperty(): * `splineProperty('tightness', t)` is the same as * `splineProperties({'tightness': t})` - * + * * @method splineProperties * @param {Object} properties An object containing key-value pairs to set. */ @@ -2307,7 +2313,7 @@ function customShapes(p5, fn) { * } * *
- * + * *
* * let vid; @@ -2315,28 +2321,28 @@ function customShapes(p5, fn) { * // Load a video and create a p5.MediaElement object. * vid = createVideo('/assets/fingers.mov'); * createCanvas(100, 100, WEBGL); - * + * * // Hide the video. * vid.hide(); - * + * * // Set the video to loop. * vid.loop(); - * + * * describe('A rectangle with video as texture'); * } - * + * * function draw() { * background(0); - * + * * // Rotate around the y-axis. * rotateY(frameCount * 0.01); - * + * * // Set the texture mode. * textureMode(NORMAL); - * + * * // Apply the video as a texture. * texture(vid); - * + * * // Draw a custom shape using uv coordinates. * beginShape(); * vertex(-40, -40, 0, 0); @@ -2489,7 +2495,7 @@ function customShapes(p5, fn) { }; /** - * Stops creating a hole within a flat shape. + * Stops creating a hole within a flat shape. * * The beginContour() and `endContour()` * functions allow for creating negative space within custom shapes that are @@ -2499,10 +2505,10 @@ function customShapes(p5, fn) { * called between beginShape() and * endShape(). * - * By default, + * By default, * the controur has an `OPEN` end, and to close it, * call `endContour(CLOSE)`. The CLOSE contour mode closes splines smoothly. - * + * * Transformations such as translate(), * rotate(), and scale() * don't work between beginContour() and diff --git a/src/webgl/3d_primitives.js b/src/webgl/3d_primitives.js index 38c0b426e2..8c29e3ea2d 100644 --- a/src/webgl/3d_primitives.js +++ b/src/webgl/3d_primitives.js @@ -1667,11 +1667,9 @@ function primitives3D(p5, fn){ *
*/ Renderer3D.prototype.point = function(x, y, z = 0) { - - const _vertex = []; - _vertex.push(new Vector(x, y, z)); - // TODO - // this._drawPoints(_vertex, this.buffers.point); + this.beginShape(constants.POINTS); + this.vertex(x, y, z); + this.endShape(); return this; }; diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index d04d14839f..0ebb3c0daa 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -52,7 +52,8 @@ class FramebufferTexture { } rawTexture() { - return this.framebuffer[this.property]; + // TODO: handle webgpu texture handle + return { texture: this.framebuffer[this.property] }; } } @@ -586,7 +587,7 @@ class Framebuffer { if (this.useDepth) { this.depth = new FramebufferTexture(this, 'depthTexture'); - const depthFilter = gl.NEAREST; + const depthFilter = constants.NEAREST; this.depthP5Texture = new Texture( this.renderer, this.depth, @@ -600,8 +601,8 @@ class Framebuffer { this.color = new FramebufferTexture(this, 'colorTexture'); const filter = this.textureFiltering === constants.LINEAR - ? gl.LINEAR - : gl.NEAREST; + ? constants.LINEAR + : constants.NEAREST; this.colorP5Texture = new Texture( this.renderer, this.color, @@ -921,7 +922,7 @@ class Framebuffer { */ _deleteTexture(texture) { const gl = this.gl; - gl.deleteTexture(texture.rawTexture()); + gl.deleteTexture(texture.rawTexture().texture); this.renderer.textures.delete(texture); } @@ -1115,12 +1116,17 @@ class Framebuffer { gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this.aaFramebuffer); gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this.framebuffer); const partsToCopy = { - colorTexture: [gl.COLOR_BUFFER_BIT, this.colorP5Texture.glMagFilter], + colorTexture: [ + gl.COLOR_BUFFER_BIT, + // TODO: move to renderer + this.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ], }; if (this.useDepth) { partsToCopy.depthTexture = [ gl.DEPTH_BUFFER_BIT, - this.depthP5Texture.glMagFilter + // TODO: move to renderer + this.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST ]; } const [flag, filter] = partsToCopy[property]; diff --git a/src/webgl/p5.Geometry.js b/src/webgl/p5.Geometry.js index 22e3a481c4..b74fe4c827 100644 --- a/src/webgl/p5.Geometry.js +++ b/src/webgl/p5.Geometry.js @@ -1419,6 +1419,7 @@ class Geometry { for (let i = 0; i < this.edges.length; i++) { const prevEdge = this.edges[i - 1]; const currEdge = this.edges[i]; + const isPoint = currEdge[0] === currEdge[1]; const begin = this.vertices[currEdge[0]]; const end = this.vertices[currEdge[1]]; const prevColor = (this.vertexStrokeColors.length > 0 && prevEdge) @@ -1439,10 +1440,12 @@ class Geometry { (currEdge[1] + 1) * 4 ) : [0, 0, 0, 0]; - const dir = end - .copy() - .sub(begin) - .normalize(); + const dir = isPoint + ? new Vector(0, 1, 0) + : end + .copy() + .sub(begin) + .normalize(); const dirOK = dir.magSq() > 0; if (dirOK) { this._addSegment(begin, end, fromColor, toColor, dir); @@ -1462,6 +1465,9 @@ class Geometry { this._addJoin(begin, lastValidDir, dir, fromColor); } } + } else if (isPoint) { + this._addCap(begin, dir.copy().mult(-1), fromColor); + this._addCap(begin, dir, fromColor); } else { // Start a new line if (dirOK && !connected.has(currEdge[0])) { @@ -1483,7 +1489,7 @@ class Geometry { }); } } - if (lastValidDir && !connected.has(prevEdge[1])) { + if (!isPoint && lastValidDir && !connected.has(prevEdge[1])) { const existingCap = potentialCaps.get(prevEdge[1]); if (existingCap) { this._addJoin( diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index f683075df1..c0d9ef244a 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1,5 +1,13 @@ import * as constants from "../core/constants"; -import { readPixelsWebGL, readPixelWebGL } from './utils'; +import { + getWebGLShaderAttributes, + getWebGLUniformMetadata, + populateGLSLHooks, + readPixelsWebGL, + readPixelWebGL, + setWebGLTextureParams, + setWebGLUniformValue +} from './utils'; import { Renderer3D, getStrokeDefs } from "../core/p5.Renderer3D"; import { Shader } from "./p5.Shader"; import { Texture, MipmapTexture } from "./p5.Texture"; @@ -39,7 +47,7 @@ import filterInvertFrag from "./shaders/filters/invert.frag"; import filterThresholdFrag from "./shaders/filters/threshold.frag"; import filterShaderVert from "./shaders/filters/default.vert"; -const { lineDefs } = getStrokeDefs((n, v) => `#define ${n} ${v};\n`); +const { lineDefs } = getStrokeDefs((n, v) => `#define ${n} ${v}\n`); const defaultShaders = { normalVert, @@ -1219,7 +1227,7 @@ class RendererGL extends Renderer3D { if (!gl.getShaderParameter(vertShader, gl.COMPILE_STATUS)) { throw new Error(`Yikes! An error occurred compiling the vertex shader: ${ gl.getShaderInfoLog(vertShader) - }`); + } in:\n\n${shader.vertSrc()}`); } const fragShader = gl.createShader(gl.FRAGMENT_SHADER); @@ -1250,216 +1258,21 @@ class RendererGL extends Renderer3D { _finalizeShader() {} _getShaderAttributes(shader) { - const attributes = {}; - - const gl = this.GL; - - const numAttributes = gl.getProgramParameter( - shader._glProgram, - gl.ACTIVE_ATTRIBUTES - ); - for (let i = 0; i < numAttributes; ++i) { - const attributeInfo = gl.getActiveAttrib(shader._glProgram, i); - const name = attributeInfo.name; - const location = gl.getAttribLocation(shader._glProgram, name); - const attribute = {}; - attribute.name = name; - attribute.location = location; - attribute.index = i; - attribute.type = attributeInfo.type; - attribute.size = attributeInfo.size; - attributes[name] = attribute; - } - - return attributes; + return getWebGLShaderAttributes(shader, this.GL); } getUniformMetadata(shader) { - const gl = this.GL; - const program = shader._glProgram; - - const numUniforms = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); - const result = []; - - let samplerIndex = 0; - - for (let i = 0; i < numUniforms; ++i) { - const uniformInfo = gl.getActiveUniform(program, i); - const uniform = {}; - uniform.location = gl.getUniformLocation( - program, - uniformInfo.name - ); - uniform.size = uniformInfo.size; - let uniformName = uniformInfo.name; - //uniforms that are arrays have their name returned as - //someUniform[0] which is a bit silly so we trim it - //off here. The size property tells us that its an array - //so we dont lose any information by doing this - if (uniformInfo.size > 1) { - uniformName = uniformName.substring(0, uniformName.indexOf('[0]')); - } - uniform.name = uniformName; - uniform.type = uniformInfo.type; - uniform._cachedData = undefined; - if (uniform.type === gl.SAMPLER_2D) { - uniform.isSampler = true; - uniform.samplerIndex = samplerIndex; - samplerIndex++; - } - - uniform.isArray = - uniformInfo.size > 1 || - uniform.type === gl.FLOAT_MAT3 || - uniform.type === gl.FLOAT_MAT4 || - uniform.type === gl.FLOAT_VEC2 || - uniform.type === gl.FLOAT_VEC3 || - uniform.type === gl.FLOAT_VEC4 || - uniform.type === gl.INT_VEC2 || - uniform.type === gl.INT_VEC4 || - uniform.type === gl.INT_VEC3; - - result.push(uniform); - } - - return result; + return getWebGLUniformMetadata(shader, this.GL); } updateUniformValue(shader, uniform, data) { - const gl = this.GL; - const location = uniform.location; - shader.useProgram(); - - switch (uniform.type) { - case gl.BOOL: - if (data === true) { - gl.uniform1i(location, 1); - } else { - gl.uniform1i(location, 0); - } - break; - case gl.INT: - if (uniform.size > 1) { - data.length && gl.uniform1iv(location, data); - } else { - gl.uniform1i(location, data); - } - break; - case gl.FLOAT: - if (uniform.size > 1) { - data.length && gl.uniform1fv(location, data); - } else { - gl.uniform1f(location, data); - } - break; - case gl.FLOAT_MAT3: - gl.uniformMatrix3fv(location, false, data); - break; - case gl.FLOAT_MAT4: - gl.uniformMatrix4fv(location, false, data); - break; - case gl.FLOAT_VEC2: - if (uniform.size > 1) { - data.length && gl.uniform2fv(location, data); - } else { - gl.uniform2f(location, data[0], data[1]); - } - break; - case gl.FLOAT_VEC3: - if (uniform.size > 1) { - data.length && gl.uniform3fv(location, data); - } else { - gl.uniform3f(location, data[0], data[1], data[2]); - } - break; - case gl.FLOAT_VEC4: - if (uniform.size > 1) { - data.length && gl.uniform4fv(location, data); - } else { - gl.uniform4f(location, data[0], data[1], data[2], data[3]); - } - break; - case gl.INT_VEC2: - if (uniform.size > 1) { - data.length && gl.uniform2iv(location, data); - } else { - gl.uniform2i(location, data[0], data[1]); - } - break; - case gl.INT_VEC3: - if (uniform.size > 1) { - data.length && gl.uniform3iv(location, data); - } else { - gl.uniform3i(location, data[0], data[1], data[2]); - } - break; - case gl.INT_VEC4: - if (uniform.size > 1) { - data.length && gl.uniform4iv(location, data); - } else { - gl.uniform4i(location, data[0], data[1], data[2], data[3]); - } - break; - case gl.SAMPLER_2D: - if (typeof data == 'number') { - if ( - data < gl.TEXTURE0 || - data > gl.TEXTURE31 || - data !== Math.ceil(data) - ) { - console.log( - '🌸 p5.js says: ' + - "You're trying to use a number as the data for a texture." + - 'Please use a texture.' - ); - return this; - } - gl.activeTexture(data); - gl.uniform1i(location, data); - } else { - gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); - uniform.texture = - data instanceof Texture ? data : this._renderer.getTexture(data); - gl.uniform1i(location, uniform.samplerIndex); - if (uniform.texture.src.gifProperties) { - uniform.texture.src._animateGif(this._renderer._pInst); - } - } - break; - case gl.SAMPLER_CUBE: - case gl.SAMPLER_3D: - case gl.SAMPLER_2D_SHADOW: - case gl.SAMPLER_2D_ARRAY: - case gl.SAMPLER_2D_ARRAY_SHADOW: - case gl.SAMPLER_CUBE_SHADOW: - case gl.INT_SAMPLER_2D: - case gl.INT_SAMPLER_3D: - case gl.INT_SAMPLER_CUBE: - case gl.INT_SAMPLER_2D_ARRAY: - case gl.UNSIGNED_INT_SAMPLER_2D: - case gl.UNSIGNED_INT_SAMPLER_3D: - case gl.UNSIGNED_INT_SAMPLER_CUBE: - case gl.UNSIGNED_INT_SAMPLER_2D_ARRAY: - if (typeof data !== 'number') { - break; - } - if ( - data < gl.TEXTURE0 || - data > gl.TEXTURE31 || - data !== Math.ceil(data) - ) { - console.log( - '🌸 p5.js says: ' + - "You're trying to use a number as the data for a texture." + - 'Please use a texture.' - ); - break; - } - gl.activeTexture(data); - gl.uniform1i(location, data); - break; - //@todo complete all types - } + return setWebGLUniformValue( + shader, + uniform, + data, + (tex) => this.getTexture(tex), + this.GL + ); } _updateTexture(uniform, tex) { @@ -1473,7 +1286,7 @@ class RendererGL extends Renderer3D { bindTexture(tex) { // bind texture using gl context + glTarget and // generated gl texture object - this.GL.bindTexture(this.GL.TEXTURE_2D, tex.getTexture()); + this.GL.bindTexture(this.GL.TEXTURE_2D, tex.getTexture().texture); } unbindTexture() { @@ -1486,7 +1299,7 @@ class RendererGL extends Renderer3D { // accidentally leave a framebuffer bound, causing a feedback loop // when something else tries to write to it const gl = this.GL; - const empty = this._renderer._getEmptyTexture(); + const empty = this._getEmptyTexture(); gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); empty.bindTexture(); gl.uniform1i(uniform.location, uniform.samplerIndex); @@ -1504,13 +1317,11 @@ class RendererGL extends Renderer3D { uploadTextureFromSource({ texture, glFormat, glDataType }, source) { const gl = this.GL; - gl.bindTexture(gl.TEXTURE_2D, texture); gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); } uploadTextureFromData({ texture, glFormat, glDataType }, data, width, height) { const gl = this.GL; - gl.bindTexture(gl.TEXTURE_2D, texture); gl.texImage2D( gl.TEXTURE_2D, 0, @@ -1537,95 +1348,7 @@ class RendererGL extends Renderer3D { } setTextureParams(texture) { - const gl = this.GL; - texture.bindTexture(); - const glMinFilter = texture.minFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; - const glMagFilter = texture.magFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; - - // for webgl 1 we need to check if the texture is power of two - // if it isn't we will set the wrap mode to CLAMP - // webgl2 will support npot REPEAT and MIRROR but we don't check for it yet - const isPowerOfTwo = x => (x & (x - 1)) === 0; - const textureData = texture._getTextureDataFromSource(); - - let wrapWidth; - let wrapHeight; - - if (textureData.naturalWidth && textureData.naturalHeight) { - wrapWidth = textureData.naturalWidth; - wrapHeight = textureData.naturalHeight; - } else { - wrapWidth = this.width; - wrapHeight = this.height; - } - - const widthPowerOfTwo = isPowerOfTwo(wrapWidth); - const heightPowerOfTwo = isPowerOfTwo(wrapHeight); - let glWrapS, glWrapT; - - if (texture.wrapS === constants.REPEAT) { - if ( - this.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - glWrapS = gl.REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' - ); - glWrapS = gl.CLAMP_TO_EDGE; - } - } else if (texture.wrapS === constants.MIRROR) { - if ( - this.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - glWrapS = gl.MIRRORED_REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' - ); - glWrapS = gl.CLAMP_TO_EDGE; - } - } else { - // falling back to default if didn't get a proper mode - glWrapS = gl.CLAMP_TO_EDGE; - } - - if (texture.wrapT === constants.REPEAT) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - glWrapT = gl.REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' - ); - glWrapT = gl.CLAMP_TO_EDGE; - } - } else if (texture.wrapT === constants.MIRROR) { - if ( - this._renderer.webglVersion === constants.WEBGL2 || - (widthPowerOfTwo && heightPowerOfTwo) - ) { - glWrapT = gl.MIRRORED_REPEAT; - } else { - console.warn( - 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' - ); - glWrapT = gl.CLAMP_TO_EDGE; - } - } else { - // falling back to default if didn't get a proper mode - glWrapT = gl.CLAMP_TO_EDGE; - } - - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, glMinFilter); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, glMagFilter); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, glWrapS); - gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, glWrapT); - texture.unbindTexture(); + return setWebGLTextureParams(texture, this.GL, this.webglVersion); } deleteTexture({ texture }) { @@ -1677,48 +1400,7 @@ class RendererGL extends Renderer3D { // Shader hooks ////////////////////////////////////////////// populateHooks(shader, src, shaderType) { - const main = 'void main'; - if (!src.includes(main)) return src; - - let [preMain, postMain] = src.split(main); - - let hooks = ''; - let defines = ''; - for (const key in shader.hooks.uniforms) { - hooks += `uniform ${key};\n`; - } - if (shader.hooks.declarations) { - hooks += shader.hooks.declarations + '\n'; - } - if (shader.hooks[shaderType].declarations) { - hooks += shader.hooks[shaderType].declarations + '\n'; - } - for (const hookDef in shader.hooks.helpers) { - hooks += `${hookDef}${shader.hooks.helpers[hookDef]}\n`; - } - for (const hookDef in shader.hooks[shaderType]) { - if (hookDef === 'declarations') continue; - const [hookType, hookName] = hookDef.split(' '); - - // Add a #define so that if the shader wants to use preprocessor directives to - // optimize away the extra function calls in main, it can do so - if (shader.hooks.modified[shaderType][hookDef]) { - defines += '#define AUGMENTED_HOOK_' + hookName + '\n'; - } - - hooks += - hookType + ' HOOK_' + hookName + shader.hooks[shaderType][hookDef] + '\n'; - } - - // Allow shaders to specify the location of hook #define statements. Normally these - // go after function definitions, but one might want to have them defined earlier - // in order to only conditionally make uniforms. - if (preMain.indexOf('#define HOOK_DEFINES') !== -1) { - preMain = preMain.replace('#define HOOK_DEFINES', '\n' + defines + '\n'); - defines = ''; - } - - return preMain + '\n' + defines + hooks + main + postMain; + return populateGLSLHooks(shader, src, shaderType); } } diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index 0134701c9c..9a506a2801 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -709,7 +709,17 @@ class Shader { for (const uniform of this.samplers) { let tex = uniform.texture; - if (tex === undefined) { + if ( + tex === undefined || + ( + // Make sure we unbind a framebuffer uniform if it's the same + // framebuffer that is actvely being drawn to in order to + // prevent a feedback cycle + tex.isFramebufferTexture && + !tex.src.framebuffer.antialias && + tex.src.framebuffer === this._renderer.activeFramebuffer() + ) + ) { // user hasn't yet supplied a texture for this slot. // (or there may not be one--maybe just lighting), // so we supply a default texture instead. @@ -1026,7 +1036,7 @@ class Shader { } if (attr.location !== -1) { - this._renderer._enableAttrib(attr, size, type, normalized, stride, offset); + this._renderer._enableAttrib(this, attr, size, type, normalized, stride, offset); } } return this; diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index b9519cb606..e7103bd13c 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -130,7 +130,7 @@ class Texture { }); } - this._renderer.setTextureParams(this.textureHandle, { + this._renderer.setTextureParams(this, { minFilter: this.minFilter, magFilter: this.magFilter, wrapS: this.wrapS, @@ -179,14 +179,13 @@ class Texture { if (this._shouldUpdate(textureData)) { this.bindTexture(); this._renderer.uploadTextureFromSource(this.textureHandle, textureData); - this.unbindTexture(); updated = true; } return updated; } - shouldUpdate(textureData) { + _shouldUpdate(textureData) { const data = this.src; if (data.width === 0 || data.height === 0) { return false; // nothing to do! @@ -280,7 +279,7 @@ class Texture { if (this.isFramebufferTexture) { return this.src.rawTexture(); } else { - return this.glTex; + return this.textureHandle; } } diff --git a/src/webgl/shaders/line.vert b/src/webgl/shaders/line.vert index a00bf94ba8..65cd9502c6 100644 --- a/src/webgl/shaders/line.vert +++ b/src/webgl/shaders/line.vert @@ -127,7 +127,7 @@ void main() { inputs.tangentOut = (uModelViewMatrix * vec4(aTangentOut, 0.)).xyz; #endif #ifdef AUGMENTED_HOOK_getCameraInputs - inputs = hook_getCameraInputs(inputs); + inputs = HOOK_getCameraInputs(inputs); #endif vec4 posp = vec4(inputs.position, 1.); diff --git a/src/webgl/utils.js b/src/webgl/utils.js index b891e96d0b..70766ac522 100644 --- a/src/webgl/utils.js +++ b/src/webgl/utils.js @@ -1,3 +1,6 @@ +import * as constants from '../core/constants'; +import { Texture } from './p5.Texture'; + /** * @private * @param {Uint8Array|Float32Array|undefined} pixels An existing pixels array to reuse if the size is the same @@ -97,3 +100,351 @@ export function readPixelWebGL(gl, framebuffer, x, y, format, type, flipY) { return Array.from(pixels); } + +export function setWebGLTextureParams(texture, gl, webglVersion) { + texture.bindTexture(); + const glMinFilter = texture.minFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; + const glMagFilter = texture.magFilter === constants.NEAREST ? gl.NEAREST : gl.LINEAR; + + // for webgl 1 we need to check if the texture is power of two + // if it isn't we will set the wrap mode to CLAMP + // webgl2 will support npot REPEAT and MIRROR but we don't check for it yet + const isPowerOfTwo = x => (x & (x - 1)) === 0; + const textureData = texture._getTextureDataFromSource(); + + let wrapWidth; + let wrapHeight; + + if (textureData.naturalWidth && textureData.naturalHeight) { + wrapWidth = textureData.naturalWidth; + wrapHeight = textureData.naturalHeight; + } else { + wrapWidth = texture.width; + wrapHeight = texture.height; + } + + const widthPowerOfTwo = isPowerOfTwo(wrapWidth); + const heightPowerOfTwo = isPowerOfTwo(wrapHeight); + let glWrapS, glWrapT; + + if (texture.wrapS === constants.REPEAT) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapS = gl.REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapS = gl.CLAMP_TO_EDGE; + } + } else if (texture.wrapS === constants.MIRROR) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapS = gl.MIRRORED_REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapS = gl.CLAMP_TO_EDGE; + } + } else { + // falling back to default if didn't get a proper mode + glWrapS = gl.CLAMP_TO_EDGE; + } + + if (texture.wrapT === constants.REPEAT) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapT = gl.REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to REPEAT but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapT = gl.CLAMP_TO_EDGE; + } + } else if (texture.wrapT === constants.MIRROR) { + if ( + webglVersion === constants.WEBGL2 || + (widthPowerOfTwo && heightPowerOfTwo) + ) { + glWrapT = gl.MIRRORED_REPEAT; + } else { + console.warn( + 'You tried to set the wrap mode to MIRROR but the texture size is not a power of two. Setting to CLAMP instead' + ); + glWrapT = gl.CLAMP_TO_EDGE; + } + } else { + // falling back to default if didn't get a proper mode + glWrapT = gl.CLAMP_TO_EDGE; + } + + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, glMinFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, glMagFilter); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, glWrapS); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, glWrapT); + texture.unbindTexture(); +} + +export function setWebGLUniformValue(shader, uniform, data, getTexture, gl) { + const location = uniform.location; + shader.useProgram(); + + switch (uniform.type) { + case gl.BOOL: + if (data === true) { + gl.uniform1i(location, 1); + } else { + gl.uniform1i(location, 0); + } + break; + case gl.INT: + if (uniform.size > 1) { + data.length && gl.uniform1iv(location, data); + } else { + gl.uniform1i(location, data); + } + break; + case gl.FLOAT: + if (uniform.size > 1) { + data.length && gl.uniform1fv(location, data); + } else { + gl.uniform1f(location, data); + } + break; + case gl.FLOAT_MAT3: + gl.uniformMatrix3fv(location, false, data); + break; + case gl.FLOAT_MAT4: + gl.uniformMatrix4fv(location, false, data); + break; + case gl.FLOAT_VEC2: + if (uniform.size > 1) { + data.length && gl.uniform2fv(location, data); + } else { + gl.uniform2f(location, data[0], data[1]); + } + break; + case gl.FLOAT_VEC3: + if (uniform.size > 1) { + data.length && gl.uniform3fv(location, data); + } else { + gl.uniform3f(location, data[0], data[1], data[2]); + } + break; + case gl.FLOAT_VEC4: + if (uniform.size > 1) { + data.length && gl.uniform4fv(location, data); + } else { + gl.uniform4f(location, data[0], data[1], data[2], data[3]); + } + break; + case gl.INT_VEC2: + if (uniform.size > 1) { + data.length && gl.uniform2iv(location, data); + } else { + gl.uniform2i(location, data[0], data[1]); + } + break; + case gl.INT_VEC3: + if (uniform.size > 1) { + data.length && gl.uniform3iv(location, data); + } else { + gl.uniform3i(location, data[0], data[1], data[2]); + } + break; + case gl.INT_VEC4: + if (uniform.size > 1) { + data.length && gl.uniform4iv(location, data); + } else { + gl.uniform4i(location, data[0], data[1], data[2], data[3]); + } + break; + case gl.SAMPLER_2D: + if (typeof data == 'number') { + if ( + data < gl.TEXTURE0 || + data > gl.TEXTURE31 || + data !== Math.ceil(data) + ) { + console.log( + '🌸 p5.js says: ' + + "You're trying to use a number as the data for a texture." + + 'Please use a texture.' + ); + return this; + } + gl.activeTexture(data); + gl.uniform1i(location, data); + } else { + gl.activeTexture(gl.TEXTURE0 + uniform.samplerIndex); + uniform.texture = + data instanceof Texture ? data : getTexture(data); + gl.uniform1i(location, uniform.samplerIndex); + if (uniform.texture.src.gifProperties) { + uniform.texture.src._animateGif(this._pInst); + } + } + break; + case gl.SAMPLER_CUBE: + case gl.SAMPLER_3D: + case gl.SAMPLER_2D_SHADOW: + case gl.SAMPLER_2D_ARRAY: + case gl.SAMPLER_2D_ARRAY_SHADOW: + case gl.SAMPLER_CUBE_SHADOW: + case gl.INT_SAMPLER_2D: + case gl.INT_SAMPLER_3D: + case gl.INT_SAMPLER_CUBE: + case gl.INT_SAMPLER_2D_ARRAY: + case gl.UNSIGNED_INT_SAMPLER_2D: + case gl.UNSIGNED_INT_SAMPLER_3D: + case gl.UNSIGNED_INT_SAMPLER_CUBE: + case gl.UNSIGNED_INT_SAMPLER_2D_ARRAY: + if (typeof data !== 'number') { + break; + } + if ( + data < gl.TEXTURE0 || + data > gl.TEXTURE31 || + data !== Math.ceil(data) + ) { + console.log( + '🌸 p5.js says: ' + + "You're trying to use a number as the data for a texture." + + 'Please use a texture.' + ); + break; + } + gl.activeTexture(data); + gl.uniform1i(location, data); + break; + //@todo complete all types + } +} + +export function getWebGLUniformMetadata(shader, gl) { + const program = shader._glProgram; + + const numUniforms = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); + const result = []; + + let samplerIndex = 0; + + for (let i = 0; i < numUniforms; ++i) { + const uniformInfo = gl.getActiveUniform(program, i); + const uniform = {}; + uniform.location = gl.getUniformLocation( + program, + uniformInfo.name + ); + uniform.size = uniformInfo.size; + let uniformName = uniformInfo.name; + //uniforms that are arrays have their name returned as + //someUniform[0] which is a bit silly so we trim it + //off here. The size property tells us that its an array + //so we dont lose any information by doing this + if (uniformInfo.size > 1) { + uniformName = uniformName.substring(0, uniformName.indexOf('[0]')); + } + uniform.name = uniformName; + uniform.type = uniformInfo.type; + uniform._cachedData = undefined; + if (uniform.type === gl.SAMPLER_2D) { + uniform.isSampler = true; + uniform.samplerIndex = samplerIndex; + samplerIndex++; + } + + uniform.isArray = + uniformInfo.size > 1 || + uniform.type === gl.FLOAT_MAT3 || + uniform.type === gl.FLOAT_MAT4 || + uniform.type === gl.FLOAT_VEC2 || + uniform.type === gl.FLOAT_VEC3 || + uniform.type === gl.FLOAT_VEC4 || + uniform.type === gl.INT_VEC2 || + uniform.type === gl.INT_VEC4 || + uniform.type === gl.INT_VEC3; + + result.push(uniform); + } + + return result; +} + +export function getWebGLShaderAttributes(shader, gl) { + const attributes = {}; + + const numAttributes = gl.getProgramParameter( + shader._glProgram, + gl.ACTIVE_ATTRIBUTES + ); + for (let i = 0; i < numAttributes; ++i) { + const attributeInfo = gl.getActiveAttrib(shader._glProgram, i); + const name = attributeInfo.name; + const location = gl.getAttribLocation(shader._glProgram, name); + const attribute = {}; + attribute.name = name; + attribute.location = location; + attribute.index = i; + attribute.type = attributeInfo.type; + attribute.size = attributeInfo.size; + attributes[name] = attribute; + } + + return attributes; +} + +export function populateGLSLHooks(shader, src, shaderType) { + const main = 'void main'; + if (!src.includes(main)) return src; + + let [preMain, postMain] = src.split(main); + + let hooks = ''; + let defines = ''; + for (const key in shader.hooks.uniforms) { + hooks += `uniform ${key};\n`; + } + if (shader.hooks.declarations) { + hooks += shader.hooks.declarations + '\n'; + } + if (shader.hooks[shaderType].declarations) { + hooks += shader.hooks[shaderType].declarations + '\n'; + } + for (const hookDef in shader.hooks.helpers) { + hooks += `${hookDef}${shader.hooks.helpers[hookDef]}\n`; + } + for (const hookDef in shader.hooks[shaderType]) { + if (hookDef === 'declarations') continue; + const [hookType, hookName] = hookDef.split(' '); + + // Add a #define so that if the shader wants to use preprocessor directives to + // optimize away the extra function calls in main, it can do so + if ( + shader.hooks.modified.vertex[hookDef] || + shader.hooks.modified.fragment[hookDef] + ) { + defines += '#define AUGMENTED_HOOK_' + hookName + '\n'; + } + + hooks += + hookType + ' HOOK_' + hookName + shader.hooks[shaderType][hookDef] + '\n'; + } + + // Allow shaders to specify the location of hook #define statements. Normally these + // go after function definitions, but one might want to have them defined earlier + // in order to only conditionally make uniforms. + if (preMain.indexOf('#define HOOK_DEFINES') !== -1) { + preMain = preMain.replace('#define HOOK_DEFINES', '\n' + defines + '\n'); + defines = ''; + } + + return preMain + '\n' + defines + hooks + main + postMain; +} diff --git a/test/unit/webgl/p5.RendererGL.js b/test/unit/webgl/p5.RendererGL.js index 93d66c790a..34b64abdfd 100644 --- a/test/unit/webgl/p5.RendererGL.js +++ b/test/unit/webgl/p5.RendererGL.js @@ -107,10 +107,9 @@ suite('p5.RendererGL', function() { // Make a red texture const tex = myp5.createFramebuffer(); tex.draw(() => myp5.background('red')); - console.log(tex.get().canvas.toDataURL()); myp5.shader(myShader); - myp5.fill('red') + myp5.fill('blue') myp5.noStroke(); myShader.setUniform('myTex', tex); diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 14bac25ce4..7dfe0e6e82 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -5,7 +5,8 @@ const plugins = [ vitePluginString({ include: [ 'src/webgl/shaders/**/*' - ] + ], + compress: false, }) ]; From aef17d5c6f2ef15dddc5c2663a2304ae982cd4ec Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 17 Jun 2025 19:57:12 -0400 Subject: [PATCH 29/69] Fix the rest of the tests! --- src/image/filterRenderer2D.js | 4 +- src/webgl/p5.Texture.js | 4 +- test/unit/webgl/light.js | 1 + test/unit/webgl/p5.Framebuffer.js | 12 +++--- test/unit/webgl/p5.Shader.js | 1 - test/unit/webgl/p5.Texture.js | 64 +++++++++++++++++-------------- 6 files changed, 47 insertions(+), 39 deletions(-) diff --git a/src/image/filterRenderer2D.js b/src/image/filterRenderer2D.js index cfdf10eb8d..e2a5aa1f5d 100644 --- a/src/image/filterRenderer2D.js +++ b/src/image/filterRenderer2D.js @@ -60,8 +60,8 @@ class FilterRenderer2D { _emptyTexture: null, webglVersion, states: { - textureWrapX: this.gl.CLAMP_TO_EDGE, - textureWrapY: this.gl.CLAMP_TO_EDGE, + textureWrapX: constants.CLAMP, + textureWrapY: constants.CLAMP, }, _arraysEqual: (a, b) => JSON.stringify(a) === JSON.stringify(b), _getEmptyTexture: () => { diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index e7103bd13c..c88389bb8e 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -23,8 +23,8 @@ class Texture { this.format = settings.format || 'rgba8unorm'; this.minFilter = settings.minFilter || constants.LINEAR; this.magFilter = settings.magFilter || constants.LINEAR; - this.wrapS = settings.wrapS || constants.CLAMP; - this.wrapT = settings.wrapT || constants.CLAMP; + this.wrapS = settings.wrapS || renderer.states.textureWrapX; + this.wrapT = settings.wrapT || renderer.states.textureWrapY; this.dataType = settings.dataType || 'uint8'; this.textureHandle = null; diff --git a/test/unit/webgl/light.js b/test/unit/webgl/light.js index 3f8785a5c9..38aa248003 100644 --- a/test/unit/webgl/light.js +++ b/test/unit/webgl/light.js @@ -67,6 +67,7 @@ suite('light', function() { }); suite('spotlight inputs', function() { + beforeEach(() => myp5.noLights()); let angle = Math.PI / 4; let defaultAngle = Math.cos(Math.PI / 3); let cosAngle = Math.cos(angle); diff --git a/test/unit/webgl/p5.Framebuffer.js b/test/unit/webgl/p5.Framebuffer.js index 8d52a1668c..f97cb6b57d 100644 --- a/test/unit/webgl/p5.Framebuffer.js +++ b/test/unit/webgl/p5.Framebuffer.js @@ -156,7 +156,7 @@ suite('p5.Framebuffer', function() { expect(fbo.density).to.equal(1); // The texture should not be recreated - expect(fbo.color.rawTexture()).to.equal(oldTexture); + expect(fbo.color.rawTexture().texture).to.equal(oldTexture.texture); }); test('manually-sized framebuffers can be made auto-sized', function() { @@ -216,7 +216,7 @@ suite('p5.Framebuffer', function() { expect(fbo.density).to.equal(2); // The texture should not be recreated - expect(fbo.color.rawTexture()).to.equal(oldTexture); + expect(fbo.color.rawTexture().texture).to.equal(oldTexture.texture); }); test('resizes the framebuffer by createFramebuffer based on max texture size', function() { @@ -638,10 +638,10 @@ suite('p5.Framebuffer', function() { }); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMinFilter, fbo.gl.NEAREST + fbo.color.framebuffer.colorP5Texture.minFilter, myp5.NEAREST ); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMagFilter, fbo.gl.NEAREST + fbo.color.framebuffer.colorP5Texture.magFilter, myp5.NEAREST ); }); test('can create a framebuffer that uses LINEAR texture filtering', @@ -651,10 +651,10 @@ suite('p5.Framebuffer', function() { const fbo = myp5.createFramebuffer({}); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMinFilter, fbo.gl.LINEAR + fbo.color.framebuffer.colorP5Texture.minFilter, myp5.LINEAR ); assert.equal( - fbo.color.framebuffer.colorP5Texture.glMagFilter, fbo.gl.LINEAR + fbo.color.framebuffer.colorP5Texture.magFilter, myp5.LINEAR ); }); }); diff --git a/test/unit/webgl/p5.Shader.js b/test/unit/webgl/p5.Shader.js index 00d4d00847..7a7d35021b 100644 --- a/test/unit/webgl/p5.Shader.js +++ b/test/unit/webgl/p5.Shader.js @@ -67,7 +67,6 @@ suite('p5.Shader', function() { 'uModelViewMatrix', 'uProjectionMatrix', 'uNormalMatrix', - 'uAmbientLightCount', 'uDirectionalLightCount', 'uPointLightCount', 'uAmbientColor', diff --git a/test/unit/webgl/p5.Texture.js b/test/unit/webgl/p5.Texture.js index 80512f0e49..60058b302d 100644 --- a/test/unit/webgl/p5.Texture.js +++ b/test/unit/webgl/p5.Texture.js @@ -67,6 +67,13 @@ suite('p5.Texture', function() { }; suite('p5.Texture', function() { + let texParamSpy; + beforeEach(() => { + texParamSpy = vi.spyOn(myp5._renderer.GL, 'texParameteri'); + }); + afterEach(() => { + vi.restoreAllMocks(); + }); test('Create and cache a single texture with p5.Image', function() { testTextureSet(texImg1); }); @@ -79,56 +86,57 @@ suite('p5.Texture', function() { test('Set filter mode to linear', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setInterpolation(myp5.LINEAR, myp5.LINEAR); - assert.deepEqual(tex.glMinFilter, myp5._renderer.GL.LINEAR); - assert.deepEqual(tex.glMagFilter, myp5._renderer.GL.LINEAR); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MIN_FILTER, myp5._renderer.GL.LINEAR); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MAG_FILTER, myp5._renderer.GL.LINEAR); }); test('Set filter mode to nearest', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setInterpolation(myp5.NEAREST, myp5.NEAREST); - assert.deepEqual(tex.glMinFilter, myp5._renderer.GL.NEAREST); - assert.deepEqual(tex.glMagFilter, myp5._renderer.GL.NEAREST); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MIN_FILTER, myp5._renderer.GL.NEAREST); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_MAG_FILTER, myp5._renderer.GL.NEAREST); }); test('Set wrap mode to clamp', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.CLAMP, myp5.CLAMP); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); }); test('Set wrap mode to repeat', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test('Set wrap mode to mirror', function() { var tex = myp5._renderer.getTexture(texImg2); tex.setWrapMode(myp5.MIRROR, myp5.MIRROR); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); }); test('Set wrap mode REPEAT if src dimensions is powerOfTwo', function() { const tex = myp5._renderer.getTexture(imgElementPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test( 'Set default wrap mode REPEAT if WEBGL2 and src dimensions != powerOfTwo', function() { const tex = myp5._renderer.getTexture(imgElementNotPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); } ); test( 'Set default wrap mode CLAMP if WEBGL1 and src dimensions != powerOfTwo', function() { myp5.setAttributes({ version: 1 }); + texParamSpy = vi.spyOn(myp5._renderer.GL, 'texParameteri'); const tex = myp5._renderer.getTexture(imgElementNotPowerOfTwo); tex.setWrapMode(myp5.REPEAT, myp5.REPEAT); - assert.deepEqual(tex.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); } ); test('Set textureMode to NORMAL', function() { @@ -143,28 +151,28 @@ suite('p5.Texture', function() { myp5.textureWrap(myp5.CLAMP); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.CLAMP_TO_EDGE); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.CLAMP_TO_EDGE); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.CLAMP_TO_EDGE); }); test('Set global wrap mode to repeat', function() { myp5.textureWrap(myp5.REPEAT); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.REPEAT); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.REPEAT); }); test('Set global wrap mode to mirror', function() { myp5.textureWrap(myp5.MIRROR); var tex1 = myp5._renderer.getTexture(texImg1); var tex2 = myp5._renderer.getTexture(texImg2); - assert.deepEqual(tex1.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex1.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex2.glWrapS, myp5._renderer.GL.MIRRORED_REPEAT); - assert.deepEqual(tex2.glWrapT, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_S, myp5._renderer.GL.MIRRORED_REPEAT); + expect(texParamSpy).toHaveBeenCalledWith(myp5._renderer.GL.TEXTURE_2D, myp5._renderer.GL.TEXTURE_WRAP_T, myp5._renderer.GL.MIRRORED_REPEAT); }); test('Handles changes to p5.Image size', function() { const tex = myp5._renderer.getTexture(texImg2); From abc40743d912e10aac4db2307fd42ddea1f36d13 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Wed, 25 Jun 2025 19:50:06 -0400 Subject: [PATCH 30/69] Get textures working --- preview/index.html | 34 +++++++++--- src/core/p5.Renderer3D.js | 16 ++++++ src/webgl/p5.RendererGL.js | 16 ------ src/webgl/p5.Shader.js | 1 + src/webgpu/p5.RendererWebGPU.js | 95 +++++++++++++++++++++++---------- src/webgpu/shaders/material.js | 9 +++- 6 files changed, 119 insertions(+), 52 deletions(-) diff --git a/preview/index.html b/preview/index.html index 99a5bc38d5..6e4915ab34 100644 --- a/preview/index.html +++ b/preview/index.html @@ -26,42 +26,59 @@ let fbo; let sh; let ssh; + let tex; p.setup = async function () { await p.createCanvas(400, 400, p.WEBGPU); + + tex = p.createImage(100, 100); + tex.loadPixels(); + for (let x = 0; x < tex.width; x++) { + for (let y = 0; y < tex.height; y++) { + const off = (x + y * tex.width) * 4; + tex.pixels[off] = p.round((x / tex.width) * 255); + tex.pixels[off + 1] = p.round((y / tex.height) * 255); + tex.pixels[off + 2] = 0; + tex.pixels[off + 3] = 255; + } + } + tex.updatePixels(); + sh = p.baseMaterialShader().modify({ uniforms: { 'f32 time': () => p.millis(), }, 'Vertex getWorldInputs': `(inputs: Vertex) { var result = inputs; - result.position.y += 40.0 * sin(uniforms.time * 0.01); + result.position.y += 40.0 * sin(uniforms.time * 0.005); return result; }`, }) - ssh = p.baseStrokeShader().modify({ + /*ssh = p.baseStrokeShader().modify({ uniforms: { 'f32 time': () => p.millis(), }, 'StrokeVertex getWorldInputs': `(inputs: StrokeVertex) { var result = inputs; - result.position.y += 40.0 * sin(uniforms.time * 0.01); + result.position.y += 40.0 * sin(uniforms.time * 0.005); return result; }`, - }) + })*/ }; p.draw = function () { - const t = p.millis() * 0.008; + p.orbitControl(); + const t = p.millis() * 0.002; p.background(200); p.shader(sh); - p.strokeShader(ssh) - p.ambientLight(50); + // p.strokeShader(ssh) + p.ambientLight(150); p.directionalLight(100, 100, 100, 0, 1, -1); p.pointLight(155, 155, 155, 0, -200, 500); p.specularMaterial(255); p.shininess(300); - p.stroke('white') + //p.stroke('white'); + p.noStroke(); for (const [i, c] of ['red', 'lime', 'blue'].entries()) { p.push(); p.fill(c); @@ -70,6 +87,7 @@ 0, //p.width/3 * p.sin(t * 0.9 + i * Math.E + 0.2), p.width/3 * p.sin(t * 1.2 + i * Math.E + 0.3), ) + p.texture(tex) p.sphere(30); p.pop(); } diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index c437f34725..f43326b908 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -1586,6 +1586,22 @@ export class Renderer3D extends Renderer { return this._emptyTexture; } + getTexture(input) { + let src = input; + if (src instanceof Framebuffer) { + src = src.color; + } + + const texture = this.textures.get(src); + if (texture) { + return texture; + } + + const tex = new Texture(this, src); + this.textures.set(src, tex); + return tex; + } + ////////////////////////////////////////////// // Buffers ////////////////////////////////////////////// diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index c0d9ef244a..9025c0d31a 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -970,22 +970,6 @@ class RendererGL extends Renderer3D { return code; } - getTexture(input) { - let src = input; - if (src instanceof Framebuffer) { - src = src.color; - } - - const texture = this.textures.get(src); - if (texture) { - return texture; - } - - const tex = new Texture(this, src); - this.textures.set(src, tex); - return tex; - } - // TODO move to super class /* * used in imageLight, diff --git a/src/webgl/p5.Shader.js b/src/webgl/p5.Shader.js index 9a506a2801..2a95af1f17 100644 --- a/src/webgl/p5.Shader.js +++ b/src/webgl/p5.Shader.js @@ -708,6 +708,7 @@ class Shader { const empty = this._renderer._getEmptyTexture(); for (const uniform of this.samplers) { + if (uniform.noData) continue; let tex = uniform.texture; if ( tex === undefined || diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 0b9dd6fcfa..8d434cc725 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -1,5 +1,6 @@ import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; +import { Texture } from '../webgl/p5.Texture'; import * as constants from '../core/constants'; @@ -252,7 +253,7 @@ class RendererWebGPU extends Renderer3D { _finalizeShader(shader) { const rawSize = Math.max( 0, - ...Object.values(shader.uniforms).map(u => u.offsetEnd) + ...Object.values(shader.uniforms).filter(u => !u.isSampler).map(u => u.offsetEnd) ); const alignedSize = Math.ceil(rawSize / 16) * 16; shader._uniformData = new Float32Array(alignedSize / 4); @@ -277,14 +278,17 @@ class RendererWebGPU extends Renderer3D { for (const sampler of shader.samplers) { const group = sampler.group; const entries = groupEntries.get(group) || []; + if (!['sampler', 'texture_2d'].includes(sampler.type)) { + throw new Error(`Unsupported texture type: ${sampler.type}`); + } entries.push({ binding: sampler.binding, - visibility: GPUShaderStage.FRAGMENT, + visibility: sampler.visibility, sampler: sampler.type === 'sampler' ? { type: 'filtering' } : undefined, - texture: sampler.type === 'texture' + texture: sampler.type === 'texture_2d' ? { sampleType: 'float', viewDimension: '2d' } : undefined, uniform: sampler, @@ -300,6 +304,7 @@ class RendererWebGPU extends Renderer3D { } shader._groupEntries = groupEntries; + console.log(shader._groupEntries); shader._bindGroupLayouts = [...bindGroupLayouts.values()]; shader._pipelineLayout = this.device.createPipelineLayout({ bindGroupLayouts: shader._bindGroupLayouts, @@ -617,11 +622,17 @@ class RendererWebGPU extends Renderer3D { }; } + if (!entry.uniform.isSampler) { + throw new Error( + 'All non-texture/sampler uniforms should be in the uniform struct!' + ); + } + return { binding: entry.binding, - resource: sampler.type === 'sampler' - ? sampler.uniform._cachedData.getSampler() - : sampler.uniform.textureHandle.view, + resource: entry.uniform.type === 'sampler' + ? (entry.uniform.textureSource.texture || this._getEmptyTexture()).getSampler() + : (entry.uniform.texture || this._getEmptyTexture()).textureHandle.view, }; }); @@ -799,29 +810,59 @@ class RendererWebGPU extends Renderer3D { const structType = uniformVarMatch[2]; const uniforms = this._parseStruct(shader.vertSrc(), structType); // Extract samplers from group bindings - const samplers = []; - const samplerRegex = /@group\((\d+)\)\s*@binding\((\d+)\)\s*var\s+(\w+)\s*:\s*(\w+);/g; - let match; - while ((match = samplerRegex.exec(shader._vertSrc)) !== null) { - const [_, group, binding, name, type] = match; - const groupIndex = parseInt(group); - // We're currently reserving group 0 for non-sampler stuff, which we parse - // above, so we can skip it here while we grab the remaining sampler - // uniforms - if (groupIndex === 0) continue; - - samplers.push({ - group: groupIndex, - binding: parseInt(binding), - name, - type, // e.g., 'sampler', 'texture_2d' - sampler: true, - }); + const samplers = {}; + // TODO: support other texture types + const samplerRegex = /@group\((\d+)\)\s*@binding\((\d+)\)\s*var\s+(\w+)\s*:\s*(texture_2d|sampler);/g; + for (const [src, visibility] of [ + [shader._vertSrc, GPUShaderStage.VERTEX], + [shader._fragSrc, GPUShaderStage.FRAGMENT] + ]) { + let match; + while ((match = samplerRegex.exec(src)) !== null) { + const [_, group, binding, name, type] = match; + const groupIndex = parseInt(group); + const bindingIndex = parseInt(binding); + // We're currently reserving group 0 for non-sampler stuff, which we parse + // above, so we can skip it here while we grab the remaining sampler + // uniforms + if (groupIndex === 0 && bindingIndex === 0) continue; + + const key = `${groupIndex},${bindingIndex}`; + samplers[key] = { + visibility: (samplers[key]?.visibility || 0) | visibility, + group: groupIndex, + binding: bindingIndex, + name, + type, + isSampler: true, + noData: type === 'sampler', + }; + } + + for (const sampler of Object.values(samplers)) { + if (sampler.type.startsWith('texture')) { + const samplerName = sampler.name + '_sampler'; + const samplerNode = Object + .values(samplers) + .find((s) => s.name === samplerName); + if (!samplerNode) { + throw new Error( + `Every shader texture needs an accompanying sampler. Could not find sampler ${samplerName} for texture ${sampler.name}` + ); + } + samplerNode.textureSource = sampler; + } + } } - return [...Object.values(uniforms).sort((a, b) => a.index - b.index), ...samplers]; + return [...Object.values(uniforms).sort((a, b) => a.index - b.index), ...Object.values(samplers)]; } - updateUniformValue(_shader, _uniform, _data) {} + updateUniformValue(_shader, uniform, data) { + if (uniform.isSampler) { + uniform.texture = + data instanceof Texture ? data : this.getTexture(data); + } + } _updateTexture(uniform, tex) { tex.update(); @@ -879,7 +920,7 @@ class RendererWebGPU extends Renderer3D { magFilter: constantMapping[texture.magFilter], minFilter: constantMapping[texture.minFilter], addressModeU: constantMapping[texture.wrapS], - addressModeV: constantMapping[params.addressModeV], + addressModeV: constantMapping[texture.wrapT], }); this.samplers.set(key, sampler); return sampler; diff --git a/src/webgpu/shaders/material.js b/src/webgpu/shaders/material.js index 9722daad06..774f131bce 100644 --- a/src/webgpu/shaders/material.js +++ b/src/webgpu/shaders/material.js @@ -145,6 +145,9 @@ struct FragmentInput { ${uniforms} @group(0) @binding(0) var uniforms: Uniforms; +@group(0) @binding(1) var uSampler: texture_2d; +@group(0) @binding(2) var uSampler_sampler: sampler; + struct ColorComponents { baseColor: vec3, opacity: f32, @@ -305,7 +308,11 @@ fn totalLight( fn main(input: FragmentInput) -> @location(0) vec4 { HOOK_beforeFragment(); - let color = input.vColor; // TODO: check isTexture and apply tint + let color = select( + input.vColor, + getTexture(uSampler, uSampler_sampler, input.vTexCoord) * (uniforms.uTint/255.0), + uniforms.isTexture == 1 + ); // TODO: check isTexture and apply tint var inputs = Inputs( normalize(input.vNormal), input.vTexCoord, From 397c1d82a7deafcfd1881b80dc6b36873d51dd33 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Thu, 26 Jun 2025 07:53:18 -0400 Subject: [PATCH 31/69] Add visual tests --- src/webgpu/p5.RendererWebGPU.js | 3 +- test/unit/visual/cases/webgpu.js | 108 ++++++++++++++++++ .../Shaders/Shader hooks can be used/000.png | Bin 0 -> 474 bytes .../Shader hooks can be used/metadata.json | 3 + .../000.png | Bin 0 -> 427 bytes .../metadata.json | 3 + .../000.png | Bin 0 -> 1707 bytes .../metadata.json | 3 + .../000.png | Bin 0 -> 510 bytes .../metadata.json | 3 + 10 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 test/unit/visual/cases/webgpu.js create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The color shader runs successfully/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 8d434cc725..8e8852ee41 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -304,7 +304,6 @@ class RendererWebGPU extends Renderer3D { } shader._groupEntries = groupEntries; - console.log(shader._groupEntries); shader._bindGroupLayouts = [...bindGroupLayouts.values()]; shader._pipelineLayout = this.device.createPipelineLayout({ bindGroupLayouts: shader._bindGroupLayouts, @@ -886,6 +885,7 @@ class RendererWebGPU extends Renderer3D { } uploadTextureFromSource({ gpuTexture }, source) { + this.uploadedTexture = true; this.queue.copyExternalImageToTexture( { source }, { texture: gpuTexture }, @@ -894,6 +894,7 @@ class RendererWebGPU extends Renderer3D { } uploadTextureFromData({ gpuTexture }, data, width, height) { + this.uploadedTexture = true; this.queue.writeTexture( { texture: gpuTexture }, data, diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js new file mode 100644 index 0000000000..7593c437e8 --- /dev/null +++ b/test/unit/visual/cases/webgpu.js @@ -0,0 +1,108 @@ +import { vi } from 'vitest'; +import p5 from '../../../../src/app'; +import { visualSuite, visualTest } from '../visualTest'; +import rendererWebGPU from '../../../../src/webgpu/p5.RendererWebGPU'; + +p5.registerAddon(rendererWebGPU); + +visualSuite('WebGPU', function() { + visualSuite('Shaders', function() { + visualTest('The color shader runs successfully', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background('white'); + for (const [i, color] of ['red', 'lime', 'blue'].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.circle(0, 0, 20); + p5.pop(); + } + screenshot(); + }); + + visualTest('The stroke shader runs successfully', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background('white'); + for (const [i, color] of ['red', 'lime', 'blue'].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.translate(15, 0); + p5.stroke(color); + p5.strokeWeight(2); + p5.circle(0, 0, 20); + p5.pop(); + } + screenshot(); + }); + + visualTest('The material shader runs successfully', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background('white'); + p5.ambientLight(50); + p5.directionalLight(100, 100, 100, 0, 1, -1); + p5.pointLight(155, 155, 155, 0, -200, 500); + p5.specularMaterial(255); + p5.shininess(300); + for (const [i, color] of ['red', 'lime', 'blue'].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.sphere(10); + p5.pop(); + } + screenshot(); + }); + + visualTest('Shader hooks can be used', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const myFill = p5.baseMaterialShader().modify({ + 'Vertex getWorldInputs': `(inputs: Vertex) { + var result = inputs; + result.position.y += 10.0 * sin(inputs.position.x * 0.25); + return result; + }`, + }); + const myStroke = p5.baseStrokeShader().modify({ + 'StrokeVertex getWorldInputs': `(inputs: StrokeVertex) { + var result = inputs; + result.position.y += 10.0 * sin(inputs.position.x * 0.25); + return result; + }`, + }); + p5.background('black'); + p5.shader(myFill); + p5.strokeShader(myStroke); + p5.fill('red'); + p5.stroke('white'); + p5.strokeWeight(5); + p5.circle(0, 0, 30); + screenshot(); + }); + + // TODO: turns out textures are only available in the next animation frame! + // need to figure out a workaround before uncommenting this test. + /*visualTest('Textures in the material shader work', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const tex = p5.createImage(50, 50); + tex.loadPixels(); + for (let x = 0; x < tex.width; x++) { + for (let y = 0; y < tex.height; y++) { + const off = (x + y * tex.width) * 4; + tex.pixels[off] = p5.round((x / tex.width) * 255); + tex.pixels[off + 1] = p5.round((y / tex.height) * 255); + tex.pixels[off + 2] = 0; + tex.pixels[off + 3] = 255; + } + } + tex.updatePixels(); + p5.texture(tex); + p5.plane(p5.width, p5.height); + + screenshot(); + });*/ + }); +}); diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/Shader hooks can be used/000.png new file mode 100644 index 0000000000000000000000000000000000000000..f883a461b5b4ee71f22494a5cbbf31b750dee77d GIT binary patch literal 474 zcmV<00VV#4P)Px$lu1NERA@u(n$fz#APj@q`#&`2bIOq^vrU_pZR$0S{U#{|TuLdmI6t*MAF(Wp z^J%uP-)41z^bq|kWCOWCHV{pTN$PMwMs}*TWU68Y8LIJO@65EAKr|4IDGMeX4*|dLY^P=mJm4$iE_I z2AOIrrXo{8nCr;(Kqls6UG0VW#Rc&ULEp4Bd#%C`L~Npj0YK2XinNJYS;_`!TUB~> z%K{ngwW%=5X0bmn`*=U{_Hx20JEb^)YN#@D?-H;p0yj()2wH$xAkreZxgqmjy@jkl zU_R8=LKSJ_S7p$0Y_Qc21W^c9#bB#&MOwvE+skOHJdk@IE(>W?BLxIfn^;&>S#)DtqGLgIqK55fOilIs^#PPx$Wl2OqRA@u(nb8u2AP7bE|3BKDn$2m;;$A@ADS9sRc!1DyDW#+bTsc5eN;#iZ z&hkqeX-)nrGDkvDAPmF?Hwp#@(LjuZfj}$}qhQ_x;$dTi712n5G-`6s3Km8*?O7NB zRxLscSftY}aA;kX%9{a1Bgd&dVD2*hgT)`kY60R)3+VZ@M{ z0m8y`huAf7l#yc@GH-RQHoQPICAdE01@RrwWF$b?WCK<#i~x&te@o;@dwooymDBmC zu_WRs*~>~Pi3Vv+{_>=wK-6zlZIJQG9L1WDk(mk*QRQ6((wWNk2owNEp$@*L#wE-OM$*Tz1{4u9zKs znWL2Kj{>p6^tQ3i4>3|}wIakuK+qtpnNbibke}Ypy46)A_0=O(XgR*ueW=HPd;qUo VGls!tF>?R_002ovPDHLkV1gA*uPx*Wl2OqRA@uxnOlfeWf+E^Z^mponH|-XvGQ;kq8$Y3q8lNIKnlFDf)FDkNP@6} zP=kuP(CEqv2MPi&BD(0Jyb6SfMCjTB^77WbA z-fOStf4}ej4$DFaf&ai||9cR#*MX(L6^Wjw3E`ne^zB*^v$ufTf!V;7zzm=d_zU1TjFVHV&Kx{)jN6gz(rUfvF8*_71QRxE8oNhSX(1 zKj0weqACJ84g3TgBZT!mhp7dU=fh&)24GeUsR5u9h*UWP{0jU493q6xJ%yd{STL87krRL`G8RF8kE$FegeRH>F?&9`)6Ipjnwf$0_03@S?s=?Tt9(d7 zJiLAez6TBx!mB-lsZ_*lYs?L);Z7yX6^IO2%=|n*HWwEuI05vEz8GEg4`QHWWRDP4=o2NR(209si9<7 z-}PN|t-ZBCJ}}FENmWXhjzuQEZiBA*JH9=MYb7DbMi=sMG)}lxaZyeov6 z-w|Ys8Ox)=%NPC5IwD9Z}jd zq>6av$wIt?Q+N0HbU@lL`p>I-n(P8`>!lLn6*CqAH^!iKt);V?pBerA* z=bNBsF9f90f+{}nh#7YQb7NR3V2c(qd)6$*#>P0)YGoBl0qc{Fhu2Bqt2({bg+6en z?>}Y6k`hyIPdTu33%&`EA_A#W#9d2MjuiR&LKLwb2DirA=&hDK@fd_>>aCL>D6!uZq?jYAYqjT2 z_+&cvYi}D2#u-boAk$Cxb2=_*{tbkurv_SOmf5cgqS_N55PXQ%ZS|LqYsG>%3Vhiu zh`n$5R!BQKj{^^84`jC$>3#yjqf^7&XO^`mzV=JE0Yyc;XYig{=kAuN{T3p)<@1v#fhd3u19(^G$%Ck#CpPU+?b*#NIF_nxZ<3MsIv&^jb6? zhOl{R7)5B>!k4+0FPU^kYFQj2>jA5_DA|YJwf64Xk1nZP0|NuJTCH-A%oiwxC#wxW z>Igt$`;Najk4T*qz)M0Uh`nfJy+pcbOm+|bGy55zGfv;}KEiltFHyXY@bY^A!fVwq zdN2W%U1b3ELAFJga%(9L4gIisa5Z~|_vDJCgM*Lknoqxlu&o}*bP8{^B3q1!Ufc6y z^2U^%i+8f|&_=?!L=ol5zJtyQ)Sa-U2WjUy0tH3_oD@;7b*QTa_3DO+n5lie4M z!w^1d5@eOZZOK|oAU-(cuSX`oLiD|jC%eK*TLQV(-UdOkg1Fl9;kxY2Tc^lWW<6|QB)04Xn70Y>Fu1?a48@DqJT`Njp*ffrmaXXA8I=H{u^VKFr>YG>b(E}002ovPDHLkV1lD> BBNhMv literal 0 HcmV?d00001 diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/The material shader runs successfully/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/000.png new file mode 100644 index 0000000000000000000000000000000000000000..cd8d8130754b37606c09f97debb75999a83fabc0 GIT binary patch literal 510 zcmVPx$xJg7oRA@u(m|G45AqYiv|BJS<(Fw64uj@?H+OH;~$l;2hxs+1U5B#zhBxj!f ziO`rqD8S5N>H9A6O(4}tOSG-yBamtkuT$Q6Rwxxn)S2jf4U`Ciasp+CgZd8;bQIC{ zs5(nU&qr;<(RV4*0XT;j-$jrbR!gJER^Hb3 zPi;v6qtVIu=PSEGMNZU29Z?mca}{_dRD@JmruO%?US%ryyQ)4hSE{3vgaxkJ07Wu1!U>eB4M_}wB}KDYIz~9;>jt4P^H-rL81`bU@UEX zPh3l0uotU#%1v$UzF48u5LFo)IQUi9cuGk{QXLrhKYpim2O*n+}YSB3_s~l(>A; zDneoU|Ik&}QS??*)LUyb5_)_9Lzr@8!`e`jFHIM#hLY?t*Z=?k07*qoM6N<$f}W_} A{{R30 literal 0 HcmV?d00001 diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Shaders/The stroke shader runs successfully/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file From e7c1d6b83f9ed40eb43883472dd625a42cb8a53e Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Thu, 26 Jun 2025 18:35:29 -0400 Subject: [PATCH 32/69] Fix usage of textures in setup --- src/webgpu/p5.RendererWebGPU.js | 15 +++++++++++++-- test/unit/visual/cases/webgpu.js | 6 ++---- .../Textures in the material shader work/000.png | Bin 0 -> 275 bytes .../metadata.json | 3 +++ 4 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/metadata.json diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 8e8852ee41..9ded1277d2 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -36,6 +36,7 @@ class RendererWebGPU extends Renderer3D { // TODO disablable stencil this.depthFormat = 'depth24plus-stencil8'; this._updateSize(); + this._update(); } _updateSize() { @@ -662,6 +663,15 @@ class RendererWebGPU extends Renderer3D { this.queue.submit([commandEncoder.finish()]); } + async ensureTexture(source) { + await this.queue.onSubmittedWorkDone(); + await new Promise((res) => requestAnimationFrame(res)); + const tex = this.getTexture(source); + tex.update(); + await this.queue.onSubmittedWorkDone(); + await new Promise((res) => requestAnimationFrame(res)); + } + ////////////////////////////////////////////// // SHADER ////////////////////////////////////////////// @@ -885,7 +895,6 @@ class RendererWebGPU extends Renderer3D { } uploadTextureFromSource({ gpuTexture }, source) { - this.uploadedTexture = true; this.queue.copyExternalImageToTexture( { source }, { texture: gpuTexture }, @@ -894,7 +903,6 @@ class RendererWebGPU extends Renderer3D { } uploadTextureFromData({ gpuTexture }, data, width, height) { - this.uploadedTexture = true; this.queue.writeTexture( { texture: gpuTexture }, data, @@ -1118,6 +1126,9 @@ function rendererWebGPU(p5, fn) { p5.RendererWebGPU = RendererWebGPU; p5.renderers[constants.WEBGPU] = p5.RendererWebGPU; + fn.ensureTexture = function(source) { + return this._renderer.ensureTexture(source); + } } export default rendererWebGPU; diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 7593c437e8..efd8cc7e93 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -83,9 +83,7 @@ visualSuite('WebGPU', function() { screenshot(); }); - // TODO: turns out textures are only available in the next animation frame! - // need to figure out a workaround before uncommenting this test. - /*visualTest('Textures in the material shader work', async function(p5, screenshot) { + visualTest('Textures in the material shader work', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); const tex = p5.createImage(50, 50); tex.loadPixels(); @@ -103,6 +101,6 @@ visualSuite('WebGPU', function() { p5.plane(p5.width, p5.height); screenshot(); - });*/ + }); }); }); diff --git a/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png b/test/unit/visual/screenshots/WebGPU/Shaders/Textures in the material shader work/000.png new file mode 100644 index 0000000000000000000000000000000000000000..b0e4b614b344a0205eb32d901c4f48631d7d2fda GIT binary patch literal 275 zcmeAS@N?(olHy`uVBq!ia0vp^Mj*_=1|;R|J2nC-#^NA%Cx&(BWL^R}XFXjULo%H2 zo);8mN@QTU@cu!QO6MdMPY}`olUTWn(q{c;J=@$mH?}g6YsnIaQ(Q|HIjrJZvdrNX*OG+}v$&Qlb-2Z~WU<38 zt|iMIesQHNaA@O7S>kYvD`k Date: Mon, 28 Jul 2025 11:04:11 -0400 Subject: [PATCH 33/69] Refactor framebuffers --- src/image/pixels.js | 2 +- src/webgl/p5.Framebuffer.js | 540 ++------------------------ src/webgl/p5.RendererGL.js | 597 ++++++++++++++++++++++++++++- src/webgl/p5.Texture.js | 21 - src/webgl/utils.js | 21 + src/webgpu/p5.RendererWebGPU.js | 382 ++++++++++++++++++ test/unit/visual/cases/webgpu.js | 164 ++++++++ test/unit/webgl/p5.RendererGL.js | 3 +- test/unit/webgpu/p5.Framebuffer.js | 247 ++++++++++++ 9 files changed, 1454 insertions(+), 523 deletions(-) create mode 100644 test/unit/webgpu/p5.Framebuffer.js diff --git a/src/image/pixels.js b/src/image/pixels.js index ebea101273..6c2ea58115 100644 --- a/src/image/pixels.js +++ b/src/image/pixels.js @@ -933,7 +933,7 @@ function pixels(p5, fn){ */ fn.loadPixels = function(...args) { // p5._validateParameters('loadPixels', args); - this._renderer.loadPixels(); + return this._renderer.loadPixels(); }; /** diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index 0ebb3c0daa..af2ab279b5 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -5,11 +5,9 @@ import * as constants from '../core/constants'; import { RGB, RGBA } from '../color/creating_reading'; -import { checkWebGLCapabilities } from './p5.Texture'; -import { readPixelsWebGL, readPixelWebGL } from './utils'; +import { checkWebGLCapabilities } from './utils'; import { Camera } from './p5.Camera'; import { Texture } from './p5.Texture'; -import { Image } from '../image/p5.Image'; const constrain = (n, low, high) => Math.max(Math.min(n, high), low); @@ -52,7 +50,6 @@ class FramebufferTexture { } rawTexture() { - // TODO: handle webgpu texture handle return { texture: this.framebuffer[this.property] }; } } @@ -87,13 +84,11 @@ class Framebuffer { this.antialiasSamples = settings.antialias ? 2 : 0; } this.antialias = this.antialiasSamples > 0; - if (this.antialias && this.renderer.webglVersion !== constants.WEBGL2) { - console.warn('Antialiasing is unsupported in a WebGL 1 context'); + if (this.antialias && !this.renderer.supportsFramebufferAntialias()) { + console.warn('Framebuffer antialiasing is unsupported in this context'); this.antialias = false; } this.density = settings.density || this.renderer._pixelDensity; - const gl = this.renderer.GL; - this.gl = gl; if (settings.width && settings.height) { const dimensions = this.renderer._adjustDimensions(settings.width, settings.height); @@ -112,7 +107,8 @@ class Framebuffer { this.height = this.renderer.height; this._autoSized = true; } - this._checkIfFormatsAvailable(); + // Let renderer validate and adjust formats for this context + this.renderer.validateFramebufferFormats(this); if (settings.stencil && !this.useDepth) { console.warn('A stencil buffer can only be used if also using depth. Since the framebuffer has no depth buffer, the stencil buffer will be ignored.'); @@ -120,16 +116,8 @@ class Framebuffer { this.useStencil = this.useDepth && (settings.stencil === undefined ? true : settings.stencil); - this.framebuffer = gl.createFramebuffer(); - if (!this.framebuffer) { - throw new Error('Unable to create a framebuffer'); - } - if (this.antialias) { - this.aaFramebuffer = gl.createFramebuffer(); - if (!this.aaFramebuffer) { - throw new Error('Unable to create a framebuffer for antialiasing'); - } - } + // Let renderer create framebuffer resources with antialiasing support + this.renderer.createFramebufferResources(this); this._recreateTextures(); @@ -466,6 +454,10 @@ class Framebuffer { } } + _deleteTextures() { + this.renderer.deleteFramebufferTextures(this); + } + /** * Creates new textures and renderbuffers given the current size of the * framebuffer. @@ -473,117 +465,10 @@ class Framebuffer { * @private */ _recreateTextures() { - const gl = this.gl; - this._updateSize(); - const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); - const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); - - const colorTexture = gl.createTexture(); - if (!colorTexture) { - throw new Error('Unable to create color texture'); - } - gl.bindTexture(gl.TEXTURE_2D, colorTexture); - const colorFormat = this._glColorFormat(); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - null - ); - this.colorTexture = colorTexture; - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.TEXTURE_2D, - colorTexture, - 0 - ); - - if (this.useDepth) { - // Create the depth texture - const depthTexture = gl.createTexture(); - if (!depthTexture) { - throw new Error('Unable to create depth texture'); - } - const depthFormat = this._glDepthFormat(); - gl.bindTexture(gl.TEXTURE_2D, depthTexture); - gl.texImage2D( - gl.TEXTURE_2D, - 0, - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - depthFormat.format, - depthFormat.type, - null - ); - - gl.framebufferTexture2D( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.TEXTURE_2D, - depthTexture, - 0 - ); - this.depthTexture = depthTexture; - } - - // Create separate framebuffer for antialiasing - if (this.antialias) { - this.colorRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.colorRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - - if (this.useDepth) { - const depthFormat = this._glDepthFormat(); - this.depthRenderbuffer = gl.createRenderbuffer(); - gl.bindRenderbuffer(gl.RENDERBUFFER, this.depthRenderbuffer); - gl.renderbufferStorageMultisample( - gl.RENDERBUFFER, - Math.max( - 0, - Math.min(this.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) - ), - depthFormat.internalFormat, - this.width * this.density, - this.height * this.density - ); - } - - gl.bindFramebuffer(gl.FRAMEBUFFER, this.aaFramebuffer); - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - gl.COLOR_ATTACHMENT0, - gl.RENDERBUFFER, - this.colorRenderbuffer - ); - if (this.useDepth) { - gl.framebufferRenderbuffer( - gl.FRAMEBUFFER, - this.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, - gl.RENDERBUFFER, - this.depthRenderbuffer - ); - } - } + // Let renderer handle texture creation and framebuffer setup + this.renderer.recreateFramebufferTextures(this); if (this.useDepth) { this.depth = new FramebufferTexture(this, 'depthTexture'); @@ -612,131 +497,6 @@ class Framebuffer { } ); this.renderer.textures.set(this.color, this.colorP5Texture); - - gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); - gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * The format and channels asked for by the user hint at what these values - * need to be, and the WebGL version affects what options are avaiable. - * This method returns the values for these three properties, given the - * framebuffer's settings. - * - * @private - */ - _glColorFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.format === constants.FLOAT) { - type = gl.FLOAT; - } else if (this.format === constants.HALF_FLOAT) { - type = this.renderer.webglVersion === constants.WEBGL2 - ? gl.HALF_FLOAT - : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; - } else { - type = gl.UNSIGNED_BYTE; - } - - if (this.channels === RGBA) { - format = gl.RGBA; - } else { - format = gl.RGB; - } - - if (this.renderer.webglVersion === constants.WEBGL2) { - // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html - const table = { - [gl.FLOAT]: { - [gl.RGBA]: gl.RGBA32F - // gl.RGB32F is not available in Firefox without an alpha channel - }, - [gl.HALF_FLOAT]: { - [gl.RGBA]: gl.RGBA16F - // gl.RGB16F is not available in Firefox without an alpha channel - }, - [gl.UNSIGNED_BYTE]: { - [gl.RGBA]: gl.RGBA8, // gl.RGBA4 - [gl.RGB]: gl.RGB8 // gl.RGB565 - } - }; - internalFormat = table[type][format]; - } else if (this.format === constants.HALF_FLOAT) { - internalFormat = gl.RGBA; - } else { - internalFormat = format; - } - - return { internalFormat, format, type }; - } - - /** - * To create a WebGL texture, one needs to supply three pieces of information: - * the type (the data type each channel will be stored as, e.g. int or float), - * the format (the color channels that will each be stored in the previously - * specified type, e.g. rgb or rgba), and the internal format (the specifics - * of how data for each channel, in the aforementioned type, will be packed - * together, such as how many bits to use, e.g. RGBA32F or RGB565.) - * - * This method takes into account the settings asked for by the user and - * returns values for these three properties that can be used for the - * texture storing depth information. - * - * @private - */ - _glDepthFormat() { - let type, format, internalFormat; - const gl = this.gl; - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - type = gl.UNSIGNED_INT_24_8; - } else { - type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; - } - } else { - if (this.depthFormat === constants.FLOAT) { - type = gl.FLOAT; - } else { - type = gl.UNSIGNED_INT; - } - } - - if (this.useStencil) { - format = gl.DEPTH_STENCIL; - } else { - format = gl.DEPTH_COMPONENT; - } - - if (this.useStencil) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH32F_STENCIL8; - } else if (this.renderer.webglVersion === constants.WEBGL2) { - internalFormat = gl.DEPTH24_STENCIL8; - } else { - internalFormat = gl.DEPTH_STENCIL; - } - } else if (this.renderer.webglVersion === constants.WEBGL2) { - if (this.depthFormat === constants.FLOAT) { - internalFormat = gl.DEPTH_COMPONENT32F; - } else { - internalFormat = gl.DEPTH_COMPONENT24; - } - } else { - internalFormat = gl.DEPTH_COMPONENT; - } - - return { internalFormat, format, type }; } /** @@ -775,17 +535,7 @@ class Framebuffer { * @private */ _handleResize() { - const oldColor = this.color; - const oldDepth = this.depth; - const oldColorRenderbuffer = this.colorRenderbuffer; - const oldDepthRenderbuffer = this.depthRenderbuffer; - - this._deleteTexture(oldColor); - if (oldDepth) this._deleteTexture(oldDepth); - const gl = this.gl; - if (oldColorRenderbuffer) gl.deleteRenderbuffer(oldColorRenderbuffer); - if (oldDepthRenderbuffer) gl.deleteRenderbuffer(oldDepthRenderbuffer); - + this._deleteTextures(); this._recreateTextures(); this.defaultCamera._resize(); } @@ -913,20 +663,6 @@ class Framebuffer { return cam; } - /** - * Given a raw texture wrapper, delete its stored texture from WebGL memory, - * and remove it from p5's list of active textures. - * - * @param {p5.FramebufferTexture} texture - * @private - */ - _deleteTexture(texture) { - const gl = this.gl; - gl.deleteTexture(texture.rawTexture().texture); - - this.renderer.textures.delete(texture); - } - /** * Deletes the framebuffer from GPU memory. * @@ -996,19 +732,11 @@ class Framebuffer { *
*/ remove() { - const gl = this.gl; - this._deleteTexture(this.color); - if (this.depth) this._deleteTexture(this.depth); - gl.deleteFramebuffer(this.framebuffer); - if (this.aaFramebuffer) { - gl.deleteFramebuffer(this.aaFramebuffer); - } - if (this.depthRenderbuffer) { - gl.deleteRenderbuffer(this.depthRenderbuffer); - } - if (this.colorRenderbuffer) { - gl.deleteRenderbuffer(this.colorRenderbuffer); - } + this._deleteTextures(); + + // Let renderer clean up framebuffer resources + this.renderer.deleteFramebufferResources(this); + this.renderer.framebuffers.delete(this); } @@ -1095,14 +823,7 @@ class Framebuffer { * @private */ _framebufferToBind() { - if (this.antialias) { - // If antialiasing, draw to an antialiased renderbuffer rather - // than directly to the texture. In end() we will copy from the - // renderbuffer to the texture. - return this.aaFramebuffer; - } else { - return this.framebuffer; - } + return this.renderer.getFramebufferToBind(this); } /** @@ -1111,45 +832,9 @@ class Framebuffer { * @property {'colorTexutre'|'depthTexture'} property The property to update */ _update(property) { - if (this.dirty[property] && this.antialias) { - const gl = this.gl; - gl.bindFramebuffer(gl.READ_FRAMEBUFFER, this.aaFramebuffer); - gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, this.framebuffer); - const partsToCopy = { - colorTexture: [ - gl.COLOR_BUFFER_BIT, - // TODO: move to renderer - this.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST - ], - }; - if (this.useDepth) { - partsToCopy.depthTexture = [ - gl.DEPTH_BUFFER_BIT, - // TODO: move to renderer - this.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST - ]; - } - const [flag, filter] = partsToCopy[property]; - gl.blitFramebuffer( - 0, - 0, - this.width * this.density, - this.height * this.density, - 0, - 0, - this.width * this.density, - this.height * this.density, - flag, - filter - ); + if (this.dirty[property]) { + this.renderer.updateFramebufferTexture(this, property); this.dirty[property] = false; - - const activeFbo = this.renderer.activeFramebuffer(); - if (activeFbo) { - gl.bindFramebuffer(gl.FRAMEBUFFER, activeFbo._framebufferToBind()); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } } } @@ -1159,8 +844,7 @@ class Framebuffer { * @private */ _beforeBegin() { - const gl = this.gl; - gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebufferToBind()); + this.renderer.bindFramebuffer(this); this.renderer.viewport( this.width * this.density, this.height * this.density @@ -1236,7 +920,7 @@ class Framebuffer { if (this.prevFramebuffer) { this.prevFramebuffer._beforeBegin(); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + this.renderer.bindFramebuffer(null); this.renderer.viewport( this.renderer._origViewport.width, this.renderer._origViewport.height @@ -1355,25 +1039,19 @@ class Framebuffer { */ loadPixels() { this._update('colorTexture'); - const gl = this.gl; - const prevFramebuffer = this.renderer.activeFramebuffer(); - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - const colorFormat = this._glColorFormat(); - this.pixels = readPixelsWebGL( - this.pixels, - gl, - this.framebuffer, - 0, - 0, - this.width * this.density, - this.height * this.density, - colorFormat.format, - colorFormat.type - ); - if (prevFramebuffer) { - gl.bindFramebuffer(gl.FRAMEBUFFER, prevFramebuffer._framebufferToBind()); + const result = this.renderer.readFramebufferPixels(this); + + // Check if renderer returned a Promise (WebGPU) or data directly (WebGL) + if (result && typeof result.then === 'function') { + // WebGPU async case - return Promise + return result.then(pixels => { + this.pixels = pixels; + return pixels; + }); } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); + // WebGL sync case - assign directly + this.pixels = result; + return result; } } @@ -1415,7 +1093,7 @@ class Framebuffer { get(x, y, w, h) { this._update('colorTexture'); // p5._validateParameters('p5.Framebuffer.get', arguments); - const colorFormat = this._glColorFormat(); + if (x === undefined && y === undefined) { x = 0; y = 0; @@ -1430,14 +1108,7 @@ class Framebuffer { y = constrain(y, 0, this.height - 1); } - return readPixelWebGL( - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - colorFormat.format, - colorFormat.type - ); + return this.renderer.readFramebufferPixel(this, x * this.density, y * this.density); } x = constrain(x, 0, this.width - 1); @@ -1445,60 +1116,7 @@ class Framebuffer { w = constrain(w, 1, this.width - x); h = constrain(h, 1, this.height - y); - const rawData = readPixelsWebGL( - undefined, - this.gl, - this.framebuffer, - x * this.density, - y * this.density, - w * this.density, - h * this.density, - colorFormat.format, - colorFormat.type - ); - // Framebuffer data might be either a Uint8Array or Float32Array - // depending on its format, and it may or may not have an alpha channel. - // To turn it into an image, we have to normalize the data into a - // Uint8ClampedArray with alpha. - const fullData = new Uint8ClampedArray( - w * h * this.density * this.density * 4 - ); - - // Default channels that aren't in the framebuffer (e.g. alpha, if the - // framebuffer is in RGB mode instead of RGBA) to 255 - fullData.fill(255); - - const channels = colorFormat.type === this.gl.RGB ? 3 : 4; - for (let y = 0; y < h * this.density; y++) { - for (let x = 0; x < w * this.density; x++) { - for (let channel = 0; channel < 4; channel++) { - const idx = (y * w * this.density + x) * 4 + channel; - if (channel < channels) { - // Find the index of this pixel in `rawData`, which might have a - // different number of channels - const rawDataIdx = channels === 4 - ? idx - : (y * w * this.density + x) * channels + channel; - fullData[idx] = rawData[rawDataIdx]; - } - } - } - } - - // Create an image from the data - const region = new Image(w * this.density, h * this.density); - region.imageData = region.canvas.getContext('2d').createImageData( - region.width, - region.height - ); - region.imageData.data.set(fullData); - region.pixels = region.imageData.data; - region.updatePixels(); - if (this.density !== 1) { - // TODO: support get() at a pixel density > 1 - region.resize(w, h); - } - return region; + return this.renderer.readFramebufferRegion(this, x, y, w, h); } /** @@ -1550,85 +1168,9 @@ class Framebuffer { * */ updatePixels() { - const gl = this.gl; - this.colorP5Texture.bindTexture(); - const colorFormat = this._glColorFormat(); - - const channels = colorFormat.format === gl.RGBA ? 4 : 3; - const len = - this.width * this.height * this.density * this.density * channels; - const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE - ? Uint8Array - : Float32Array; - if ( - !(this.pixels instanceof TypedArrayClass) || this.pixels.length !== len - ) { - throw new Error( - 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' - ); - } - - gl.texImage2D( - gl.TEXTURE_2D, - 0, - colorFormat.internalFormat, - this.width * this.density, - this.height * this.density, - 0, - colorFormat.format, - colorFormat.type, - this.pixels - ); - this.colorP5Texture.unbindTexture(); + // Let renderer handle the pixel update process + this.renderer.updateFramebufferPixels(this); this.dirty.colorTexture = false; - - const prevFramebuffer = this.renderer.activeFramebuffer(); - if (this.antialias) { - // We need to make sure the antialiased framebuffer also has the updated - // pixels so that if more is drawn to it, it goes on top of the updated - // pixels instead of replacing them. - // We can't blit the framebuffer to the multisampled antialias - // framebuffer to leave both in the same state, so instead we have - // to use image() to put the framebuffer texture onto the antialiased - // framebuffer. - this.begin(); - this.renderer.push(); - // this.renderer.imageMode(constants.CENTER); - this.renderer.states.setValue('imageMode', constants.CORNER); - this.renderer.setCamera(this.filterCamera); - this.renderer.resetMatrix(); - this.renderer.states.setValue('strokeColor', null); - this.renderer.clear(); - this.renderer._drawingFilter = true; - this.renderer.image( - this, - 0, 0, - this.width, this.height, - -this.renderer.width / 2, -this.renderer.height / 2, - this.renderer.width, this.renderer.height - ); - this.renderer._drawingFilter = false; - this.renderer.pop(); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - this.end(); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer); - if (this.useDepth) { - gl.clearDepth(1); - gl.clear(gl.DEPTH_BUFFER_BIT); - } - if (prevFramebuffer) { - gl.bindFramebuffer( - gl.FRAMEBUFFER, - prevFramebuffer._framebufferToBind() - ); - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - } } } diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 9025c0d31a..5a9482baa2 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -6,14 +6,17 @@ import { readPixelsWebGL, readPixelWebGL, setWebGLTextureParams, - setWebGLUniformValue + setWebGLUniformValue, + checkWebGLCapabilities } from './utils'; import { Renderer3D, getStrokeDefs } from "../core/p5.Renderer3D"; import { Shader } from "./p5.Shader"; import { Texture, MipmapTexture } from "./p5.Texture"; import { Framebuffer } from "./p5.Framebuffer"; import { Graphics } from "../core/p5.Graphics"; +import { RGB, RGBA } from '../color/creating_reading'; import { Element } from "../dom/p5.Element"; +import { Image } from '../image/p5.Image'; import filterBaseVert from "./shaders/filters/base.vert"; import lightingShader from "./shaders/lighting.glsl"; @@ -1386,6 +1389,598 @@ class RendererGL extends Renderer3D { populateHooks(shader, src, shaderType) { return populateGLSLHooks(shader, src, shaderType); } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + supportsFramebufferAntialias() { + return this.webglVersion === constants.WEBGL2; + } + + createFramebufferResources(framebuffer) { + const gl = this.GL; + + framebuffer.framebuffer = gl.createFramebuffer(); + if (!framebuffer.framebuffer) { + throw new Error('Unable to create a framebuffer'); + } + + if (framebuffer.antialias) { + framebuffer.aaFramebuffer = gl.createFramebuffer(); + if (!framebuffer.aaFramebuffer) { + throw new Error('Unable to create a framebuffer for antialiasing'); + } + } + } + + validateFramebufferFormats(framebuffer) { + const gl = this.GL; + + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + !gl.getExtension('WEBGL_depth_texture') + ) { + console.warn( + 'Unable to create depth textures in this environment. Falling back ' + + 'to a framebuffer without depth.' + ); + framebuffer.useDepth = false; + } + + if ( + framebuffer.useDepth && + this.webglVersion === constants.WEBGL && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'FLOAT depth format is unavailable in WebGL 1. ' + + 'Defaulting to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } + + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } + + const support = checkWebGLCapabilities(this); + if (!support.float && framebuffer.format === constants.FLOAT) { + console.warn( + 'This environment does not support FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + if ( + framebuffer.useDepth && + !support.float && + framebuffer.depthFormat === constants.FLOAT + ) { + console.warn( + 'This environment does not support FLOAT depth textures. ' + + 'Falling back to UNSIGNED_INT.' + ); + framebuffer.depthFormat = constants.UNSIGNED_INT; + } + if (!support.halfFloat && framebuffer.format === constants.HALF_FLOAT) { + console.warn( + 'This environment does not support HALF_FLOAT textures. ' + + 'Falling back to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + + if ( + framebuffer.channels === RGB && + [constants.FLOAT, constants.HALF_FLOAT].includes(framebuffer.format) + ) { + console.warn( + 'FLOAT and HALF_FLOAT formats do not work cross-platform with only ' + + 'RGB channels. Falling back to RGBA.' + ); + framebuffer.channels = RGBA; + } + } + + recreateFramebufferTextures(framebuffer) { + const gl = this.GL; + + const prevBoundTexture = gl.getParameter(gl.TEXTURE_BINDING_2D); + const prevBoundFramebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING); + + const colorTexture = gl.createTexture(); + if (!colorTexture) { + throw new Error('Unable to create color texture'); + } + gl.bindTexture(gl.TEXTURE_2D, colorTexture); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + null + ); + framebuffer.colorTexture = colorTexture; + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.TEXTURE_2D, + colorTexture, + 0 + ); + + if (framebuffer.useDepth) { + // Create the depth texture + const depthTexture = gl.createTexture(); + if (!depthTexture) { + throw new Error('Unable to create depth texture'); + } + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + gl.bindTexture(gl.TEXTURE_2D, depthTexture); + gl.texImage2D( + gl.TEXTURE_2D, + 0, + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + depthFormat.format, + depthFormat.type, + null + ); + + gl.framebufferTexture2D( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.TEXTURE_2D, + depthTexture, + 0 + ); + framebuffer.depthTexture = depthTexture; + } + + // Create separate framebuffer for antialiasing + if (framebuffer.antialias) { + framebuffer.colorRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.colorRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + + if (framebuffer.useDepth) { + const depthFormat = this._getFramebufferDepthFormat(framebuffer); + framebuffer.depthRenderbuffer = gl.createRenderbuffer(); + gl.bindRenderbuffer(gl.RENDERBUFFER, framebuffer.depthRenderbuffer); + gl.renderbufferStorageMultisample( + gl.RENDERBUFFER, + Math.max( + 0, + Math.min(framebuffer.antialiasSamples, gl.getParameter(gl.MAX_SAMPLES)) + ), + depthFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density + ); + } + + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + gl.COLOR_ATTACHMENT0, + gl.RENDERBUFFER, + framebuffer.colorRenderbuffer + ); + if (framebuffer.useDepth) { + gl.framebufferRenderbuffer( + gl.FRAMEBUFFER, + framebuffer.useStencil ? gl.DEPTH_STENCIL_ATTACHMENT : gl.DEPTH_ATTACHMENT, + gl.RENDERBUFFER, + framebuffer.depthRenderbuffer + ); + } + } + + gl.bindTexture(gl.TEXTURE_2D, prevBoundTexture); + gl.bindFramebuffer(gl.FRAMEBUFFER, prevBoundFramebuffer); + } + + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * The format and channels asked for by the user hint at what these values + * need to be, and the WebGL version affects what options are avaiable. + * This method returns the values for these three properties, given the + * framebuffer's settings. + * + * @private + */ + _getFramebufferColorFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.format === constants.FLOAT) { + type = gl.FLOAT; + } else if (framebuffer.format === constants.HALF_FLOAT) { + type = this.webglVersion === constants.WEBGL2 + ? gl.HALF_FLOAT + : gl.getExtension('OES_texture_half_float').HALF_FLOAT_OES; + } else { + type = gl.UNSIGNED_BYTE; + } + + if (framebuffer.channels === RGBA) { + format = gl.RGBA; + } else { + format = gl.RGB; + } + + if (this.webglVersion === constants.WEBGL2) { + // https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html + const table = { + [gl.FLOAT]: { + [gl.RGBA]: gl.RGBA32F + // gl.RGB32F is not available in Firefox without an alpha channel + }, + [gl.HALF_FLOAT]: { + [gl.RGBA]: gl.RGBA16F + // gl.RGB16F is not available in Firefox without an alpha channel + }, + [gl.UNSIGNED_BYTE]: { + [gl.RGBA]: gl.RGBA8, // gl.RGBA4 + [gl.RGB]: gl.RGB8 // gl.RGB565 + } + }; + internalFormat = table[type][format]; + } else if (framebuffer.format === constants.HALF_FLOAT) { + internalFormat = gl.RGBA; + } else { + internalFormat = format; + } + + return { internalFormat, format, type }; + } + + /** + * To create a WebGL texture, one needs to supply three pieces of information: + * the type (the data type each channel will be stored as, e.g. int or float), + * the format (the color channels that will each be stored in the previously + * specified type, e.g. rgb or rgba), and the internal format (the specifics + * of how data for each channel, in the aforementioned type, will be packed + * together, such as how many bits to use, e.g. RGBA32F or RGB565.) + * + * This method takes into account the settings asked for by the user and + * returns values for these three properties that can be used for the + * texture storing depth information. + * + * @private + */ + _getFramebufferDepthFormat(framebuffer) { + let type, format, internalFormat; + const gl = this.GL; + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT_32_UNSIGNED_INT_24_8_REV; + } else if (this.webglVersion === constants.WEBGL2) { + type = gl.UNSIGNED_INT_24_8; + } else { + type = gl.getExtension('WEBGL_depth_texture').UNSIGNED_INT_24_8_WEBGL; + } + } else { + if (framebuffer.depthFormat === constants.FLOAT) { + type = gl.FLOAT; + } else { + type = gl.UNSIGNED_INT; + } + } + + if (framebuffer.useStencil) { + format = gl.DEPTH_STENCIL; + } else { + format = gl.DEPTH_COMPONENT; + } + + if (framebuffer.useStencil) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH32F_STENCIL8; + } else if (this.webglVersion === constants.WEBGL2) { + internalFormat = gl.DEPTH24_STENCIL8; + } else { + internalFormat = gl.DEPTH_STENCIL; + } + } else if (this.webglVersion === constants.WEBGL2) { + if (framebuffer.depthFormat === constants.FLOAT) { + internalFormat = gl.DEPTH_COMPONENT32F; + } else { + internalFormat = gl.DEPTH_COMPONENT24; + } + } else { + internalFormat = gl.DEPTH_COMPONENT; + } + + return { internalFormat, format, type }; + } + + _deleteFramebufferTexture(texture) { + const gl = this.GL; + gl.deleteTexture(texture.rawTexture().texture); + this.textures.delete(texture); + } + + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + const gl = this.GL; + if (framebuffer.colorRenderbuffer) gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + if (framebuffer.depthRenderbuffer) gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); + } + + deleteFramebufferResources(framebuffer) { + const gl = this.GL; + gl.deleteFramebuffer(framebuffer.framebuffer); + if (framebuffer.aaFramebuffer) { + gl.deleteFramebuffer(framebuffer.aaFramebuffer); + } + if (framebuffer.depthRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.depthRenderbuffer); + } + if (framebuffer.colorRenderbuffer) { + gl.deleteRenderbuffer(framebuffer.colorRenderbuffer); + } + } + + getFramebufferToBind(framebuffer) { + if (framebuffer.antialias) { + return framebuffer.aaFramebuffer; + } else { + return framebuffer.framebuffer; + } + } + + updateFramebufferTexture(framebuffer, property) { + if (framebuffer.antialias) { + const gl = this.GL; + gl.bindFramebuffer(gl.READ_FRAMEBUFFER, framebuffer.aaFramebuffer); + gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, framebuffer.framebuffer); + const partsToCopy = { + colorTexture: [ + gl.COLOR_BUFFER_BIT, + framebuffer.colorP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ], + }; + if (framebuffer.useDepth) { + partsToCopy.depthTexture = [ + gl.DEPTH_BUFFER_BIT, + framebuffer.depthP5Texture.magFilter === constants.LINEAR ? gl.LINEAR : gl.NEAREST + ]; + } + const [flag, filter] = partsToCopy[property]; + gl.blitFramebuffer( + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + flag, + filter + ); + + const activeFbo = this.activeFramebuffer(); + this.bindFramebuffer(activeFbo); + } + } + + bindFramebuffer(framebuffer) { + const gl = this.GL; + gl.bindFramebuffer( + gl.FRAMEBUFFER, + framebuffer + ? this.getFramebufferToBind(framebuffer) + : null + ); + } + + readFramebufferPixels(framebuffer) { + const gl = this.GL; + const prevFramebuffer = this.activeFramebuffer(); + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + const pixels = readPixelsWebGL( + framebuffer.pixels, + gl, + framebuffer.framebuffer, + 0, + 0, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + this.bindFramebuffer(prevFramebuffer); + return pixels; + } + + readFramebufferPixel(framebuffer, x, y) { + const colorFormat = this._getFramebufferColorFormat(framebuffer); + return readPixelWebGL( + this.GL, + framebuffer.framebuffer, + x, + y, + colorFormat.format, + colorFormat.type + ); + } + + readFramebufferRegion(framebuffer, x, y, w, h) { + const gl = this.GL; + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const rawData = readPixelsWebGL( + undefined, + gl, + framebuffer.framebuffer, + x * framebuffer.density, + y * framebuffer.density, + w * framebuffer.density, + h * framebuffer.density, + colorFormat.format, + colorFormat.type + ); + + // Framebuffer data might be either a Uint8Array or Float32Array + // depending on its format, and it may or may not have an alpha channel. + // To turn it into an image, we have to normalize the data into a + // Uint8ClampedArray with alpha. + const fullData = new Uint8ClampedArray( + w * h * framebuffer.density * framebuffer.density * 4 + ); + // Default channels that aren't in the framebuffer (e.g. alpha, if the + // framebuffer is in RGB mode instead of RGBA) to 255 + fullData.fill(255); + + const channels = colorFormat.format === gl.RGB ? 3 : 4; + for (let yPos = 0; yPos < h * framebuffer.density; yPos++) { + for (let xPos = 0; xPos < w * framebuffer.density; xPos++) { + for (let channel = 0; channel < 4; channel++) { + const idx = (yPos * w * framebuffer.density + xPos) * 4 + channel; + if (channel < channels) { + // Find the index of this pixel in `rawData`, which might have a + // different number of channels + const rawDataIdx = channels === 4 + ? idx + : (yPos * w * framebuffer.density + xPos) * channels + channel; + fullData[idx] = rawData[rawDataIdx]; + } + } + } + } + + // Create image from data + const region = new Image(w * framebuffer.density, h * framebuffer.density); + region.imageData = region.canvas.getContext('2d').createImageData( + region.width, + region.height + ); + region.imageData.data.set(fullData); + region.pixels = region.imageData.data; + region.updatePixels(); + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + return region; + } + + updateFramebufferPixels(framebuffer) { + const gl = this.GL; + framebuffer.colorP5Texture.bindTexture(); + const colorFormat = this._getFramebufferColorFormat(framebuffer); + + const channels = colorFormat.format === gl.RGBA ? 4 : 3; + const len = framebuffer.width * framebuffer.height * framebuffer.density * framebuffer.density * channels; + const TypedArrayClass = colorFormat.type === gl.UNSIGNED_BYTE ? Uint8Array : Float32Array; + + if (!(framebuffer.pixels instanceof TypedArrayClass) || framebuffer.pixels.length !== len) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + gl.texImage2D( + gl.TEXTURE_2D, + 0, + colorFormat.internalFormat, + framebuffer.width * framebuffer.density, + framebuffer.height * framebuffer.density, + 0, + colorFormat.format, + colorFormat.type, + framebuffer.pixels + ); + framebuffer.colorP5Texture.unbindTexture(); + + const prevFramebuffer = this.activeFramebuffer(); + if (framebuffer.antialias) { + // We need to make sure the antialiased framebuffer also has the updated + // pixels so that if more is drawn to it, it goes on top of the updated + // pixels instead of replacing them. + // We can't blit the framebuffer to the multisampled antialias + // framebuffer to leave both in the same state, so instead we have + // to use image() to put the framebuffer texture onto the antialiased + // framebuffer. + framebuffer.begin(); + this.push(); + this.states.setValue('imageMode', constants.CORNER); + this.setCamera(framebuffer.filterCamera); + this.resetMatrix(); + this.states.setValue('strokeColor', null); + this.clear(); + this._drawingFilter = true; + this.image( + framebuffer, + 0, 0, + framebuffer.width, framebuffer.height, + -this.width / 2, -this.height / 2, + this.width, this.height + ); + this._drawingFilter = false; + this.pop(); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + framebuffer.end(); + } else { + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer.framebuffer); + if (framebuffer.useDepth) { + gl.clearDepth(1); + gl.clear(gl.DEPTH_BUFFER_BIT); + } + this.bindFramebuffer(prevFramebuffer); + } + } } function rendererGL(p5, fn) { diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index c88389bb8e..4ea07a1fba 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -382,27 +382,6 @@ function texture(p5, fn){ p5.MipmapTexture = MipmapTexture; } -export function checkWebGLCapabilities({ GL, webglVersion }) { - const gl = GL; - const supportsFloat = webglVersion === constants.WEBGL2 - ? (gl.getExtension('EXT_color_buffer_float') && - gl.getExtension('EXT_float_blend')) - : gl.getExtension('OES_texture_float'); - const supportsFloatLinear = supportsFloat && - gl.getExtension('OES_texture_float_linear'); - const supportsHalfFloat = webglVersion === constants.WEBGL2 - ? gl.getExtension('EXT_color_buffer_float') - : gl.getExtension('OES_texture_half_float'); - const supportsHalfFloatLinear = supportsHalfFloat && - gl.getExtension('OES_texture_half_float_linear'); - return { - float: supportsFloat, - floatLinear: supportsFloatLinear, - halfFloat: supportsHalfFloat, - halfFloatLinear: supportsHalfFloatLinear - }; -} - export default texture; export { Texture, MipmapTexture }; diff --git a/src/webgl/utils.js b/src/webgl/utils.js index 70766ac522..0727e91e1f 100644 --- a/src/webgl/utils.js +++ b/src/webgl/utils.js @@ -448,3 +448,24 @@ export function populateGLSLHooks(shader, src, shaderType) { return preMain + '\n' + defines + hooks + main + postMain; } + +export function checkWebGLCapabilities({ GL, webglVersion }) { + const gl = GL; + const supportsFloat = webglVersion === constants.WEBGL2 + ? (gl.getExtension('EXT_color_buffer_float') && + gl.getExtension('EXT_float_blend')) + : gl.getExtension('OES_texture_float'); + const supportsFloatLinear = supportsFloat && + gl.getExtension('OES_texture_float_linear'); + const supportsHalfFloat = webglVersion === constants.WEBGL2 + ? gl.getExtension('EXT_color_buffer_float') + : gl.getExtension('OES_texture_half_float'); + const supportsHalfFloatLinear = supportsHalfFloat && + gl.getExtension('OES_texture_half_float_linear'); + return { + float: supportsFloat, + floatLinear: supportsFloatLinear, + halfFloat: supportsHalfFloat, + halfFloatLinear: supportsHalfFloatLinear + }; +} diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 9ded1277d2..a8732d22dd 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -1,6 +1,8 @@ import { Renderer3D, getStrokeDefs } from '../core/p5.Renderer3D'; import { Shader } from '../webgl/p5.Shader'; import { Texture } from '../webgl/p5.Texture'; +import { Image } from '../image/p5.Image'; +import { RGB, RGBA } from '../color/creating_reading'; import * as constants from '../core/constants'; @@ -17,6 +19,10 @@ class RendererWebGPU extends Renderer3D { this.renderPass = {}; this.samplers = new Map(); + + // Single reusable staging buffer for pixel reading + this.pixelReadBuffer = null; + this.pixelReadBufferSize = 0; } async setupContext() { @@ -1120,6 +1126,382 @@ class RendererWebGPU extends Renderer3D { return preMain + '\n' + defines + hooks + main + postMain; } + + ////////////////////////////////////////////// + // Buffer management for pixel reading + ////////////////////////////////////////////// + + _ensurePixelReadBuffer(requiredSize) { + // Create or resize staging buffer if needed + if (!this.pixelReadBuffer || this.pixelReadBufferSize < requiredSize) { + // Clean up old buffer + if (this.pixelReadBuffer) { + this.pixelReadBuffer.destroy(); + } + + // Create new buffer with padding to avoid frequent recreations + // Scale by 2 to ensure integer size and reasonable headroom + const bufferSize = Math.max(requiredSize, this.pixelReadBufferSize * 2); + this.pixelReadBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); + this.pixelReadBufferSize = bufferSize; + } + return this.pixelReadBuffer; + } + + ////////////////////////////////////////////// + // Framebuffer methods + ////////////////////////////////////////////// + + supportsFramebufferAntialias() { + return true; + } + + createFramebufferResources(framebuffer) { + } + + validateFramebufferFormats(framebuffer) { + if (![ + constants.UNSIGNED_BYTE, + constants.FLOAT, + constants.HALF_FLOAT + ].includes(framebuffer.format)) { + console.warn( + 'Unknown Framebuffer format. ' + + 'Please use UNSIGNED_BYTE, FLOAT, or HALF_FLOAT. ' + + 'Defaulting to UNSIGNED_BYTE.' + ); + framebuffer.format = constants.UNSIGNED_BYTE; + } + + if (framebuffer.useDepth && ![ + constants.UNSIGNED_INT, + constants.FLOAT + ].includes(framebuffer.depthFormat)) { + console.warn( + 'Unknown Framebuffer depth format. ' + + 'Please use UNSIGNED_INT or FLOAT. Defaulting to FLOAT.' + ); + framebuffer.depthFormat = constants.FLOAT; + } + } + + recreateFramebufferTextures(framebuffer) { + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + + const colorTextureDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUColorFormat(framebuffer), + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC, + sampleCount: framebuffer.antialias ? framebuffer.antialiasSamples : 1, + }; + + framebuffer.colorTexture = this.device.createTexture(colorTextureDescriptor); + + if (framebuffer.useDepth) { + const depthTextureDescriptor = { + size: { + width: framebuffer.width * framebuffer.density, + height: framebuffer.height * framebuffer.density, + depthOrArrayLayers: 1, + }, + format: this._getWebGPUDepthFormat(framebuffer), + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING, + sampleCount: framebuffer.antialias ? framebuffer.antialiasSamples : 1, + }; + + framebuffer.depthTexture = this.device.createTexture(depthTextureDescriptor); + } + } + + _getWebGPUColorFormat(framebuffer) { + if (framebuffer.format === constants.FLOAT) { + return framebuffer.channels === RGBA ? 'rgba32float' : 'rgba32float'; + } else if (framebuffer.format === constants.HALF_FLOAT) { + return framebuffer.channels === RGBA ? 'rgba16float' : 'rgba16float'; + } else { + return framebuffer.channels === RGBA ? 'rgba8unorm' : 'rgba8unorm'; + } + } + + _getWebGPUDepthFormat(framebuffer) { + if (framebuffer.useStencil) { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float-stencil8' : 'depth24plus-stencil8'; + } else { + return framebuffer.depthFormat === constants.FLOAT ? 'depth32float' : 'depth24plus'; + } + } + + _deleteFramebufferTexture(texture) { + const handle = texture.rawTexture(); + if (handle.texture && handle.texture.destroy) { + handle.texture.destroy(); + } + this.textures.delete(texture); + } + + deleteFramebufferTextures(framebuffer) { + this._deleteFramebufferTexture(framebuffer.color) + if (framebuffer.depth) this._deleteFramebufferTexture(framebuffer.depth); + } + + deleteFramebufferResources(framebuffer) { + if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { + framebuffer.colorTexture.destroy(); + } + if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { + framebuffer.depthTexture.destroy(); + } + } + + getFramebufferToBind(framebuffer) { + } + + updateFramebufferTexture(framebuffer, property) { + // No-op for WebGPU since antialiasing is handled at pipeline level + } + + bindFramebuffer(framebuffer) {} + + async readFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + const bufferSize = width * height * bytesPerPixel; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { texture: framebuffer.colorTexture }, + { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const result = new Uint8Array(mappedRange.slice(0, bufferSize)); + + stagingBuffer.unmap(); + return result; + } + + async readFramebufferPixel(framebuffer, x, y) { + const bytesPerPixel = 4; + const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: bytesPerPixel }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bytesPerPixel); + const mappedRange = stagingBuffer.getMappedRange(0, bytesPerPixel); + const pixelData = new Uint8Array(mappedRange); + const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async readFramebufferRegion(framebuffer, x, y, w, h) { + const width = w * framebuffer.density; + const height = h * framebuffer.density; + const bytesPerPixel = 4; + const bufferSize = width * height * bytesPerPixel; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: framebuffer.colorTexture, + origin: { x: x * framebuffer.density, y: y * framebuffer.density, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); + + // WebGPU doesn't need vertical flipping unlike WebGL + const region = new Image(width, height); + region.imageData = region.canvas.getContext('2d').createImageData(width, height); + region.imageData.data.set(pixelData); + region.pixels = region.imageData.data; + region.updatePixels(); + + if (framebuffer.density !== 1) { + region.pixelDensity(framebuffer.density); + } + + stagingBuffer.unmap(); + return region; + } + + updateFramebufferPixels(framebuffer) { + const width = framebuffer.width * framebuffer.density; + const height = framebuffer.height * framebuffer.density; + const bytesPerPixel = 4; + + const expectedLength = width * height * bytesPerPixel; + if (!framebuffer.pixels || framebuffer.pixels.length !== expectedLength) { + throw new Error( + 'The pixels array has not been set correctly. Please call loadPixels() before updatePixels().' + ); + } + + this.device.queue.writeTexture( + { texture: framebuffer.colorTexture }, + framebuffer.pixels, + { + bytesPerRow: width * bytesPerPixel, + rowsPerImage: height + }, + { width, height, depthOrArrayLayers: 1 } + ); + } + + ////////////////////////////////////////////// + // Main canvas pixel methods + ////////////////////////////////////////////// + + async loadPixels() { + const width = this.width * this._pixelDensity; + const height = this.height * this._pixelDensity; + const bytesPerPixel = 4; + const bufferSize = width * height * bytesPerPixel; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + // Get the current canvas texture + const canvasTexture = this.drawingContext.getCurrentTexture(); + + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { texture: canvasTexture }, + { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + this.pixels = new Uint8Array(mappedRange.slice(0, bufferSize)); + + stagingBuffer.unmap(); + return this.pixels; + } + + async _getPixel(x, y) { + const bytesPerPixel = 4; + const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); + + const canvasTexture = this.drawingContext.getCurrentTexture(); + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: canvasTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: bytesPerPixel }, + { width: 1, height: 1, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bytesPerPixel); + const mappedRange = stagingBuffer.getMappedRange(0, bytesPerPixel); + const pixelData = new Uint8Array(mappedRange); + const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; + + stagingBuffer.unmap(); + return result; + } + + async get(x, y, w, h) { + const pd = this._pixelDensity; + + if (typeof x === 'undefined' && typeof y === 'undefined') { + // get() - return entire canvas + x = y = 0; + w = this.width; + h = this.height; + } else { + x *= pd; + y *= pd; + + if (typeof w === 'undefined' && typeof h === 'undefined') { + // get(x,y) - single pixel + if (x < 0 || y < 0 || x >= this.width * pd || y >= this.height * pd) { + return [0, 0, 0, 0]; + } + + return this._getPixel(x, y); + } + // get(x,y,w,h) - region + } + + // Read region and create p5.Image + const width = w * pd; + const height = h * pd; + const bytesPerPixel = 4; + const bufferSize = width * height * bytesPerPixel; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + + const canvasTexture = this.drawingContext.getCurrentTexture(); + const commandEncoder = this.device.createCommandEncoder(); + commandEncoder.copyTextureToBuffer( + { + texture: canvasTexture, + origin: { x, y, z: 0 } + }, + { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { width, height, depthOrArrayLayers: 1 } + ); + + this.device.queue.submit([commandEncoder.finish()]); + + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); + const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); + + const region = new Image(width, height); + region.pixelDensity(pd); + region.imageData = region.canvas.getContext('2d').createImageData(width, height); + region.imageData.data.set(pixelData); + region.pixels = region.imageData.data; + region.updatePixels(); + + stagingBuffer.unmap(); + return region; + } } function rendererWebGPU(p5, fn) { diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index efd8cc7e93..363626807a 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -103,4 +103,168 @@ visualSuite('WebGPU', function() { screenshot(); }); }); + + visualSuite('Framebuffers', function() { + visualTest('Basic framebuffer draw to canvas', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create a framebuffer + const fbo = p5.createFramebuffer({ width: 25, height: 25 }); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(255, 0, 0); // Red background + p5.fill(0, 255, 0); // Green circle + p5.noStroke(); + p5.circle(12.5, 12.5, 20); + }); + + // Draw the framebuffer to the main canvas + p5.background(0, 0, 255); // Blue background + p5.texture(fbo); + p5.noStroke(); + p5.plane(25, 25); + + screenshot(); + }); + + visualTest('Framebuffer with different sizes', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create two different sized framebuffers + const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); + const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); + + // Draw to first framebuffer + fbo1.draw(() => { + p5.background(255, 100, 100); + p5.fill(255, 255, 0); + p5.noStroke(); + p5.rect(5, 5, 10, 10); + }); + + // Draw to second framebuffer + fbo2.draw(() => { + p5.background(100, 255, 100); + p5.fill(255, 0, 255); + p5.noStroke(); + p5.circle(7.5, 7.5, 10); + }); + + // Draw both to main canvas + p5.background(50); + p5.push(); + p5.translate(-12.5, -12.5); + p5.texture(fbo1); + p5.noStroke(); + p5.plane(20, 20); + p5.pop(); + + p5.push(); + p5.translate(12.5, 12.5); + p5.texture(fbo2); + p5.noStroke(); + p5.plane(15, 15); + p5.pop(); + + screenshot(); + }); + + visualTest('Auto-sized framebuffer', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create auto-sized framebuffer (should match canvas size) + const fbo = p5.createFramebuffer(); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(0); + p5.stroke(255); + p5.strokeWeight(2); + p5.noFill(); + // Draw a grid pattern to verify size + for (let x = 0; x < 50; x += 10) { + p5.line(x, 0, x, 50); + } + for (let y = 0; y < 50; y += 10) { + p5.line(0, y, 50, y); + } + p5.fill(255, 0, 0); + p5.noStroke(); + p5.circle(25, 25, 15); + }); + + // Draw the framebuffer to fill the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(50, 50); + + screenshot(); + }); + + visualTest('Auto-sized framebuffer after canvas resize', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create auto-sized framebuffer + const fbo = p5.createFramebuffer(); + + // Resize the canvas (framebuffer should auto-resize) + p5.resizeCanvas(30, 30); + + // Draw to the framebuffer after resize + fbo.draw(() => { + p5.background(100, 0, 100); + p5.fill(0, 255, 255); + p5.noStroke(); + // Draw a shape that fills the new size + p5.rect(5, 5, 20, 20); + p5.fill(255, 255, 0); + p5.circle(15, 15, 10); + }); + + // Draw the framebuffer to the main canvas + p5.texture(fbo); + p5.noStroke(); + p5.plane(30, 30); + + screenshot(); + }); + + visualTest('Fixed-size framebuffer after manual resize', async function(p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create fixed-size framebuffer + const fbo = p5.createFramebuffer({ width: 20, height: 20 }); + + // Draw initial content + fbo.draw(() => { + p5.background(255, 200, 100); + p5.fill(0, 100, 200); + p5.noStroke(); + p5.circle(10, 10, 15); + }); + + // Manually resize the framebuffer + fbo.resize(35, 25); + + // Draw new content to the resized framebuffer + fbo.draw(() => { + p5.background(200, 255, 100); + p5.fill(200, 0, 100); + p5.noStroke(); + // Draw content that uses the new size + p5.rect(5, 5, 25, 15); + p5.fill(0, 0, 255); + p5.circle(17.5, 12.5, 8); + }); + + // Draw the resized framebuffer to the main canvas + p5.background(50); + p5.texture(fbo); + p5.noStroke(); + p5.plane(35, 25); + + screenshot(); + }); + }); }); diff --git a/test/unit/webgl/p5.RendererGL.js b/test/unit/webgl/p5.RendererGL.js index 34b64abdfd..f437ac4c20 100644 --- a/test/unit/webgl/p5.RendererGL.js +++ b/test/unit/webgl/p5.RendererGL.js @@ -1098,7 +1098,7 @@ suite('p5.RendererGL', function() { assert.isTrue(img.length === 4); }); - test('updatePixels() matches 2D mode', function() { + test.only('updatePixels() matches 2D mode', function() { myp5.createCanvas(20, 20); myp5.pixelDensity(1); const getColors = function(mode) { @@ -1120,6 +1120,7 @@ suite('p5.RendererGL', function() { }; const p2d = getColors(myp5.P2D); + debugger const webgl = getColors(myp5.WEBGL); myp5.image(p2d, 0, 0); myp5.blendMode(myp5.DIFFERENCE); diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js new file mode 100644 index 0000000000..9fec2f070d --- /dev/null +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -0,0 +1,247 @@ +import p5 from '../../../src/app.js'; + +suite('WebGPU p5.Framebuffer', function() { + let myp5; + let prevPixelRatio; + + beforeAll(async function() { + prevPixelRatio = window.devicePixelRatio; + window.devicePixelRatio = 1; + myp5 = new p5(function(p) { + p.setup = function() {}; + p.draw = function() {}; + }); + }); + + afterAll(function() { + myp5.remove(); + window.devicePixelRatio = prevPixelRatio; + }); + + suite('Creation and basic properties', function() { + test('framebuffers can be created with WebGPU renderer', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.autoSized()).to.equal(true); + }); + + test('framebuffers can be created with custom dimensions', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer({ width: 20, height: 30 }); + + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(30); + expect(fbo.autoSized()).to.equal(false); + }); + + test('framebuffers have color texture', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + expect(fbo.color).to.be.an('object'); + expect(fbo.color.rawTexture).to.be.a('function'); + }); + + test('framebuffers can specify different formats', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer({ + format: 'float', + channels: 'rgb' + }); + + expect(fbo).to.be.an('object'); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + }); + }); + + suite('Auto-sizing behavior', function() { + test('auto-sized framebuffers change size with canvas', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.autoSized()).to.equal(true); + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(15, 20); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + + test('manually-sized framebuffers do not change size with canvas', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(3); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 1 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + + myp5.resizeCanvas(5, 15); + myp5.pixelDensity(2); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(1); + }); + + test('manually-sized framebuffers can be made auto-sized', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 2 }); + + expect(fbo.autoSized()).to.equal(false); + expect(fbo.width).to.equal(25); + expect(fbo.height).to.equal(30); + expect(fbo.density).to.equal(2); + + // Make it auto-sized + fbo.autoSized(true); + expect(fbo.autoSized()).to.equal(true); + + myp5.resizeCanvas(8, 12); + myp5.pixelDensity(3); + expect(fbo.width).to.equal(8); + expect(fbo.height).to.equal(12); + expect(fbo.density).to.equal(3); + }); + }); + + suite('Manual resizing', function() { + test('framebuffers can be manually resized', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + expect(fbo.width).to.equal(10); + expect(fbo.height).to.equal(10); + expect(fbo.density).to.equal(1); + + fbo.resize(20, 25); + expect(fbo.width).to.equal(20); + expect(fbo.height).to.equal(25); + expect(fbo.autoSized()).to.equal(false); + }); + + test('resizing affects pixel density', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + myp5.pixelDensity(1); + const fbo = myp5.createFramebuffer(); + + fbo.pixelDensity(3); + expect(fbo.density).to.equal(3); + + fbo.resize(15, 20); + fbo.pixelDensity(2); + expect(fbo.width).to.equal(15); + expect(fbo.height).to.equal(20); + expect(fbo.density).to.equal(2); + }); + }); + + suite('Drawing functionality', function() { + test('can draw to framebuffer with draw() method', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + let drawCallbackExecuted = false; + fbo.draw(() => { + drawCallbackExecuted = true; + myp5.background(255, 0, 0); + myp5.fill(0, 255, 0); + myp5.noStroke(); + myp5.circle(5, 5, 8); + }); + + expect(drawCallbackExecuted).to.equal(true); + }); + + test('can use framebuffer as texture', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + // Should not throw when used as texture + expect(() => { + myp5.texture(fbo); + myp5.plane(10, 10); + }).to.not.throw(); + }); + }); + + suite('Pixel access', function() { + test('loadPixels returns a promise in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(255, 0, 0); + }); + + const result = fbo.loadPixels(); + expect(result).to.be.a('promise'); + + const pixels = await result; + expect(pixels).to.be.an('array'); + expect(pixels.length).to.equal(10 * 10 * 4); + }); + + test('pixels property is set after loadPixels resolves', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const pixels = await fbo.loadPixels(); + expect(fbo.pixels).to.equal(pixels); + expect(fbo.pixels.length).to.equal(10 * 10 * 4); + }); + + test('get() returns a promise for single pixel in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(5, 5); + expect(result).to.be.a('promise'); + + const color = await result; + expect(color).to.be.an('array'); + expect(color).to.have.length(4); + }); + + test('get() returns a promise for region in WebGPU', async function() { + await myp5.createCanvas(10, 10, myp5.WEBGPU); + const fbo = myp5.createFramebuffer(); + + fbo.draw(() => { + myp5.background(100, 150, 200); + }); + + const result = fbo.get(2, 2, 4, 4); + expect(result).to.be.a('promise'); + + const region = await result; + expect(region).to.be.an('object'); // Should be a p5.Image + expect(region.width).to.equal(4); + expect(region.height).to.equal(4); + }); + }); +}); From f94f8981f051cfdc3299058500e48e8b1b59d2a3 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Mon, 28 Jul 2025 11:15:28 -0400 Subject: [PATCH 34/69] Fix ordering of dirty flag --- src/webgl/p5.Framebuffer.js | 1 - src/webgl/p5.RendererGL.js | 1 + test/unit/webgl/p5.RendererGL.js | 3 +-- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index af2ab279b5..297e3dd09f 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -1170,7 +1170,6 @@ class Framebuffer { updatePixels() { // Let renderer handle the pixel update process this.renderer.updateFramebufferPixels(this); - this.dirty.colorTexture = false; } } diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 5a9482baa2..507839e1fe 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1940,6 +1940,7 @@ class RendererGL extends Renderer3D { framebuffer.pixels ); framebuffer.colorP5Texture.unbindTexture(); + framebuffer.dirty.colorTexture = false; const prevFramebuffer = this.activeFramebuffer(); if (framebuffer.antialias) { diff --git a/test/unit/webgl/p5.RendererGL.js b/test/unit/webgl/p5.RendererGL.js index f437ac4c20..34b64abdfd 100644 --- a/test/unit/webgl/p5.RendererGL.js +++ b/test/unit/webgl/p5.RendererGL.js @@ -1098,7 +1098,7 @@ suite('p5.RendererGL', function() { assert.isTrue(img.length === 4); }); - test.only('updatePixels() matches 2D mode', function() { + test('updatePixels() matches 2D mode', function() { myp5.createCanvas(20, 20); myp5.pixelDensity(1); const getColors = function(mode) { @@ -1120,7 +1120,6 @@ suite('p5.RendererGL', function() { }; const p2d = getColors(myp5.P2D); - debugger const webgl = getColors(myp5.WEBGL); myp5.image(p2d, 0, 0); myp5.blendMode(myp5.DIFFERENCE); From 5dfcd2456eee86e63ae2b8332b9c363bcab1dbd6 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Mon, 28 Jul 2025 17:04:02 -0400 Subject: [PATCH 35/69] Make sure textures are cleared at start --- preview/index.html | 7 +- src/webgl/p5.Framebuffer.js | 4 +- src/webgl/p5.RendererGL.js | 15 +++ src/webgl/p5.Texture.js | 2 + src/webgpu/p5.RendererWebGPU.js | 218 +++++++++++++++++++++++++++---- test/unit/visual/cases/webgpu.js | 72 +++++----- test/unit/visual/visualTest.js | 94 ++++++------- 7 files changed, 304 insertions(+), 108 deletions(-) diff --git a/preview/index.html b/preview/index.html index 6e4915ab34..4092992316 100644 --- a/preview/index.html +++ b/preview/index.html @@ -30,6 +30,7 @@ p.setup = async function () { await p.createCanvas(400, 400, p.WEBGPU); + fbo = p.createFramebuffer(); tex = p.createImage(100, 100); tex.loadPixels(); @@ -43,6 +44,10 @@ } } tex.updatePixels(); + fbo.draw(() => { + p.imageMode(p.CENTER); + p.image(tex, 0, 0, p.width, p.height); + }); sh = p.baseMaterialShader().modify({ uniforms: { @@ -87,7 +92,7 @@ 0, //p.width/3 * p.sin(t * 0.9 + i * Math.E + 0.2), p.width/3 * p.sin(t * 1.2 + i * Math.E + 0.3), ) - p.texture(tex) + p.texture(fbo) p.sphere(30); p.pop(); } diff --git a/src/webgl/p5.Framebuffer.js b/src/webgl/p5.Framebuffer.js index 297e3dd09f..0fb5504d25 100644 --- a/src/webgl/p5.Framebuffer.js +++ b/src/webgl/p5.Framebuffer.js @@ -67,7 +67,7 @@ class Framebuffer { this.format = settings.format || constants.UNSIGNED_BYTE; this.channels = settings.channels || ( - this.renderer._pInst._glAttributes.alpha + this.renderer.defaultFramebufferAlpha() ? RGBA : RGB ); @@ -75,7 +75,7 @@ class Framebuffer { this.depthFormat = settings.depthFormat || constants.FLOAT; this.textureFiltering = settings.textureFiltering || constants.LINEAR; if (settings.antialias === undefined) { - this.antialiasSamples = this.renderer._pInst._glAttributes.antialias + this.antialiasSamples = this.renderer.defaultFramebufferAntialias() ? 2 : 0; } else if (typeof settings.antialias === 'number') { diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index 507839e1fe..c6fbfa45a6 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -1302,6 +1302,11 @@ class RendererGL extends Renderer3D { return { texture: tex, glFormat: gl.RGBA, glDataType: gl.UNSIGNED_BYTE }; } + createFramebufferTextureHandle(framebufferTexture) { + // For WebGL, framebuffer texture handles are designed to be null + return null; + } + uploadTextureFromSource({ texture, glFormat, glDataType }, source) { const gl = this.GL; gl.texImage2D(gl.TEXTURE_2D, 0, glFormat, glFormat, glDataType, source); @@ -1394,6 +1399,16 @@ class RendererGL extends Renderer3D { // Framebuffer methods ////////////////////////////////////////////// + defaultFramebufferAlpha() { + return this._pInst._glAttributes.alpha; + } + + defaultFramebufferAntialias() { + return this.supportsFramebufferAntialias() + ? this._pInst._glAttributes.antialias + : false; + } + supportsFramebufferAntialias() { return this.webglVersion === constants.WEBGL2; } diff --git a/src/webgl/p5.Texture.js b/src/webgl/p5.Texture.js index 4ea07a1fba..d1c45b84f1 100644 --- a/src/webgl/p5.Texture.js +++ b/src/webgl/p5.Texture.js @@ -128,6 +128,8 @@ class Texture { width: textureData.width, height: textureData.height, }); + } else { + this.textureHandle = this._renderer.createFramebufferTextureHandle(this.src); } this._renderer.setTextureParams(this, { diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index a8732d22dd..29ffcbf40d 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -27,7 +27,10 @@ class RendererWebGPU extends Renderer3D { async setupContext() { this.adapter = await navigator.gpu?.requestAdapter(); - this.device = await this.adapter?.requestDevice(); + this.device = await this.adapter?.requestDevice({ + // Todo: check support + requiredFeatures: ['depth32float-stencil8'] + }); if (!this.device) { throw new Error('Your browser does not support WebGPU.'); } @@ -36,7 +39,8 @@ class RendererWebGPU extends Renderer3D { this.presentationFormat = navigator.gpu.getPreferredCanvasFormat(); this.drawingContext.configure({ device: this.device, - format: this.presentationFormat + format: this.presentationFormat, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC, }); // TODO disablable stencil @@ -193,12 +197,34 @@ class RendererWebGPU extends Renderer3D { freeDefs(this.renderer.buffers.user); } + _getValidSampleCount(requestedCount) { + // WebGPU supports sample counts of 1, 4 (and sometimes 8) + if (requestedCount <= 1) return 1; + if (requestedCount <= 4) return 4; + return 4; // Cap at 4 for broader compatibility + } + _shaderOptions({ mode }) { + const activeFramebuffer = this.activeFramebuffer(); + const format = activeFramebuffer ? + this._getWebGPUColorFormat(activeFramebuffer) : + this.presentationFormat; + + const requestedSampleCount = activeFramebuffer ? + (activeFramebuffer.antialias ? activeFramebuffer.antialiasSamples : 1) : + (this.antialias || 1); + const sampleCount = this._getValidSampleCount(requestedSampleCount); + + const depthFormat = activeFramebuffer && activeFramebuffer.useDepth ? + this._getWebGPUDepthFormat(activeFramebuffer) : + this.depthFormat; + return { topology: mode === constants.TRIANGLE_STRIP ? 'triangle-strip' : 'triangle-list', blendMode: this.states.curBlendMode, - sampleCount: (this.activeFramebuffer() || this).antialias || 1, // TODO - format: this.activeFramebuffer()?.format || this.presentationFormat, // TODO + sampleCount, + format, + depthFormat, } } @@ -209,8 +235,8 @@ class RendererWebGPU extends Renderer3D { shader.fragModule = device.createShaderModule({ code: shader.fragSrc() }); shader._pipelineCache = new Map(); - shader.getPipeline = ({ topology, blendMode, sampleCount, format }) => { - const key = `${topology}_${blendMode}_${sampleCount}_${format}`; + shader.getPipeline = ({ topology, blendMode, sampleCount, format, depthFormat }) => { + const key = `${topology}_${blendMode}_${sampleCount}_${format}_${depthFormat}`; if (!shader._pipelineCache.has(key)) { const pipeline = device.createRenderPipeline({ layout: shader._pipelineLayout, @@ -230,7 +256,7 @@ class RendererWebGPU extends Renderer3D { primitive: { topology }, multisample: { count: sampleCount }, depthStencil: { - format: this.depthFormat, + format: depthFormat, depthWriteEnabled: true, depthCompare: 'less', stencilFront: { @@ -531,9 +557,15 @@ class RendererWebGPU extends Renderer3D { _useShader(shader, options) {} _updateViewport() { + this._origViewport = { + width: this.width, + height: this.height, + }; this._viewport = [0, 0, this.width, this.height]; } + viewport() {} + zClipRange() { return [0, 1]; } @@ -573,14 +605,27 @@ class RendererWebGPU extends Renderer3D { if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); - const currentTexture = this.drawingContext.getCurrentTexture(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); + const colorAttachment = { - view: currentTexture.createView(), + view: colorTexture.createView(), loadOp: "load", storeOp: "store", + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, }; - const depthTextureView = this.depthTexture?.createView(); + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); const renderPassDescriptor = { colorAttachments: [colorAttachment], depthStencilAttachment: depthTextureView @@ -1155,6 +1200,14 @@ class RendererWebGPU extends Renderer3D { // Framebuffer methods ////////////////////////////////////////////// + defaultFramebufferAlpha() { + return true + } + + defaultFramebufferAntialias() { + return true; + } + supportsFramebufferAntialias() { return true; } @@ -1189,40 +1242,138 @@ class RendererWebGPU extends Renderer3D { } recreateFramebufferTextures(framebuffer) { + // Clean up existing textures if (framebuffer.colorTexture && framebuffer.colorTexture.destroy) { framebuffer.colorTexture.destroy(); } + if (framebuffer.aaColorTexture && framebuffer.aaColorTexture.destroy) { + framebuffer.aaColorTexture.destroy(); + } if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { framebuffer.depthTexture.destroy(); } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } + // Clear cached views when recreating textures + framebuffer._colorTextureView = null; - const colorTextureDescriptor = { + const baseDescriptor = { size: { width: framebuffer.width * framebuffer.density, height: framebuffer.height * framebuffer.density, depthOrArrayLayers: 1, }, format: this._getWebGPUColorFormat(framebuffer), - usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC, - sampleCount: framebuffer.antialias ? framebuffer.antialiasSamples : 1, }; + // Create non-multisampled texture for texture binding (always needed) + const colorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC, + sampleCount: 1, + }; framebuffer.colorTexture = this.device.createTexture(colorTextureDescriptor); + // Create multisampled texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaColorTextureDescriptor = { + ...baseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaColorTexture = this.device.createTexture(aaColorTextureDescriptor); + } + if (framebuffer.useDepth) { - const depthTextureDescriptor = { + const depthBaseDescriptor = { size: { width: framebuffer.width * framebuffer.density, height: framebuffer.height * framebuffer.density, depthOrArrayLayers: 1, }, format: this._getWebGPUDepthFormat(framebuffer), - usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING, - sampleCount: framebuffer.antialias ? framebuffer.antialiasSamples : 1, }; + // Create non-multisampled depth texture for texture binding (always needed) + const depthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING, + sampleCount: 1, + }; framebuffer.depthTexture = this.device.createTexture(depthTextureDescriptor); + + // Create multisampled depth texture for rendering if antialiasing is enabled + if (framebuffer.antialias) { + const aaDepthTextureDescriptor = { + ...depthBaseDescriptor, + usage: GPUTextureUsage.RENDER_ATTACHMENT, + sampleCount: this._getValidSampleCount(framebuffer.antialiasSamples), + }; + framebuffer.aaDepthTexture = this.device.createTexture(aaDepthTextureDescriptor); + } + } + + // Clear the framebuffer textures after creation + this._clearFramebufferTextures(framebuffer); + } + + _clearFramebufferTextures(framebuffer) { + const commandEncoder = this.device.createCommandEncoder(); + + // Clear the color texture (and multisampled texture if it exists) + const colorTexture = framebuffer.aaColorTexture || framebuffer.colorTexture; + const colorAttachment = { + view: colorTexture.createView(), + loadOp: "clear", + storeOp: "store", + clearValue: { r: 0, g: 0, b: 0, a: 0 }, + resolveTarget: framebuffer.aaColorTexture ? + framebuffer.colorTexture.createView() : undefined, + }; + + // Clear the depth texture if it exists + const depthTexture = framebuffer.aaDepthTexture || framebuffer.depthTexture; + const depthStencilAttachment = depthTexture ? { + view: depthTexture.createView(), + depthLoadOp: "clear", + depthStoreOp: "store", + depthClearValue: 1.0, + stencilLoadOp: "clear", + stencilStoreOp: "store", + depthReadOnly: false, + stencilReadOnly: false, + } : undefined; + + const renderPassDescriptor = { + colorAttachments: [colorAttachment], + depthStencilAttachment: depthStencilAttachment, + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.end(); + + this.queue.submit([commandEncoder.finish()]); + } + + _getFramebufferColorTextureView(framebuffer) { + if (!framebuffer._colorTextureView && framebuffer.colorTexture) { + framebuffer._colorTextureView = framebuffer.colorTexture.createView(); } + return framebuffer._colorTextureView; + } + + createFramebufferTextureHandle(framebufferTexture) { + const src = framebufferTexture; + let renderer = this; + return { + get view() { + return renderer._getFramebufferColorTextureView(src.framebuffer); + }, + get gpuTexture() { + return src.framebuffer.colorTexture; + } + }; } _getWebGPUColorFormat(framebuffer) { @@ -1263,6 +1414,9 @@ class RendererWebGPU extends Renderer3D { if (framebuffer.depthTexture && framebuffer.depthTexture.destroy) { framebuffer.depthTexture.destroy(); } + if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { + framebuffer.aaDepthTexture.destroy(); + } } getFramebufferToBind(framebuffer) { @@ -1275,6 +1429,9 @@ class RendererWebGPU extends Renderer3D { bindFramebuffer(framebuffer) {} async readFramebufferPixels(framebuffer) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + const width = framebuffer.width * framebuffer.density; const height = framebuffer.height * framebuffer.density; const bytesPerPixel = 4; @@ -1284,8 +1441,8 @@ class RendererWebGPU extends Renderer3D { const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( - { texture: framebuffer.colorTexture }, - { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { texture: framebuffer.colorTexture, origin: { x: 0, y: 0, z: 0 } }, + { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel, rowsPerImage: height }, { width, height, depthOrArrayLayers: 1 } ); @@ -1300,6 +1457,9 @@ class RendererWebGPU extends Renderer3D { } async readFramebufferPixel(framebuffer, x, y) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + const bytesPerPixel = 4; const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); @@ -1325,6 +1485,9 @@ class RendererWebGPU extends Renderer3D { } async readFramebufferRegion(framebuffer, x, y, w, h) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + const width = w * framebuffer.density; const height = h * framebuffer.density; const bytesPerPixel = 4; @@ -1391,6 +1554,9 @@ class RendererWebGPU extends Renderer3D { ////////////////////////////////////////////// async loadPixels() { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + const width = this.width * this._pixelDensity; const height = this.height * this._pixelDensity; const bytesPerPixel = 4; @@ -1419,6 +1585,9 @@ class RendererWebGPU extends Renderer3D { } async _getPixel(x, y) { + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + const bytesPerPixel = 4; const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); @@ -1467,6 +1636,9 @@ class RendererWebGPU extends Renderer3D { // get(x,y,w,h) - region } + // Ensure all pending GPU work is complete before reading pixels + await this.queue.onSubmittedWorkDone(); + // Read region and create p5.Image const width = w * pd; const height = h * pd; @@ -1487,17 +1659,19 @@ class RendererWebGPU extends Renderer3D { ); this.device.queue.submit([commandEncoder.finish()]); + await this.queue.onSubmittedWorkDone(); await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); + console.log(pixelData) const region = new Image(width, height); region.pixelDensity(pd); - region.imageData = region.canvas.getContext('2d').createImageData(width, height); - region.imageData.data.set(pixelData); - region.pixels = region.imageData.data; - region.updatePixels(); + const ctx = region.canvas.getContext('2d'); + const imageData = ctx.createImageData(width, height); + imageData.data.set(pixelData); + ctx.putImageData(imageData, 0, 0); stagingBuffer.unmap(); return region; diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 363626807a..9c0502ce39 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -19,7 +19,7 @@ visualSuite('WebGPU', function() { p5.circle(0, 0, 20); p5.pop(); } - screenshot(); + await screenshot(); }); visualTest('The stroke shader runs successfully', async function(p5, screenshot) { @@ -34,7 +34,7 @@ visualSuite('WebGPU', function() { p5.circle(0, 0, 20); p5.pop(); } - screenshot(); + await screenshot(); }); visualTest('The material shader runs successfully', async function(p5, screenshot) { @@ -54,7 +54,7 @@ visualSuite('WebGPU', function() { p5.sphere(10); p5.pop(); } - screenshot(); + await screenshot(); }); visualTest('Shader hooks can be used', async function(p5, screenshot) { @@ -80,7 +80,7 @@ visualSuite('WebGPU', function() { p5.stroke('white'); p5.strokeWeight(5); p5.circle(0, 0, 30); - screenshot(); + await screenshot(); }); visualTest('Textures in the material shader work', async function(p5, screenshot) { @@ -100,17 +100,17 @@ visualSuite('WebGPU', function() { p5.texture(tex); p5.plane(p5.width, p5.height); - screenshot(); + await screenshot(); }); }); visualSuite('Framebuffers', function() { visualTest('Basic framebuffer draw to canvas', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + // Create a framebuffer const fbo = p5.createFramebuffer({ width: 25, height: 25 }); - + // Draw to the framebuffer fbo.draw(() => { p5.background(255, 0, 0); // Red background @@ -118,23 +118,23 @@ visualSuite('WebGPU', function() { p5.noStroke(); p5.circle(12.5, 12.5, 20); }); - + // Draw the framebuffer to the main canvas p5.background(0, 0, 255); // Blue background p5.texture(fbo); p5.noStroke(); p5.plane(25, 25); - - screenshot(); + + await screenshot(); }); visualTest('Framebuffer with different sizes', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + // Create two different sized framebuffers const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); - + // Draw to first framebuffer fbo1.draw(() => { p5.background(255, 100, 100); @@ -142,15 +142,15 @@ visualSuite('WebGPU', function() { p5.noStroke(); p5.rect(5, 5, 10, 10); }); - - // Draw to second framebuffer + + // Draw to second framebuffer fbo2.draw(() => { p5.background(100, 255, 100); p5.fill(255, 0, 255); p5.noStroke(); p5.circle(7.5, 7.5, 10); }); - + // Draw both to main canvas p5.background(50); p5.push(); @@ -159,23 +159,23 @@ visualSuite('WebGPU', function() { p5.noStroke(); p5.plane(20, 20); p5.pop(); - + p5.push(); p5.translate(12.5, 12.5); p5.texture(fbo2); p5.noStroke(); p5.plane(15, 15); p5.pop(); - - screenshot(); + + await screenshot(); }); visualTest('Auto-sized framebuffer', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + // Create auto-sized framebuffer (should match canvas size) const fbo = p5.createFramebuffer(); - + // Draw to the framebuffer fbo.draw(() => { p5.background(0); @@ -193,24 +193,24 @@ visualSuite('WebGPU', function() { p5.noStroke(); p5.circle(25, 25, 15); }); - + // Draw the framebuffer to fill the main canvas p5.texture(fbo); p5.noStroke(); p5.plane(50, 50); - - screenshot(); + + await screenshot(); }); visualTest('Auto-sized framebuffer after canvas resize', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + // Create auto-sized framebuffer const fbo = p5.createFramebuffer(); - + // Resize the canvas (framebuffer should auto-resize) p5.resizeCanvas(30, 30); - + // Draw to the framebuffer after resize fbo.draw(() => { p5.background(100, 0, 100); @@ -221,21 +221,21 @@ visualSuite('WebGPU', function() { p5.fill(255, 255, 0); p5.circle(15, 15, 10); }); - + // Draw the framebuffer to the main canvas p5.texture(fbo); p5.noStroke(); p5.plane(30, 30); - - screenshot(); + + await screenshot(); }); visualTest('Fixed-size framebuffer after manual resize', async function(p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + // Create fixed-size framebuffer const fbo = p5.createFramebuffer({ width: 20, height: 20 }); - + // Draw initial content fbo.draw(() => { p5.background(255, 200, 100); @@ -243,10 +243,10 @@ visualSuite('WebGPU', function() { p5.noStroke(); p5.circle(10, 10, 15); }); - + // Manually resize the framebuffer fbo.resize(35, 25); - + // Draw new content to the resized framebuffer fbo.draw(() => { p5.background(200, 255, 100); @@ -257,14 +257,14 @@ visualSuite('WebGPU', function() { p5.fill(0, 0, 255); p5.circle(17.5, 12.5, 8); }); - + // Draw the resized framebuffer to the main canvas p5.background(50); p5.texture(fbo); p5.noStroke(); p5.plane(35, 25); - - screenshot(); + + await screenshot(); }); }); }); diff --git a/test/unit/visual/visualTest.js b/test/unit/visual/visualTest.js index 120ce79565..7d301d142b 100644 --- a/test/unit/visual/visualTest.js +++ b/test/unit/visual/visualTest.js @@ -89,43 +89,43 @@ export function visualSuite( /** * Image Diff Algorithm for p5.js Visual Tests - * + * * This algorithm addresses the challenge of cross-platform rendering differences in p5.js visual tests. * Different operating systems and browsers render graphics with subtle variations, particularly with * anti-aliasing, text rendering, and sub-pixel positioning. This can cause false negatives in tests * when the visual differences are acceptable rendering variations rather than actual bugs. - * + * * Key components of the approach: - * + * * 1. Initial pixel-by-pixel comparison: * - Uses pixelmatch to identify differences between expected and actual images * - Sets a moderate threshold (0.5) to filter out minor color/intensity variations * - Produces a diff image with red pixels marking differences - * + * * 2. Cluster identification using BFS (Breadth-First Search): * - Groups connected difference pixels into clusters * - Uses a queue-based BFS algorithm to find all connected pixels * - Defines connectivity based on 8-way adjacency (all surrounding pixels) - * + * * 3. Cluster categorization by type: * - Analyzes each pixel's neighborhood characteristics * - Specifically identifies "line shift" clusters - differences that likely represent * the same visual elements shifted by 1px due to platform rendering differences * - Line shifts are identified when >80% of pixels in a cluster have ≤2 neighboring diff pixels - * + * * 4. Intelligent failure criteria: * - Filters out clusters smaller than MIN_CLUSTER_SIZE pixels (noise reduction) * - Applies different thresholds for regular differences vs. line shifts * - Considers both the total number of significant pixels and number of distinct clusters - * - * This approach balances the need to catch genuine visual bugs (like changes to shape geometry, + * + * This approach balances the need to catch genuine visual bugs (like changes to shape geometry, * colors, or positioning) while tolerating acceptable cross-platform rendering variations. - * + * * Parameters: * - MIN_CLUSTER_SIZE: Minimum size for a cluster to be considered significant (default: 4) * - MAX_TOTAL_DIFF_PIXELS: Maximum allowed non-line-shift difference pixels (default: 40) * Note: These can be adjusted for further updation - * + * * Note for contributors: When running tests locally, you may not see these differences as they * mainly appear when tests run on different operating systems or browser rendering engines. * However, the same code may produce slightly different renderings on CI environments, particularly @@ -140,7 +140,7 @@ export async function checkMatch(actual, expected, p5) { if (narrow) { scale *= 2; } - + for (const img of [actual, expected]) { img.resize( Math.ceil(img.width * scale), @@ -151,28 +151,28 @@ export async function checkMatch(actual, expected, p5) { // Ensure both images have the same dimensions const width = expected.width; const height = expected.height; - + // Create canvases with background color const actualCanvas = p5.createGraphics(width, height); const expectedCanvas = p5.createGraphics(width, height); actualCanvas.pixelDensity(1); expectedCanvas.pixelDensity(1); - + actualCanvas.background(BG); expectedCanvas.background(BG); - + actualCanvas.image(actual, 0, 0); expectedCanvas.image(expected, 0, 0); - + // Load pixel data actualCanvas.loadPixels(); expectedCanvas.loadPixels(); - + // Create diff output canvas const diffCanvas = p5.createGraphics(width, height); diffCanvas.pixelDensity(1); diffCanvas.loadPixels(); - + // Run pixelmatch const diffCount = pixelmatch( actualCanvas.pixels, @@ -180,13 +180,13 @@ export async function checkMatch(actual, expected, p5) { diffCanvas.pixels, width, height, - { + { threshold: 0.5, includeAA: false, alpha: 0.1 } ); - + // If no differences, return early if (diffCount === 0) { actualCanvas.remove(); @@ -194,19 +194,19 @@ export async function checkMatch(actual, expected, p5) { diffCanvas.updatePixels(); return { ok: true, diff: diffCanvas }; } - + // Post-process to identify and filter out isolated differences const visited = new Set(); const clusterSizes = []; - + for (let y = 0; y < height; y++) { for (let x = 0; x < width; x++) { const pos = (y * width + x) * 4; - + // If this is a diff pixel (red in pixelmatch output) and not yet visited if ( - diffCanvas.pixels[pos] === 255 && - diffCanvas.pixels[pos + 1] === 0 && + diffCanvas.pixels[pos] === 255 && + diffCanvas.pixels[pos + 1] === 0 && diffCanvas.pixels[pos + 2] === 0 && !visited.has(pos) ) { @@ -216,37 +216,37 @@ export async function checkMatch(actual, expected, p5) { } } } - + // Define significance thresholds const MIN_CLUSTER_SIZE = 4; // Minimum pixels in a significant cluster const MAX_TOTAL_DIFF_PIXELS = 40; // Maximum total different pixels // Determine if the differences are significant const nonLineShiftClusters = clusterSizes.filter(c => !c.isLineShift && c.size >= MIN_CLUSTER_SIZE); - + // Calculate significant differences excluding line shifts const significantDiffPixels = nonLineShiftClusters.reduce((sum, c) => sum + c.size, 0); // Update the diff canvas diffCanvas.updatePixels(); - + // Clean up canvases actualCanvas.remove(); expectedCanvas.remove(); - + // Determine test result const ok = ( - diffCount === 0 || + diffCount === 0 || ( - significantDiffPixels === 0 || + significantDiffPixels === 0 || ( - (significantDiffPixels <= MAX_TOTAL_DIFF_PIXELS) && + (significantDiffPixels <= MAX_TOTAL_DIFF_PIXELS) && (nonLineShiftClusters.length <= 2) // Not too many significant clusters ) ) ); - return { + return { ok, diff: diffCanvas, details: { @@ -264,31 +264,31 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) const queue = [{x: startX, y: startY}]; let size = 0; const clusterPixels = []; - + while (queue.length > 0) { const {x, y} = queue.shift(); const pos = (y * width + x) * 4; - + // Skip if already visited if (visited.has(pos)) continue; - + // Skip if not a diff pixel if (pixels[pos] !== 255 || pixels[pos + 1] !== 0 || pixels[pos + 2] !== 0) continue; - + // Mark as visited visited.add(pos); size++; clusterPixels.push({x, y}); - + // Add neighbors to queue for (let dy = -radius; dy <= radius; dy++) { for (let dx = -radius; dx <= radius; dx++) { const nx = x + dx; const ny = y + dy; - + // Skip if out of bounds if (nx < 0 || nx >= width || ny < 0 || ny >= height) continue; - + // Skip if already visited const npos = (ny * width + nx) * 4; if (!visited.has(npos)) { @@ -302,20 +302,20 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) if (clusterPixels.length > 0) { // Count pixels with limited neighbors (line-like characteristic) let linelikePixels = 0; - + for (const {x, y} of clusterPixels) { // Count neighbors let neighbors = 0; for (let dy = -1; dy <= 1; dy++) { for (let dx = -1; dx <= 1; dx++) { if (dx === 0 && dy === 0) continue; // Skip self - + const nx = x + dx; const ny = y + dy; - + // Skip if out of bounds if (nx < 0 || nx >= width || ny < 0 || ny >= height) continue; - + const npos = (ny * width + nx) * 4; // Check if neighbor is a diff pixel if (pixels[npos] === 255 && pixels[npos + 1] === 0 && pixels[npos + 2] === 0) { @@ -323,13 +323,13 @@ function findClusterSize(pixels, startX, startY, width, height, radius, visited) } } } - + // Line-like pixels typically have 1-2 neighbors if (neighbors <= 2) { linelikePixels++; } } - + // If most pixels (>80%) in the cluster have ≤2 neighbors, it's likely a line shift isLineShift = linelikePixels / clusterPixels.length > 0.8; } @@ -407,8 +407,8 @@ export function visualTest( const actual = []; // Generate screenshots - await callback(myp5, () => { - const img = myp5.get(); + await callback(myp5, async () => { + const img = await myp5.get(); img.pixelDensity(1); actual.push(img); }); From 4fd6c19b68fee4632793ba5d23604165547f2eb7 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Mon, 28 Jul 2025 18:29:43 -0400 Subject: [PATCH 36/69] Add fixes for other gl-specific cases --- src/webgl/3d_primitives.js | 2 +- src/webgpu/p5.RendererWebGPU.js | 29 +- test/unit/visual/cases/webgpu.js | 424 ++++++++++++++++-------------- test/unit/webgl/p5.Framebuffer.js | 13 +- 4 files changed, 246 insertions(+), 222 deletions(-) diff --git a/src/webgl/3d_primitives.js b/src/webgl/3d_primitives.js index 8c29e3ea2d..386a64535d 100644 --- a/src/webgl/3d_primitives.js +++ b/src/webgl/3d_primitives.js @@ -1869,7 +1869,7 @@ function primitives3D(p5, fn){ if (typeof args[4] === 'undefined') { // Use the retained mode for drawing rectangle, // if args for rounding rectangle is not provided by user. - const perPixelLighting = this._pInst._glAttributes.perPixelLighting; + const perPixelLighting = this._pInst._glAttributes?.perPixelLighting; const detailX = args[4] || (perPixelLighting ? 1 : 24); const detailY = args[5] || (perPixelLighting ? 1 : 16); const gid = `rect|${detailX}|${detailY}`; diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 29ffcbf40d..cad16a1765 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -210,7 +210,7 @@ class RendererWebGPU extends Renderer3D { this._getWebGPUColorFormat(activeFramebuffer) : this.presentationFormat; - const requestedSampleCount = activeFramebuffer ? + const requestedSampleCount = activeFramebuffer ? (activeFramebuffer.antialias ? activeFramebuffer.antialiasSamples : 1) : (this.antialias || 1); const sampleCount = this._getValidSampleCount(requestedSampleCount); @@ -564,6 +564,12 @@ class RendererWebGPU extends Renderer3D { this._viewport = [0, 0, this.width, this.height]; } + _createPixelsArray() { + this.pixels = new Uint8Array( + this.width * this.pixelDensity() * this.height * this.pixelDensity() * 4 + ); + } + viewport() {} zClipRange() { @@ -605,25 +611,25 @@ class RendererWebGPU extends Renderer3D { if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); - + // Use framebuffer texture if active, otherwise use canvas texture const activeFramebuffer = this.activeFramebuffer(); - const colorTexture = activeFramebuffer ? - (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : this.drawingContext.getCurrentTexture(); - + const colorAttachment = { view: colorTexture.createView(), loadOp: "load", storeOp: "store", // If using multisampled texture, resolve to non-multisampled texture - resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? activeFramebuffer.colorTexture.createView() : undefined, }; // Use framebuffer depth texture if active, otherwise use canvas depth texture - const depthTexture = activeFramebuffer ? - (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : this.depthTexture; const depthTextureView = depthTexture?.createView(); const renderPassDescriptor = { @@ -1313,14 +1319,14 @@ class RendererWebGPU extends Renderer3D { framebuffer.aaDepthTexture = this.device.createTexture(aaDepthTextureDescriptor); } } - + // Clear the framebuffer textures after creation this._clearFramebufferTextures(framebuffer); } _clearFramebufferTextures(framebuffer) { const commandEncoder = this.device.createCommandEncoder(); - + // Clear the color texture (and multisampled texture if it exists) const colorTexture = framebuffer.aaColorTexture || framebuffer.colorTexture; const colorAttachment = { @@ -1328,7 +1334,7 @@ class RendererWebGPU extends Renderer3D { loadOp: "clear", storeOp: "store", clearValue: { r: 0, g: 0, b: 0, a: 0 }, - resolveTarget: framebuffer.aaColorTexture ? + resolveTarget: framebuffer.aaColorTexture ? framebuffer.colorTexture.createView() : undefined, }; @@ -1664,7 +1670,6 @@ class RendererWebGPU extends Renderer3D { await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); - console.log(pixelData) const region = new Image(width, height); region.pixelDensity(pd); diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 9c0502ce39..334abc1be1 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -1,176 +1,194 @@ -import { vi } from 'vitest'; -import p5 from '../../../../src/app'; -import { visualSuite, visualTest } from '../visualTest'; -import rendererWebGPU from '../../../../src/webgpu/p5.RendererWebGPU'; +import { vi } from "vitest"; +import p5 from "../../../../src/app"; +import { visualSuite, visualTest } from "../visualTest"; +import rendererWebGPU from "../../../../src/webgpu/p5.RendererWebGPU"; p5.registerAddon(rendererWebGPU); -visualSuite('WebGPU', function() { - visualSuite('Shaders', function() { - visualTest('The color shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.fill(color); - p5.translate(15, 0); - p5.noStroke(); - p5.circle(0, 0, 20); - p5.pop(); - } - await screenshot(); - }); - - visualTest('The stroke shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.translate(15, 0); - p5.stroke(color); - p5.strokeWeight(2); - p5.circle(0, 0, 20); - p5.pop(); - } - await screenshot(); - }); - - visualTest('The material shader runs successfully', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - p5.background('white'); - p5.ambientLight(50); - p5.directionalLight(100, 100, 100, 0, 1, -1); - p5.pointLight(155, 155, 155, 0, -200, 500); - p5.specularMaterial(255); - p5.shininess(300); - for (const [i, color] of ['red', 'lime', 'blue'].entries()) { - p5.push(); - p5.rotate(p5.TWO_PI * (i / 3)); - p5.fill(color); - p5.translate(15, 0); - p5.noStroke(); - p5.sphere(10); - p5.pop(); - } - await screenshot(); - }); +visualSuite("WebGPU", function () { + visualSuite("Shaders", function () { + visualTest( + "The color shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); + + visualTest( + "The stroke shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.translate(15, 0); + p5.stroke(color); + p5.strokeWeight(2); + p5.circle(0, 0, 20); + p5.pop(); + } + await screenshot(); + }, + ); + + visualTest( + "The material shader runs successfully", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + p5.background("white"); + p5.ambientLight(50); + p5.directionalLight(100, 100, 100, 0, 1, -1); + p5.pointLight(155, 155, 155, 0, -200, 500); + p5.specularMaterial(255); + p5.shininess(300); + for (const [i, color] of ["red", "lime", "blue"].entries()) { + p5.push(); + p5.rotate(p5.TWO_PI * (i / 3)); + p5.fill(color); + p5.translate(15, 0); + p5.noStroke(); + p5.sphere(10); + p5.pop(); + } + await screenshot(); + }, + ); - visualTest('Shader hooks can be used', async function(p5, screenshot) { + visualTest("Shader hooks can be used", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); const myFill = p5.baseMaterialShader().modify({ - 'Vertex getWorldInputs': `(inputs: Vertex) { + "Vertex getWorldInputs": `(inputs: Vertex) { var result = inputs; result.position.y += 10.0 * sin(inputs.position.x * 0.25); return result; }`, }); const myStroke = p5.baseStrokeShader().modify({ - 'StrokeVertex getWorldInputs': `(inputs: StrokeVertex) { + "StrokeVertex getWorldInputs": `(inputs: StrokeVertex) { var result = inputs; result.position.y += 10.0 * sin(inputs.position.x * 0.25); return result; }`, }); - p5.background('black'); + p5.background("black"); p5.shader(myFill); p5.strokeShader(myStroke); - p5.fill('red'); - p5.stroke('white'); + p5.fill("red"); + p5.stroke("white"); p5.strokeWeight(5); p5.circle(0, 0, 30); await screenshot(); }); - visualTest('Textures in the material shader work', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - const tex = p5.createImage(50, 50); - tex.loadPixels(); - for (let x = 0; x < tex.width; x++) { - for (let y = 0; y < tex.height; y++) { - const off = (x + y * tex.width) * 4; - tex.pixels[off] = p5.round((x / tex.width) * 255); - tex.pixels[off + 1] = p5.round((y / tex.height) * 255); - tex.pixels[off + 2] = 0; - tex.pixels[off + 3] = 255; + visualTest( + "Textures in the material shader work", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + const tex = p5.createImage(50, 50); + tex.loadPixels(); + for (let x = 0; x < tex.width; x++) { + for (let y = 0; y < tex.height; y++) { + const off = (x + y * tex.width) * 4; + tex.pixels[off] = p5.round((x / tex.width) * 255); + tex.pixels[off + 1] = p5.round((y / tex.height) * 255); + tex.pixels[off + 2] = 0; + tex.pixels[off + 3] = 255; + } } - } - tex.updatePixels(); - p5.texture(tex); - p5.plane(p5.width, p5.height); + tex.updatePixels(); + p5.texture(tex); + p5.plane(p5.width, p5.height); - await screenshot(); - }); + await screenshot(); + }, + ); }); - visualSuite('Framebuffers', function() { - visualTest('Basic framebuffer draw to canvas', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - - // Create a framebuffer - const fbo = p5.createFramebuffer({ width: 25, height: 25 }); - - // Draw to the framebuffer - fbo.draw(() => { - p5.background(255, 0, 0); // Red background - p5.fill(0, 255, 0); // Green circle + visualSuite("Framebuffers", function () { + visualTest( + "Basic framebuffer draw to canvas", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create a framebuffer + const fbo = p5.createFramebuffer({ width: 25, height: 25 }); + + // Draw to the framebuffer + fbo.draw(() => { + p5.background(255, 0, 0); // Red background + p5.fill(0, 255, 0); // Green circle + p5.noStroke(); + p5.circle(12.5, 12.5, 20); + }); + + // Draw the framebuffer to the main canvas + p5.background(0, 0, 255); // Blue background + p5.texture(fbo); p5.noStroke(); - p5.circle(12.5, 12.5, 20); - }); - - // Draw the framebuffer to the main canvas - p5.background(0, 0, 255); // Blue background - p5.texture(fbo); - p5.noStroke(); - p5.plane(25, 25); - - await screenshot(); - }); - - visualTest('Framebuffer with different sizes', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - - // Create two different sized framebuffers - const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); - const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); - - // Draw to first framebuffer - fbo1.draw(() => { - p5.background(255, 100, 100); - p5.fill(255, 255, 0); + p5.plane(25, 25); + + await screenshot(); + }, + ); + + visualTest( + "Framebuffer with different sizes", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create two different sized framebuffers + const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); + const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); + + // Draw to first framebuffer + fbo1.draw(() => { + p5.background(255, 100, 100); + p5.fill(255, 255, 0); + p5.noStroke(); + p5.rect(5, 5, 10, 10); + }); + + // Draw to second framebuffer + fbo2.draw(() => { + p5.background(100, 255, 100); + p5.fill(255, 0, 255); + p5.noStroke(); + p5.circle(7.5, 7.5, 10); + }); + + // Draw both to main canvas + p5.background(50); + p5.push(); + p5.translate(-12.5, -12.5); + p5.texture(fbo1); p5.noStroke(); - p5.rect(5, 5, 10, 10); - }); + p5.plane(20, 20); + p5.pop(); - // Draw to second framebuffer - fbo2.draw(() => { - p5.background(100, 255, 100); - p5.fill(255, 0, 255); + p5.push(); + p5.translate(12.5, 12.5); + p5.texture(fbo2); p5.noStroke(); - p5.circle(7.5, 7.5, 10); - }); - - // Draw both to main canvas - p5.background(50); - p5.push(); - p5.translate(-12.5, -12.5); - p5.texture(fbo1); - p5.noStroke(); - p5.plane(20, 20); - p5.pop(); + p5.plane(15, 15); + p5.pop(); - p5.push(); - p5.translate(12.5, 12.5); - p5.texture(fbo2); - p5.noStroke(); - p5.plane(15, 15); - p5.pop(); + await screenshot(); + }, + ); - await screenshot(); - }); - - visualTest('Auto-sized framebuffer', async function(p5, screenshot) { + visualTest("Auto-sized framebuffer", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); // Create auto-sized framebuffer (should match canvas size) @@ -202,69 +220,75 @@ visualSuite('WebGPU', function() { await screenshot(); }); - visualTest('Auto-sized framebuffer after canvas resize', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - - // Create auto-sized framebuffer - const fbo = p5.createFramebuffer(); - - // Resize the canvas (framebuffer should auto-resize) - p5.resizeCanvas(30, 30); - - // Draw to the framebuffer after resize - fbo.draw(() => { - p5.background(100, 0, 100); - p5.fill(0, 255, 255); + visualTest( + "Auto-sized framebuffer after canvas resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create auto-sized framebuffer + const fbo = p5.createFramebuffer(); + + // Resize the canvas (framebuffer should auto-resize) + p5.resizeCanvas(30, 30); + + // Draw to the framebuffer after resize + fbo.draw(() => { + p5.background(100, 0, 100); + p5.fill(0, 255, 255); + p5.noStroke(); + // Draw a shape that fills the new size + p5.rect(5, 5, 20, 20); + p5.fill(255, 255, 0); + p5.circle(15, 15, 10); + }); + + // Draw the framebuffer to the main canvas + p5.texture(fbo); p5.noStroke(); - // Draw a shape that fills the new size - p5.rect(5, 5, 20, 20); - p5.fill(255, 255, 0); - p5.circle(15, 15, 10); - }); - - // Draw the framebuffer to the main canvas - p5.texture(fbo); - p5.noStroke(); - p5.plane(30, 30); - - await screenshot(); - }); - - visualTest('Fixed-size framebuffer after manual resize', async function(p5, screenshot) { - await p5.createCanvas(50, 50, p5.WEBGPU); - - // Create fixed-size framebuffer - const fbo = p5.createFramebuffer({ width: 20, height: 20 }); - - // Draw initial content - fbo.draw(() => { - p5.background(255, 200, 100); - p5.fill(0, 100, 200); - p5.noStroke(); - p5.circle(10, 10, 15); - }); - - // Manually resize the framebuffer - fbo.resize(35, 25); - - // Draw new content to the resized framebuffer - fbo.draw(() => { - p5.background(200, 255, 100); - p5.fill(200, 0, 100); + p5.plane(30, 30); + + await screenshot(); + }, + ); + + visualTest( + "Fixed-size framebuffer after manual resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + + // Create fixed-size framebuffer + const fbo = p5.createFramebuffer({ width: 20, height: 20 }); + + // Draw initial content + fbo.draw(() => { + p5.background(255, 200, 100); + p5.fill(0, 100, 200); + p5.noStroke(); + p5.circle(10, 10, 15); + }); + + // Manually resize the framebuffer + fbo.resize(35, 25); + + // Draw new content to the resized framebuffer + fbo.draw(() => { + p5.background(200, 255, 100); + p5.fill(200, 0, 100); + p5.noStroke(); + // Draw content that uses the new size + p5.rect(5, 5, 25, 15); + p5.fill(0, 0, 255); + p5.circle(17.5, 12.5, 8); + }); + + // Draw the resized framebuffer to the main canvas + p5.background(50); + p5.texture(fbo); p5.noStroke(); - // Draw content that uses the new size - p5.rect(5, 5, 25, 15); - p5.fill(0, 0, 255); - p5.circle(17.5, 12.5, 8); - }); + p5.plane(35, 25); - // Draw the resized framebuffer to the main canvas - p5.background(50); - p5.texture(fbo); - p5.noStroke(); - p5.plane(35, 25); - - await screenshot(); - }); + await screenshot(); + }, + ); }); }); diff --git a/test/unit/webgl/p5.Framebuffer.js b/test/unit/webgl/p5.Framebuffer.js index f97cb6b57d..6a6d556351 100644 --- a/test/unit/webgl/p5.Framebuffer.js +++ b/test/unit/webgl/p5.Framebuffer.js @@ -461,7 +461,7 @@ suite('p5.Framebuffer', function() { } }); - test('get() creates a p5.Image with 1x pixel density', function() { + test('get() creates a p5.Image matching the source pixel density', function() { const mainCanvas = myp5.createCanvas(20, 20, myp5.WEBGL); myp5.pixelDensity(2); const fbo = myp5.createFramebuffer(); @@ -482,22 +482,17 @@ suite('p5.Framebuffer', function() { myp5.pop(); }); const img = fbo.get(); - const p2d = myp5.createGraphics(20, 20); - p2d.pixelDensity(1); myp5.image(fbo, -10, -10); - p2d.image(mainCanvas, 0, 0); fbo.loadPixels(); img.loadPixels(); - p2d.loadPixels(); expect(img.width).to.equal(fbo.width); expect(img.height).to.equal(fbo.height); - expect(img.pixels.length).to.equal(fbo.pixels.length / 4); - // The pixels should be approximately the same in the 1x image as when we - // draw the framebuffer onto a 1x canvas + expect(img.pixels.length).to.equal(fbo.pixels.length); + // The pixels should be approximately the same as the framebuffer's for (let i = 0; i < img.pixels.length; i++) { - expect(img.pixels[i]).to.be.closeTo(p2d.pixels[i], 2); + expect(img.pixels[i]).to.be.closeTo(fbo.pixels[i], 2); } }); }); From edce0299a0a6e89cbc867ad2b9d47f59df4aad57 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Mon, 28 Jul 2025 21:00:19 -0400 Subject: [PATCH 37/69] Fix canvas readback --- src/core/main.js | 4 +- src/webgpu/p5.RendererWebGPU.js | 253 ++++++++++++++++++++++++----- test/unit/webgpu/p5.Framebuffer.js | 35 ++-- 3 files changed, 235 insertions(+), 57 deletions(-) diff --git a/src/core/main.js b/src/core/main.js index a5c9a6c93d..f9fc1c6559 100644 --- a/src/core/main.js +++ b/src/core/main.js @@ -468,11 +468,11 @@ for (const k in constants) { * If `setup()` is declared `async` (e.g. `async function setup()`), * execution pauses at each `await` until its promise resolves. * For example, `font = await loadFont(...)` waits for the font asset - * to load because `loadFont()` function returns a promise, and the await + * to load because `loadFont()` function returns a promise, and the await * keyword means the program will wait for the promise to resolve. * This ensures that all assets are fully loaded before the sketch continues. - * + * * loading assets. * * Note: `setup()` doesn’t have to be declared, but it’s common practice to do so. diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index cad16a1765..383cf9f97f 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -23,6 +23,9 @@ class RendererWebGPU extends Renderer3D { // Single reusable staging buffer for pixel reading this.pixelReadBuffer = null; this.pixelReadBufferSize = 0; + + // Lazy readback texture for main canvas pixel reading + this.canvasReadbackTexture = null; } async setupContext() { @@ -62,6 +65,12 @@ class RendererWebGPU extends Renderer3D { format: this.depthFormat, usage: GPUTextureUsage.RENDER_ATTACHMENT, }); + + // Destroy existing readback texture when size changes + if (this.canvasReadbackTexture && this.canvasReadbackTexture.destroy) { + this.canvasReadbackTexture.destroy(); + this.canvasReadbackTexture = null; + } } clear(...args) { @@ -71,16 +80,28 @@ class RendererWebGPU extends Renderer3D { const _a = args[3] || 0; const commandEncoder = this.device.createCommandEncoder(); - const textureView = this.drawingContext.getCurrentTexture().createView(); + + // Use framebuffer texture if active, otherwise use canvas texture + const activeFramebuffer = this.activeFramebuffer(); + const colorTexture = activeFramebuffer ? + (activeFramebuffer.aaColorTexture || activeFramebuffer.colorTexture) : + this.drawingContext.getCurrentTexture(); const colorAttachment = { - view: textureView, + view: colorTexture.createView(), clearValue: { r: _r * _a, g: _g * _a, b: _b * _a, a: _a }, loadOp: 'clear', storeOp: 'store', + // If using multisampled texture, resolve to non-multisampled texture + resolveTarget: activeFramebuffer && activeFramebuffer.aaColorTexture ? + activeFramebuffer.colorTexture.createView() : undefined, }; - const depthTextureView = this.depthTexture?.createView(); + // Use framebuffer depth texture if active, otherwise use canvas depth texture + const depthTexture = activeFramebuffer ? + (activeFramebuffer.aaDepthTexture || activeFramebuffer.depthTexture) : + this.depthTexture; + const depthTextureView = depthTexture?.createView(); const depthAttachment = depthTextureView ? { view: depthTextureView, @@ -1202,6 +1223,11 @@ class RendererWebGPU extends Renderer3D { return this.pixelReadBuffer; } + _alignBytesPerRow(bytesPerRow) { + // WebGPU requires bytesPerRow to be a multiple of 256 bytes for texture-to-buffer copies + return Math.ceil(bytesPerRow / 256) * 256; + } + ////////////////////////////////////////////// // Framebuffer methods ////////////////////////////////////////////// @@ -1435,31 +1461,56 @@ class RendererWebGPU extends Renderer3D { bindFramebuffer(framebuffer) {} async readFramebufferPixels(framebuffer) { - // Ensure all pending GPU work is complete before reading pixels - await this.queue.onSubmittedWorkDone(); - const width = framebuffer.width * framebuffer.density; const height = framebuffer.height * framebuffer.density; const bytesPerPixel = 4; - const bufferSize = width * height * bytesPerPixel; - - const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; + + // const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); + const stagingBuffer = this.device.createBuffer({ + size: bufferSize, + usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ, + }); const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( - { texture: framebuffer.colorTexture, origin: { x: 0, y: 0, z: 0 } }, - { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel, rowsPerImage: height }, + { + texture: framebuffer.colorTexture, + origin: { x: 0, y: 0, z: 0 }, + mipLevel: 0, + aspect: 'all' + }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow, rowsPerImage: height }, { width, height, depthOrArrayLayers: 1 } ); this.device.queue.submit([commandEncoder.finish()]); + // Wait for the copy operation to complete + // await this.queue.onSubmittedWorkDone(); + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); - const result = new Uint8Array(mappedRange.slice(0, bufferSize)); - stagingBuffer.unmap(); - return result; + // If alignment was needed, extract the actual pixel data + if (alignedBytesPerRow === unalignedBytesPerRow) { + const result = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + stagingBuffer.unmap(); + return result; + } else { + // Need to extract pixel data from aligned buffer + const result = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + result.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + stagingBuffer.unmap(); + return result; + } } async readFramebufferPixel(framebuffer, x, y) { @@ -1467,7 +1518,10 @@ class RendererWebGPU extends Renderer3D { await this.queue.onSubmittedWorkDone(); const bytesPerPixel = 4; - const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( @@ -1475,14 +1529,14 @@ class RendererWebGPU extends Renderer3D { texture: framebuffer.colorTexture, origin: { x, y, z: 0 } }, - { buffer: stagingBuffer, bytesPerRow: bytesPerPixel }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, { width: 1, height: 1, depthOrArrayLayers: 1 } ); this.device.queue.submit([commandEncoder.finish()]); - await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bytesPerPixel); - const mappedRange = stagingBuffer.getMappedRange(0, bytesPerPixel); + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); const pixelData = new Uint8Array(mappedRange); const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; @@ -1497,7 +1551,9 @@ class RendererWebGPU extends Renderer3D { const width = w * framebuffer.density; const height = h * framebuffer.density; const bytesPerPixel = 4; - const bufferSize = width * height * bytesPerPixel; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); @@ -1505,9 +1561,10 @@ class RendererWebGPU extends Renderer3D { commandEncoder.copyTextureToBuffer( { texture: framebuffer.colorTexture, + mipLevel: 0, origin: { x: x * framebuffer.density, y: y * framebuffer.density, z: 0 } }, - { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, { width, height, depthOrArrayLayers: 1 } ); @@ -1515,7 +1572,20 @@ class RendererWebGPU extends Renderer3D { await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); - const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } // WebGPU doesn't need vertical flipping unlike WebGL const region = new Image(width, height); @@ -1559,24 +1629,75 @@ class RendererWebGPU extends Renderer3D { // Main canvas pixel methods ////////////////////////////////////////////// - async loadPixels() { - // Ensure all pending GPU work is complete before reading pixels - await this.queue.onSubmittedWorkDone(); + _ensureCanvasReadbackTexture() { + if (!this.canvasReadbackTexture) { + const width = Math.ceil(this.width * this._pixelDensity); + const height = Math.ceil(this.height * this._pixelDensity); + + this.canvasReadbackTexture = this.device.createTexture({ + size: { width, height, depthOrArrayLayers: 1 }, + format: this.presentationFormat, + usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.COPY_SRC, + }); + } + return this.canvasReadbackTexture; + } + + _copyCanvasToReadbackTexture() { + // Get the current canvas texture BEFORE any awaiting + const canvasTexture = this.drawingContext.getCurrentTexture(); + + // Ensure readback texture exists + const readbackTexture = this._ensureCanvasReadbackTexture(); + + // Copy canvas texture to readback texture immediately + const copyEncoder = this.device.createCommandEncoder(); + copyEncoder.copyTextureToTexture( + { texture: canvasTexture }, + { texture: readbackTexture }, + { + width: Math.ceil(this.width * this._pixelDensity), + height: Math.ceil(this.height * this._pixelDensity), + depthOrArrayLayers: 1 + } + ); + this.device.queue.submit([copyEncoder.finish()]); + + return readbackTexture; + } + + _convertBGRtoRGB(pixelData) { + // Convert BGR to RGB by swapping red and blue channels + for (let i = 0; i < pixelData.length; i += 4) { + const temp = pixelData[i]; // Store red + pixelData[i] = pixelData[i + 2]; // Red = Blue + pixelData[i + 2] = temp; // Blue = Red + // Green (i + 1) and Alpha (i + 3) stay the same + } + return pixelData; + } + async loadPixels() { const width = this.width * this._pixelDensity; const height = this.height * this._pixelDensity; + + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await + await this.queue.onSubmittedWorkDone(); + const bytesPerPixel = 4; - const bufferSize = width * height * bytesPerPixel; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); - // Get the current canvas texture - const canvasTexture = this.drawingContext.getCurrentTexture(); - const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( - { texture: canvasTexture }, - { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { texture: readbackTexture }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, { width, height, depthOrArrayLayers: 1 } ); @@ -1584,36 +1705,58 @@ class RendererWebGPU extends Renderer3D { await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); - this.pixels = new Uint8Array(mappedRange.slice(0, bufferSize)); + + if (alignedBytesPerRow === unalignedBytesPerRow) { + this.pixels = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + this.pixels = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + this.pixels.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(this.pixels); stagingBuffer.unmap(); return this.pixels; } async _getPixel(x, y) { - // Ensure all pending GPU work is complete before reading pixels + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await await this.queue.onSubmittedWorkDone(); const bytesPerPixel = 4; - const stagingBuffer = this._ensurePixelReadBuffer(bytesPerPixel); + const alignedBytesPerRow = this._alignBytesPerRow(bytesPerPixel); + const bufferSize = alignedBytesPerRow; + + const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); - const canvasTexture = this.drawingContext.getCurrentTexture(); const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( { - texture: canvasTexture, + texture: readbackTexture, origin: { x, y, z: 0 } }, - { buffer: stagingBuffer, bytesPerRow: bytesPerPixel }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, { width: 1, height: 1, depthOrArrayLayers: 1 } ); this.device.queue.submit([commandEncoder.finish()]); - await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bytesPerPixel); - const mappedRange = stagingBuffer.getMappedRange(0, bytesPerPixel); + await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); + const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); const pixelData = new Uint8Array(mappedRange); - const result = [pixelData[0], pixelData[1], pixelData[2], pixelData[3]]; + + // Convert BGR to RGB for main canvas - swap red and blue + const result = [pixelData[2], pixelData[1], pixelData[0], pixelData[3]]; stagingBuffer.unmap(); return result; @@ -1642,25 +1785,29 @@ class RendererWebGPU extends Renderer3D { // get(x,y,w,h) - region } - // Ensure all pending GPU work is complete before reading pixels + // Copy canvas to readback texture + const readbackTexture = this._copyCanvasToReadbackTexture(); + + // Now we can safely await await this.queue.onSubmittedWorkDone(); // Read region and create p5.Image const width = w * pd; const height = h * pd; const bytesPerPixel = 4; - const bufferSize = width * height * bytesPerPixel; + const unalignedBytesPerRow = width * bytesPerPixel; + const alignedBytesPerRow = this._alignBytesPerRow(unalignedBytesPerRow); + const bufferSize = alignedBytesPerRow * height; const stagingBuffer = this._ensurePixelReadBuffer(bufferSize); - const canvasTexture = this.drawingContext.getCurrentTexture(); const commandEncoder = this.device.createCommandEncoder(); commandEncoder.copyTextureToBuffer( { - texture: canvasTexture, + texture: readbackTexture, origin: { x, y, z: 0 } }, - { buffer: stagingBuffer, bytesPerRow: width * bytesPerPixel }, + { buffer: stagingBuffer, bytesPerRow: alignedBytesPerRow }, { width, height, depthOrArrayLayers: 1 } ); @@ -1669,7 +1816,23 @@ class RendererWebGPU extends Renderer3D { await stagingBuffer.mapAsync(GPUMapMode.READ, 0, bufferSize); const mappedRange = stagingBuffer.getMappedRange(0, bufferSize); - const pixelData = new Uint8Array(mappedRange.slice(0, bufferSize)); + + let pixelData; + if (alignedBytesPerRow === unalignedBytesPerRow) { + pixelData = new Uint8Array(mappedRange.slice(0, width * height * bytesPerPixel)); + } else { + // Need to extract pixel data from aligned buffer + pixelData = new Uint8Array(width * height * bytesPerPixel); + const mappedData = new Uint8Array(mappedRange); + for (let y = 0; y < height; y++) { + const srcOffset = y * alignedBytesPerRow; + const dstOffset = y * unalignedBytesPerRow; + pixelData.set(mappedData.subarray(srcOffset, srcOffset + unalignedBytesPerRow), dstOffset); + } + } + + // Convert BGR to RGB for main canvas + this._convertBGRtoRGB(pixelData); const region = new Image(width, height); region.pixelDensity(pd); diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js index 9fec2f070d..452585b6c8 100644 --- a/test/unit/webgpu/p5.Framebuffer.js +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -1,4 +1,7 @@ import p5 from '../../../src/app.js'; +import rendererWebGPU from "../../../src/webgpu/p5.RendererWebGPU"; + +p5.registerAddon(rendererWebGPU); suite('WebGPU p5.Framebuffer', function() { let myp5; @@ -9,7 +12,6 @@ suite('WebGPU p5.Framebuffer', function() { window.devicePixelRatio = 1; myp5 = new p5(function(p) { p.setup = function() {}; - p.draw = function() {}; }); }); @@ -153,16 +155,26 @@ suite('WebGPU p5.Framebuffer', function() { await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); - let drawCallbackExecuted = false; + myp5.background(0, 255, 0); + fbo.draw(() => { - drawCallbackExecuted = true; - myp5.background(255, 0, 0); - myp5.fill(0, 255, 0); - myp5.noStroke(); - myp5.circle(5, 5, 8); + myp5.background(0, 0, 255); + // myp5.fill(0, 255, 0); }); - - expect(drawCallbackExecuted).to.equal(true); + await myp5.loadPixels(); + // Drawing should have gone to the framebuffer, leaving the main + // canvas the same + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 255, 0]); + await fbo.loadPixels(); + // The framebuffer should have content + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + + // The content can be drawn back to the main canvas + myp5.imageMode(myp5.CENTER); + myp5.image(fbo, 0, 0); + await myp5.loadPixels(); + expect([...fbo.pixels.slice(0, 3)]).toEqual([0, 0, 255]); + expect([...myp5.pixels.slice(0, 3)]).toEqual([0, 0, 255]); }); test('can use framebuffer as texture', async function() { @@ -194,8 +206,9 @@ suite('WebGPU p5.Framebuffer', function() { expect(result).to.be.a('promise'); const pixels = await result; - expect(pixels).to.be.an('array'); + expect(pixels).toBeInstanceOf(Uint8Array); expect(pixels.length).to.equal(10 * 10 * 4); + expect([...pixels.slice(0, 4)]).toEqual([255, 0, 0, 255]); }); test('pixels property is set after loadPixels resolves', async function() { @@ -225,6 +238,7 @@ suite('WebGPU p5.Framebuffer', function() { const color = await result; expect(color).to.be.an('array'); expect(color).to.have.length(4); + expect([...color]).toEqual([100, 150, 200, 255]); }); test('get() returns a promise for region in WebGPU', async function() { @@ -242,6 +256,7 @@ suite('WebGPU p5.Framebuffer', function() { expect(region).to.be.an('object'); // Should be a p5.Image expect(region.width).to.equal(4); expect(region.height).to.equal(4); + expect([...region.pixels.slice(0, 4)]).toEqual([100, 150, 200, 255]); }); }); }); From 9fc319f996db258fbd6839ec5afb65682970d4f1 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 11:32:18 -0400 Subject: [PATCH 38/69] Start adding tests --- test/unit/visual/cases/webgpu.js | 11 +++++++---- .../000.png | Bin 0 -> 132 bytes .../metadata.json | 3 +++ .../Framebuffers/Auto-sized framebuffer/000.png | Bin 0 -> 396 bytes .../Auto-sized framebuffer/metadata.json | 3 +++ .../Basic framebuffer draw to canvas/000.png | Bin 0 -> 521 bytes .../metadata.json | 3 +++ .../000.png | Bin 0 -> 291 bytes .../metadata.json | 3 +++ .../Framebuffer with different sizes/000.png | Bin 0 -> 402 bytes .../metadata.json | 3 +++ 11 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 334abc1be1..5613d39091 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -130,7 +130,7 @@ visualSuite("WebGPU", function () { p5.background(255, 0, 0); // Red background p5.fill(0, 255, 0); // Green circle p5.noStroke(); - p5.circle(12.5, 12.5, 20); + p5.circle(0, 0, 20); }); // Draw the framebuffer to the main canvas @@ -157,7 +157,7 @@ visualSuite("WebGPU", function () { p5.background(255, 100, 100); p5.fill(255, 255, 0); p5.noStroke(); - p5.rect(5, 5, 10, 10); + p5.rect(-5, -5, 10, 10); }); // Draw to second framebuffer @@ -165,7 +165,7 @@ visualSuite("WebGPU", function () { p5.background(100, 255, 100); p5.fill(255, 0, 255); p5.noStroke(); - p5.circle(7.5, 7.5, 10); + p5.circle(0, 0, 10); }); // Draw both to main canvas @@ -197,6 +197,7 @@ visualSuite("WebGPU", function () { // Draw to the framebuffer fbo.draw(() => { p5.background(0); + p5.translate(-fbo.width / 2, -fbo.height / 2) p5.stroke(255); p5.strokeWeight(2); p5.noFill(); @@ -234,6 +235,7 @@ visualSuite("WebGPU", function () { // Draw to the framebuffer after resize fbo.draw(() => { p5.background(100, 0, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) p5.fill(0, 255, 255); p5.noStroke(); // Draw a shape that fills the new size @@ -264,7 +266,7 @@ visualSuite("WebGPU", function () { p5.background(255, 200, 100); p5.fill(0, 100, 200); p5.noStroke(); - p5.circle(10, 10, 15); + p5.circle(0, 0, 15); }); // Manually resize the framebuffer @@ -273,6 +275,7 @@ visualSuite("WebGPU", function () { // Draw new content to the resized framebuffer fbo.draw(() => { p5.background(200, 255, 100); + p5.translate(-fbo.width / 2, -fbo.height / 2) p5.fill(200, 0, 100); p5.noStroke(); // Draw content that uses the new size diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png new file mode 100644 index 0000000000000000000000000000000000000000..972571631e30265fef58735d0666a400049f596e GIT binary patch literal 132 zcmeAS@N?(olHy`uVBq!ia0vp^av;pX1|+Qw)-3{3jKx9jP7LeL$-D$|>^xl@Lp;3S zUf#&dV8Fp*c%we^*B5@*GYkrm?$Px$MoC0LRA@u(Spg1%FbIU7xtBR#Hh;!OQ;k_{hm~q|XJWEMHaWQC(6NXJGgIym zk#M=yxR4uwthDf~&Y>eIJ)t9s%2*2nYJpm@0#?dc<7R4Q-S8q8gCSld*KC?__O-O~B3B zp9{*+kCzz~sWmdwBU6=AG!hoJX>BtCQNX$PH-`fW-6#a29np!lXXSC5T6=)#6w~hm zNUVU1*wyr2cc50lPza@v9p^N*`NR894i`oqb7#+|sC?(bZZSYdw!F`G3hZ zzE)v!ly>ov=?2!_$UDw$MF9|}|1Qyyl<2&BKD1+58M<1l`k q9lxMfp*XGFx5nL1BP|--)|}p3hPVXA^49VI0000Px$!%0LzRA@u(m=DduFc8Jx9N+*PfCF#<4wwUQKyUyKh68W_4!{98z`@8%v#@;M zn(pu2SniT$BxP%V_x7&sA|OpMfQSsFXDXT^Jre|`h$+%h2JF@9D6zF4Yl_rPT}L-o zRwQM`?l>Y;-~Jg$NR;;o$hXlOm^=YqL!qt+Cw`x~_8J$1Odr7F>Y#j~3i6x)=M`{S zIuJq?1px>$zxjdGKnQV)fn*KvcbOnid;-?Ve|1zq>Of?mJ{T2{BnI}o@=E+wi!hrb z5GUV4APzz`2tk-8LC@^=&NB>gkfv&6@GQv$*&Y-?Se2;8VcSHD@4bW|45367VTmOO zOKuxMj}U~O5e2y}90;#%#OmxoX7?Wl$s1l&k+xqgHAMzvcKU)9i$3w~tIB$Z2B$x0 zc$IaeEyL$4mI?@!;G%fkG~1%&)ldQ9po7^o)`jWaHa;^nm7!;Db9eQfOSlE$qPM&N zEV)G|MbUM{SCKNH$JD{lN1S}t#xDoL<~zUl0we<@KoQ7Ke0(UoZiN0PqDW{GTK$zj z&}mIHG_HY!G9=w^ib!WXG@dCE%8+!wDI%Tm(0HauC_~cygHhxIAOktx0yLiA00000 LNkvXXu0mjfpm^EI literal 0 HcmV?d00001 diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Basic framebuffer draw to canvas/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/000.png new file mode 100644 index 0000000000000000000000000000000000000000..1fb817b6b53c94fe1a9aa8d3412e4fc2a846cb6d GIT binary patch literal 291 zcmV+;0o?wHP)Px#-AP12RA@u(n9&V_Fc^e?R;mNwJT8GQ;XF8iuB7m2gf`_kLz<6QdRP9tuL;nl zl-|+6)iKDyTBMPK6%sT;Bc=pE%M|QpTN>FMok^H&3>r&m_QAoZDQ3L2U7 zYx|6h5%JJ4DXRO3IDfC&AFbQ8!L|4(+Jf1CV36Ms7taWdRL}-mD`A0F1x1TXK?}50 zhF0{T1zMn$Pv9?Sq?y-v6HO)C{l7;_JDTn@NK@bJCQ2-}{QI@UDwxuU5S~R!BbHbN pQyLM%vq)*g602ZJBSLr<=?C8Wx=p=J5DWkS002ovPDHLkV1j}$d4K=_ literal 0 HcmV?d00001 diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Fixed-size framebuffer after manual resize/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/000.png new file mode 100644 index 0000000000000000000000000000000000000000..155638a0c818aa5432045a7fc8a9c574122ea8e3 GIT binary patch literal 402 zcmV;D0d4+?P)Px$Oi4sRRA@u(n9UJ_Fc5{8GHk%yV*zRb&;=cESMPRU7f^s&0J+BoC<7;qGV;4l zjIzY##*k$4?R#&@B#=^;;K37k2a-$aRF{=fPS?X0s}*68fl{#|n1=?w|B1Ck0kACa z8r>EEdeIn1%UYoVh~DTvsRYQHU5`O<{d-4@XM`02U~93p9;e^lBMN7PGh&$#fbsV0 z&7a-6KGdwmAgqYB2mHHyur39WiL3|_A?j_gBCN=lD!|tIw6HR_ziU>j?#<3*ig*tq zE&{|=RD!3wv{!i2ibRJ9!hP7;c(%_vKx9FNAf8lh7_uOz;Dq*$xQy#DBtbAWp1VJu zkDxcYdYv`MtG!5qm@buU6VzJXfRN9Kkpj!fY`WGM%&h(+uK_XYm`AU5-KE+AA{llx wv^Nd9TV*yT%r+73jhEB07*qoM6N<$f)?JfjsO4v literal 0 HcmV?d00001 diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json new file mode 100644 index 0000000000..2d4bfe30da --- /dev/null +++ b/test/unit/visual/screenshots/WebGPU/Framebuffers/Framebuffer with different sizes/metadata.json @@ -0,0 +1,3 @@ +{ + "numScreenshots": 1 +} \ No newline at end of file From 76c010898d0623ae5da5e7b383df936698165d82 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 16:03:34 -0400 Subject: [PATCH 39/69] Add another test --- test/unit/visual/cases/webgpu.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 5613d39091..a57caa1ff1 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -116,6 +116,23 @@ visualSuite("WebGPU", function () { ); }); + visualSuite("Canvas Resizing", function () { + visualTest( + "Main canvas drawing after resize", + async function (p5, screenshot) { + await p5.createCanvas(50, 50, p5.WEBGPU); + // Resize the canvas + p5.resizeCanvas(30, 30); + // Draw to the main canvas after resize + p5.background(100, 0, 100); + p5.fill(0, 255, 255); + p5.noStroke(); + p5.circle(0, 0, 20); + await screenshot(); + }, + ); + }); + visualSuite("Framebuffers", function () { visualTest( "Basic framebuffer draw to canvas", @@ -251,6 +268,7 @@ visualSuite("WebGPU", function () { await screenshot(); }, + { focus: true } ); visualTest( From a3daa14cb606b36a67cbb1436cd49043f9be0ad2 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 16:13:43 -0400 Subject: [PATCH 40/69] Fix main canvas not being drawable after resizing --- src/webgpu/p5.RendererWebGPU.js | 11 ++++++----- test/unit/visual/cases/webgpu.js | 1 - .../000.png | Bin 132 -> 167 bytes 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 383cf9f97f..f85fd4607b 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -66,6 +66,9 @@ class RendererWebGPU extends Renderer3D { usage: GPUTextureUsage.RENDER_ATTACHMENT, }); + // Clear the main canvas after resize + this.clear(); + // Destroy existing readback texture when size changes if (this.canvasReadbackTexture && this.canvasReadbackTexture.destroy) { this.canvasReadbackTexture.destroy(); @@ -1287,8 +1290,6 @@ class RendererWebGPU extends Renderer3D { if (framebuffer.aaDepthTexture && framebuffer.aaDepthTexture.destroy) { framebuffer.aaDepthTexture.destroy(); } - // Clear cached views when recreating textures - framebuffer._colorTextureView = null; const baseDescriptor = { size: { @@ -1389,10 +1390,10 @@ class RendererWebGPU extends Renderer3D { } _getFramebufferColorTextureView(framebuffer) { - if (!framebuffer._colorTextureView && framebuffer.colorTexture) { - framebuffer._colorTextureView = framebuffer.colorTexture.createView(); + if (framebuffer.colorTexture) { + return framebuffer.colorTexture.createView(); } - return framebuffer._colorTextureView; + return null; } createFramebufferTextureHandle(framebufferTexture) { diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index a57caa1ff1..28382dda25 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -268,7 +268,6 @@ visualSuite("WebGPU", function () { await screenshot(); }, - { focus: true } ); visualTest( diff --git a/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png b/test/unit/visual/screenshots/WebGPU/Framebuffers/Auto-sized framebuffer after canvas resize/000.png index 972571631e30265fef58735d0666a400049f596e..01be2eb74e88adf3364c6690d614b349cfa91f03 100644 GIT binary patch delta 125 zcmV-@0D}L70jB|wF?L}|L_t(YOJhu7Ncqn&0Dy7SVtR%8;0o$F|7TOx<0*`(80KO1 z@up(Xm%RSPNaswXaO=>fWXttXaPcM_CZ%qbatZDB4YFpu2v>7 fE~Zq?$n!A(ky>$a7wkmT00000NkvXXu0mjfc*i*f delta 90 zcmV-g0Hyz@0fYgNF;hNCL_t(YOYPIK4FE6*1ToluY5MdJMa%#oSx48=^wHgNcugKP w>X?AIVzlpK)TmDwV{wTqCh%We1L^kwA6Dmx82|tP07*qoM6N<$g69P$<^TWy From 01110c9f77923fd1b704b1a87efdd3152fed0567 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 16:13:59 -0400 Subject: [PATCH 41/69] Add screenshots --- .../Main canvas drawing after resize/000.png | Bin 0 -> 225 bytes .../metadata.json | 3 +++ 2 files changed, 3 insertions(+) create mode 100644 test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png create mode 100644 test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/metadata.json diff --git a/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png b/test/unit/visual/screenshots/WebGPU/Canvas Resizing/Main canvas drawing after resize/000.png new file mode 100644 index 0000000000000000000000000000000000000000..96849ce04c21325da234ba7192bc8c1bc63bce67 GIT binary patch literal 225 zcmeAS@N?(olHy`uVBq!ia0vp^av;pX1|+Qw)-3{3jKx9jP7LeL$-D$|W_!9ghIn|t zofgfFo`syt)gx-5OIFIp(nbZ(PLshJD(tw9jF0d>^YO=xU_ Date: Tue, 29 Jul 2025 18:06:51 -0400 Subject: [PATCH 42/69] Try setting different launch options --- vitest.workspace.mjs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 7dfe0e6e82..e11dd11c53 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -38,7 +38,15 @@ export default defineWorkspace([ enabled: true, name: 'chrome', provider: 'webdriverio', - screenshotFailures: false + screenshotFailures: false, + launchOptions: { + args: [ + '--enable-unsafe-webgpu', + '--headless=new', + '--disable-gpu-sandbox', + '--no-sandbox', + ], + }, } } } From 02eef85af474bae627f5d4d8492b0db648b9da19 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 18:13:01 -0400 Subject: [PATCH 43/69] Test different options --- vitest.workspace.mjs | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index e11dd11c53..636e7a8db3 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -39,14 +39,17 @@ export default defineWorkspace([ name: 'chrome', provider: 'webdriverio', screenshotFailures: false, - launchOptions: { - args: [ - '--enable-unsafe-webgpu', - '--headless=new', - '--disable-gpu-sandbox', - '--no-sandbox', - ], - }, + providerOptions: { + capabilities: { + 'goog:chromeOptions': { + args: [ + '--enable-unsafe-webgpu', + '--enable-features=Vulkan', + '--disable-vulkan-fallback-to-gl-for-testing' + ] + } + } + } } } } From 3c6c19506f1543a7f07fa8abd77d586a2ca3e700 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 18:31:00 -0400 Subject: [PATCH 44/69] Try sequential --- test/unit/visual/cases/webgpu.js | 2 +- test/unit/visual/visualTest.js | 5 ++++- test/unit/webgpu/p5.Framebuffer.js | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 28382dda25..dde49a9d16 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -311,4 +311,4 @@ visualSuite("WebGPU", function () { }, ); }); -}); +}, { sequential: true }); diff --git a/test/unit/visual/visualTest.js b/test/unit/visual/visualTest.js index 7d301d142b..7841c5a84d 100644 --- a/test/unit/visual/visualTest.js +++ b/test/unit/visual/visualTest.js @@ -52,7 +52,7 @@ let shiftThreshold = 2; export function visualSuite( name, callback, - { focus = false, skip = false, shiftThreshold: newShiftThreshold } = {} + { focus = false, skip = false, sequential = false, shiftThreshold: newShiftThreshold } = {} ) { let suiteFn = describe; if (focus) { @@ -61,6 +61,9 @@ export function visualSuite( if (skip) { suiteFn = suiteFn.skip; } + if (sequential) { + suiteFn = suiteFn.sequential; + } suiteFn(name, () => { let lastShiftThreshold let lastPrefix; diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js index 452585b6c8..08789e92d1 100644 --- a/test/unit/webgpu/p5.Framebuffer.js +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -1,9 +1,10 @@ +import describe from '../../../src/accessibility/describe.js'; import p5 from '../../../src/app.js'; import rendererWebGPU from "../../../src/webgpu/p5.RendererWebGPU"; p5.registerAddon(rendererWebGPU); -suite('WebGPU p5.Framebuffer', function() { +suite.sequential('WebGPU p5.Framebuffer', function() { let myp5; let prevPixelRatio; From e4509570a43e8744ab8ab17e68fc6fd464eed227 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 19:15:43 -0400 Subject: [PATCH 45/69] Attempt to install later chrome --- .github/workflows/ci-test.yml | 9 +++++++++ test/unit/visual/cases/webgpu.js | 2 +- test/unit/visual/visualTest.js | 5 +---- test/unit/webgpu/p5.Framebuffer.js | 3 +-- vitest.workspace.mjs | 10 +++++++--- 5 files changed, 19 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index bd3ca55ba0..31d8e36566 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -19,6 +19,15 @@ jobs: uses: actions/setup-node@v1 with: node-version: 20.x + - name: Install Chrome (latest stable) + run: | + sudo apt-get update + sudo apt-get install -y wget gnupg + wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - + sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list' + sudo apt-get update + sudo apt-get install -y google-chrome-stable + which google-chrome - name: Get node modules run: npm ci env: diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index dde49a9d16..28382dda25 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -311,4 +311,4 @@ visualSuite("WebGPU", function () { }, ); }); -}, { sequential: true }); +}); diff --git a/test/unit/visual/visualTest.js b/test/unit/visual/visualTest.js index 7841c5a84d..7d301d142b 100644 --- a/test/unit/visual/visualTest.js +++ b/test/unit/visual/visualTest.js @@ -52,7 +52,7 @@ let shiftThreshold = 2; export function visualSuite( name, callback, - { focus = false, skip = false, sequential = false, shiftThreshold: newShiftThreshold } = {} + { focus = false, skip = false, shiftThreshold: newShiftThreshold } = {} ) { let suiteFn = describe; if (focus) { @@ -61,9 +61,6 @@ export function visualSuite( if (skip) { suiteFn = suiteFn.skip; } - if (sequential) { - suiteFn = suiteFn.sequential; - } suiteFn(name, () => { let lastShiftThreshold let lastPrefix; diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js index 08789e92d1..452585b6c8 100644 --- a/test/unit/webgpu/p5.Framebuffer.js +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -1,10 +1,9 @@ -import describe from '../../../src/accessibility/describe.js'; import p5 from '../../../src/app.js'; import rendererWebGPU from "../../../src/webgpu/p5.RendererWebGPU"; p5.registerAddon(rendererWebGPU); -suite.sequential('WebGPU p5.Framebuffer', function() { +suite('WebGPU p5.Framebuffer', function() { let myp5; let prevPixelRatio; diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 636e7a8db3..611754fcdc 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -40,15 +40,19 @@ export default defineWorkspace([ provider: 'webdriverio', screenshotFailures: false, providerOptions: { - capabilities: { + capabilities: process.env.CI ? { 'goog:chromeOptions': { + binary: '/usr/bin/google-chrome', args: [ '--enable-unsafe-webgpu', + '--disable-dawn-features=disallow_unsafe_apis', + '--use-angle=default', '--enable-features=Vulkan', - '--disable-vulkan-fallback-to-gl-for-testing' + '--no-sandbox', + '--disable-dev-shm-usage', ] } - } + } : undefined } } } From f44629bac26cbc86c0eb04aca1549f19056da40e Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 19:23:01 -0400 Subject: [PATCH 46/69] Add some debug info --- .github/workflows/ci-test.yml | 1 + vitest.workspace.mjs | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 31d8e36566..6a1bd0b549 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -28,6 +28,7 @@ jobs: sudo apt-get update sudo apt-get install -y google-chrome-stable which google-chrome + google-chrome --version - name: Get node modules run: npm ci env: diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 611754fcdc..2774fe1286 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -1,5 +1,6 @@ import { defineWorkspace } from 'vitest/config'; import vitePluginString from 'vite-plugin-string'; +console.log(`CI: ${process.env.CI}`) const plugins = [ vitePluginString({ From b281d332b5acfecacc32b3372f0929e2dc7ea226 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 19:39:48 -0400 Subject: [PATCH 47/69] Try different flags --- vitest.workspace.mjs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 2774fe1286..1aa54097e5 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -48,9 +48,10 @@ export default defineWorkspace([ '--enable-unsafe-webgpu', '--disable-dawn-features=disallow_unsafe_apis', '--use-angle=default', - '--enable-features=Vulkan', + '--enable-features=Vulkan,SharedArrayBuffer', '--no-sandbox', '--disable-dev-shm-usage', + '--headless=new', ] } } : undefined From 777334131df1ad9e122aaf809911909e71e91175 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Tue, 29 Jul 2025 19:47:19 -0400 Subject: [PATCH 48/69] Does it work in xvfb? --- .github/workflows/ci-test.yml | 2 +- vitest.workspace.mjs | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 6a1bd0b549..67684ad745 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -34,7 +34,7 @@ jobs: env: CI: true - name: build and test - run: npm test + run: xvfb-run --auto-servernum --server-args='-screen 0 1920x1080x24' npm test env: CI: true - name: report test coverage diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 1aa54097e5..9540289d24 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -46,12 +46,11 @@ export default defineWorkspace([ binary: '/usr/bin/google-chrome', args: [ '--enable-unsafe-webgpu', - '--disable-dawn-features=disallow_unsafe_apis', - '--use-angle=default', '--enable-features=Vulkan,SharedArrayBuffer', + '--disable-dawn-features=disallow_unsafe_apis', + '--disable-gpu-sandbox', '--no-sandbox', - '--disable-dev-shm-usage', - '--headless=new', + '--disable-dev-shm-usage' ] } } : undefined From cfeac932d62f32ea7cca9c88c52532d4a8432807 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Thu, 31 Jul 2025 20:42:15 -0400 Subject: [PATCH 49/69] Try enabling swiftshader --- vitest.workspace.mjs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 9540289d24..d39212ef8e 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -45,12 +45,14 @@ export default defineWorkspace([ 'goog:chromeOptions': { binary: '/usr/bin/google-chrome', args: [ - '--enable-unsafe-webgpu', - '--enable-features=Vulkan,SharedArrayBuffer', '--disable-dawn-features=disallow_unsafe_apis', '--disable-gpu-sandbox', '--no-sandbox', - '--disable-dev-shm-usage' + '--disable-dev-shm-usage', + + '--enable-unsafe-webgpu', + '--use-angle=swiftshader', + '--enable-features=ReduceOpsTaskSplitting,Vulkan,VulkanFromANGLE,DefaultANGLEVulkan', ] } } : undefined From af5194a25a34ea21d2ba24c5b64ba99149cee4f4 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Thu, 31 Jul 2025 20:46:35 -0400 Subject: [PATCH 50/69] less flags --- vitest.workspace.mjs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index d39212ef8e..7fadc2434e 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -45,11 +45,6 @@ export default defineWorkspace([ 'goog:chromeOptions': { binary: '/usr/bin/google-chrome', args: [ - '--disable-dawn-features=disallow_unsafe_apis', - '--disable-gpu-sandbox', - '--no-sandbox', - '--disable-dev-shm-usage', - '--enable-unsafe-webgpu', '--use-angle=swiftshader', '--enable-features=ReduceOpsTaskSplitting,Vulkan,VulkanFromANGLE,DefaultANGLEVulkan', From 88b4fe4d31f604be349493cbb569f10b40b9d569 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Thu, 31 Jul 2025 20:53:42 -0400 Subject: [PATCH 51/69] Try disabling dawn --- vitest.workspace.mjs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 7fadc2434e..4220f9aa26 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -46,8 +46,13 @@ export default defineWorkspace([ binary: '/usr/bin/google-chrome', args: [ '--enable-unsafe-webgpu', - '--use-angle=swiftshader', - '--enable-features=ReduceOpsTaskSplitting,Vulkan,VulkanFromANGLE,DefaultANGLEVulkan', + '--enable-features=Vulkan', + '--use-cmd-decoder=passthrough', + '--disable-gpu-sandbox', + '--disable-software-rasterizer=false', + '--disable-dawn-features=disallow_unsafe_apis', + '--use-angle=vulkan', + '--use-vulkan=swiftshader', ] } } : undefined From ff83226f86cd39817fea848732fc4bcb46cac391 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Fri, 1 Aug 2025 08:05:42 -0400 Subject: [PATCH 52/69] Try installing swiftshader? --- .github/workflows/ci-test.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 67684ad745..4e4c749ca1 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -19,6 +19,17 @@ jobs: uses: actions/setup-node@v1 with: node-version: 20.x + - name: Install Vulkan SwiftShader + run: | + sudo apt-get update + sudo apt-get install -y libvulkan1 vulkan-tools mesa-vulkan-drivers + mkdir -p $HOME/swiftshader + curl -L https://github.com/google/swiftshader/releases/download/latest/Linux.zip -o swiftshader.zip + unzip swiftshader.zip -d $HOME/swiftshader + export VK_ICD_FILENAMES=$HOME/swiftshader/Linux/vk_swiftshader_icd.json + export VK_LAYER_PATH=$HOME/swiftshader/Linux + echo "VK_ICD_FILENAMES=$VK_ICD_FILENAMES" >> $GITHUB_ENV + echo "VK_LAYER_PATH=$VK_LAYER_PATH" >> $GITHUB_ENV - name: Install Chrome (latest stable) run: | sudo apt-get update @@ -34,7 +45,7 @@ jobs: env: CI: true - name: build and test - run: xvfb-run --auto-servernum --server-args='-screen 0 1920x1080x24' npm test + run: npm test env: CI: true - name: report test coverage From 4314cf2d21d860cd0e41ae993b127c1c79bca201 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Fri, 1 Aug 2025 08:11:57 -0400 Subject: [PATCH 53/69] Just vulkan --- .github/workflows/ci-test.yml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 4e4c749ca1..12a6cde3b9 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -19,17 +19,10 @@ jobs: uses: actions/setup-node@v1 with: node-version: 20.x - - name: Install Vulkan SwiftShader + - name: Install Vulkan run: | sudo apt-get update sudo apt-get install -y libvulkan1 vulkan-tools mesa-vulkan-drivers - mkdir -p $HOME/swiftshader - curl -L https://github.com/google/swiftshader/releases/download/latest/Linux.zip -o swiftshader.zip - unzip swiftshader.zip -d $HOME/swiftshader - export VK_ICD_FILENAMES=$HOME/swiftshader/Linux/vk_swiftshader_icd.json - export VK_LAYER_PATH=$HOME/swiftshader/Linux - echo "VK_ICD_FILENAMES=$VK_ICD_FILENAMES" >> $GITHUB_ENV - echo "VK_LAYER_PATH=$VK_LAYER_PATH" >> $GITHUB_ENV - name: Install Chrome (latest stable) run: | sudo apt-get update From c01dee7285d2a90e5e535c9e2067c3c8b0307b23 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Fri, 1 Aug 2025 08:36:49 -0400 Subject: [PATCH 54/69] Try with xvfb --- .github/workflows/ci-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 12a6cde3b9..5289728040 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -38,7 +38,7 @@ jobs: env: CI: true - name: build and test - run: npm test + run: xvfb-run --auto-servernum --server-args='-screen 0 1280x1024x24' npm test env: CI: true - name: report test coverage From 7b3ed67261ce104e8ec1ad21aa7e5fafb2dc5dd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?charlotte=20=F0=9F=8C=B8?= Date: Wed, 6 Aug 2025 12:05:41 -0700 Subject: [PATCH 55/69] Test ci flow with warp. --- .github/workflows/ci-test.yml | 22 ++++++---------------- vitest.workspace.mjs | 14 ++++++-------- 2 files changed, 12 insertions(+), 24 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 5289728040..0d3569d0a4 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -11,34 +11,24 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: windows-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Use Node.js 20.x - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 20.x - - name: Install Vulkan - run: | - sudo apt-get update - sudo apt-get install -y libvulkan1 vulkan-tools mesa-vulkan-drivers - name: Install Chrome (latest stable) run: | - sudo apt-get update - sudo apt-get install -y wget gnupg - wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - sudo sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list' - sudo apt-get update - sudo apt-get install -y google-chrome-stable - which google-chrome - google-chrome --version + choco install googlechrome + & "C:\Program Files\Google\Chrome\Application\chrome.exe" --version - name: Get node modules run: npm ci env: CI: true - name: build and test - run: xvfb-run --auto-servernum --server-args='-screen 0 1280x1024x24' npm test + run: npm test env: CI: true - name: report test coverage diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 4220f9aa26..943943eabd 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -43,16 +43,14 @@ export default defineWorkspace([ providerOptions: { capabilities: process.env.CI ? { 'goog:chromeOptions': { - binary: '/usr/bin/google-chrome', args: [ '--enable-unsafe-webgpu', - '--enable-features=Vulkan', - '--use-cmd-decoder=passthrough', - '--disable-gpu-sandbox', - '--disable-software-rasterizer=false', - '--disable-dawn-features=disallow_unsafe_apis', - '--use-angle=vulkan', - '--use-vulkan=swiftshader', + '--headless=new', + '--no-sandbox', + '--disable-dev-shm-usage', + '--use-gl=angle', + '--use-angle=d3d11-warp', + '--disable-gpu-sandbox' ] } } : undefined From 22f5294a35bc6a8ec9f6942dd173e15b9ed91a3f Mon Sep 17 00:00:00 2001 From: charlotte Date: Thu, 7 Aug 2025 16:44:09 -0700 Subject: [PATCH 56/69] Fixes for CI. --- .github/workflows/ci-test.yml | 41 ++++++++++++++++++---- src/webgpu/p5.RendererWebGPU.js | 2 +- src/webgpu/shaders/utils.js | 4 +-- vitest.workspace.mjs | 61 ++++++++++++++++++++++++++++----- 4 files changed, 90 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 0d3569d0a4..416d01777d 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -11,27 +11,54 @@ on: jobs: test: - runs-on: windows-latest + strategy: + matrix: + include: + - os: windows-latest + browser: firefox + test-workspace: unit-tests-firefox + - os: ubuntu-latest + browser: chrome + test-workspace: unit-tests-chrome + + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 + - name: Use Node.js 20.x uses: actions/setup-node@v4 with: node-version: 20.x - - name: Install Chrome (latest stable) + + - name: Verify Firefox (Windows) + if: matrix.os == 'windows-latest' && matrix.browser == 'firefox' run: | - choco install googlechrome - & "C:\Program Files\Google\Chrome\Application\chrome.exe" --version + & "C:\Program Files\Mozilla Firefox\firefox.exe" --version + + - name: Verify Chrome (Ubuntu) + if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' + run: | + google-chrome --version + - name: Get node modules run: npm ci env: CI: true - - name: build and test - run: npm test + + - name: Build and test (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: npm test -- --project=${{ matrix.test-workspace }} + env: + CI: true + + - name: Build and test (Windows) + if: matrix.os == 'windows-latest' + run: npm test -- --project=${{ matrix.test-workspace }} env: CI: true - - name: report test coverage + + - name: Report test coverage run: bash <(curl -s https://codecov.io/bash) -f coverage/coverage-final.json env: CI: true diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index f85fd4607b..2a4a9b3e8d 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -631,7 +631,7 @@ class RendererWebGPU extends Renderer3D { ////////////////////////////////////////////// _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }) { - const buffers = this.geometryBufferCache.getCached(geometry); + const buffers = this.geometryBufferCache.ensureCached(geometry); if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); diff --git a/src/webgpu/shaders/utils.js b/src/webgpu/shaders/utils.js index 0a313dfaf8..a6b79426e9 100644 --- a/src/webgpu/shaders/utils.js +++ b/src/webgpu/shaders/utils.js @@ -1,6 +1,6 @@ export const getTexture = ` -fn getTexture(texture: texture_2d, sampler: sampler, coord: vec2) -> vec4 { - let color = textureSample(texture, sampler, coord); +fn getTexture(texture: texture_2d, texSampler: sampler, coord: vec2) -> vec4 { + let color = textureSample(texture, texSampler, coord); let alpha = color.a; return vec4( select(color.rgb / alpha, vec3(0.0), alpha == 0.0), diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 943943eabd..23055b6680 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -23,7 +23,7 @@ export default defineWorkspace([ ], }, test: { - name: 'unit', + name: 'unit-tests-chrome', root: './', include: [ './test/unit/**/*.js', @@ -33,7 +33,7 @@ export default defineWorkspace([ './test/unit/assets/**/*', './test/unit/visual/visualTest.js', ], - testTimeout: 1000, + testTimeout: 10000, globals: true, browser: { enabled: true, @@ -44,18 +44,63 @@ export default defineWorkspace([ capabilities: process.env.CI ? { 'goog:chromeOptions': { args: [ - '--enable-unsafe-webgpu', - '--headless=new', '--no-sandbox', - '--disable-dev-shm-usage', - '--use-gl=angle', - '--use-angle=d3d11-warp', - '--disable-gpu-sandbox' + '--headless=new', + '--use-angle=vulkan', + '--enable-features=Vulkan', + '--disable-vulkan-surface', + '--enable-unsafe-webgpu', ] } } : undefined } } } + }, + { + plugins, + publicDir: './test', + bench: { + name: 'bench', + root: './', + include: [ + './test/bench/**/*.js' + ], + }, + test: { + name: 'unit-tests-firefox', + root: './', + include: [ + './test/unit/**/*.js', + ], + exclude: [ + './test/unit/spec.js', + './test/unit/assets/**/*', + './test/unit/visual/visualTest.js', + ], + testTimeout: 10000, + globals: true, + browser: { + enabled: true, + name: 'firefox', + provider: 'webdriverio', + screenshotFailures: false, + providerOptions: { + capabilities: process.env.CI ? { + 'moz:firefoxOptions': { + args: [ + '--headless', + '--enable-webgpu', + ], + prefs: { + 'dom.webgpu.enabled': true, + 'gfx.webgpu.force-enabled': true, + 'dom.webgpu.testing.assert-on-warnings': false, + } + } + } : undefined + } + } + } } ]); \ No newline at end of file From a8dea1506b25e776d02024855bce3d5dd23c4dcb Mon Sep 17 00:00:00 2001 From: charlotte Date: Thu, 7 Aug 2025 16:48:56 -0700 Subject: [PATCH 57/69] Revert change. --- src/webgpu/p5.RendererWebGPU.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 2a4a9b3e8d..f85fd4607b 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -631,7 +631,7 @@ class RendererWebGPU extends Renderer3D { ////////////////////////////////////////////// _drawBuffers(geometry, { mode = constants.TRIANGLES, count = 1 }) { - const buffers = this.geometryBufferCache.ensureCached(geometry); + const buffers = this.geometryBufferCache.getCached(geometry); if (!buffers) return; const commandEncoder = this.device.createCommandEncoder(); From 9c49827445dfdcd1bf425d5bcda7a23c620331f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?charlotte=20=F0=9F=8C=B8?= Date: Fri, 8 Aug 2025 14:40:02 -0700 Subject: [PATCH 58/69] Add setAttributes api to WebGPU renderer. --- src/core/main.js | 1 + src/core/p5.Renderer3D.js | 75 ++++++++++++++++++++++++++++++ src/webgl/p5.RendererGL.js | 73 +---------------------------- src/webgpu/p5.RendererWebGPU.js | 71 +++++++++++++++++++++++++++- test/unit/visual/cases/webgpu.js | 38 +++++++++++++-- test/unit/webgpu/p5.Framebuffer.js | 22 +++------ 6 files changed, 187 insertions(+), 93 deletions(-) diff --git a/src/core/main.js b/src/core/main.js index f9fc1c6559..9a3d929f3f 100644 --- a/src/core/main.js +++ b/src/core/main.js @@ -58,6 +58,7 @@ class p5 { this._curElement = null; this._elements = []; this._glAttributes = null; + this._webgpuAttributes = null; this._requestAnimId = 0; this._isGlobal = false; this._loop = true; diff --git a/src/core/p5.Renderer3D.js b/src/core/p5.Renderer3D.js index e422c2940f..bbf42b330c 100644 --- a/src/core/p5.Renderer3D.js +++ b/src/core/p5.Renderer3D.js @@ -1,4 +1,5 @@ import * as constants from "../core/constants"; +import { Graphics } from "../core/p5.Graphics"; import { Renderer } from './p5.Renderer'; import GeometryBuilder from "../webgl/GeometryBuilder"; import { Matrix } from "../math/p5.Matrix"; @@ -350,6 +351,80 @@ export class Renderer3D extends Renderer { }; } + //This is helper function to reset the context anytime the attributes + //are changed with setAttributes() + + async _resetContext(options, callback, ctor = Renderer3D) { + const w = this.width; + const h = this.height; + const defaultId = this.canvas.id; + const isPGraphics = this._pInst instanceof Graphics; + + // Preserve existing position and styles before recreation + const prevStyle = { + position: this.canvas.style.position, + top: this.canvas.style.top, + left: this.canvas.style.left, + }; + + if (isPGraphics) { + // Handle PGraphics: remove and recreate the canvas + const pg = this._pInst; + pg.canvas.parentNode.removeChild(pg.canvas); + pg.canvas = document.createElement("canvas"); + const node = pg._pInst._userNode || document.body; + node.appendChild(pg.canvas); + Element.call(pg, pg.canvas, pg._pInst); + // Restore previous width and height + pg.width = w; + pg.height = h; + } else { + // Handle main canvas: remove and recreate it + let c = this.canvas; + if (c) { + c.parentNode.removeChild(c); + } + c = document.createElement("canvas"); + c.id = defaultId; + // Attach the new canvas to the correct parent node + if (this._pInst._userNode) { + this._pInst._userNode.appendChild(c); + } else { + document.body.appendChild(c); + } + this._pInst.canvas = c; + this.canvas = c; + + // Restore the saved position + this.canvas.style.position = prevStyle.position; + this.canvas.style.top = prevStyle.top; + this.canvas.style.left = prevStyle.left; + } + + const renderer = new ctor( + this._pInst, + w, + h, + !isPGraphics, + this._pInst.canvas + ); + this._pInst._renderer = renderer; + + renderer._applyDefaults(); + + if (renderer.contextReady) { + await renderer.contextReady + } + + if (typeof callback === "function") { + //setTimeout with 0 forces the task to the back of the queue, this ensures that + //we finish switching out the renderer + setTimeout(() => { + callback.apply(window._renderer, options); + }, 0); + } + } + remove() { this.wrappedElt.remove(); this.wrappedElt = null; diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index c6fbfa45a6..ab15ea3d81 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -13,7 +13,6 @@ import { Renderer3D, getStrokeDefs } from "../core/p5.Renderer3D"; import { Shader } from "./p5.Shader"; import { Texture, MipmapTexture } from "./p5.Texture"; import { Framebuffer } from "./p5.Framebuffer"; -import { Graphics } from "../core/p5.Graphics"; import { RGB, RGBA } from '../color/creating_reading'; import { Element } from "../dom/p5.Element"; import { Image } from '../image/p5.Image'; @@ -450,76 +449,6 @@ class RendererGL extends Renderer3D { return { adjustedWidth, adjustedHeight }; } - //This is helper function to reset the context anytime the attributes - //are changed with setAttributes() - - _resetContext(options, callback) { - const w = this.width; - const h = this.height; - const defaultId = this.canvas.id; - const isPGraphics = this._pInst instanceof Graphics; - - // Preserve existing position and styles before recreation - const prevStyle = { - position: this.canvas.style.position, - top: this.canvas.style.top, - left: this.canvas.style.left, - }; - - if (isPGraphics) { - // Handle PGraphics: remove and recreate the canvas - const pg = this._pInst; - pg.canvas.parentNode.removeChild(pg.canvas); - pg.canvas = document.createElement("canvas"); - const node = pg._pInst._userNode || document.body; - node.appendChild(pg.canvas); - Element.call(pg, pg.canvas, pg._pInst); - // Restore previous width and height - pg.width = w; - pg.height = h; - } else { - // Handle main canvas: remove and recreate it - let c = this.canvas; - if (c) { - c.parentNode.removeChild(c); - } - c = document.createElement("canvas"); - c.id = defaultId; - // Attach the new canvas to the correct parent node - if (this._pInst._userNode) { - this._pInst._userNode.appendChild(c); - } else { - document.body.appendChild(c); - } - this._pInst.canvas = c; - this.canvas = c; - - // Restore the saved position - this.canvas.style.position = prevStyle.position; - this.canvas.style.top = prevStyle.top; - this.canvas.style.left = prevStyle.left; - } - - const renderer = new RendererGL( - this._pInst, - w, - h, - !isPGraphics, - this._pInst.canvas - ); - this._pInst._renderer = renderer; - - renderer._applyDefaults(); - - if (typeof callback === "function") { - //setTimeout with 0 forces the task to the back of the queue, this ensures that - //we finish switching out the renderer - setTimeout(() => { - callback.apply(window._renderer, options); - }, 0); - } - } - _resetBuffersBeforeDraw() { this.GL.clearStencil(0); this.GL.clear(this.GL.DEPTH_BUFFER_BIT | this.GL.STENCIL_BUFFER_BIT); @@ -2196,7 +2125,7 @@ function rendererGL(p5, fn) { } } - this._renderer._resetContext(); + this._renderer._resetContext(null, null, RendererGL); if (this._renderer.states.curCamera) { this._renderer.states.curCamera._renderer = this._renderer; diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index f85fd4607b..58c17f35aa 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -9,6 +9,8 @@ import * as constants from '../core/constants'; import { colorVertexShader, colorFragmentShader } from './shaders/color'; import { lineVertexShader, lineFragmentShader} from './shaders/line'; import { materialVertexShader, materialFragmentShader } from './shaders/material'; +import {Graphics} from "../core/p5.Graphics"; +import {Element} from "../dom/p5.Element"; const { lineDefs } = getStrokeDefs((n, v, t) => `const ${n}: ${t} = ${v};\n`); @@ -29,7 +31,25 @@ class RendererWebGPU extends Renderer3D { } async setupContext() { - this.adapter = await navigator.gpu?.requestAdapter(); + this._setAttributeDefaults(this._pInst); + await this._initContext(); + } + + _setAttributeDefaults(pInst) { + const defaults = { + forceFallbackAdapter: false, + powerPreference: 'high-performance', + }; + if (pInst._webgpuAttributes === null) { + pInst._webgpuAttributes = defaults; + } else { + pInst._webgpuAttributes = Object.assign(defaults, pInst._webgpuAttributes); + } + return; + } + + async _initContext() { + this.adapter = await navigator.gpu?.requestAdapter(this._webgpuAttributes); this.device = await this.adapter?.requestDevice({ // Todo: check support requiredFeatures: ['depth32float-stencil8'] @@ -1854,6 +1874,55 @@ function rendererWebGPU(p5, fn) { fn.ensureTexture = function(source) { return this._renderer.ensureTexture(source); } + + fn.setAttributes = async function (key, value) { + if (typeof this._webgpuAttributes === "undefined") { + console.log( + "You are trying to use setAttributes on a p5.Graphics object " + + "that does not use a WebGPU renderer." + ); + return; + } + let unchanged = true; + + if (typeof value !== "undefined") { + //first time modifying the attributes + if (this._webgpuAttributes === null) { + this._webgpuAttributes = {}; + } + if (this._webgpuAttributes[key] !== value) { + //changing value of previously altered attribute + this._webgpuAttributes[key] = value; + unchanged = false; + } + //setting all attributes with some change + } else if (key instanceof Object) { + if (this._webgpuAttributes !== key) { + this._webgpuAttributes = key; + unchanged = false; + } + } + //@todo_FES + if (!this._renderer.isP3D || unchanged) { + return; + } + + if (!this._setupDone) { + if (this._renderer.geometryBufferCache.numCached() > 0) { + p5._friendlyError( + "Sorry, Could not set the attributes, you need to call setAttributes() " + + "before calling the other drawing methods in setup()" + ); + return; + } + } + + await this._renderer._resetContext(null, null, RendererWebGPU); + + if (this._renderer.states.curCamera) { + this._renderer.states.curCamera._renderer = this._renderer; + } + } } export default rendererWebGPU; diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 28382dda25..130dabf83b 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -11,6 +11,9 @@ visualSuite("WebGPU", function () { "The color shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); p5.background("white"); for (const [i, color] of ["red", "lime", "blue"].entries()) { p5.push(); @@ -29,6 +32,9 @@ visualSuite("WebGPU", function () { "The stroke shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); p5.background("white"); for (const [i, color] of ["red", "lime", "blue"].entries()) { p5.push(); @@ -47,6 +53,9 @@ visualSuite("WebGPU", function () { "The material shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); p5.background("white"); p5.ambientLight(50); p5.directionalLight(100, 100, 100, 0, 1, -1); @@ -68,6 +77,9 @@ visualSuite("WebGPU", function () { visualTest("Shader hooks can be used", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); const myFill = p5.baseMaterialShader().modify({ "Vertex getWorldInputs": `(inputs: Vertex) { var result = inputs; @@ -96,6 +108,9 @@ visualSuite("WebGPU", function () { "Textures in the material shader work", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); const tex = p5.createImage(50, 50); tex.loadPixels(); for (let x = 0; x < tex.width; x++) { @@ -121,6 +136,9 @@ visualSuite("WebGPU", function () { "Main canvas drawing after resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Resize the canvas p5.resizeCanvas(30, 30); // Draw to the main canvas after resize @@ -138,7 +156,9 @@ visualSuite("WebGPU", function () { "Basic framebuffer draw to canvas", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Create a framebuffer const fbo = p5.createFramebuffer({ width: 25, height: 25 }); @@ -164,7 +184,9 @@ visualSuite("WebGPU", function () { "Framebuffer with different sizes", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Create two different sized framebuffers const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); @@ -207,7 +229,9 @@ visualSuite("WebGPU", function () { visualTest("Auto-sized framebuffer", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Create auto-sized framebuffer (should match canvas size) const fbo = p5.createFramebuffer(); @@ -242,7 +266,9 @@ visualSuite("WebGPU", function () { "Auto-sized framebuffer after canvas resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Create auto-sized framebuffer const fbo = p5.createFramebuffer(); @@ -274,7 +300,9 @@ visualSuite("WebGPU", function () { "Fixed-size framebuffer after manual resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - + await p5.setAttributes({ + forceFallbackAdapter: true + }); // Create fixed-size framebuffer const fbo = p5.createFramebuffer({ width: 20, height: 20 }); diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js index 452585b6c8..97cb8a13dd 100644 --- a/test/unit/webgpu/p5.Framebuffer.js +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -15,6 +15,13 @@ suite('WebGPU p5.Framebuffer', function() { }); }); + beforeEach(async function() { + const renderer = await myp5.createCanvas(10, 10, 'webgpu'); + await myp5.setAttributes({ + forceFallbackAdapter: true + }); + }) + afterAll(function() { myp5.remove(); window.devicePixelRatio = prevPixelRatio; @@ -22,7 +29,6 @@ suite('WebGPU p5.Framebuffer', function() { suite('Creation and basic properties', function() { test('framebuffers can be created with WebGPU renderer', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); expect(fbo).to.be.an('object'); @@ -32,7 +38,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('framebuffers can be created with custom dimensions', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer({ width: 20, height: 30 }); expect(fbo.width).to.equal(20); @@ -41,7 +46,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('framebuffers have color texture', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); expect(fbo.color).to.be.an('object'); @@ -49,7 +53,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('framebuffers can specify different formats', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer({ format: 'float', channels: 'rgb' @@ -63,7 +66,6 @@ suite('WebGPU p5.Framebuffer', function() { suite('Auto-sizing behavior', function() { test('auto-sized framebuffers change size with canvas', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); myp5.pixelDensity(1); const fbo = myp5.createFramebuffer(); @@ -80,7 +82,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('manually-sized framebuffers do not change size with canvas', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); myp5.pixelDensity(3); const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 1 }); @@ -97,7 +98,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('manually-sized framebuffers can be made auto-sized', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); myp5.pixelDensity(1); const fbo = myp5.createFramebuffer({ width: 25, height: 30, density: 2 }); @@ -120,7 +120,6 @@ suite('WebGPU p5.Framebuffer', function() { suite('Manual resizing', function() { test('framebuffers can be manually resized', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); myp5.pixelDensity(1); const fbo = myp5.createFramebuffer(); @@ -135,7 +134,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('resizing affects pixel density', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); myp5.pixelDensity(1); const fbo = myp5.createFramebuffer(); @@ -152,7 +150,6 @@ suite('WebGPU p5.Framebuffer', function() { suite('Drawing functionality', function() { test('can draw to framebuffer with draw() method', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); myp5.background(0, 255, 0); @@ -178,7 +175,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('can use framebuffer as texture', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); fbo.draw(() => { @@ -195,7 +191,6 @@ suite('WebGPU p5.Framebuffer', function() { suite('Pixel access', function() { test('loadPixels returns a promise in WebGPU', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); fbo.draw(() => { @@ -212,7 +207,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('pixels property is set after loadPixels resolves', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); fbo.draw(() => { @@ -225,7 +219,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('get() returns a promise for single pixel in WebGPU', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); fbo.draw(() => { @@ -242,7 +235,6 @@ suite('WebGPU p5.Framebuffer', function() { }); test('get() returns a promise for region in WebGPU', async function() { - await myp5.createCanvas(10, 10, myp5.WEBGPU); const fbo = myp5.createFramebuffer(); fbo.draw(() => { From 695e9e60a1f4d58ff159d6eccb17ee70cf9654e7 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 09:02:58 -0400 Subject: [PATCH 59/69] Try ignore-blocklist flag --- vitest.workspace.mjs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 23055b6680..90b4173e2f 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -32,6 +32,7 @@ export default defineWorkspace([ './test/unit/spec.js', './test/unit/assets/**/*', './test/unit/visual/visualTest.js', + './test/unit/visual/cases/webgpu.js', ], testTimeout: 10000, globals: true, @@ -71,7 +72,8 @@ export default defineWorkspace([ name: 'unit-tests-firefox', root: './', include: [ - './test/unit/**/*.js', + './test/unit/visual/cases/webgpu.js', + // './test/unit/**/*.js', ], exclude: [ './test/unit/spec.js', @@ -96,6 +98,7 @@ export default defineWorkspace([ 'dom.webgpu.enabled': true, 'gfx.webgpu.force-enabled': true, 'dom.webgpu.testing.assert-on-warnings': false, + 'gfx.webgpu.ignore-blocklist': true, } } } : undefined From 48ce3209d3d1c22f67164e60924262f4a2d1f784 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 09:34:32 -0400 Subject: [PATCH 60/69] Go back to ubuntu for now, try different swiftshader flags --- .github/workflows/ci-test.yml | 14 -------- vitest.workspace.mjs | 60 ++++------------------------------- 2 files changed, 7 insertions(+), 67 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 416d01777d..328d090e23 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -14,9 +14,6 @@ jobs: strategy: matrix: include: - - os: windows-latest - browser: firefox - test-workspace: unit-tests-firefox - os: ubuntu-latest browser: chrome test-workspace: unit-tests-chrome @@ -31,11 +28,6 @@ jobs: with: node-version: 20.x - - name: Verify Firefox (Windows) - if: matrix.os == 'windows-latest' && matrix.browser == 'firefox' - run: | - & "C:\Program Files\Mozilla Firefox\firefox.exe" --version - - name: Verify Chrome (Ubuntu) if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' run: | @@ -52,12 +44,6 @@ jobs: env: CI: true - - name: Build and test (Windows) - if: matrix.os == 'windows-latest' - run: npm test -- --project=${{ matrix.test-workspace }} - env: - CI: true - - name: Report test coverage run: bash <(curl -s https://codecov.io/bash) -f coverage/coverage-final.json env: diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 90b4173e2f..bca96ad5a8 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -26,13 +26,14 @@ export default defineWorkspace([ name: 'unit-tests-chrome', root: './', include: [ - './test/unit/**/*.js', + // './test/unit/**/*.js', + './test/unit/visual/cases/webgpu.js', ], exclude: [ './test/unit/spec.js', './test/unit/assets/**/*', './test/unit/visual/visualTest.js', - './test/unit/visual/cases/webgpu.js', + // './test/unit/visual/cases/webgpu.js', ], testTimeout: 10000, globals: true, @@ -47,10 +48,11 @@ export default defineWorkspace([ args: [ '--no-sandbox', '--headless=new', - '--use-angle=vulkan', - '--enable-features=Vulkan', - '--disable-vulkan-surface', '--enable-unsafe-webgpu', + '--enable-features=Vulkan', + '--use-vulkan=swiftshader', + '--use-webgpu-adapter=swiftshader', + '--no-sandbox', ] } } : undefined @@ -58,52 +60,4 @@ export default defineWorkspace([ } } }, - { - plugins, - publicDir: './test', - bench: { - name: 'bench', - root: './', - include: [ - './test/bench/**/*.js' - ], - }, - test: { - name: 'unit-tests-firefox', - root: './', - include: [ - './test/unit/visual/cases/webgpu.js', - // './test/unit/**/*.js', - ], - exclude: [ - './test/unit/spec.js', - './test/unit/assets/**/*', - './test/unit/visual/visualTest.js', - ], - testTimeout: 10000, - globals: true, - browser: { - enabled: true, - name: 'firefox', - provider: 'webdriverio', - screenshotFailures: false, - providerOptions: { - capabilities: process.env.CI ? { - 'moz:firefoxOptions': { - args: [ - '--headless', - '--enable-webgpu', - ], - prefs: { - 'dom.webgpu.enabled': true, - 'gfx.webgpu.force-enabled': true, - 'dom.webgpu.testing.assert-on-warnings': false, - 'gfx.webgpu.ignore-blocklist': true, - } - } - } : undefined - } - } - } - } ]); \ No newline at end of file From 47535445966433bbcc36a0db56ca7a32c4dc83a7 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 09:39:31 -0400 Subject: [PATCH 61/69] Different flag --- vitest.workspace.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index bca96ad5a8..6802ae3588 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -49,9 +49,9 @@ export default defineWorkspace([ '--no-sandbox', '--headless=new', '--enable-unsafe-webgpu', - '--enable-features=Vulkan', '--use-vulkan=swiftshader', '--use-webgpu-adapter=swiftshader', + '--use-angle=vulkan', '--no-sandbox', ] } From 9eb541d8ff2be5745a6be13d60d4655a1a8b3db7 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 09:41:45 -0400 Subject: [PATCH 62/69] Check if the adapter is defined --- src/webgpu/p5.RendererWebGPU.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 58c17f35aa..daa7eeffc0 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -50,6 +50,11 @@ class RendererWebGPU extends Renderer3D { async _initContext() { this.adapter = await navigator.gpu?.requestAdapter(this._webgpuAttributes); + console.log('Adapter:'); + console.log(this.adapter); + if (this.adapter) { + console.log([...this.adapter.features]); + } this.device = await this.adapter?.requestDevice({ // Todo: check support requiredFeatures: ['depth32float-stencil8'] From 74c167243661e3e867c104d2717f42800386a556 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 09:58:15 -0400 Subject: [PATCH 63/69] Try without setAttributes since the adapter seems to exist before that --- src/webgpu/p5.RendererWebGPU.js | 2 ++ test/unit/visual/cases/webgpu.js | 33 -------------------------------- 2 files changed, 2 insertions(+), 33 deletions(-) diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index daa7eeffc0..8d3f8c2e24 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -59,6 +59,8 @@ class RendererWebGPU extends Renderer3D { // Todo: check support requiredFeatures: ['depth32float-stencil8'] }); + console.log('Device:'); + console.log(this.device); if (!this.device) { throw new Error('Your browser does not support WebGPU.'); } diff --git a/test/unit/visual/cases/webgpu.js b/test/unit/visual/cases/webgpu.js index 130dabf83b..bffc88c219 100644 --- a/test/unit/visual/cases/webgpu.js +++ b/test/unit/visual/cases/webgpu.js @@ -11,9 +11,6 @@ visualSuite("WebGPU", function () { "The color shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); p5.background("white"); for (const [i, color] of ["red", "lime", "blue"].entries()) { p5.push(); @@ -32,9 +29,6 @@ visualSuite("WebGPU", function () { "The stroke shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); p5.background("white"); for (const [i, color] of ["red", "lime", "blue"].entries()) { p5.push(); @@ -53,9 +47,6 @@ visualSuite("WebGPU", function () { "The material shader runs successfully", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); p5.background("white"); p5.ambientLight(50); p5.directionalLight(100, 100, 100, 0, 1, -1); @@ -77,9 +68,6 @@ visualSuite("WebGPU", function () { visualTest("Shader hooks can be used", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); const myFill = p5.baseMaterialShader().modify({ "Vertex getWorldInputs": `(inputs: Vertex) { var result = inputs; @@ -108,9 +96,6 @@ visualSuite("WebGPU", function () { "Textures in the material shader work", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); const tex = p5.createImage(50, 50); tex.loadPixels(); for (let x = 0; x < tex.width; x++) { @@ -136,9 +121,6 @@ visualSuite("WebGPU", function () { "Main canvas drawing after resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Resize the canvas p5.resizeCanvas(30, 30); // Draw to the main canvas after resize @@ -156,9 +138,6 @@ visualSuite("WebGPU", function () { "Basic framebuffer draw to canvas", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Create a framebuffer const fbo = p5.createFramebuffer({ width: 25, height: 25 }); @@ -184,9 +163,6 @@ visualSuite("WebGPU", function () { "Framebuffer with different sizes", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Create two different sized framebuffers const fbo1 = p5.createFramebuffer({ width: 20, height: 20 }); const fbo2 = p5.createFramebuffer({ width: 15, height: 15 }); @@ -229,9 +205,6 @@ visualSuite("WebGPU", function () { visualTest("Auto-sized framebuffer", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Create auto-sized framebuffer (should match canvas size) const fbo = p5.createFramebuffer(); @@ -266,9 +239,6 @@ visualSuite("WebGPU", function () { "Auto-sized framebuffer after canvas resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Create auto-sized framebuffer const fbo = p5.createFramebuffer(); @@ -300,9 +270,6 @@ visualSuite("WebGPU", function () { "Fixed-size framebuffer after manual resize", async function (p5, screenshot) { await p5.createCanvas(50, 50, p5.WEBGPU); - await p5.setAttributes({ - forceFallbackAdapter: true - }); // Create fixed-size framebuffer const fbo = p5.createFramebuffer({ width: 20, height: 20 }); From 498bb836fd1c2b54a293009a66be78486eb82b10 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 10:05:41 -0400 Subject: [PATCH 64/69] Try installing chrome with swiftshader --- .github/workflows/ci-test.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 328d090e23..d1e98902a2 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -27,6 +27,13 @@ jobs: uses: actions/setup-node@v4 with: node-version: 20.x + + - name: Install Chrome with SwiftShader + run: | + sudo apt-get update + curl -sSL https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb -o chrome.deb + sudo apt-get install -y ./chrome.deb + ls -R /opt/google/chrome/swiftshader - name: Verify Chrome (Ubuntu) if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' From bc3501a10b98271888ffe0e68f344caf0bae61f4 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 10:18:44 -0400 Subject: [PATCH 65/69] Revert "Try installing chrome with swiftshader" This reverts commit 498bb836fd1c2b54a293009a66be78486eb82b10. --- .github/workflows/ci-test.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index d1e98902a2..328d090e23 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -27,13 +27,6 @@ jobs: uses: actions/setup-node@v4 with: node-version: 20.x - - - name: Install Chrome with SwiftShader - run: | - sudo apt-get update - curl -sSL https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb -o chrome.deb - sudo apt-get install -y ./chrome.deb - ls -R /opt/google/chrome/swiftshader - name: Verify Chrome (Ubuntu) if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' From 88dec1b8b3a9bffda5b3e8dffdf274d74306fa35 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 10:26:08 -0400 Subject: [PATCH 66/69] Try chrome on windows --- .github/workflows/ci-test.yml | 22 ++++++++++++---- vitest.workspace.mjs | 49 ++++++++++++++++++++++++++++++++++- 2 files changed, 65 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 328d090e23..1f0c7f4ca9 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -14,9 +14,10 @@ jobs: strategy: matrix: include: - - os: ubuntu-latest + #- os: ubuntu-latest + # browser: chrome + - os: windows-latest browser: chrome - test-workspace: unit-tests-chrome runs-on: ${{ matrix.os }} @@ -32,18 +33,29 @@ jobs: if: matrix.os == 'ubuntu-latest' && matrix.browser == 'chrome' run: | google-chrome --version + + - name: Verify Chrome (Windows) + if: matrix.os == 'windows-latest' && matrix.browser == 'chrome' + run: | + & "C:\Program Files\Google\Chrome\Application\chrome.exe" --version - name: Get node modules run: npm ci env: CI: true - + - name: Build and test (Ubuntu) - if: matrix.os == 'ubuntu-latest' - run: npm test -- --project=${{ matrix.test-workspace }} + if: matrix.os == 'windows-latest' + run: npm test -- --project=unit-tests-webgpu env: CI: true + #- name: Build and test (Ubuntu) + # if: matrix.os == 'ubuntu-latest' + # run: npm test -- --project=unit-tests + # env: + # CI: true + - name: Report test coverage run: bash <(curl -s https://codecov.io/bash) -f coverage/coverage-final.json env: diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 6802ae3588..d2d95bb049 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -23,7 +23,54 @@ export default defineWorkspace([ ], }, test: { - name: 'unit-tests-chrome', + name: 'unit-tests', + root: './', + include: [ + './test/unit/**/*.js', + ], + exclude: [ + './test/unit/spec.js', + './test/unit/assets/**/*', + './test/unit/visual/visualTest.js', + './test/unit/visual/cases/webgpu.js', + ], + testTimeout: 10000, + globals: true, + browser: { + enabled: true, + name: 'chrome', + provider: 'webdriverio', + screenshotFailures: false, + providerOptions: { + capabilities: process.env.CI ? { + 'goog:chromeOptions': { + args: [ + '--no-sandbox', + '--headless=new', + '--enable-unsafe-webgpu', + '--use-vulkan=swiftshader', + '--use-webgpu-adapter=swiftshader', + '--use-angle=vulkan', + '--no-sandbox', + ] + } + } : undefined + } + } + } + }, + { + plugins, + publicDir: './test', + bench: { + name: 'bench', + root: './', + include: [ + './test/bench/**/*.js' + ], + }, + test: { + name: 'unit-tests-webgpu', root: './', include: [ // './test/unit/**/*.js', From d5f584f4e0fe1ad5f7cc615f2b17dcd2fa745e54 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 10:43:57 -0400 Subject: [PATCH 67/69] Don't run webgpu tests on CI for now --- .github/workflows/ci-test.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 1f0c7f4ca9..04dcd79306 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -14,11 +14,11 @@ jobs: strategy: matrix: include: - #- os: ubuntu-latest - # browser: chrome - - os: windows-latest + - os: ubuntu-latest browser: chrome - + # - os: windows-latest + # browser: chrome + runs-on: ${{ matrix.os }} steps: @@ -44,18 +44,18 @@ jobs: env: CI: true - - name: Build and test (Ubuntu) - if: matrix.os == 'windows-latest' - run: npm test -- --project=unit-tests-webgpu - env: - CI: true - #- name: Build and test (Ubuntu) - # if: matrix.os == 'ubuntu-latest' - # run: npm test -- --project=unit-tests + # if: matrix.os == 'windows-latest' + # run: npm test -- --project=unit-tests-webgpu # env: # CI: true - + + - name: Build and test (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: npm test -- --project=unit-tests + env: + CI: true + - name: Report test coverage run: bash <(curl -s https://codecov.io/bash) -f coverage/coverage-final.json env: From 3ea32edaaefffef9d0e8b04f7ce8fa89d7df927b Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 10:48:51 -0400 Subject: [PATCH 68/69] Exclude other webgpu tests --- vitest.workspace.mjs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index d2d95bb049..13dca58dbe 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -33,6 +33,7 @@ export default defineWorkspace([ './test/unit/assets/**/*', './test/unit/visual/visualTest.js', './test/unit/visual/cases/webgpu.js', + './test/unit/webgpu/*.js', ], testTimeout: 10000, globals: true, @@ -75,6 +76,7 @@ export default defineWorkspace([ include: [ // './test/unit/**/*.js', './test/unit/visual/cases/webgpu.js', + './test/unit/webgpu/*.js', ], exclude: [ './test/unit/spec.js', From 724b41a11f6504d071c883036480f938efd64117 Mon Sep 17 00:00:00 2001 From: Dave Pagurek Date: Sun, 14 Sep 2025 11:06:51 -0400 Subject: [PATCH 69/69] Move setAttributes implementation to renderer --- src/webgl/p5.RendererGL.js | 94 +++++++++++++------------- src/webgpu/p5.RendererWebGPU.js | 105 +++++++++++++++-------------- test/unit/webgpu/p5.Framebuffer.js | 5 +- vitest.workspace.mjs | 5 +- 4 files changed, 107 insertions(+), 102 deletions(-) diff --git a/src/webgl/p5.RendererGL.js b/src/webgl/p5.RendererGL.js index ab15ea3d81..d22668cc10 100644 --- a/src/webgl/p5.RendererGL.js +++ b/src/webgl/p5.RendererGL.js @@ -382,6 +382,54 @@ class RendererGL extends Renderer3D { return; } + _setAttributes(key, value) { + if (typeof this._pInst._glAttributes === "undefined") { + console.log( + "You are trying to use setAttributes on a p5.Graphics object " + + "that does not use a WEBGL renderer." + ); + return; + } + let unchanged = true; + if (typeof value !== "undefined") { + //first time modifying the attributes + if (this._pInst._glAttributes === null) { + this._pInst._glAttributes = {}; + } + if (this._pInst._glAttributes[key] !== value) { + //changing value of previously altered attribute + this._pInst._glAttributes[key] = value; + unchanged = false; + } + //setting all attributes with some change + } else if (key instanceof Object) { + if (this._pInst._glAttributes !== key) { + this._pInst._glAttributes = key; + unchanged = false; + } + } + //@todo_FES + if (!this.isP3D || unchanged) { + return; + } + + if (!this._pInst._setupDone) { + if (this.geometryBufferCache.numCached() > 0) { + p5._friendlyError( + "Sorry, Could not set the attributes, you need to call setAttributes() " + + "before calling the other drawing methods in setup()" + ); + return; + } + } + + this._resetContext(null, null, RendererGL); + + if (this.states.curCamera) { + this.states.curCamera._renderer = this._renderer; + } + } + _initContext() { if (this._pInst._glAttributes?.version !== 1) { // Unless WebGL1 is explicitly asked for, try to create a WebGL2 context @@ -2085,51 +2133,7 @@ function rendererGL(p5, fn) { * @param {Object} obj object with key-value pairs */ fn.setAttributes = function (key, value) { - if (typeof this._glAttributes === "undefined") { - console.log( - "You are trying to use setAttributes on a p5.Graphics object " + - "that does not use a WEBGL renderer." - ); - return; - } - let unchanged = true; - if (typeof value !== "undefined") { - //first time modifying the attributes - if (this._glAttributes === null) { - this._glAttributes = {}; - } - if (this._glAttributes[key] !== value) { - //changing value of previously altered attribute - this._glAttributes[key] = value; - unchanged = false; - } - //setting all attributes with some change - } else if (key instanceof Object) { - if (this._glAttributes !== key) { - this._glAttributes = key; - unchanged = false; - } - } - //@todo_FES - if (!this._renderer.isP3D || unchanged) { - return; - } - - if (!this._setupDone) { - if (this._renderer.geometryBufferCache.numCached() > 0) { - p5._friendlyError( - "Sorry, Could not set the attributes, you need to call setAttributes() " + - "before calling the other drawing methods in setup()" - ); - return; - } - } - - this._renderer._resetContext(null, null, RendererGL); - - if (this._renderer.states.curCamera) { - this._renderer.states.curCamera._renderer = this._renderer; - } + return this._renderer._setAttributes(key, value); }; /** diff --git a/src/webgpu/p5.RendererWebGPU.js b/src/webgpu/p5.RendererWebGPU.js index 8d3f8c2e24..2bcf39949d 100644 --- a/src/webgpu/p5.RendererWebGPU.js +++ b/src/webgpu/p5.RendererWebGPU.js @@ -50,8 +50,8 @@ class RendererWebGPU extends Renderer3D { async _initContext() { this.adapter = await navigator.gpu?.requestAdapter(this._webgpuAttributes); - console.log('Adapter:'); - console.log(this.adapter); + // console.log('Adapter:'); + // console.log(this.adapter); if (this.adapter) { console.log([...this.adapter.features]); } @@ -59,8 +59,8 @@ class RendererWebGPU extends Renderer3D { // Todo: check support requiredFeatures: ['depth32float-stencil8'] }); - console.log('Device:'); - console.log(this.device); + // console.log('Device:'); + // console.log(this.device); if (!this.device) { throw new Error('Your browser does not support WebGPU.'); } @@ -79,6 +79,55 @@ class RendererWebGPU extends Renderer3D { this._update(); } + async _setAttributes(key, value) { + if (typeof this._pInst._webgpuAttributes === "undefined") { + console.log( + "You are trying to use setAttributes on a p5.Graphics object " + + "that does not use a WebGPU renderer." + ); + return; + } + let unchanged = true; + + if (typeof value !== "undefined") { + //first time modifying the attributes + if (this._pInst._webgpuAttributes === null) { + this._pInst._webgpuAttributes = {}; + } + if (this._pInst._webgpuAttributes[key] !== value) { + //changing value of previously altered attribute + this._webgpuAttributes[key] = value; + unchanged = false; + } + //setting all attributes with some change + } else if (key instanceof Object) { + if (this._pInst._webgpuAttributes !== key) { + this._pInst._webgpuAttributes = key; + unchanged = false; + } + } + //@todo_FES + if (!this.isP3D || unchanged) { + return; + } + + if (!this._pInst._setupDone) { + if (this.geometryBufferCache.numCached() > 0) { + p5._friendlyError( + "Sorry, Could not set the attributes, you need to call setAttributes() " + + "before calling the other drawing methods in setup()" + ); + return; + } + } + + await this._resetContext(null, null, RendererWebGPU); + + if (this.states.curCamera) { + this.states.curCamera._renderer = this._renderer; + } + } + _updateSize() { if (this.depthTexture && this.depthTexture.destroy) { this.depthTexture.destroy(); @@ -1882,53 +1931,9 @@ function rendererWebGPU(p5, fn) { return this._renderer.ensureTexture(source); } + // TODO: move this and the duplicate in the WebGL renderer to another file fn.setAttributes = async function (key, value) { - if (typeof this._webgpuAttributes === "undefined") { - console.log( - "You are trying to use setAttributes on a p5.Graphics object " + - "that does not use a WebGPU renderer." - ); - return; - } - let unchanged = true; - - if (typeof value !== "undefined") { - //first time modifying the attributes - if (this._webgpuAttributes === null) { - this._webgpuAttributes = {}; - } - if (this._webgpuAttributes[key] !== value) { - //changing value of previously altered attribute - this._webgpuAttributes[key] = value; - unchanged = false; - } - //setting all attributes with some change - } else if (key instanceof Object) { - if (this._webgpuAttributes !== key) { - this._webgpuAttributes = key; - unchanged = false; - } - } - //@todo_FES - if (!this._renderer.isP3D || unchanged) { - return; - } - - if (!this._setupDone) { - if (this._renderer.geometryBufferCache.numCached() > 0) { - p5._friendlyError( - "Sorry, Could not set the attributes, you need to call setAttributes() " + - "before calling the other drawing methods in setup()" - ); - return; - } - } - - await this._renderer._resetContext(null, null, RendererWebGPU); - - if (this._renderer.states.curCamera) { - this._renderer.states.curCamera._renderer = this._renderer; - } + return this._renderer._setAttributes(key, value); } } diff --git a/test/unit/webgpu/p5.Framebuffer.js b/test/unit/webgpu/p5.Framebuffer.js index 97cb8a13dd..ccbadbc7a0 100644 --- a/test/unit/webgpu/p5.Framebuffer.js +++ b/test/unit/webgpu/p5.Framebuffer.js @@ -16,10 +16,7 @@ suite('WebGPU p5.Framebuffer', function() { }); beforeEach(async function() { - const renderer = await myp5.createCanvas(10, 10, 'webgpu'); - await myp5.setAttributes({ - forceFallbackAdapter: true - }); + await myp5.createCanvas(10, 10, 'webgpu'); }) afterAll(function() { diff --git a/vitest.workspace.mjs b/vitest.workspace.mjs index 13dca58dbe..a8da776a67 100644 --- a/vitest.workspace.mjs +++ b/vitest.workspace.mjs @@ -1,6 +1,5 @@ import { defineWorkspace } from 'vitest/config'; import vitePluginString from 'vite-plugin-string'; -console.log(`CI: ${process.env.CI}`) const plugins = [ vitePluginString({ @@ -35,7 +34,7 @@ export default defineWorkspace([ './test/unit/visual/cases/webgpu.js', './test/unit/webgpu/*.js', ], - testTimeout: 10000, + testTimeout: 1000, globals: true, browser: { enabled: true, @@ -84,7 +83,7 @@ export default defineWorkspace([ './test/unit/visual/visualTest.js', // './test/unit/visual/cases/webgpu.js', ], - testTimeout: 10000, + testTimeout: 1000, globals: true, browser: { enabled: true,